id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
/custom-awscli-1.27.51.tar.gz/custom-awscli-1.27.51/awscli/examples/cognito-idp/update-group.rst
|
**To update a group**
This example updates the description and precedence for MyGroup.
Command::
aws cognito-idp update-group --user-pool-id us-west-2_aaaaaaaaa --group-name MyGroup --description "New description" --precedence 2
Output::
{
"Group": {
"GroupName": "MyGroup",
"UserPoolId": "us-west-2_aaaaaaaaa",
"Description": "New description",
"RoleArn": "arn:aws:iam::111111111111:role/MyRole",
"Precedence": 2,
"LastModifiedDate": 1548800862.812,
"CreationDate": 1548097827.125
}
}
|
PypiClean
|
/python_neuron-0.2.0.tar.gz/python_neuron-0.2.0/docs/installation.rst
|
.. highlight:: shell
============
Installation
============
Stable release
--------------
To install Python Neuron, run this command in your terminal:
.. code-block:: console
$ pip install python_neuron
This is the preferred method to install Python Neuron, as it will always install the most recent stable release.
If you don't have `pip`_ installed, this `Python installation guide`_ can guide
you through the process.
.. _pip: https://pip.pypa.io
.. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/
From sources
------------
The sources for Python Neuron can be downloaded from the `Github repo`_.
You can either clone the public repository:
.. code-block:: console
$ git clone git://github.com/iris9112/python_neuron
Or download the `tarball`_:
.. code-block:: console
$ curl -OL https://github.com/iris9112/python_neuron/tarball/master
Once you have a copy of the source, you can install it with:
.. code-block:: console
$ python setup.py install
.. _Github repo: https://github.com/iris9112/python_neuron
.. _tarball: https://github.com/iris9112/python_neuron/tarball/master
|
PypiClean
|
/msgraph_beta_sdk-1.0.0a9-py3-none-any.whl/msgraph/generated/models/aosp_device_owner_pkcs_certificate_profile.py
|
from __future__ import annotations
from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union
if TYPE_CHECKING:
from . import aosp_device_owner_certificate_profile_base, certificate_store, custom_subject_alternative_name, device_management_certification_authority, managed_device_certificate_state
from . import aosp_device_owner_certificate_profile_base
class AospDeviceOwnerPkcsCertificateProfile(aosp_device_owner_certificate_profile_base.AospDeviceOwnerCertificateProfileBase):
def __init__(self,) -> None:
"""
Instantiates a new AospDeviceOwnerPkcsCertificateProfile and sets the default values.
"""
super().__init__()
self.odata_type = "#microsoft.graph.aospDeviceOwnerPkcsCertificateProfile"
# CertificateStore types
self._certificate_store: Optional[certificate_store.CertificateStore] = None
# PKCS Certificate Template Name
self._certificate_template_name: Optional[str] = None
# PKCS Certification Authority
self._certification_authority: Optional[str] = None
# PKCS Certification Authority Name
self._certification_authority_name: Optional[str] = None
# Device Management Certification Authority Types.
self._certification_authority_type: Optional[device_management_certification_authority.DeviceManagementCertificationAuthority] = None
# Custom Subject Alternative Name Settings. This collection can contain a maximum of 500 elements.
self._custom_subject_alternative_names: Optional[List[custom_subject_alternative_name.CustomSubjectAlternativeName]] = None
# Certificate state for devices. This collection can contain a maximum of 2147483647 elements.
self._managed_device_certificate_states: Optional[List[managed_device_certificate_state.ManagedDeviceCertificateState]] = None
# Custom String that defines the AAD Attribute.
self._subject_alternative_name_format_string: Optional[str] = None
# Custom format to use with SubjectNameFormat = Custom. Example: CN={{EmailAddress}},E={{EmailAddress}},OU=Enterprise Users,O=Contoso Corporation,L=Redmond,ST=WA,C=US
self._subject_name_format_string: Optional[str] = None
@property
def certificate_store(self,) -> Optional[certificate_store.CertificateStore]:
"""
Gets the certificateStore property value. CertificateStore types
Returns: Optional[certificate_store.CertificateStore]
"""
return self._certificate_store
@certificate_store.setter
def certificate_store(self,value: Optional[certificate_store.CertificateStore] = None) -> None:
"""
Sets the certificateStore property value. CertificateStore types
Args:
value: Value to set for the certificate_store property.
"""
self._certificate_store = value
@property
def certificate_template_name(self,) -> Optional[str]:
"""
Gets the certificateTemplateName property value. PKCS Certificate Template Name
Returns: Optional[str]
"""
return self._certificate_template_name
@certificate_template_name.setter
def certificate_template_name(self,value: Optional[str] = None) -> None:
"""
Sets the certificateTemplateName property value. PKCS Certificate Template Name
Args:
value: Value to set for the certificate_template_name property.
"""
self._certificate_template_name = value
@property
def certification_authority(self,) -> Optional[str]:
"""
Gets the certificationAuthority property value. PKCS Certification Authority
Returns: Optional[str]
"""
return self._certification_authority
@certification_authority.setter
def certification_authority(self,value: Optional[str] = None) -> None:
"""
Sets the certificationAuthority property value. PKCS Certification Authority
Args:
value: Value to set for the certification_authority property.
"""
self._certification_authority = value
@property
def certification_authority_name(self,) -> Optional[str]:
"""
Gets the certificationAuthorityName property value. PKCS Certification Authority Name
Returns: Optional[str]
"""
return self._certification_authority_name
@certification_authority_name.setter
def certification_authority_name(self,value: Optional[str] = None) -> None:
"""
Sets the certificationAuthorityName property value. PKCS Certification Authority Name
Args:
value: Value to set for the certification_authority_name property.
"""
self._certification_authority_name = value
@property
def certification_authority_type(self,) -> Optional[device_management_certification_authority.DeviceManagementCertificationAuthority]:
"""
Gets the certificationAuthorityType property value. Device Management Certification Authority Types.
Returns: Optional[device_management_certification_authority.DeviceManagementCertificationAuthority]
"""
return self._certification_authority_type
@certification_authority_type.setter
def certification_authority_type(self,value: Optional[device_management_certification_authority.DeviceManagementCertificationAuthority] = None) -> None:
"""
Sets the certificationAuthorityType property value. Device Management Certification Authority Types.
Args:
value: Value to set for the certification_authority_type property.
"""
self._certification_authority_type = value
@staticmethod
def create_from_discriminator_value(parse_node: Optional[ParseNode] = None) -> AospDeviceOwnerPkcsCertificateProfile:
"""
Creates a new instance of the appropriate class based on discriminator value
Args:
parseNode: The parse node to use to read the discriminator value and create the object
Returns: AospDeviceOwnerPkcsCertificateProfile
"""
if parse_node is None:
raise Exception("parse_node cannot be undefined")
return AospDeviceOwnerPkcsCertificateProfile()
@property
def custom_subject_alternative_names(self,) -> Optional[List[custom_subject_alternative_name.CustomSubjectAlternativeName]]:
"""
Gets the customSubjectAlternativeNames property value. Custom Subject Alternative Name Settings. This collection can contain a maximum of 500 elements.
Returns: Optional[List[custom_subject_alternative_name.CustomSubjectAlternativeName]]
"""
return self._custom_subject_alternative_names
@custom_subject_alternative_names.setter
def custom_subject_alternative_names(self,value: Optional[List[custom_subject_alternative_name.CustomSubjectAlternativeName]] = None) -> None:
"""
Sets the customSubjectAlternativeNames property value. Custom Subject Alternative Name Settings. This collection can contain a maximum of 500 elements.
Args:
value: Value to set for the custom_subject_alternative_names property.
"""
self._custom_subject_alternative_names = value
def get_field_deserializers(self,) -> Dict[str, Callable[[ParseNode], None]]:
"""
The deserialization information for the current model
Returns: Dict[str, Callable[[ParseNode], None]]
"""
from . import aosp_device_owner_certificate_profile_base, certificate_store, custom_subject_alternative_name, device_management_certification_authority, managed_device_certificate_state
fields: Dict[str, Callable[[Any], None]] = {
"certificateStore": lambda n : setattr(self, 'certificate_store', n.get_enum_value(certificate_store.CertificateStore)),
"certificateTemplateName": lambda n : setattr(self, 'certificate_template_name', n.get_str_value()),
"certificationAuthority": lambda n : setattr(self, 'certification_authority', n.get_str_value()),
"certificationAuthorityName": lambda n : setattr(self, 'certification_authority_name', n.get_str_value()),
"certificationAuthorityType": lambda n : setattr(self, 'certification_authority_type', n.get_enum_value(device_management_certification_authority.DeviceManagementCertificationAuthority)),
"customSubjectAlternativeNames": lambda n : setattr(self, 'custom_subject_alternative_names', n.get_collection_of_object_values(custom_subject_alternative_name.CustomSubjectAlternativeName)),
"managedDeviceCertificateStates": lambda n : setattr(self, 'managed_device_certificate_states', n.get_collection_of_object_values(managed_device_certificate_state.ManagedDeviceCertificateState)),
"subjectAlternativeNameFormatString": lambda n : setattr(self, 'subject_alternative_name_format_string', n.get_str_value()),
"subjectNameFormatString": lambda n : setattr(self, 'subject_name_format_string', n.get_str_value()),
}
super_fields = super().get_field_deserializers()
fields.update(super_fields)
return fields
@property
def managed_device_certificate_states(self,) -> Optional[List[managed_device_certificate_state.ManagedDeviceCertificateState]]:
"""
Gets the managedDeviceCertificateStates property value. Certificate state for devices. This collection can contain a maximum of 2147483647 elements.
Returns: Optional[List[managed_device_certificate_state.ManagedDeviceCertificateState]]
"""
return self._managed_device_certificate_states
@managed_device_certificate_states.setter
def managed_device_certificate_states(self,value: Optional[List[managed_device_certificate_state.ManagedDeviceCertificateState]] = None) -> None:
"""
Sets the managedDeviceCertificateStates property value. Certificate state for devices. This collection can contain a maximum of 2147483647 elements.
Args:
value: Value to set for the managed_device_certificate_states property.
"""
self._managed_device_certificate_states = value
def serialize(self,writer: SerializationWriter) -> None:
"""
Serializes information the current object
Args:
writer: Serialization writer to use to serialize this model
"""
if writer is None:
raise Exception("writer cannot be undefined")
super().serialize(writer)
writer.write_enum_value("certificateStore", self.certificate_store)
writer.write_str_value("certificateTemplateName", self.certificate_template_name)
writer.write_str_value("certificationAuthority", self.certification_authority)
writer.write_str_value("certificationAuthorityName", self.certification_authority_name)
writer.write_enum_value("certificationAuthorityType", self.certification_authority_type)
writer.write_collection_of_object_values("customSubjectAlternativeNames", self.custom_subject_alternative_names)
writer.write_collection_of_object_values("managedDeviceCertificateStates", self.managed_device_certificate_states)
writer.write_str_value("subjectAlternativeNameFormatString", self.subject_alternative_name_format_string)
writer.write_str_value("subjectNameFormatString", self.subject_name_format_string)
@property
def subject_alternative_name_format_string(self,) -> Optional[str]:
"""
Gets the subjectAlternativeNameFormatString property value. Custom String that defines the AAD Attribute.
Returns: Optional[str]
"""
return self._subject_alternative_name_format_string
@subject_alternative_name_format_string.setter
def subject_alternative_name_format_string(self,value: Optional[str] = None) -> None:
"""
Sets the subjectAlternativeNameFormatString property value. Custom String that defines the AAD Attribute.
Args:
value: Value to set for the subject_alternative_name_format_string property.
"""
self._subject_alternative_name_format_string = value
@property
def subject_name_format_string(self,) -> Optional[str]:
"""
Gets the subjectNameFormatString property value. Custom format to use with SubjectNameFormat = Custom. Example: CN={{EmailAddress}},E={{EmailAddress}},OU=Enterprise Users,O=Contoso Corporation,L=Redmond,ST=WA,C=US
Returns: Optional[str]
"""
return self._subject_name_format_string
@subject_name_format_string.setter
def subject_name_format_string(self,value: Optional[str] = None) -> None:
"""
Sets the subjectNameFormatString property value. Custom format to use with SubjectNameFormat = Custom. Example: CN={{EmailAddress}},E={{EmailAddress}},OU=Enterprise Users,O=Contoso Corporation,L=Redmond,ST=WA,C=US
Args:
value: Value to set for the subject_name_format_string property.
"""
self._subject_name_format_string = value
|
PypiClean
|
/ktumon-1.0.1.tar.gz/ktumon-1.0.1/README.md
|
# KTU Mon
## Introduction
KTU Mon is an application that was developed as a desperate attempt at trying to get myself up to date with the recent notifications published by KTU.
Once upon a time I missed registering for few backlog exams because I put way too much trust in the elected UUC's of my college. I had hoped they would continue providing updates through their Instagram page, only to realize one day that they've deleted the whole damn account and I had missed my backlog exams.
Those UUC's were useless to say the least (ആരംഭ ശൂരത്വം മാത്രം).
Although I have dropped out from this University and no longer need the app, I have decided to add useless enhacements and publish this application as a package so that other students out there studying in KTU might find it useful.
Since this is Open Source code, you can try to understand the engineering behind this over-engineered webscraper and please feel free to post bugs, issue fixes, contribute improvements etc.
Thanks,
Anantha
## Installation & Usage
> The recommended way to install is through pipX
1. Get pipX from pip
```
python -m pip install --user pipx
```
2. Make sure that pipX is on PATH by going to it's installed folder ```C:\Users\{USER_NAME}\AppData\Roaming\Python\Python3x\Scripts``` and executing
```
pipx ensurepath
```
3. Restart the terminal and run pipX by typing ```pipx```
4. If that didn't work, then manually add the ```pipx.exe``` file in the location ```C:\Users\{USER_NAME}\AppData\Roaming\Python\Python3x\Scripts``` to PATH
5. After that you can install KTU Mon by typing the following into the terminal
```
pipx install ktumon
```
6. After installing, you can run it by typing ```ktumon``` in the terminal.
> It is recommended that you do not run it immediately. Instead set it up to make it run during startup and restart the system.
7. But inorder for the application to run at startup you'll need to add the executable ```ktumon.exe``` from the location ```C:\Users\{USER_NAME}\AppData\Roaming\Python\Python3x\Scripts``` to the Startup folder.
8. Press Windows logo key + R to open the Run prompt and type ```shell:startup``` this will open up the Startup folder. Paste in the shortcut of the ```ktumon.exe``` executable in there.
9. Now restart the system.
The first run would take some time to set up the local database and fetch the latest data from KTU, so please be patient. You'll be getting a desktop notification once the application has finished setting up.
## Technical Stuff
The application runs as a system tray icon making use of Pystray. We use a local sqlite database for our database related stuff.
We spawn new threads to run the timer that checks for notifications at set intervals.
The application's Web GUI runs on Uvicorn ASGI Web Server with FastAPI as Web Framework using the Jinja2 Templating Engine and the frontend is completely made using Bootstrap CSS Framework.
The Pystray Process is used to spawn a new Process for running Uvicorn on demand. The spawned Uvicorn Process automatically opens up the default browser to show the Web GUI.
We use a WebSocket connection with the frontend to the backend to determine whether or not to terminate the spawned Uvicorn Process.
The spawned Uvicorn Process is terminated as soon as the User closes the Web GUI tab or the whole Browser itself.
## Development & Contribution
In order to develop and contribute, fork this project and clone it to your local machine.
We use Visual Studio Code for development and recommend you to use it as well.
You should have Python 3.11 or above.
We recommennd working with a virtual environment to isolate the packages.
You can install ```virtualenv``` for that purpose
```
pipx install virtualenv
```
Then within the cloned repository run the following command to create a virtual environment
```
virtualenv venv
```
After which change the interpreter within Visual Studio Code to point to the Python Interpreter contained within the created virtual environment.
Now create a new terminal in Visual Studio Code and you'll see that the virtual environement is activated.
>You can see ```(venv)``` in the shell prompt of the terminal.
After which type in the following command to install an editable module of the project you are working on
```
pip install -e .[dev]
```
After making any changes to the code you can always run it whenever by executing ```ktumon```
Whenever you are modifying the code, most of the time you won't have to rebuild and reinstall the editable module and that is the advantage of this approach.
Now you're all set!
The following command is used to build the distribution packages
```
python -m build
```
|
PypiClean
|
/iflag-1.0.1.tar.gz/iflag-1.0.1/README.md
|
# iflag
A Python library for the Itron / Actaris IFLAG and Corus protocol
## Installing
Install via pip, python 3.6+ only
```pip install iflag```
## About
iflag is a library focused on reading and writing data to devices using the IFLAG or
Corus protocol. Mainly Itron / Actaris gas volume converters. Communication is done over
TCP/IP
## Features
The library is now only focused on using Single Address Mode (SAM) of the Corus protocol
to access data. SEVC-D parameters of I-FLAG is not supported.
* Read parameters
* Write parameters
* Read databases (logs), event log not yet implemented
## Usage
- Different firmware versions have different ID for each parameter. But the
parameter_id_map is always on id `0x5e`.
So the mapping should be known beforehand or it should be read from the device before
reading more values.
- Different firmware versions also have different database record layout. You will need
to supply a mapping of how the databases looks like for the meters you want to read.
A default mapping is not supplied since it would infer opinionated interpretation of
some values.
You should create a mapping like: `Dict[str, Dict[int, List[DatabaseRecordParameter]]]`
`interval` is the database and `52` is the length of the database record. A list of
`DatabaseRecordParameter` in the order they are appearing in the data base record
will make it possible to convert the bytes into python values.
Ex:
```python
{
"interval": {
52: [
DatabaseRecordParameter(name="record_duration", data_class=data.Byte),
DatabaseRecordParameter(name="status", data_class=data.Byte),
DatabaseRecordParameter(name="end_date", data_class=data.Date),
DatabaseRecordParameter(
name="consumption_interval_unconverted",
data_class=data.Word,
affected_by_pulse_input=True,
),
DatabaseRecordParameter(
name="consumption_interval_converted",
data_class=data.ULong,
affected_by_pulse_input=True,
),
DatabaseRecordParameter(
name="counter_interval_unconverted",
data_class=data.Word,
affected_by_pulse_input=True,
),
DatabaseRecordParameter(
name="counter_interval_converted",
data_class=data.ULong,
affected_by_pulse_input=True,
),
DatabaseRecordParameter(
name="temperature_interval_minimum", data_class=data.Float1
),
DatabaseRecordParameter(
name="temperature_interval_maximum", data_class=data.Float1
),
DatabaseRecordParameter(
name="temperature_interval_average", data_class=data.Float1
),
DatabaseRecordParameter(
name="pressure_interval_minimum", data_class=data.Float2
),
DatabaseRecordParameter(
name="pressure_interval_maximum", data_class=data.Float2
),
DatabaseRecordParameter(
name="pressure_interval_average", data_class=data.Float2
),
DatabaseRecordParameter(
name="flowrate_unconverted_interval_minimum",
data_class=data.Float3,
affected_by_pulse_input=True,
),
DatabaseRecordParameter(
name="flowrate_unconverted_interval_maximum",
data_class=data.Float3,
affected_by_pulse_input=True,
),
DatabaseRecordParameter(
name="flowrate_converted_interval_minimum",
data_class=data.Float3,
affected_by_pulse_input=True,
),
DatabaseRecordParameter(
name="flowrate_converted_interval_maximum",
data_class=data.Float3,
affected_by_pulse_input=True,
),
DatabaseRecordParameter(name="none_data_1", data_class=data.Null4),
DatabaseRecordParameter(
name="flowrate_unconverted_interval_average",
data_class=data.Float3,
affected_by_pulse_input=True,
),
DatabaseRecordParameter(
name="flowrate_converted_interval_average",
data_class=data.Float3,
affected_by_pulse_input=True,
),
DatabaseRecordParameter(name="start_date", data_class=data.Date),
DatabaseRecordParameter(name="none_data_2", data_class=data.Null2),
]
}
}
```
- Good to know: There are several different float formats due to memory constraints in
the protocol and device. All floats are handled as `decimal.Decimal` in Python to
not have float rounding errors.
### Read parameters:
``python
from iflag import CorusClient
from iflag.data import CorusString, Index
from iflag.parse import IFlagParameter
from decimal import Decimal
client = CorusClient.with_tcp_transport(address=("localhost", 4000))
# Read single value
client.read_parameters([IFlagParameter(id=0x5e, data_class=CorusString)])
>> {0x5e: "FL_b0040"}
# Read multiple values
client.read_parameters(
[
IFlagParameter(id=0x5e, data_class=CorusString),
IFlagParameter(id=48, data_class=Index),
IFlagParameter(id=49, data_class=Index)
]
)
>> {0x5e: "FL_b0040", 48: Decimal("234567.982"), 49: Decimal("222222.982")}
```
### Write parameters
```python
from iflag import CorusClient, TcpTransport
from iflag.data import Date
from iflag.parse import IFlagParameter
from datetime import datetime
transport = TcpTransport(address=('localhost', 4000))
client = CorusClient(transport=transport)
client.write_parameters([(IFlagParameter(id=106, data_class=Date), datetime.now())])
```
### Read database
```python
from iflag import CorusClient
from datetime import datetime, timedelta
client = CorusClient.with_tcp_transport(address=('localhost', 4000), database_layout=MY_DATABASE_LAYOUT)
client.read_database(database='interval', start=datetime.now(), stop=(datetime.now() - timedelta(hours=4)))
````
- When reading databases you will need to know the `input_pulse_weight`. If it is not
set on the client at initiation or on the `read_database` call the client will read it
from the meter automatically.
|
PypiClean
|
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/request/AnttechOceanbasePassinfoLogininfoQueryRequest.py
|
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AnttechOceanbasePassinfoLogininfoQueryModel import AnttechOceanbasePassinfoLogininfoQueryModel
class AnttechOceanbasePassinfoLogininfoQueryRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AnttechOceanbasePassinfoLogininfoQueryModel):
self._biz_content = value
else:
self._biz_content = AnttechOceanbasePassinfoLogininfoQueryModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'anttech.oceanbase.passinfo.logininfo.query'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
|
PypiClean
|
/cas_pip-0.1.0.tar.gz/cas_pip-0.1.0/README.rst
|
=======
CAS pip
=======
.. image:: https://img.shields.io/pypi/v/cas_pip.svg
:target: https://pypi.python.org/pypi/cas_pip
.. image:: https://readthedocs.org/projects/cas-pip/badge/?version=latest
:target: https://cas-pip.readthedocs.io/en/latest/?version=latest
:alt: Documentation Status
Community Attestation Pip integration
* Free software: Apache Software License 2.0
* Documentation: https://cas-pip.readthedocs.io.
Features
----------
* Notarizing requirements.txt
* Authorizing requirements.txt
* Notarizing files
* Authorizing files
* Notarizing hashes
* Authorizing hashes
* API client
* Async API client
* Easy integration with your software
Install it
----------
Installation is very easy::
pip install cas_pip
Usage
----------
.. image:: https://asciinema.org/a/491483.svg
:target: https://asciinema.org/a/491483
|
PypiClean
|
/odoo12_addon_account_asset_management-12.0.3.2.0-py3-none-any.whl/odoo/addons/account_asset_management/models/account_asset_group.py
|
from odoo import api, fields, models
from odoo.osv import expression
class AccountAssetGroup(models.Model):
_name = 'account.asset.group'
_description = 'Asset Group'
_order = 'code, name'
_parent_store = True
name = fields.Char(string='Name', size=64, required=True, index=True)
code = fields.Char(index=True)
parent_path = fields.Char(index=True)
company_id = fields.Many2one(
comodel_name='res.company',
string='Company',
required=True,
default=lambda self: self._default_company_id())
parent_id = fields.Many2one(
comodel_name='account.asset.group',
string='Parent Asset Group',
ondelete='restrict')
child_ids = fields.One2many(
comodel_name='account.asset.group',
inverse_name='parent_id',
string='Child Asset Groups')
@api.model
def _default_company_id(self):
return self.env['res.company']._company_default_get('account.asset')
@api.multi
def name_get(self):
result = []
params = self.env.context.get('params')
list_view = params and params.get('view_type') == 'list'
short_name_len = 16
for rec in self:
if rec.code:
full_name = rec.code + ' ' + rec.name
short_name = rec.code
else:
full_name = rec.name
if len(full_name) > short_name_len:
short_name = full_name[:16] + '...'
else:
short_name = full_name
if list_view:
name = short_name
else:
name = full_name
result.append((rec.id, name))
return result
@api.model
def _name_search(self, name, args=None, operator='ilike', limit=100,
name_get_uid=None):
args = args or []
domain = []
if name:
domain = [
'|',
('code', '=ilike', name.split(' ')[0] + '%'),
('name', operator, name)
]
if operator in expression.NEGATIVE_TERM_OPERATORS:
domain = ['&', '!'] + domain[1:]
rec_ids = self._search(
expression.AND([domain, args]), limit=limit,
access_rights_uid=name_get_uid)
return self.browse(rec_ids).name_get()
|
PypiClean
|
/boot-synth-1.2.0.tar.gz/boot-synth-1.2.0/synth/projects_master/nginx_router/frontend/react/node_modules/jest-snapshot/build/snapshot_resolver.js
|
'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
exports.buildSnapshotResolver = exports.isSnapshotPath = exports.DOT_EXTENSION = exports.EXTENSION = void 0;
var _path = _interopRequireDefault(require('path'));
var _chalk = _interopRequireDefault(require('chalk'));
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {default: obj};
}
/**
* Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
const EXTENSION = 'snap';
exports.EXTENSION = EXTENSION;
const DOT_EXTENSION = '.' + EXTENSION;
exports.DOT_EXTENSION = DOT_EXTENSION;
const isSnapshotPath = path => path.endsWith(DOT_EXTENSION);
exports.isSnapshotPath = isSnapshotPath;
const cache = new Map();
const buildSnapshotResolver = config => {
const key = config.rootDir;
if (!cache.has(key)) {
cache.set(key, createSnapshotResolver(config.snapshotResolver));
}
return cache.get(key);
};
exports.buildSnapshotResolver = buildSnapshotResolver;
function createSnapshotResolver(snapshotResolverPath) {
return typeof snapshotResolverPath === 'string'
? createCustomSnapshotResolver(snapshotResolverPath)
: createDefaultSnapshotResolver();
}
function createDefaultSnapshotResolver() {
return {
resolveSnapshotPath: testPath =>
_path.default.join(
_path.default.join(_path.default.dirname(testPath), '__snapshots__'),
_path.default.basename(testPath) + DOT_EXTENSION
),
resolveTestPath: snapshotPath =>
_path.default.resolve(
_path.default.dirname(snapshotPath),
'..',
_path.default.basename(snapshotPath, DOT_EXTENSION)
),
testPathForConsistencyCheck: _path.default.posix.join(
'consistency_check',
'__tests__',
'example.test.js'
)
};
}
function createCustomSnapshotResolver(snapshotResolverPath) {
const custom = require(snapshotResolverPath);
const keys = [
['resolveSnapshotPath', 'function'],
['resolveTestPath', 'function'],
['testPathForConsistencyCheck', 'string']
];
keys.forEach(([propName, requiredType]) => {
if (typeof custom[propName] !== requiredType) {
throw new TypeError(mustImplement(propName, requiredType));
}
});
const customResolver = {
resolveSnapshotPath: testPath =>
custom.resolveSnapshotPath(testPath, DOT_EXTENSION),
resolveTestPath: snapshotPath =>
custom.resolveTestPath(snapshotPath, DOT_EXTENSION),
testPathForConsistencyCheck: custom.testPathForConsistencyCheck
};
verifyConsistentTransformations(customResolver);
return customResolver;
}
function mustImplement(propName, requiredType) {
return (
_chalk.default.bold(
`Custom snapshot resolver must implement a \`${propName}\` as a ${requiredType}.`
) +
'\nDocumentation: https://facebook.github.io/jest/docs/en/configuration.html#snapshotResolver'
);
}
function verifyConsistentTransformations(custom) {
const resolvedSnapshotPath = custom.resolveSnapshotPath(
custom.testPathForConsistencyCheck
);
const resolvedTestPath = custom.resolveTestPath(resolvedSnapshotPath);
if (resolvedTestPath !== custom.testPathForConsistencyCheck) {
throw new Error(
_chalk.default.bold(
`Custom snapshot resolver functions must transform paths consistently, i.e. expects resolveTestPath(resolveSnapshotPath('${custom.testPathForConsistencyCheck}')) === ${resolvedTestPath}`
)
);
}
}
|
PypiClean
|
/valer.core-1.3.5rc12.zip/valer.core-1.3.5rc12/bika/lims/content/pricelist.py
|
from AccessControl import ClassSecurityInfo
from DateTime import DateTime
from Products.Archetypes.public import BaseFolder
from Products.Archetypes.public import BooleanField
from Products.Archetypes.public import BooleanWidget
from Products.Archetypes.public import DecimalWidget
from Products.Archetypes.public import FixedPointField
from Products.Archetypes.public import Schema
from Products.Archetypes.public import SelectionWidget
from Products.Archetypes.public import StringField
from Products.Archetypes.public import TextAreaWidget
from Products.Archetypes.public import TextField
from Products.Archetypes.public import registerType
from zope.interface import implements
from bika.lims import bikaMessageFactory as _
from bika.lims.config import PRICELIST_TYPES
from bika.lims.config import PROJECTNAME
from bika.lims.content.bikaschema import BikaSchema
from bika.lims.interfaces import IDeactivable
from bika.lims.interfaces import IPricelist
schema = BikaSchema.copy() + Schema((
StringField(
"Type",
required=1,
vocabulary=PRICELIST_TYPES,
widget=SelectionWidget(
format="select",
label=_("Pricelist for"),
),
),
BooleanField(
"BulkDiscount",
default=False,
widget=SelectionWidget(
label=_("Bulk discount applies"),
),
),
FixedPointField(
"BulkPrice",
widget=DecimalWidget(
label=_("Discount %"),
description=_("Enter discount percentage value"),
),
),
BooleanField(
"Descriptions",
default=False,
widget=BooleanWidget(
label=_("Include descriptions"),
description=_("Select if the descriptions should be included"),
),
),
TextField(
"Remarks",
allowable_content_types=("text/plain",),
widget=TextAreaWidget(
label=_("Remarks"),
)
),
))
Field = schema["title"]
Field.required = 1
Field.widget.visible = True
Field = schema["effectiveDate"]
Field.schemata = "default"
# If no date is selected the item will be publishedimmediately.
Field.required = 0
Field.widget.visible = True
Field = schema["expirationDate"]
Field.schemata = "default"
# If no date is chosen, it will never expire.
Field.required = 0
Field.widget.visible = True
class Pricelist(BaseFolder):
"""Pricelist content
"""
implements(IPricelist, IDeactivable)
security = ClassSecurityInfo()
displayContentsTab = False
schema = schema
_at_rename_after_creation = True
def _renameAfterCreation(self, check_auto_id=False):
from bika.lims.idserver import renameAfterCreation
renameAfterCreation(self)
@security.public
def current_date(self):
""" return current date """
return DateTime()
registerType(Pricelist, PROJECTNAME)
|
PypiClean
|
/adcirc_rom-0.0.0-py3-none-any.whl/adcirc_rom/stacked_model.py
|
import gc
import json
import os
import pdb
import h5py
import joblib
import numpy as np
import xgboost as xgb
from fire import Fire
from sklearn.decomposition import PCA
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import GroupKFold, train_test_split
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
from tensorflow import keras
from tensorflow.keras.callbacks import (CSVLogger, ModelCheckpoint,
ReduceLROnPlateau)
from tensorflow.keras.losses import MeanSquaredError
from tensorflow.keras.optimzers import Adam
from adcirc_rom.constants import SUPPORTED_MODELS
from adcirc_rom.model import (CorrelationFilter, FeatureImportanceFilter,
extract_features)
class StackedModel:
"""Similar to the one-shot XGBoost regression Model class
The differences are that the sklearn api is used for XGBoost, and the problem
is split into classification and regression steps.
"""
def _get_modeldir(self, modelname):
return f"{self._datadir}/models/{modelname}"
def __init__(
self,
dataset="default",
datadir="data",
include_latlon=False,
exclude_bathy=False,
ordered_split=False,
):
"""Load in the dataset we will work with"""
with h5py.File(f"{datadir}/datasets/{dataset}.hdf5", "r") as ds:
print("Opened file")
feature_values, self._feature_names = extract_features(
ds, include_latlon=include_latlon, exclude_bathy=exclude_bathy
)
print("Loaded features")
print(self._feature_names)
maxele = ds["maxele"][:]
maxele[maxele < 0] = 0
self._storm_inds = ds["storm"][:]
self._maxele = maxele
self._coords = np.column_stack([ds["x"][:], ds["y"][:]])
self._storm_names = ds["storm_names"][:]
self.feature_values = feature_values
self._datadir = datadir
self._dataset = dataset
self._ordered_split = ordered_split
print("Loaded data")
def _split_data(self, split_factor=10, seed=2022):
"""
Split the data into training and testing sets.
Parameters
----------
split_factor : int, optional
Number of splits for cross-validation, by default 10.
seed : int, optional
Seed for random number generator, by default 2022.
Returns
-------
None
The function sets the following attributes: `x_train`, `y_train`,
`x_test`, `y_test`, and `holdout_inds`.
Note
----
If function has been called before, split is not re-computed.
"""
if hasattr(self, "x_train"):
return
np.random.seed(seed)
if self._ordered_split:
unique_storms = np.unique(self._storm_inds)
np.sort(unique_storms)
nunique = len(unique_storms)
pivot = unique_storms[int((1 - 1.0 / split_factor) * nunique)]
print(f"Storms > {pivot} will be used for testing")
train_mask = self._storm_inds <= pivot
test_mask = ~train_mask
self.holdout_inds = np.where(test_mask)[0]
self.x_train, self.y_train = (
self.feature_values[train_mask],
self._maxele[train_mask],
)
self.x_test, self.y_test = (
self.feature_values[test_mask],
self._maxele[test_mask],
)
return
dummy_arr = np.empty((len(self._storm_inds), 1))
fold = GroupKFold(n_splits=split_factor)
for train_inds, holdout_inds in fold.split(dummy_arr, groups=self._storm_inds):
self.x_train = self.feature_values[train_inds, :]
self.x_test = self.feature_values[holdout_inds, :]
self.y_train = self._maxele[train_inds]
self.y_test = self._maxele[holdout_inds]
self.holdout_inds = holdout_inds
break
def train(
self,
classifier="nn1",
regressor="nn1",
epochs=100,
preprocess=None,
modelname=None,
pca_components=50,
correlation_threshold=0.9,
):
"""
Trains the stacked model.
Parameters
----------
classifier : str, optional
Classifier model name, by default "nn1".
regressor : str, optional
Regressor model name, by default "nn1".
epochs : int, optional
Number of epochs, by default 100.
preprocess : str or None, optional
Preprocessing method, by default None.
modelname : str or None, optional
Name for the trained model, by default will resolve to:
`stacked_{classifier}_{regressor}_{self._dataset}`
pca_components : int, optional
Number of PCA components to keep, by default 50.
correlation_threshold : float, optional
Correlation threshold for feature selection, by default 0.9.
Returns
-------
res : dict
Dictionary with results of training the model, including the
classification accuracy, the mean error in regression, and the
root mean squared error.
"""
if classifier not in SUPPORTED_MODELS:
raise ValueError(f"Unsupported classifier {classifier}!")
if regressor not in SUPPORTED_MODELS:
raise ValueError(f"Unsupported regressor {regressor}!")
if modelname is None:
modelname = f"stacked_{classifier}_{regressor}_{self._dataset}"
modeldir = self._get_modeldir(modelname)
if not os.path.exists(modeldir):
os.makedirs(modeldir, exist_ok=True)
self._split_data()
x_train, y_train = self.x_train, self.y_train
x_test, y_test = self.x_test, self.y_test
transforms = []
if preprocess == "pca":
transforms.append(("pca", PCA(n_components=pca_components)))
elif preprocess == "importance":
transforms.append(("feature_importance", FeatureImportanceFilter()))
elif preprocess == "correlation":
transforms.append(
("corr", CorrelationFilter(threshold=correlation_threshold))
)
elif preprocess is not None:
raise ValueError(f"Unrecognized preprocess scheme {preprocess}")
transforms.append(("scaler", StandardScaler()))
pipeline = Pipeline(transforms)
y_train_class = np.zeros(len(y_train))
y_train_class[y_train != 0] = 1
x_train_normed = pipeline.fit_transform(x_train, y_train)
x_test_normed = pipeline.transform(x_test)
y_test_class = np.zeros(len(y_test))
y_test_class[y_test != 0] = 1
x_train_normed[~np.isfinite(x_train_normed)] = 0
x_test_normed[~np.isfinite(x_test_normed)] = 0
# save preprocesse data
preproc_file = modeldir + "/preprocess_joblib"
joblib.dump(pipeline, preproc_file)
num_features = x_train_normed.shape[1]
clf = self._get_model(classifier, num_features=num_features, classifier=True)
reg = self._get_model(regressor, num_features=num_features, classifier=False)
print("Training Classifier")
if classifier.startswith("nn"):
csv_logger = CSVLogger(
modeldir + "/training.log", separator=",", append=False
)
cp = ModelCheckpoint(modeldir + "/classifier", save_best_only=True)
callbacks = [
ReduceLROnPlateau(
monitor="val_loss",
factor=0.2,
patience=2,
min_lr=0.000001,
verbose=1,
),
csv_logger,
cp,
]
optimizer = Adam(learning_rate=0.0001)
clf.compile(
loss="binary_crossentropy", optimizer=optimizer, metrics=["accuracy"]
)
clf.fit(
x_train_normed,
y_train_class,
epochs=epochs // 2,
batch_size=2048,
validation_split=0.2,
callbacks=callbacks,
)
test_stage1_pred = clf.predict(x_test_normed, batch_size=2048)
test_stage1_pred = test_stage1_pred.flatten() > 0.5
elif classifier.startswith("xgb"):
# split the training data so we can do early stopping
x_train_xgb, x_val_xgb, y_train_xgb, y_val_xgb = train_test_split(
x_train_normed, y_train_class, test_size=0.2
)
clf.fit(
x_train_xgb,
y_train_xgb,
eval_set=[(x_val_xgb, y_val_xgb)],
verbose=True,
)
os.makedirs(modeldir + "/classifier", exist_ok=True)
clf.save_model(modeldir + "/classifier/model.xgb")
test_stage1_pred = clf.predict(x_test_normed).astype(bool)
print(confusion_matrix(y_test_class, clf.predict(x_test_normed)))
elif classifier == "dummy":
# don't perform classification
test_stage1_pred = np.ones(len(x_test_normed)).astype(bool)
acc = (test_stage1_pred.astype(int) == y_test_class).mean()
print(f"Classification accuracy on test data {100*acc:.2f}%")
pdb.set_trace()
# train the regression model on non-zero values
if classifier == "dummy":
y_filter_index = np.ones(len(y_train)).astype(bool)
else:
y_filter_index = y_train != 0
x_train_filter = x_train_normed[y_filter_index].copy()
y_train_filter = y_train[y_filter_index].copy()
gc.collect()
print("Training regressor")
if regressor.startswith("nn"):
loss = MeanSquaredError(reduction="auto")
optimizer = Adam(learning_rate=0.0001)
reduce_lr = ReduceLROnPlateau(
monitor="val_loss", factor=0.2, patience=5, min_lr=0.00001
)
cp = ModelCheckpoint(modeldir + "/regressor", save_best_only=True)
reg.compile(optimizer=optimizer, loss=loss, metrics=["mae"])
gc.collect()
history = reg.fit(
x_train_filter,
y_train_filter,
batch_size=2048,
epochs=epochs,
validation_split=0.2,
callbacks=[reduce_lr, cp],
)
gc.collect()
# prediction pipline
# prediction stage 2 is regression
test_pred = np.zeros(x_test.shape[0])
test_pred[test_stage1_pred] = reg.predict(
x_test_normed[test_stage1_pred, :], batch_size=2048
).reshape(-1)
gc.collect()
else:
# split the training data so we can do early stopping
x_train_xgb, x_val_xgb, y_train_xgb, y_val_xgb = train_test_split(
x_train_filter, y_train_filter, test_size=0.2
)
reg.fit(
x_train_xgb,
y_train_xgb,
eval_set=[(x_val_xgb, y_val_xgb)],
verbose=True,
)
os.makedirs(modeldir + "/regressor", exist_ok=True)
reg.save_model(modeldir + "/regressor/model.xgb")
test_pred = np.zeros(x_test.shape[0])
test_pred[test_stage1_pred] = reg.predict(
x_test_normed[test_stage1_pred, :]
)
# Absolute error on predictions
error_test = np.abs(y_test.flatten() - test_pred.flatten())
mae = error_test.mean()
rmse = (error_test**2).mean() ** 0.5
res = {"accuracy": acc, "mae": mae, "rmse": rmse}
print(res)
with open(modeldir + "/results.json", "w") as fp:
json.dump(res, fp)
# Save the predictions for later plotting
with h5py.File(modeldir + "/test_preds.hdf5", "w") as outds:
outds["test_pred"] = test_pred
outds["storm_inds"] = self._storm_inds[self.holdout_inds]
outds["coords"] = self._coords[self.holdout_inds]
outds["maxele"] = y_test
return res
def _get_model(self, name, num_features, classifier=True):
params = SUPPORTED_MODELS[name]
params["classifier"] = classifier
if name.startswith("nn"):
return self._get_nn(num_features=num_features, **params)
elif name.startswith("xgb"):
return self._get_xgb(**params)
def _get_nn(self, num_features, size=1, classifier=True):
inputs = keras.Input(shape=num_features)
initial_width = width = 256
x = keras.layers.Dense(initial_width, activation="relu")(inputs)
for i in range(size):
width *= 2
x = keras.layers.Dense(width, activation="relu")(x)
for i in range(size):
width = width // 2
x = keras.layers.Dense(width, activation="relu")(x)
if classifier:
x = keras.layers.Dense(1, activation="sigmoid")(x)
else:
x = keras.layers.Dense(1, activation="relu")(x)
return keras.Model(inputs, x)
def _get_xgb(self, classifier=True, **kwargs):
if classifier:
return xgb.XGBClassifier(eval_metric="error", **kwargs)
else:
return xgb.XGBRegressor(eval_metric="mae", **kwargs)
def predict(self, modelname, test_only=False):
"""
Generate predictions for the given dataset
If test_only is True, assume this is the original dataset the model was
trained with and regenerate predictions for the test set. Otherwise,
generated named predictions for the entire datset.
Parameters
----------
modelname : str
Name of the trained model to use for prediction.
test_only : bool, optional
Whether to generate predictions for the test set only, by default
False.
Returns
-------
outname: str
Name of output file named 'test_pred5.hdf5' if test_only is set,
or '{self._dataset}._preds.hdf5' if it is not.
"""
# load model
modeldir = self._get_modeldir(modelname)
# for now we just support neural nets
pipeline = joblib.load(modeldir + "/preprocess_joblib")
if test_only:
self._split_data()
X, y = self.x_test, self.y_test
coords = self._coords[self.holdout_inds]
storm_inds = self._storm_inds[self.holdout_inds]
else:
X, y = self.feature_values, self._maxele
coords = self._coords
storm_inds = self._storm_inds
X = pipeline.transform(X)
if os.path.exists(modeldir + "/classifier/model.xgb"):
classifier = xgb.XGBClassifier()
classifier.load_model(modeldir + "/classifier/model.xgb")
inundation_flag = classifier.predict(X).astype(bool)
elif "dummy" in modeldir:
inundation_flag = np.ones(X.shape[0]).astype(bool)
else:
classifier = keras.models.load_model(modeldir + "/classifier")
inundation_flag = (
classifier.predict(X, batch_size=2048).reshape(-1) > 0.5
).astype(bool)
acc = (inundation_flag == (y != 0)).mean()
print(f"Classification accuracy {100*acc:2f}")
elevations = np.zeros(X.shape[0])
if os.path.exists(modeldir + "/regressor/model.xgb"):
regressor = xgb.XGBRegressor()
regressor.load_model(modeldir + "/regressor/model.xgb")
elevations[inundation_flag] = regressor.predict(X[inundation_flag])
else:
regressor = keras.models.load_model(modeldir + "/regressor")
elevations[inundation_flag] = regressor.predict(
X[inundation_flag], batch_size=2048
).reshape(-1)
mae = np.abs((elevations - y)).mean()
rmse = ((elevations - y) ** 2).mean() ** 0.5
print(f"mae: {mae}, rmse: {rmse}")
outname = "test_preds.hdf5" if test_only else f"{self._dataset}_preds.hdf5"
with h5py.File(modeldir + "/" + outname, "w") as outds:
if test_only:
outds["test_pred"] = elevations
else:
outds["pred"] = elevations
outds["coords"] = coords
outds["storm_inds"] = storm_inds
outds["maxele"] = y
return outname
if __name__ == "__main__":
Fire(StackedModel)
|
PypiClean
|
/flet_core-0.10.1-py3-none-any.whl/flet_core/divider.py
|
from typing import Any, Optional
from flet_core.control import Control, OptionalNumber
from flet_core.ref import Ref
class Divider(Control):
"""
A thin horizontal line, with padding on either side.
In the material design language, this represents a divider.
Example:
```
import flet as ft
def main(page: ft.Page):
page.add(
ft.Column(
[
ft.Container(
bgcolor=ft.colors.AMBER,
alignment=ft.alignment.center,
expand=True,
),
ft.Divider(),
ft.Container(
bgcolor=ft.colors.PINK, alignment=ft.alignment.center, expand=True
),
],
spacing=0,
expand=True,
),
)
ft.app(target=main)
```
-----
Online docs: https://flet.dev/docs/controls/divider
"""
def __init__(
self,
ref: Optional[Ref] = None,
opacity: OptionalNumber = None,
visible: Optional[bool] = None,
data: Any = None,
#
# Specific
#
height: OptionalNumber = None,
thickness: OptionalNumber = None,
color: Optional[str] = None,
):
Control.__init__(
self,
ref=ref,
opacity=opacity,
visible=visible,
data=data,
)
self.height = height
self.thickness = thickness
self.color = color
def _get_control_name(self):
return "divider"
# height
@property
def height(self) -> OptionalNumber:
return self._get_attr("height")
@height.setter
def height(self, value: OptionalNumber):
self._set_attr("height", value)
# thickness
@property
def thickness(self) -> OptionalNumber:
return self._get_attr("thickness")
@thickness.setter
def thickness(self, value: OptionalNumber):
self._set_attr("thickness", value)
# color
@property
def color(self):
return self._get_attr("color")
@color.setter
def color(self, value):
self._set_attr("color", value)
|
PypiClean
|
/openram-1.2.29.tar.gz/openram-1.2.29/compiler/base/geometry.py
|
import math
import copy
import numpy as np
from openram import debug
from openram import tech
from openram import OPTS
from .utils import round_to_grid
from .vector import vector
class geometry:
"""
A specific path, shape, or text geometry. Base class for shared
items.
"""
def __init__(self, lpp=None):
""" By default, everything has no size. """
self.width = 0
self.height = 0
if lpp:
self.lpp = lpp
self.layerNumber = lpp[0]
self.layerPurpose = lpp[1]
def __str__(self):
""" override print function output """
debug.error("__str__ must be overridden by all geometry types.", 1)
def __repr__(self):
""" override print function output """
debug.error("__repr__ must be overridden by all geometry types.", 1)
# def translate_coords(self, coords, mirr, angle, xyShift):
# """Calculate coordinates after flip, rotate, and shift"""
# coordinate = []
# for item in coords:
# x = (item[0]*math.cos(angle)-item[1]*mirr*math.sin(angle)+xyShift[0])
# y = (item[0]*math.sin(angle)+item[1]*mirr*math.cos(angle)+xyShift[1])
# coordinate += [(x, y)]
# return coordinate
def transform_coords(self, coords, offset, mirr, angle):
"""Calculate coordinates after flip, rotate, and shift"""
coordinate = []
for item in coords:
x = item[0] * math.cos(angle) - item[1] * mirr * math.sin(angle) + offset[0]
y = item[0] * math.sin(angle) + item[1] * mirr * math.cos(angle) + offset[1]
coordinate += [[x, y]]
return coordinate
def normalize(self):
""" Re-find the LL and UR points after a transform """
(first, second) = self.boundary
ll = vector(min(first[0], second[0]),
min(first[1], second[1])).snap_to_grid()
ur = vector(max(first[0], second[0]),
max(first[1], second[1])).snap_to_grid()
self.boundary = [ll, ur]
def update_boundary(self):
""" Update the boundary with a new placement. """
self.compute_boundary(self.offset, self.mirror, self.rotate)
def compute_boundary(self, offset=vector(0, 0), mirror="", rotate=0):
"""
Transform with offset, mirror and rotation to get the absolute pin location.
We must then re-find the ll and ur. The master is the cell instance.
"""
if OPTS.netlist_only:
self.boundary = [vector(0, 0), vector(0, 0)]
return
(ll, ur) = [vector(0, 0), vector(self.width, self.height)]
# Mirroring is performed before rotation
if mirror == "MX":
ll = ll.scale(1, -1)
ur = ur.scale(1, -1)
elif mirror == "MY":
ll = ll.scale(-1, 1)
ur = ur.scale(-1, 1)
elif mirror == "XY":
ll = ll.scale(-1, -1)
ur = ur.scale(-1, -1)
elif mirror == "" or mirror == "R0":
pass
else:
debug.error("Invalid mirroring: {}".format(mirror), -1)
if rotate == 0:
pass
elif rotate == 90:
ll = ll.rotate_scale(-1, 1)
ur = ur.rotate_scale(-1, 1)
elif rotate == 180:
ll = ll.scale(-1, -1)
ur = ur.scale(-1, -1)
elif rotate == 270:
ll = ll.rotate_scale(1, -1)
ur = ur.rotate_scale(1, -1)
else:
debug.error("Invalid rotation: {}".format(rotate), -1)
self.boundary = [offset + ll, offset + ur]
self.normalize()
def ll(self):
""" Return the lower left corner """
return self.boundary[0]
def ur(self):
""" Return the upper right corner """
return self.boundary[1]
def lr(self):
""" Return the lower right corner """
return vector(self.boundary[1].x, self.boundary[0].y)
def ul(self):
""" Return the upper left corner """
return vector(self.boundary[0].x, self.boundary[1].y)
def uy(self):
""" Return the upper edge """
return self.boundary[1].y
def by(self):
""" Return the bottom edge """
return self.boundary[0].y
def lx(self):
""" Return the left edge """
return self.boundary[0].x
def rx(self):
""" Return the right edge """
return self.boundary[1].x
def cx(self):
""" Return the center x """
return 0.5 * (self.boundary[0].x + self.boundary[1].x)
def cy(self):
""" Return the center y """
return 0.5 * (self.boundary[0].y + self.boundary[1].y)
def center(self):
""" Return the center coordinate """
return vector(self.cx(), self.cy())
class instance(geometry):
"""
An instance of a module with a specified location, rotation,
spice pins, and spice nets
"""
def __init__(self, name, mod, offset=[0, 0], mirror="R0", rotate=0):
"""Initializes an instance to represent a module"""
super().__init__()
debug.check(mirror not in ["R90", "R180", "R270"],
"Please use rotation and not mirroring during instantiation.")
self.name = name
self.mod = mod
self.gds = mod.gds
self.rotate = rotate
self.offset = vector(offset).snap_to_grid()
self.mirror = mirror
# track if the instance's spice pin connections have been made
self.connected = False
# deepcopy because this instance needs to
# change attributes in these spice objects
self.spice_pins = copy.deepcopy(self.mod.pins)
self.spice_nets = copy.deepcopy(self.mod.nets)
for pin in self.spice_pins.values():
pin.set_inst(self)
for net in self.spice_nets.values():
net.set_inst(self)
if OPTS.netlist_only:
self.width = 0
self.height = 0
else:
if mirror in ["R90", "R270"] or rotate in [90, 270]:
self.width = round_to_grid(mod.height)
self.height = round_to_grid(mod.width)
else:
self.width = round_to_grid(mod.width)
self.height = round_to_grid(mod.height)
self.compute_boundary(offset, mirror, rotate)
debug.info(4, "creating instance: " + self.name)
def get_blockages(self, lpp, top=False):
""" Retrieve blockages of all modules in this instance.
Apply the transform of the instance placement to give absolute blockages."""
angle = math.radians(float(self.rotate))
mirr = 1
if self.mirror == "R90":
angle += math.radians(90.0)
elif self.mirror == "R180":
angle += math.radians(180.0)
elif self.mirror == "R270":
angle += math.radians(270.0)
elif self.mirror == "MX":
mirr = -1
elif self.mirror == "MY":
mirr = -1
angle += math.radians(180.0)
elif self.mirror == "XY":
mirr = 1
angle += math.radians(180.0)
new_blockages = []
if self.mod.is_library_cell:
# Writes library cell blockages as shapes instead of a large metal blockage
blockages = []
blockages = self.mod.gds.getBlockages(lpp)
for b in blockages:
new_blockages.append(self.transform_coords(b, self.offset, mirr, angle))
else:
blockages = self.mod.get_blockages(lpp)
for b in blockages:
new_blockages.append(self.transform_coords(b, self.offset, mirr, angle))
return new_blockages
def gds_write_file(self, new_layout):
"""Recursively writes all the sub-modules in this instance"""
debug.info(4, "writing instance: " + self.name)
# make sure to write out my module/structure
# (it will only be written the first time though)
self.mod.gds_write_file(self.gds)
# now write an instance of my module/structure
new_layout.addInstance(self.gds,
self.mod.cell_name,
offsetInMicrons=self.offset,
mirror=self.mirror,
rotate=self.rotate)
def place(self, offset, mirror="R0", rotate=0):
""" This updates the placement of an instance. """
# Update the placement of an already added instance
self.offset = vector(offset).snap_to_grid()
self.mirror = mirror
self.rotate = rotate
self.update_boundary()
debug.info(3, "placing instance {}".format(self))
def get_pin(self, name, index=-1):
""" Return an absolute pin that is offset and transformed based on
this instance location. Index will return one of several pins."""
if index == -1:
pin = copy.deepcopy(self.mod.get_pin(name))
pin.transform(self.offset, self.mirror, self.rotate)
return pin
else:
pins = copy.deepcopy(self.mod.get_pin(name))
pins.transform(self.offset, self.mirror, self.rotate)
return pin[index]
def get_num_pins(self, name):
""" Return the number of pins of a given name """
return len(self.mod.get_pins(name))
def get_pins(self, name):
""" Return an absolute pin that is offset and transformed based on
this instance location. """
pin = copy.deepcopy(self.mod.get_pins(name))
new_pins = []
for p in pin:
p.transform(self.offset, self.mirror, self.rotate)
new_pins.append(p)
return new_pins
def connect_spice_pins(self, nets_list):
"""
add the connection between instance pins and module nets
to both of their respective objects
nets_list must be the same length as self.spice_pins
"""
if len(nets_list) == 0 and len(self.spice_pins) == 0:
# this is the only valid case to skip the following debug check
# because this with no pins are often connected arbitrarily
self.connected = True
return
debug.check(not self.connected,
"instance {} has already been connected".format(self.name))
debug.check(len(self.spice_pins) == len(nets_list),
"must provide list of nets the same length as pin list\
when connecting an instance")
for pin in self.spice_pins.values():
net = nets_list.pop(0)
pin.set_inst_net(net)
net.connect_pin(pin)
self.connected = True
def get_connections(self):
conns = []
for pin in self.spice_pins.values():
conns.append(pin.inst_net.name)
return conns
def calculate_transform(self, node):
#set up the rotation matrix
angle = math.radians(float(node.rotate))
mRotate = np.array([[math.cos(angle), -math.sin(angle), 0.0],
[math.sin(angle), math.cos(angle), 0.0],
[0.0, 0.0, 1.0]])
#set up translation matrix
translateX = float(node.offset[0])
translateY = float(node.offset[1])
mTranslate = np.array([[1.0, 0.0, translateX],
[0.0, 1.0, translateY],
[0.0, 0.0, 1.0]])
#set up the scale matrix (handles mirror X)
scaleX = 1.0
if(node.mirror == 'MX'):
scaleY = -1.0
else:
scaleY = 1.0
mScale = np.array([[scaleX, 0.0, 0.0],
[0.0, scaleY, 0.0],
[0.0, 0.0, 1.0]])
return (mRotate, mScale, mTranslate)
def apply_transform(self, mtransforms, uVector, vVector, origin):
origin = np.dot(mtransforms[0], origin) # rotate
uVector = np.dot(mtransforms[0], uVector) # rotate
vVector = np.dot(mtransforms[0], vVector) # rotate
origin = np.dot(mtransforms[1], origin) # scale
uVector = np.dot(mtransforms[1], uVector) # scale
vVector = np.dot(mtransforms[1], vVector) # scale
origin = np.dot(mtransforms[2], origin)
return(uVector, vVector, origin)
def apply_path_transform(self, path):
uVector = np.array([[1.0], [0.0], [0.0]])
vVector = np.array([[0.0], [1.0], [0.0]])
origin = np.array([[0.0], [0.0], [1.0]])
while(path):
instance = path.pop(-1)
mtransforms = self.calculate_transform(instance)
(uVector, vVector, origin) = self.apply_transform(mtransforms, uVector, vVector, origin)
return (uVector, vVector, origin)
def reverse_transformation_bitcell(self, cell_name):
path = [] # path currently follwed in bitcell search
cell_paths = [] # saved paths to bitcells
origin_offsets = [] # cell to bank offset
Q_offsets = [] # Q to cell offet
Q_bar_offsets = [] # Q_bar to cell offset
bl_offsets = [] # bl to cell offset
br_offsets = [] # br to cell offset
bl_meta = [] # bl offset metadata (row,col,name)
br_meta = [] # br offset metadata (row,col,name)
def walk_subtree(node):
path.append(node)
if node.mod.name == cell_name:
cell_paths.append(copy.copy(path))
# get the row and col names from the path
row = int(path[-1].name.split('_')[-2][1:])
col = int(path[-1].name.split('_')[-1][1:])
cell_bl_meta = []
cell_br_meta = []
normalized_storage_nets = node.mod.get_normalized_storage_nets_offset()
(normalized_bl_offsets, normalized_br_offsets, bl_names, br_names) = node.mod.get_normalized_bitline_offset()
for offset in range(len(normalized_bl_offsets)):
for port in range(len(bl_names)):
cell_bl_meta.append([bl_names[offset], row, col, port])
for offset in range(len(normalized_br_offsets)):
for port in range(len(br_names)):
cell_br_meta.append([br_names[offset], row, col, port])
if normalized_storage_nets == []:
debug.error("normalized storage nets should not be empty! Check if the GDS labels Q and Q_bar are correctly set on M1 of the cell",1)
Q_x = normalized_storage_nets[0][0]
Q_y = normalized_storage_nets[0][1]
Q_bar_x = normalized_storage_nets[1][0]
Q_bar_y = normalized_storage_nets[1][1]
if node.mirror == 'MX':
Q_y = -1 * Q_y
Q_bar_y = -1 * Q_bar_y
for pair in range(len(normalized_bl_offsets)):
normalized_bl_offsets[pair] = (normalized_bl_offsets[pair][0],
-1 * normalized_bl_offsets[pair][1])
for pair in range(len(normalized_br_offsets)):
normalized_br_offsets[pair] = (normalized_br_offsets[pair][0],
-1 * normalized_br_offsets[pair][1])
Q_offsets.append([Q_x, Q_y])
Q_bar_offsets.append([Q_bar_x, Q_bar_y])
bl_offsets.append(normalized_bl_offsets)
br_offsets.append(normalized_br_offsets)
bl_meta.append(cell_bl_meta)
br_meta.append(cell_br_meta)
elif node.mod.insts is not []:
for instance in node.mod.insts:
walk_subtree(instance)
path.pop(-1)
walk_subtree(self)
for path in cell_paths:
vector_spaces = self.apply_path_transform(path)
origin = vector_spaces[2]
origin_offsets.append([origin[0], origin[1]])
return(origin_offsets, Q_offsets, Q_bar_offsets, bl_offsets, br_offsets, bl_meta, br_meta)
def __str__(self):
""" override print function output """
return "( inst: " + self.name + " @" + str(self.offset) + " mod=" + self.mod.cell_name + " " + self.mirror + " R=" + str(self.rotate) + ")"
def __repr__(self):
""" override print function output """
return "( inst: " + self.name + " @" + str(self.offset) + " mod=" + self.mod.cell_name + " " + self.mirror + " R=" + str(self.rotate) + ")"
class path(geometry):
"""Represents a Path"""
def __init__(self, lpp, coordinates, path_width):
"""Initializes a path for the specified layer"""
super().__init__(lpp)
self.name = "path"
self.coordinates = map(lambda x: [x[0], x[1]], coordinates)
self.coordinates = vector(self.coordinates).snap_to_grid()
self.path_width = path_width
# FIXME figure out the width/height. This type of path is not
# supported right now. It might not work in gdsMill.
assert(0)
def gds_write_file(self, new_layout):
"""Writes the path to GDS"""
debug.info(4, "writing path (" + str(self.layerNumber) + "): " + self.coordinates)
new_layout.addPath(layerNumber=self.layerNumber,
purposeNumber=self.layerPurpose,
coordinates=self.coordinates,
width=self.path_width)
def get_blockages(self, layer):
""" Fail since we don't support paths yet. """
assert(0)
def __str__(self):
""" override print function output """
return "path: layer=" + self.layerNumber + " purpose=" + str(self.layerPurpose) + " w=" + self.width
def __repr__(self):
""" override print function output """
return "( path: layer=" + self.layerNumber + " purpose=" + str(self.layerPurpose) + " w=" + self.width + " coords=" + str(self.coordinates) + " )"
class label(geometry):
"""Represents a text label"""
def __init__(self, text, lpp, offset, zoom=None):
"""Initializes a text label for specified layer"""
super().__init__(lpp)
self.name = "label"
self.text = text
self.offset = vector(offset).snap_to_grid()
if not zoom:
try:
self.zoom = tech.GDS["zoom"]
except:
self.zoom = None
else:
self.zoom = zoom
self.size = 0
debug.info(4, "creating label " + self.text + " " + str(self.layerNumber) + " " + str(self.offset))
def gds_write_file(self, new_layout):
"""Writes the text label to GDS"""
debug.info(4, "writing label (" + str(self.layerNumber) + "): " + self.text)
new_layout.addText(text=self.text,
layerNumber=self.layerNumber,
purposeNumber=self.layerPurpose,
offsetInMicrons=self.offset,
magnification=self.zoom,
rotate=None)
def get_blockages(self, layer):
""" Returns an empty list since text cannot be blockages. """
return []
def __str__(self):
""" override print function output """
return "label: " + self.text + " layer=" + str(self.layerNumber) + " purpose=" + str(self.layerPurpose)
def __repr__(self):
""" override print function output """
return "( label: " + self.text + " @" + str(self.offset) + " layer=" + str(self.layerNumber) + " purpose=" + str(self.layerPurpose) + " )"
class rectangle(geometry):
"""Represents a rectangular shape"""
def __init__(self, lpp, offset, width, height):
"""Initializes a rectangular shape for specified layer"""
super().__init__(lpp)
self.name = "rect"
self.offset = vector(offset).snap_to_grid()
self.size = vector(width, height).snap_to_grid()
self.width = round_to_grid(self.size.x)
self.height = round_to_grid(self.size.y)
self.compute_boundary(offset, "", 0)
debug.info(4, "creating rectangle (" + str(self.layerNumber) + "): "
+ str(self.width) + "x" + str(self.height) + " @ " + str(self.offset))
def get_blockages(self, layer):
""" Returns a list of one rectangle if it is on this layer"""
if self.layerNumber == layer:
return [[self.offset,
vector(self.offset.x + self.width,
self.offset.y + self.height)]]
else:
return []
def gds_write_file(self, new_layout):
"""Writes the rectangular shape to GDS"""
debug.info(4, "writing rectangle (" + str(self.layerNumber) + "):"
+ str(self.width) + "x" + str(self.height) + " @ " + str(self.offset))
new_layout.addBox(layerNumber=self.layerNumber,
purposeNumber=self.layerPurpose,
offsetInMicrons=self.offset,
width=self.width,
height=self.height,
center=False)
def __str__(self):
""" override print function output """
return self.__repr__()
def __repr__(self):
""" override print function output """
return "( rect: @" + str(self.offset) + " WxH=" + str(self.width) + "x" + str(self.height) + " layer=" + str(self.layerNumber) + " purpose=" + str(self.layerPurpose) + " )"
|
PypiClean
|
/retaped_tui-1.0rc11-py3-none-any.whl/retaped_tui/widgets.py
|
from . import npyscreen
from . import globals, structures
import threading
import requests
def fetchUsername(userID, index):
userData = requests.get(f'{globals.apiEndpoint}/users/{userID}', headers={
'x-session-token': globals.token}).json()
tmpUser = structures.user()
tmpUser.id = userData['_id']
tmpUser.username = userData['username']
tmpUser.status = userData['status'] if 'status' in userData.keys(
) else False
tmpUser.online = userData['online']
globals.cache['users'].update({userData['_id']: tmpUser})
globals.messageBox.values[index].author = tmpUser.username
globals.messageBox.update()
class ServerMultiLineAction(npyscreen.MultiLineAction):
def __init__(self, screen, values=None, value=None, slow_scroll=False, scroll_exit=False, return_exit=False, select_exit=False, exit_left=False, exit_right=False, widgets_inherit_color=False, always_show_cursor=False, allow_filtering=True, **keywords):
super().__init__(screen, values, value, slow_scroll, scroll_exit, return_exit, select_exit,
exit_left, exit_right, widgets_inherit_color, always_show_cursor, allow_filtering, **keywords)
def actionHighlighted(self, act_on_this, key_press):
globals.currentServer = globals.serverIDList[globals.serverList.values.index(
act_on_this)]
globals.channelList.name = act_on_this
globals.channelList.fetchChannels(
globals.cache['servers'][globals.currentServer])
class ChannelMultiLineAction(npyscreen.MultiLineAction):
def __init__(self, screen, values=None, value=None, slow_scroll=False, scroll_exit=False, return_exit=False, select_exit=False, exit_left=False, exit_right=False, widgets_inherit_color=False, always_show_cursor=False, allow_filtering=True, **keywords):
super().__init__(screen, values, value, slow_scroll, scroll_exit, return_exit, select_exit,
exit_left, exit_right, widgets_inherit_color, always_show_cursor, allow_filtering, **keywords)
def actionHighlighted(self, act_on_this, key_press):
globals.currentChannel = globals.channelIDList[globals.channelList.values.index(
act_on_this)]
globals.messageBox.values = []
globals.messageBox.name = act_on_this
globals.messageBox.update()
messages = globals.messageBox.fetchMessages(globals.currentChannel)
if not messages:
return
for i in messages['users']:
if not i['_id'] in globals.cache['users'].keys():
tmpUser = structures.user()
tmpUser.id = i['_id']
tmpUser.username = i['username']
tmpUser.status = i['status'] if 'status' in i.keys() else False
tmpUser.online = i['online']
globals.cache['users'].update({i['_id']: tmpUser})
for i in reversed(range(len(messages['messages']))):
messageData = messages['messages'][i]
globals.messageBox.renderMessage(messageData)
globals.messageBox.update()
self.editing = False
# globals.messageBox.reset_cursor()
class serverBox(npyscreen.BoxTitle):
def __init__(self, screen, contained_widget_arguments=None, *args, **keywords):
super().__init__(screen, contained_widget_arguments, *args, **keywords)
_contained_widget = ServerMultiLineAction
class channelBox(npyscreen.BoxTitle):
def __init__(self, screen, contained_widget_arguments=None, *args, **keywords):
super().__init__(screen, contained_widget_arguments, *args, **keywords)
_contained_widget = ChannelMultiLineAction
def fetchChannels(self, server: structures.server):
globals.channelList.values = []
globals.channelIDList = []
for i in server.channels:
if i in globals.cache['channels'].keys():
channel = globals.cache['channels'][i]
globals.channelList.values.append(channel.name)
globals.channelIDList.append(channel.id)
globals.channelList.update()
class multiLineMessages(npyscreen.MultiSelectFixed):
def __init__(self, screen, values=None, value=None, slow_scroll=False, scroll_exit=False, return_exit=False, select_exit=False, exit_left=False, exit_right=False, widgets_inherit_color=False, always_show_cursor=False, allow_filtering=True, **keywords):
self.allow_filtering = False
self.check_cursor_move = True
self._contained_widgets = npyscreen.TitleFixedText
self._contained_widgets.allow_override_begin_entry_at = False
self._contained_widgets.use_two_lines = False
self._contained_widgets.height = 1
self._contained_widget_height = 1
super().__init__(screen, values, value, slow_scroll, scroll_exit, return_exit, select_exit,
exit_left, exit_right, widgets_inherit_color, always_show_cursor, allow_filtering, **keywords)
def display_value(self, vl: structures.message):
output = ''
mentioned = False
if globals.localUser.id in vl.mentions:
mentioned = True
# if vl.author:
# output += f'[{vl.author}] '
output += f'{vl.content}'
return [output, mentioned, vl.author]
def when_cursor_moved(self):
globals.highlightedIndex = self.cursor_line
return super().when_cursor_moved()
def _before_print_lines(self):
if len(self.values)-self.start_display_at < self.height:
self.reset_display_cache()
self.make_contained_widgets()
self.clear()
class messageBox(npyscreen.BoxTitle):
def __init__(self, screen, contained_widget_arguments=None, *args, **keywords):
super().__init__(screen, contained_widget_arguments, *args, **keywords)
_contained_widget = multiLineMessages
def renderMessage(self, messageData):
reply = []
mentions = []
author = ''
if 'replies' in messageData.keys():
for i in messageData['replies']:
if i in globals.cache['messages']:
replyContent = f'> {globals.cache["messages"][i].content}'
else:
replyContent = f'>Unloaded Message'
reply = structures.message()
reply.content = replyContent
reply.author = ''
reply.id = ''
reply.mentions = []
self.values.append(reply)
if 'mentions' in messageData.keys():
for i in messageData['mentions']:
mentions.append(i)
if 'masquerade' in messageData.keys():
author = messageData['masquerade']['name']
else:
if messageData['author'] in globals.cache['users'].keys():
author = globals.cache['users'][messageData['author']].username
else:
author = messageData['author']
threading.Thread(target=fetchUsername, args=[
messageData['author'], len(globals.messageBox.values)-1]).start()
renderedMessage = structures.message()
renderedMessage.id = messageData['_id']
renderedMessage.author = author
renderedMessage.authorID = messageData['author']
renderedMessage.content = messageData['content'] if 'content' in messageData.keys(
) else None
renderedMessage.replies = reply
renderedMessage.mentions = mentions
globals.cache['messages'].update({renderedMessage.id: renderedMessage})
self.values.append(renderedMessage)
def updateMessage(self, message):
for i in self.values:
if i.id == message['id']:
i.content = message['data']['content']
break
def deleteMessage(self, message):
for i in range(len(self.values)):
if self.values[i].id == message['id']:
self.values.pop(i)
break
def fetchMessages(self, id: str):
try:
r = requests.get(
f'{globals.apiEndpoint}/channels/{id}/messages?include_users=true', headers={'x-session-token': globals.token})
except Exception:
npyscreen.notify('Failed to request messages', title='Error')
return False
return r.json()
# def resize(self):
# self.relx, self.height, self.width = globals.form.max_x//6+1, globals.form.max_y-6, globals.form.max_x-globals.form.max_x//6-2
# self.entry_widget.relx, self.entry_widget.height, self.entry_widget.width = globals.form.max_x//6+2, globals.form.max_y-6, globals.form.max_x-globals.form.max_x//6-2
# self.entry_widget.request_height, self.entry_widget.request_width=True, True
# self.update(clear=True)
class inputTextBox(npyscreen.BoxTitle):
def __init__(self, screen, contained_widget_arguments=None, *args, **keywords):
super().__init__(screen, contained_widget_arguments, *args, **keywords)
_contained_widget = npyscreen.MultiLineEdit
def resize(self):
self.relx, self.width = globals.form.max_x//6 + \
1, globals.form.max_x-globals.form.max_x//6-4
self.entry_widget.relx = globals.form.max_x//6+2
self.update(clear=True)
|
PypiClean
|
/marxs-1.2.tar.gz/marxs-1.2/licenses/LICENSE.rst
|
GNU General Public License
==========================
*Version 3, 29 June 2007*
*Copyright © 2007 Free Software Foundation, Inc* <http://fsf.org>
Everyone is permitted to copy and distribute verbatim copies of this license
document, but changing it is not allowed.
Preamble
--------
The GNU General Public License is a free, copyleft license for software and other
kinds of works.
The licenses for most software and other practical works are designed to take away
your freedom to share and change the works. By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change all versions of a
program--to make sure it remains free software for all its users. We, the Free
Software Foundation, use the GNU General Public License for most of our software; it
applies also to any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not price. Our General
Public Licenses are designed to make sure that you have the freedom to distribute
copies of free software (and charge for them if you wish), that you receive source
code or can get it if you want it, that you can change the software or use pieces of
it in new free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you these rights or
asking you to surrender the rights. Therefore, you have certain responsibilities if
you distribute copies of the software, or if you modify it: responsibilities to
respect the freedom of others.
For example, if you distribute copies of such a program, whether gratis or for a fee,
you must pass on to the recipients the same freedoms that you received. You must make
sure that they, too, receive or can get the source code. And you must show them these
terms so they know their rights.
Developers that use the GNU GPL protect your rights with two steps: **(1)** assert
copyright on the software, and **(2)** offer you this License giving you legal permission
to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains that there is
no warranty for this free software. For both users' and authors' sake, the GPL
requires that modified versions be marked as changed, so that their problems will not
be attributed erroneously to authors of previous versions.
Some devices are designed to deny users access to install or run modified versions of
the software inside them, although the manufacturer can do so. This is fundamentally
incompatible with the aim of protecting users' freedom to change the software. The
systematic pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we have designed
this version of the GPL to prohibit the practice for those products. If such problems
arise substantially in other domains, we stand ready to extend this provision to
those domains in future versions of the GPL, as needed to protect the freedom of
users.
Finally, every program is threatened constantly by software patents. States should
not allow patents to restrict development and use of software on general-purpose
computers, but in those that do, we wish to avoid the special danger that patents
applied to a free program could make it effectively proprietary. To prevent this, the
GPL assures that patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and modification follow.
TERMS AND CONDITIONS
--------------------
0. Definitions
~~~~~~~~~~~~~~
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work in
a fashion requiring copyright permission, other than the making of an exact copy. The
resulting work is called a "modified version" of the earlier work or a
work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based on
the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for infringement under
applicable copyright law, except executing it on a computer or modifying a private
copy. Propagation includes copying, distribution (with or without modification),
making available to the public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through a computer
network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices" to the
extent that it includes a convenient and prominently visible feature that **(1)**
displays an appropriate copyright notice, and **(2)** tells the user that there is no
warranty for the work (except to the extent that warranties are provided), that
licensees may convey the work under this License, and how to view a copy of this
License. If the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code
~~~~~~~~~~~~~~
The "source code" for a work means the preferred form of the work for
making modifications to it. "Object code" means any non-source form of a
work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of interfaces
specified for a particular programming language, one that is widely used among
developers working in that language.
The "System Libraries" of an executable work include anything, other than
the work as a whole, that **(a)** is included in the normal form of packaging a Major
Component, but which is not part of that Major Component, and **(b)** serves only to
enable use of the work with that Major Component, or to implement a Standard
Interface for which an implementation is available to the public in source code form.
A "Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system (if any) on which
the executable work runs, or a compiler used to produce the work, or an object code
interpreter used to run it.
The "Corresponding Source" for a work in object code form means all the
source code needed to generate, install, and (for an executable work) run the object
code and to modify the work, including scripts to control those activities. However,
it does not include the work's System Libraries, or general-purpose tools or
generally available free programs which are used unmodified in performing those
activities but which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for the work, and
the source code for shared libraries and dynamically linked subprograms that the work
is specifically designed to require, such as by intimate data communication or
control flow between those subprograms and other parts of the work.
The Corresponding Source need not include anything that users can regenerate
automatically from other parts of the Corresponding Source.
The Corresponding Source for a work in source code form is that same work.
2. Basic Permissions
~~~~~~~~~~~~~~~~~~~~
All rights granted under this License are granted for the term of copyright on the
Program, and are irrevocable provided the stated conditions are met. This License
explicitly affirms your unlimited permission to run the unmodified Program. The
output from running a covered work is covered by this License only if the output,
given its content, constitutes a covered work. This License acknowledges your rights
of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not convey, without
conditions so long as your license otherwise remains in force. You may convey covered
works to others for the sole purpose of having them make modifications exclusively
for you, or provide you with facilities for running those works, provided that you
comply with the terms of this License in conveying all material for which you do not
control copyright. Those thus making or running the covered works for you must do so
exclusively on your behalf, under your direction and control, on terms that prohibit
them from making any copies of your copyrighted material outside their relationship
with you.
Conveying under any other circumstances is permitted solely under the conditions
stated below. Sublicensing is not allowed; section 10 makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
No covered work shall be deemed part of an effective technological measure under any
applicable law fulfilling obligations under article 11 of the WIPO copyright treaty
adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention
of such measures.
When you convey a covered work, you waive any legal power to forbid circumvention of
technological measures to the extent such circumvention is effected by exercising
rights under this License with respect to the covered work, and you disclaim any
intention to limit operation or modification of the work as a means of enforcing,
against the work's users, your or third parties' legal rights to forbid circumvention
of technological measures.
4. Conveying Verbatim Copies
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You may convey verbatim copies of the Program's source code as you receive it, in any
medium, provided that you conspicuously and appropriately publish on each copy an
appropriate copyright notice; keep intact all notices stating that this License and
any non-permissive terms added in accord with section 7 apply to the code; keep
intact all notices of the absence of any warranty; and give all recipients a copy of
this License along with the Program.
You may charge any price or no price for each copy that you convey, and you may offer
support or warranty protection for a fee.
5. Conveying Modified Source Versions
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You may convey a work based on the Program, or the modifications to produce it from
the Program, in the form of source code under the terms of section 4, provided that
you also meet all of these conditions:
* **a)** The work must carry prominent notices stating that you modified it, and giving a
relevant date.
* **b)** The work must carry prominent notices stating that it is released under this
License and any conditions added under section 7. This requirement modifies the
requirement in section 4 to "keep intact all notices".
* **c)** You must license the entire work, as a whole, under this License to anyone who
comes into possession of a copy. This License will therefore apply, along with any
applicable section 7 additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no permission to license the
work in any other way, but it does not invalidate such permission if you have
separately received it.
* **d)** If the work has interactive user interfaces, each must display Appropriate Legal
Notices; however, if the Program has interactive interfaces that do not display
Appropriate Legal Notices, your work need not make them do so.
A compilation of a covered work with other separate and independent works, which are
not by their nature extensions of the covered work, and which are not combined with
it such as to form a larger program, in or on a volume of a storage or distribution
medium, is called an "aggregate" if the compilation and its resulting
copyright are not used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work in an aggregate
does not cause this License to apply to the other parts of the aggregate.
6. Conveying Non-Source Forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You may convey a covered work in object code form under the terms of sections 4 and
5, provided that you also convey the machine-readable Corresponding Source under the
terms of this License, in one of these ways:
* **a)** Convey the object code in, or embodied in, a physical product (including a
physical distribution medium), accompanied by the Corresponding Source fixed on a
durable physical medium customarily used for software interchange.
* **b)** Convey the object code in, or embodied in, a physical product (including a
physical distribution medium), accompanied by a written offer, valid for at least
three years and valid for as long as you offer spare parts or customer support for
that product model, to give anyone who possesses the object code either **(1)** a copy of
the Corresponding Source for all the software in the product that is covered by this
License, on a durable physical medium customarily used for software interchange, for
a price no more than your reasonable cost of physically performing this conveying of
source, or **(2)** access to copy the Corresponding Source from a network server at no
charge.
* **c)** Convey individual copies of the object code with a copy of the written offer to
provide the Corresponding Source. This alternative is allowed only occasionally and
noncommercially, and only if you received the object code with such an offer, in
accord with subsection 6b.
* **d)** Convey the object code by offering access from a designated place (gratis or for
a charge), and offer equivalent access to the Corresponding Source in the same way
through the same place at no further charge. You need not require recipients to copy
the Corresponding Source along with the object code. If the place to copy the object
code is a network server, the Corresponding Source may be on a different server
(operated by you or a third party) that supports equivalent copying facilities,
provided you maintain clear directions next to the object code saying where to find
the Corresponding Source. Regardless of what server hosts the Corresponding Source,
you remain obligated to ensure that it is available for as long as needed to satisfy
these requirements.
* **e)** Convey the object code using peer-to-peer transmission, provided you inform
other peers where the object code and Corresponding Source of the work are being
offered to the general public at no charge under subsection 6d.
A separable portion of the object code, whose source code is excluded from the
Corresponding Source as a System Library, need not be included in conveying the
object code work.
A "User Product" is either **(1)** a "consumer product", which
means any tangible personal property which is normally used for personal, family, or
household purposes, or **(2)** anything designed or sold for incorporation into a
dwelling. In determining whether a product is a consumer product, doubtful cases
shall be resolved in favor of coverage. For a particular product received by a
particular user, "normally used" refers to a typical or common use of
that class of product, regardless of the status of the particular user or of the way
in which the particular user actually uses, or expects or is expected to use, the
product. A product is a consumer product regardless of whether the product has
substantial commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install and execute
modified versions of a covered work in that User Product from a modified version of
its Corresponding Source. The information must suffice to ensure that the continued
functioning of the modified object code is in no case prevented or interfered with
solely because modification has been made.
If you convey an object code work under this section in, or with, or specifically for
use in, a User Product, and the conveying occurs as part of a transaction in which
the right of possession and use of the User Product is transferred to the recipient
in perpetuity or for a fixed term (regardless of how the transaction is
characterized), the Corresponding Source conveyed under this section must be
accompanied by the Installation Information. But this requirement does not apply if
neither you nor any third party retains the ability to install modified object code
on the User Product (for example, the work has been installed in ROM).
The requirement to provide Installation Information does not include a requirement to
continue to provide support service, warranty, or updates for a work that has been
modified or installed by the recipient, or for the User Product in which it has been
modified or installed. Access to a network may be denied when the modification itself
materially and adversely affects the operation of the network or violates the rules
and protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided, in accord with
this section must be in a format that is publicly documented (and with an
implementation available to the public in source code form), and must require no
special password or key for unpacking, reading or copying.
7. Additional Terms
~~~~~~~~~~~~~~~~~~~
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions. Additional
permissions that are applicable to the entire Program shall be treated as though they
were included in this License, to the extent that they are valid under applicable
law. If additional permissions apply only to part of the Program, that part may be
used separately under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option remove any
additional permissions from that copy, or from any part of it. (Additional
permissions may be written to require their own removal in certain cases when you
modify the work.) You may place additional permissions on material, added by you to a
covered work, for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you add to a
covered work, you may (if authorized by the copyright holders of that material)
supplement the terms of this License with terms:
* **a)** Disclaiming warranty or limiting liability differently from the terms of
sections 15 and 16 of this License; or
* **b)** Requiring preservation of specified reasonable legal notices or author
attributions in that material or in the Appropriate Legal Notices displayed by works
containing it; or
* **c)** Prohibiting misrepresentation of the origin of that material, or requiring that
modified versions of such material be marked in reasonable ways as different from the
original version; or
* **d)** Limiting the use for publicity purposes of names of licensors or authors of the
material; or
* **e)** Declining to grant rights under trademark law for use of some trade names,
trademarks, or service marks; or
* **f)** Requiring indemnification of licensors and authors of that material by anyone
who conveys the material (or modified versions of it) with contractual assumptions of
liability to the recipient, for any liability that these contractual assumptions
directly impose on those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you received
it, or any part of it, contains a notice stating that it is governed by this License
along with a term that is a further restriction, you may remove that term. If a
license document contains a further restriction but permits relicensing or conveying
under this License, you may add to a covered work material governed by the terms of
that license document, provided that the further restriction does not survive such
relicensing or conveying.
If you add terms to a covered work in accord with this section, you must place, in
the relevant source files, a statement of the additional terms that apply to those
files, or a notice indicating where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the form of a
separately written license, or stated as exceptions; the above requirements apply
either way.
8. Termination
~~~~~~~~~~~~~~
You may not propagate or modify a covered work except as expressly provided under
this License. Any attempt otherwise to propagate or modify it is void, and will
automatically terminate your rights under this License (including any patent licenses
granted under the third paragraph of section 11).
However, if you cease all violation of this License, then your license from a
particular copyright holder is reinstated **(a)** provisionally, unless and until the
copyright holder explicitly and finally terminates your license, and **(b)** permanently,
if the copyright holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is reinstated permanently
if the copyright holder notifies you of the violation by some reasonable means, this
is the first time you have received notice of violation of this License (for any
work) from that copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the licenses of
parties who have received copies or rights from you under this License. If your
rights have been terminated and not permanently reinstated, you do not qualify to
receive new licenses for the same material under section 10.
9. Acceptance Not Required for Having Copies
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You are not required to accept this License in order to receive or run a copy of the
Program. Ancillary propagation of a covered work occurring solely as a consequence of
using peer-to-peer transmission to receive a copy likewise does not require
acceptance. However, nothing other than this License grants you permission to
propagate or modify any covered work. These actions infringe copyright if you do not
accept this License. Therefore, by modifying or propagating a covered work, you
indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Each time you convey a covered work, the recipient automatically receives a license
from the original licensors, to run, modify and propagate that work, subject to this
License. You are not responsible for enforcing compliance by third parties with this
License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an organization, or
merging organizations. If propagation of a covered work results from an entity
transaction, each party to that transaction who receives a copy of the work also
receives whatever licenses to the work the party's predecessor in interest had or
could give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if the predecessor
has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the rights granted or
affirmed under this License. For example, you may not impose a license fee, royalty,
or other charge for exercise of rights granted under this License, and you may not
initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging
that any patent claim is infringed by making, using, selling, offering for sale, or
importing the Program or any portion of it.
11. Patents
~~~~~~~~~~~
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The work thus
licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims owned or
controlled by the contributor, whether already acquired or hereafter acquired, that
would be infringed by some manner, permitted by this License, of making, using, or
selling its contributor version, but do not include claims that would be infringed
only as a consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant patent
sublicenses in a manner consistent with the requirements of this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free patent license
under the contributor's essential patent claims, to make, use, sell, offer for sale,
import and otherwise run, modify and propagate the contents of its contributor
version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent (such as an
express permission to practice a patent or covenant not to sue for patent
infringement). To "grant" such a patent license to a party means to make
such an agreement or commitment not to enforce a patent against the party.
If you convey a covered work, knowingly relying on a patent license, and the
Corresponding Source of the work is not available for anyone to copy, free of charge
and under the terms of this License, through a publicly available network server or
other readily accessible means, then you must either **(1)** cause the Corresponding
Source to be so available, or **(2)** arrange to deprive yourself of the benefit of the
patent license for this particular work, or **(3)** arrange, in a manner consistent with
the requirements of this License, to extend the patent license to downstream
recipients. "Knowingly relying" means you have actual knowledge that, but
for the patent license, your conveying the covered work in a country, or your
recipient's use of the covered work in a country, would infringe one or more
identifiable patents in that country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or arrangement, you
convey, or propagate by procuring conveyance of, a covered work, and grant a patent
license to some of the parties receiving the covered work authorizing them to use,
propagate, modify or convey a specific copy of the covered work, then the patent
license you grant is automatically extended to all recipients of the covered work and
works based on it.
A patent license is "discriminatory" if it does not include within the
scope of its coverage, prohibits the exercise of, or is conditioned on the
non-exercise of one or more of the rights that are specifically granted under this
License. You may not convey a covered work if you are a party to an arrangement with
a third party that is in the business of distributing software, under which you make
payment to the third party based on the extent of your activity of conveying the
work, and under which the third party grants, to any of the parties who would receive
the covered work from you, a discriminatory patent license **(a)** in connection with
copies of the covered work conveyed by you (or copies made from those copies), or **(b)**
primarily for and in connection with specific products or compilations that contain
the covered work, unless you entered into that arrangement, or that patent license
was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting any implied
license or other defenses to infringement that may otherwise be available to you
under applicable patent law.
12. No Surrender of Others' Freedom
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If conditions are imposed on you (whether by court order, agreement or otherwise)
that contradict the conditions of this License, they do not excuse you from the
conditions of this License. If you cannot convey a covered work so as to satisfy
simultaneously your obligations under this License and any other pertinent
obligations, then as a consequence you may not convey it at all. For example, if you
agree to terms that obligate you to collect a royalty for further conveying from
those to whom you convey the Program, the only way you could satisfy both those terms
and this License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Notwithstanding any other provision of this License, you have permission to link or
combine any covered work with a work licensed under version 3 of the GNU Affero
General Public License into a single combined work, and to convey the resulting work.
The terms of this License will continue to apply to the part which is the covered
work, but the special requirements of the GNU Affero General Public License, section
13, concerning interaction through a network will apply to the combination as such.
14. Revised Versions of this License
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The Free Software Foundation may publish revised and/or new versions of the GNU
General Public License from time to time. Such new versions will be similar in spirit
to the present version, but may differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Program specifies that
a certain numbered version of the GNU General Public License "or any later
version" applies to it, you have the option of following the terms and
conditions either of that numbered version or of any later version published by the
Free Software Foundation. If the Program does not specify a version number of the GNU
General Public License, you may choose any version ever published by the Free
Software Foundation.
If the Program specifies that a proxy can decide which future versions of the GNU
General Public License can be used, that proxy's public statement of acceptance of a
version permanently authorizes you to choose that version for the Program.
Later license versions may give you additional or different permissions. However, no
additional obligations are imposed on any author or copyright holder as a result of
your choosing to follow a later version.
15. Disclaimer of Warranty
~~~~~~~~~~~~~~~~~~~~~~~~~~
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER
EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE
QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability
~~~~~~~~~~~~~~~~~~~~~~~~~~~
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY
COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS
PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL,
INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE
OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE
WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
17. Interpretation of Sections 15 and 16
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the disclaimer of warranty and limitation of liability provided above cannot be
given local legal effect according to their terms, reviewing courts shall apply local
law that most closely approximates an absolute waiver of all civil liability in
connection with the Program, unless a warranty or assumption of liability accompanies
a copy of the Program in return for a fee.
*END OF TERMS AND CONDITIONS*
How to Apply These Terms to Your New Programs
---------------------------------------------
If you develop a new program, and you want it to be of the greatest possible use to
the public, the best way to achieve this is to make it free software which everyone
can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest to attach them
to the start of each source file to most effectively state the exclusion of warranty;
and each file should have at least the "copyright" line and a pointer to
where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short notice like this
when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type 'show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type 'show c' for details.
The hypothetical commands `show w` and `show c` should show the appropriate parts of
the General Public License. Of course, your program's commands might be different;
for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school, if any, to
sign a "copyright disclaimer" for the program, if necessary. For more
information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program into
proprietary programs. If your program is a subroutine library, you may consider it
more useful to permit linking proprietary applications with the library. If this is
what you want to do, use the GNU Lesser General Public License instead of this
License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
|
PypiClean
|
/ANSIColors-balises-1.9.9.public.tar.gz/ANSIColors-balises-1.9.9.public/README
|
ANSI Colors
===========
A Python script and module to simply use ANSI Colors in a terminal.

----
### Author:
Lilian Besson.
### Language:
Python v2.7+ (but *not* v3).
A P3K compatible version is in progress !
This project is now hosted on [Pypi module repository](<https://pypi.python.org/pypi/ANSIColors-balises> "Pypi !").
Documentation
-------------
The complete documentation of the module is available, see [here on pythonhosted.org](<http://pythonhosted.org/ANSIColors-balises/> "on-line").
**All the details (installation, options, etc) are in the doc**.
Anyway, here are somes infos.
----
Installation
============
The project is just the main script **ANSIColors.py**.
How to install it
-----------------
Download or copy it from this *git* repository, then launch ``python setup.py install``.
More details can be found in the **INSTALL** file.
Dependencies
------------
The project is *entirely written in Python*, version *2.7.3*.
For more details about the **Python** language, see [the official site](<http://www.python.org> "Python power !").
Python 2.7.1 or higher is **required**.
Plateform(s)
------------
The project have been *developped* on *GNU/Linux* (Ubuntu 11.10).
#### Warning (Windows)
It also have been quickly tested on *Windows 7* **with the Cygwin environment** and Python 2.7.
#### Warning (Mac OS X)
It shall also work on *Mac OS X*, but **not been tested**.
Any suggestion or returns is welcome !
About the project
=================
This project was part of my work realised for the MPRI 1-21 **net programming lesson**.
The MPRI is the **Parisian Master for Research in Computer Science** (*Master Parisien de Recherche en Informatique* in French).
About the doc
=============
The documentation is produced mainly with **Sphinx**, the Python's documentation generator.
I wrote a few scripts to help *Sphinx* to do what I wanted, and to generate a *PyDoc* version of the doc too.
Those scripts constitute now an independant and a powerful project.
It is hosted [here on my Google Site](https://sites.google.com/site/naereencorp/liste-des-projets/makepydoc "check this out too ;) !")
Contact me
----------
Feel free to contact me, either with a bitbucket message (my profile is [lbesson](https://bitbucket.org/lbesson/ "here")), or via an email at **lilian DOT besson AT ens-cachan DOT fr**.
License
-------
This project is released under the **GPLv3 license**, for more details, take a look at the LICENSE file in the source.
*Basically, that allow you to use all or part of the project for you own business.*
|
PypiClean
|
/swarms-1.5.0.tar.gz/swarms-1.5.0/README.md
|

<div align="center">
Swarms is a modular framework that enables reliable and useful multi-agent collaboration at scale to automate real-world tasks.
[](https://github.com/kyegomez/swarms/issues) [](https://github.com/kyegomez/swarms/network) [](https://github.com/kyegomez/swarms/stargazers) [](https://github.com/kyegomez/swarms/blob/main/LICENSE)[](https://star-history.com/#kyegomez/swarms)[](https://libraries.io/github/kyegomez/swarms) [](https://pepy.tech/project/swarms)
### Share on Social Media
[](https://twitter.com/intent/tweet?text=Check%20out%20this%20amazing%20AI%20project:%20&url=https%3A%2F%2Fgithub.com%2Fkyegomez%2Fswarms) [](https://www.facebook.com/sharer/sharer.php?u=https%3A%2F%2Fgithub.com%2Fkyegomez%2Fswarms) [](https://www.linkedin.com/shareArticle?mini=true&url=https%3A%2F%2Fgithub.com%2Fkyegomez%2Fswarms&title=&summary=&source=)
[](https://www.reddit.com/submit?url=https%3A%2F%2Fgithub.com%2Fkyegomez%2Fswarms&title=Swarms%20-%20the%20future%20of%20AI) [](https://news.ycombinator.com/submitlink?u=https%3A%2F%2Fgithub.com%2Fkyegomez%2Fswarms&t=Swarms%20-%20the%20future%20of%20AI) [](https://pinterest.com/pin/create/button/?url=https%3A%2F%2Fgithub.com%2Fkyegomez%2Fswarms&media=https%3A%2F%2Fexample.com%2Fimage.jpg&description=Swarms%20-%20the%20future%20of%20AI) [](https://api.whatsapp.com/send?text=Check%20out%20Swarms%20-%20the%20future%20of%20AI%20%23swarms%20%23AI%0A%0Ahttps%3A%2F%2Fgithub.com%2Fkyegomez%2Fswarms)
</div>
## Purpose
At Swarms, we're transforming the landscape of AI from siloed AI agents to a unified 'swarm' of intelligence. Through relentless iteration and the power of collective insight from our 1500+ Agora researchers, we're developing a groundbreaking framework for AI collaboration. Our mission is to catalyze a paradigm shift, advancing Humanity with the power of unified autonomous AI agent swarms.
-----
## Hiring
We're hiring: Engineers, Researchers, Interns And, salesprofessionals to work on democratizing swarms, email me at with your story at `[email protected]`
----------
## Installation
There are 2 methods, one is through `git clone` and the other is by `pip install swarms`. Check out the [DOCUMENTATION](DOCS/DOCUMENTATION.md) for more information on the classes.
* Pip install `pip3 install swarms`
* Create new python file and unleash superintelligence
```python
from swarms import Worker
node = Worker(
openai_api_key="",
ai_name="Optimus Prime",
)
task = "What were the winning boston marathon times for the past 5 years (ending in 2022)? Generate a table of the year, name, country of origin, and times."
response = node.run(task)
print(response)
```
---
## Usage
```python
from swarms import HuggingFaceLLM
hugging_face_model = HuggingFaceLLM(model_id="Voicelab/trurl-2-13b")
generated_text = hugging_face_model.generate("In a world where AI")
```
```python
from swarms import Worker
node = Worker(
openai_api_key="",
ai_name="Optimus Prime",
)
task = "What were the winning boston marathon times for the past 5 years (ending in 2022)? Generate a table of the year, name, country of origin, and times."
response = node.run(task)
print(response)
```
---
# Documentation
For documentation, go here, [the docs folder in the root diectory](https://swarms.apac.ai)
**NOTE: We need help building the documentation**
-----
# Docker Setup
The docker file is located in the docker folder in the `infra` folder, [click here and navigate here in your environment](/infra/Docker)
* Build the Docker image
* You can build the Docker image using the provided Dockerfile. Navigate to the infra/Docker directory where the Dockerfiles are located.
* For the CPU version, use:
```bash
docker build -t swarms-api:latest -f Dockerfile.cpu .
```
For the GPU version, use:
```bash
docker build -t swarms-api:gpu -f Dockerfile.gpu .
```
### Run the Docker container
After building the Docker image, you can run the Swarms API in a Docker container. Replace your_redis_host and your_redis_port with your actual Redis host and port.
For the CPU version:
```bash
docker run -p 8000:8000 -e REDIS_HOST=your_redis_host -e REDIS_PORT=your_redis_port swarms-api:latest
```
## For the GPU version:
```bash
docker run --gpus all -p 8000:8000 -e REDIS_HOST=your_redis_host -e REDIS_PORT=your_redis_port swarms-api:gpu
```
## Access the Swarms API
* The Swarms API will be accessible at http://localhost:8000. You can use tools like curl or Postman to send requests to the API.
Here's an example curl command to send a POST request to the /chat endpoint:
```bash
curl -X POST -H "Content-Type: application/json" -d '{"api_key": "your_openai_api_key", "objective": "your_objective"}' http://localhost:8000/chat
```
Replace your_openai_api_key and your_objective with your actual OpenAI API key and objective.
----
# ✨ Features
* Easy to use Base LLMs, `OpenAI` `Palm` `Anthropic` `HuggingFace`
* Enterprise Grade, Production Ready with robust Error Handling
* Multi-Modality Native with Multi-Modal LLMs as tools
* Infinite Memory Processing: Store infinite sequences of infinite Multi-Modal data, text, images, videos, audio
* Usability: Extreme emphasis on useability, code is at it's theortical minimum simplicity factor to use
* Reliability: Outputs that accomplish tasks and activities you wish to execute.
* Fluidity: A seamless all-around experience to build production grade workflows
* Speed: Lower the time to automate tasks by 90%.
* Simplicity: Swarms is extremely simple to use, if not thee simplest agent framework of all time
* Powerful: Swarms is capable of building entire software apps, to large scale data analysis, and handling chaotic situations
-----
## Contribute
We're always looking for contributors to help us improve and expand this project. If you're interested, please check out our [Contributing Guidelines](DOCS/C0NTRIBUTING.md).
Thank you for being a part of our project!
---
# Roadmap
Please checkout our [Roadmap](DOCS/ROADMAP.md) and consider contributing to make the dream of Swarms real to advance Humanity.
## Optimization Priorities
1. **Reliability**: Increase the reliability of the swarm - obtaining the desired output with a basic and un-detailed input.
2. **Speed**: Reduce the time it takes for the swarm to accomplish tasks by improving the communication layer, critiquing, and self-alignment with meta prompting.
3. **Scalability**: Ensure that the system is asynchronous, concurrent, and self-healing to support scalability.
Our goal is to continuously improve Swarms by following this roadmap, while also being adaptable to new needs and opportunities as they arise.
---
# Bounty Program
Our bounty program is an exciting opportunity for contributors to help us build the future of Swarms. By participating, you can earn rewards while contributing to a project that aims to revolutionize digital activity.
Here's how it works:
1. **Check out our Roadmap**: We've shared our roadmap detailing our short and long-term goals. These are the areas where we're seeking contributions.
2. **Pick a Task**: Choose a task from the roadmap that aligns with your skills and interests. If you're unsure, you can reach out to our team for guidance.
3. **Get to Work**: Once you've chosen a task, start working on it. Remember, quality is key. We're looking for contributions that truly make a difference.
4. **Submit your Contribution**: Once your work is complete, submit it for review. We'll evaluate your contribution based on its quality, relevance, and the value it brings to Swarms.
5. **Earn Rewards**: If your contribution is approved, you'll earn a bounty. The amount of the bounty depends on the complexity of the task, the quality of your work, and the value it brings to Swarms.
---
## The Plan
### Phase 1: Building the Foundation
In the first phase, our focus is on building the basic infrastructure of Swarms. This includes developing key components like the Swarms class, integrating essential tools, and establishing task completion and evaluation logic. We'll also start developing our testing and evaluation framework during this phase. If you're interested in foundational work and have a knack for building robust, scalable systems, this phase is for you.
### Phase 2: Optimizing the System
In the second phase, we'll focus on optimizng Swarms by integrating more advanced features, improving the system's efficiency, and refining our testing and evaluation framework. This phase involves more complex tasks, so if you enjoy tackling challenging problems and contributing to the development of innovative features, this is the phase for you.
### Phase 3: Towards Super-Intelligence
The third phase of our bounty program is the most exciting - this is where we aim to achieve super-intelligence. In this phase, we'll be working on improving the swarm's capabilities, expanding its skills, and fine-tuning the system based on real-world testing and feedback. If you're excited about the future of AI and want to contribute to a project that could potentially transform the digital world, this is the phase for you.
Remember, our roadmap is a guide, and we encourage you to bring your own ideas and creativity to the table. We believe that every contribution, no matter how small, can make a difference. So join us on this exciting journey and help us create the future of Swarms.
<!-- **To participate in our bounty program, visit the [Swarms Bounty Program Page](https://swarms.ai/bounty).** Let's build the future together! -->
---
# EcoSystem
* [The-Compiler, compile natural language into serene, reliable, and secure programs](https://github.com/kyegomez/the-compiler)
*[The Replicator, an autonomous swarm that conducts Multi-Modal AI research by creating new underlying mathematical operations and models](https://github.com/kyegomez/The-Replicator)
* Make a swarm that checks arxviv for papers -> checks if there is a github link -> then implements them and checks them
* [SwarmLogic, where a swarm is your API, database, and backend!](https://github.com/kyegomez/SwarmLogic)
---
# Demos

## Swarm Video Demo {Click for more}
[](https://youtu.be/Br62cDMYXgc)
---
# Contact
For enterprise and production ready deployments, allow us to discover more about you and your story, [book a call with us here](https://www.apac.ai/Setup-Call)
|
PypiClean
|
/django-phone-login-fix-version-0.0.9.tar.gz/django-phone-login-fix-version-0.0.9/README.md
|
[![build-status-image]][travis]
[![pypi-version]][pypi]
# Django Phone Login
Django-phone-login uses django-sendsms to send sms.
Django Phone Login provides phone number login with no additional passwords to remember.
It's a easy way to grow your customer base. Without any hassle.
## Installing Django Phone Login
Django Phone Login was built for django.
PyPi, install using PIP:
```bash
pip install django-phone-login
```
If you want to install manually:
```bash
git clone [email protected]:wejhink/django-phone-login.git
cd django-phone-login/
pip install -r requirements.txt
python setup.py install
```
## Instructions
```python
INSTALLED_APPS += [
... # Make sure to include the default installed apps here.
'phone_login',
'rest_framework',
'rest_framework.authtoken',
]
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.TokenAuthentication',
)
}
AUTHENTICATION_BACKENDS = [
'phone_login.backends.phone_backend.PhoneBackend',
'django.contrib.auth.backends.ModelBackend'
]
# Make sure you also have backend Django Templates and APP_DIRS True, if you want to use default OTP Template.
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
...
},
]
# Configure the SENDSMS_BACKEND (for django-sendsms integration)
SENDSMS_BACKEND = 'myapp.mysmsbackend.SmsBackend' #(defaults to 'sendsms.backends.console.SmsBackend')
SENDSMS_FROM_NUMBER = "+XXxxxxxxxxxx"
SENDSMS_ACCOUNT_SID = 'ACXXXXXXXXXXXXXX'
SENDSMS_AUTH_TOKEN = 'xxxxxxxx'
```
## Adding to URLs
Add the Below `urls.py`
```python
urlpatterns = [
url(r'^phone_login/', include('phone_login.urls', namespace='phone_login'),),
]
```
## Customizable Fields in Settings.
```python
PHONE_LOGIN_ATTEMPTS = 10
PHONE_LOGIN_OTP_LENGTH = 6
PHONE_LOGIN_OTP_HASH_ALGORITHM = 'sha256'
PHONE_LOGIN_DEBUG = True # will include otp in generate response, default is False.
```
# Flow
1. User enter the `phone_number` and sends request to generate `secret code`.
1. `django-phone-login` sends a `secret_code` as SMS to the phone number.
1. User sends `secret_code` to the server to verify.
1. `django-phone-login` verifies and send `token` as response using `DRF3`.
## Why use django-phone-login?
+ Phone number login, no password required.
+ Registration through phone number.
+ Mobile based user authentication.
[build-status-image]: https://secure.travis-ci.org/wejhink/django-phone-login.svg?branch=master
[travis]: http://travis-ci.org/wejhink/django-phone-login?branch=master
[pypi-version]: https://img.shields.io/pypi/v/django-phone-login.svg
[pypi]: https://pypi.python.org/pypi/django-phone-login
|
PypiClean
|
/jupyros-0.7.0a0.tar.gz/jupyros-0.7.0a0/js/node_modules/@types/node/ts4.8/stream/promises.d.ts
|
declare module 'stream/promises' {
import { FinishedOptions, PipelineSource, PipelineTransform, PipelineDestination, PipelinePromise, PipelineOptions } from 'node:stream';
function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options?: FinishedOptions): Promise<void>;
function pipeline<A extends PipelineSource<any>, B extends PipelineDestination<A, any>>(source: A, destination: B, options?: PipelineOptions): PipelinePromise<B>;
function pipeline<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, B extends PipelineDestination<T1, any>>(
source: A,
transform1: T1,
destination: B,
options?: PipelineOptions
): PipelinePromise<B>;
function pipeline<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, T2 extends PipelineTransform<T1, any>, B extends PipelineDestination<T2, any>>(
source: A,
transform1: T1,
transform2: T2,
destination: B,
options?: PipelineOptions
): PipelinePromise<B>;
function pipeline<
A extends PipelineSource<any>,
T1 extends PipelineTransform<A, any>,
T2 extends PipelineTransform<T1, any>,
T3 extends PipelineTransform<T2, any>,
B extends PipelineDestination<T3, any>
>(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, options?: PipelineOptions): PipelinePromise<B>;
function pipeline<
A extends PipelineSource<any>,
T1 extends PipelineTransform<A, any>,
T2 extends PipelineTransform<T1, any>,
T3 extends PipelineTransform<T2, any>,
T4 extends PipelineTransform<T3, any>,
B extends PipelineDestination<T4, any>
>(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, options?: PipelineOptions): PipelinePromise<B>;
function pipeline(streams: ReadonlyArray<NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream>, options?: PipelineOptions): Promise<void>;
function pipeline(
stream1: NodeJS.ReadableStream,
stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream,
...streams: Array<NodeJS.ReadWriteStream | NodeJS.WritableStream | PipelineOptions>
): Promise<void>;
}
declare module 'node:stream/promises' {
export * from 'stream/promises';
}
|
PypiClean
|
/compressed_dictionary-1.2.1.tar.gz/compressed_dictionary-1.2.1/compressed_dictionary/utils/split.py
|
import os
import math
import logging
from tqdm import tqdm
from argparse import ArgumentParser
from compressed_dictionary import CompressedDictionary
logging.getLogger().setLevel(logging.INFO)
def main(args):
assert os.path.isfile(args.input_file), (
f"Input file {args.input_file} does not exist."
)
assert not os.path.isdir(args.output_folder), (
f"Output directory {args.output_folder} does already exist."
)
assert (args.parts is None) != (args.parts_length is None), (
"you can define only one between `parts` and `parts-length`"
)
logging.info("Loading input dictionary")
dictionary = CompressedDictionary.load(args.input_file, limit=args.limit)
os.makedirs(args.output_folder)
logging.info("Splitting")
splits_iterator = dictionary.split(
parts=args.parts,
parts_length=args.parts_length,
drop_last=args.drop_last,
reset_keys=args.reset_keys,
shuffle=args.shuffle
)
logging.info("Writing splits to disk")
total = (
args.parts if args.parts is not None else (
math.floor(len(dictionary) / args.parts_length) if args.drop_last else math.ceil(len(dictionary) / args.parts_length)
)
)
for i, split_dict in tqdm(enumerate(splits_iterator), desc="Splitting", total=total):
name = f"{os.path.basename(args.input_file).split('.')[0]}-split-{i}"
split_dict.dump(
os.path.join(args.output_folder, name),
)
logging.info("Done")
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('-i', '--input-file', type=str, required=True, help="Input dictionary to split")
parser.add_argument('-o', '--output-folder', type=str, required=True, help="Output folder in which splits will be put")
parser.add_argument('--parts', type=int, required=False, default=None, help="Input dictionary to split")
parser.add_argument('--parts-length', type=int, required=False, default=None, help="Input dictionary to split")
parser.add_argument('--drop-last', action="store_true", help="Input dictionary to split")
parser.add_argument('--reset-keys', action="store_true", help="Input dictionary to split")
parser.add_argument('--shuffle', action="store_true", help="Input dictionary to split")
parser.add_argument('--limit', type=int, default=None, required=False,
help="Read only a limited number of key-value pairs from the input dict")
args = parser.parse_args()
main(args)
|
PypiClean
|
/lbrlabs_pulumi_ovh-0.32.0.tar.gz/lbrlabs_pulumi_ovh-0.32.0/lbrlabs_pulumi_ovh/cloudprojectdatabase/opensearch_pattern.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['OpensearchPatternArgs', 'OpensearchPattern']
@pulumi.input_type
class OpensearchPatternArgs:
def __init__(__self__, *,
cluster_id: pulumi.Input[str],
pattern: pulumi.Input[str],
service_name: pulumi.Input[str],
max_index_count: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a OpensearchPattern resource.
:param pulumi.Input[str] cluster_id: Cluster ID.
:param pulumi.Input[str] pattern: Pattern format.
:param pulumi.Input[str] service_name: The id of the public cloud project. If omitted,
the `OVH_CLOUD_PROJECT_SERVICE` environment variable is used.
:param pulumi.Input[int] max_index_count: Maximum number of index for this pattern.
"""
pulumi.set(__self__, "cluster_id", cluster_id)
pulumi.set(__self__, "pattern", pattern)
pulumi.set(__self__, "service_name", service_name)
if max_index_count is not None:
pulumi.set(__self__, "max_index_count", max_index_count)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> pulumi.Input[str]:
"""
Cluster ID.
"""
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: pulumi.Input[str]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter
def pattern(self) -> pulumi.Input[str]:
"""
Pattern format.
"""
return pulumi.get(self, "pattern")
@pattern.setter
def pattern(self, value: pulumi.Input[str]):
pulumi.set(self, "pattern", value)
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> pulumi.Input[str]:
"""
The id of the public cloud project. If omitted,
the `OVH_CLOUD_PROJECT_SERVICE` environment variable is used.
"""
return pulumi.get(self, "service_name")
@service_name.setter
def service_name(self, value: pulumi.Input[str]):
pulumi.set(self, "service_name", value)
@property
@pulumi.getter(name="maxIndexCount")
def max_index_count(self) -> Optional[pulumi.Input[int]]:
"""
Maximum number of index for this pattern.
"""
return pulumi.get(self, "max_index_count")
@max_index_count.setter
def max_index_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_index_count", value)
@pulumi.input_type
class _OpensearchPatternState:
def __init__(__self__, *,
cluster_id: Optional[pulumi.Input[str]] = None,
max_index_count: Optional[pulumi.Input[int]] = None,
pattern: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering OpensearchPattern resources.
:param pulumi.Input[str] cluster_id: Cluster ID.
:param pulumi.Input[int] max_index_count: Maximum number of index for this pattern.
:param pulumi.Input[str] pattern: Pattern format.
:param pulumi.Input[str] service_name: The id of the public cloud project. If omitted,
the `OVH_CLOUD_PROJECT_SERVICE` environment variable is used.
"""
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if max_index_count is not None:
pulumi.set(__self__, "max_index_count", max_index_count)
if pattern is not None:
pulumi.set(__self__, "pattern", pattern)
if service_name is not None:
pulumi.set(__self__, "service_name", service_name)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
"""
Cluster ID.
"""
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter(name="maxIndexCount")
def max_index_count(self) -> Optional[pulumi.Input[int]]:
"""
Maximum number of index for this pattern.
"""
return pulumi.get(self, "max_index_count")
@max_index_count.setter
def max_index_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_index_count", value)
@property
@pulumi.getter
def pattern(self) -> Optional[pulumi.Input[str]]:
"""
Pattern format.
"""
return pulumi.get(self, "pattern")
@pattern.setter
def pattern(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pattern", value)
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> Optional[pulumi.Input[str]]:
"""
The id of the public cloud project. If omitted,
the `OVH_CLOUD_PROJECT_SERVICE` environment variable is used.
"""
return pulumi.get(self, "service_name")
@service_name.setter
def service_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_name", value)
class OpensearchPattern(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
max_index_count: Optional[pulumi.Input[int]] = None,
pattern: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates a pattern for a opensearch cluster associated with a public cloud project.
## Example Usage
```python
import pulumi
import lbrlabs_pulumi_ovh as ovh
import pulumi_ovh as ovh
opensearch = ovh.CloudProjectDatabase.get_database(service_name="XXX",
engine="opensearch",
id="ZZZ")
pattern = ovh.cloud_project_database.OpensearchPattern("pattern",
service_name=opensearch.service_name,
cluster_id=opensearch.id,
max_index_count=2,
pattern="logs_*")
```
## Import
OVHcloud Managed opensearch clusters patterns can be imported using the `service_name`, `cluster_id` and `id` of the pattern, separated by "/" E.g., bash
```sh
$ pulumi import ovh:CloudProjectDatabase/opensearchPattern:OpensearchPattern my_pattern service_name/cluster_id/id
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] cluster_id: Cluster ID.
:param pulumi.Input[int] max_index_count: Maximum number of index for this pattern.
:param pulumi.Input[str] pattern: Pattern format.
:param pulumi.Input[str] service_name: The id of the public cloud project. If omitted,
the `OVH_CLOUD_PROJECT_SERVICE` environment variable is used.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: OpensearchPatternArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates a pattern for a opensearch cluster associated with a public cloud project.
## Example Usage
```python
import pulumi
import lbrlabs_pulumi_ovh as ovh
import pulumi_ovh as ovh
opensearch = ovh.CloudProjectDatabase.get_database(service_name="XXX",
engine="opensearch",
id="ZZZ")
pattern = ovh.cloud_project_database.OpensearchPattern("pattern",
service_name=opensearch.service_name,
cluster_id=opensearch.id,
max_index_count=2,
pattern="logs_*")
```
## Import
OVHcloud Managed opensearch clusters patterns can be imported using the `service_name`, `cluster_id` and `id` of the pattern, separated by "/" E.g., bash
```sh
$ pulumi import ovh:CloudProjectDatabase/opensearchPattern:OpensearchPattern my_pattern service_name/cluster_id/id
```
:param str resource_name: The name of the resource.
:param OpensearchPatternArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(OpensearchPatternArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
max_index_count: Optional[pulumi.Input[int]] = None,
pattern: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = OpensearchPatternArgs.__new__(OpensearchPatternArgs)
if cluster_id is None and not opts.urn:
raise TypeError("Missing required property 'cluster_id'")
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["max_index_count"] = max_index_count
if pattern is None and not opts.urn:
raise TypeError("Missing required property 'pattern'")
__props__.__dict__["pattern"] = pattern
if service_name is None and not opts.urn:
raise TypeError("Missing required property 'service_name'")
__props__.__dict__["service_name"] = service_name
super(OpensearchPattern, __self__).__init__(
'ovh:CloudProjectDatabase/opensearchPattern:OpensearchPattern',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
max_index_count: Optional[pulumi.Input[int]] = None,
pattern: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None) -> 'OpensearchPattern':
"""
Get an existing OpensearchPattern resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] cluster_id: Cluster ID.
:param pulumi.Input[int] max_index_count: Maximum number of index for this pattern.
:param pulumi.Input[str] pattern: Pattern format.
:param pulumi.Input[str] service_name: The id of the public cloud project. If omitted,
the `OVH_CLOUD_PROJECT_SERVICE` environment variable is used.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _OpensearchPatternState.__new__(_OpensearchPatternState)
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["max_index_count"] = max_index_count
__props__.__dict__["pattern"] = pattern
__props__.__dict__["service_name"] = service_name
return OpensearchPattern(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> pulumi.Output[str]:
"""
Cluster ID.
"""
return pulumi.get(self, "cluster_id")
@property
@pulumi.getter(name="maxIndexCount")
def max_index_count(self) -> pulumi.Output[Optional[int]]:
"""
Maximum number of index for this pattern.
"""
return pulumi.get(self, "max_index_count")
@property
@pulumi.getter
def pattern(self) -> pulumi.Output[str]:
"""
Pattern format.
"""
return pulumi.get(self, "pattern")
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> pulumi.Output[str]:
"""
The id of the public cloud project. If omitted,
the `OVH_CLOUD_PROJECT_SERVICE` environment variable is used.
"""
return pulumi.get(self, "service_name")
|
PypiClean
|
/pulumi_aws_native-0.75.1a1693503310.tar.gz/pulumi_aws_native-0.75.1a1693503310/pulumi_aws_native/supportapp/account_alias.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['AccountAliasArgs', 'AccountAlias']
@pulumi.input_type
class AccountAliasArgs:
def __init__(__self__, *,
account_alias: pulumi.Input[str]):
"""
The set of arguments for constructing a AccountAlias resource.
:param pulumi.Input[str] account_alias: An account alias associated with a customer's account.
"""
pulumi.set(__self__, "account_alias", account_alias)
@property
@pulumi.getter(name="accountAlias")
def account_alias(self) -> pulumi.Input[str]:
"""
An account alias associated with a customer's account.
"""
return pulumi.get(self, "account_alias")
@account_alias.setter
def account_alias(self, value: pulumi.Input[str]):
pulumi.set(self, "account_alias", value)
class AccountAlias(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_alias: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
An AWS Support App resource that creates, updates, reads, and deletes a customer's account alias.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_alias: An account alias associated with a customer's account.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AccountAliasArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
An AWS Support App resource that creates, updates, reads, and deletes a customer's account alias.
:param str resource_name: The name of the resource.
:param AccountAliasArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AccountAliasArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_alias: Optional[pulumi.Input[str]] = None,
__props__=None):
opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AccountAliasArgs.__new__(AccountAliasArgs)
if account_alias is None and not opts.urn:
raise TypeError("Missing required property 'account_alias'")
__props__.__dict__["account_alias"] = account_alias
__props__.__dict__["account_alias_resource_id"] = None
super(AccountAlias, __self__).__init__(
'aws-native:supportapp:AccountAlias',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'AccountAlias':
"""
Get an existing AccountAlias resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = AccountAliasArgs.__new__(AccountAliasArgs)
__props__.__dict__["account_alias"] = None
__props__.__dict__["account_alias_resource_id"] = None
return AccountAlias(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accountAlias")
def account_alias(self) -> pulumi.Output[str]:
"""
An account alias associated with a customer's account.
"""
return pulumi.get(self, "account_alias")
@property
@pulumi.getter(name="accountAliasResourceId")
def account_alias_resource_id(self) -> pulumi.Output[str]:
"""
Unique identifier representing an alias tied to an account
"""
return pulumi.get(self, "account_alias_resource_id")
|
PypiClean
|
/django-genes-0.18.tar.gz/django-genes-0.18/genes/utils.py
|
from django.db.models.functions import Coalesce
from genes.models import Gene, CrossRef, CrossRefDB
def translate_genes(id_list=None, from_id=None, to_id=None, organism=None):
"""
Pass a list of identifiers (id_list), the name of the database ('Entrez',
'Symbol', 'Standard name', 'Systematic name' or a loaded crossreference
database) that you wish to translate from, and the name of the database
that you wish to translate to.
"""
ids = set(id_list)
# Initialize set of identifiers not found by this translate_genes method.
not_found = set()
from_ids = None # Get the map of from_ids to the gene pks
if organism is not None:
gene_objects_manager = Gene.objects.filter(
organism__scientific_name=organism)
else:
gene_objects_manager = Gene.objects
if (from_id == 'Entrez'):
int_list = []
for x in ids:
try:
int_list.append(int(x))
except(ValueError):
not_found.add(x)
ids = set(int_list)
from_ids = gene_objects_manager.filter(entrezid__in=ids).values_list(
'entrezid', 'id')
elif (from_id == 'Systematic name'):
from_ids = gene_objects_manager.filter(
systematic_name__in=ids).values_list('systematic_name', 'id')
elif (from_id == 'Standard name'):
from_ids = gene_objects_manager.filter(
standard_name__in=ids).values_list('standard_name', 'id')
elif (from_id == 'Symbol'):
# If standard_name exists, symbol will be standard_name; otherwise
# symbol will be systematic_name.
from_ids = gene_objects_manager.annotate(
symbol=Coalesce('standard_name', 'systematic_name')).filter(
symbol__in=ids).values_list('symbol', 'id')
else: # a crossreference db?
xrdb = CrossRefDB.objects.get(name=from_id)
from_ids = CrossRef.objects.filter(crossrefdb=xrdb).values_list(
'xrid', 'gene__id')
# Dictionary that maps from type ID passed by user to gene__id.
from_id_map = {}
gene_ids = []
for item in from_ids:
from_id_map[item[0]] = item[1]
gene_ids.append(item[1])
# Now let's figure out what we need to go to:
to_ids = None
if (to_id == 'Entrez'):
to_ids = Gene.objects.filter(id__in=gene_ids).values_list(
'id', 'entrezid')
elif (to_id == 'Systematic name'):
to_ids = Gene.objects.filter(id__in=gene_ids).values_list(
'id', 'systematic_name')
elif (to_id == 'Standard name'):
to_ids = Gene.objects.filter(id__in=gene_ids).values_list(
'id', 'standard_name')
elif (to_id == 'Symbol'):
# If standard_name exists, symbol will be standard_name; otherwise
# symbol will be systematic_name.
to_ids = Gene.objects.annotate(
symbol=Coalesce('standard_name', 'systematic_name')).filter(
id__in=gene_ids).values_list('id', 'symbol')
else: # A crossreference db?
xrdb = CrossRefDB.objects.get(name=to_id)
to_ids = CrossRef.objects.filter(crossrefdb=xrdb).values_list(
'gene__id', 'xrid')
to_id_map = {}
for item in to_ids:
if not item[0] in to_id_map:
to_id_map[item[0]] = [item[1], ]
else:
to_id_map[item[0]].append(item[1])
from_to = {}
for item in ids:
try:
gene_id = from_id_map[item]
except KeyError:
not_found.add(item)
continue
to_id = to_id_map[gene_id]
from_to[item] = to_id
from_to['not_found'] = list(not_found)
return from_to
|
PypiClean
|
/latimes-appengine-template-0.022.tar.gz/latimes-appengine-template-0.022/appengine_template/google_appengine/lib/yaml/lib/yaml/serializer.py
|
__all__ = ['Serializer', 'SerializerError']
from error import YAMLError
from events import *
from nodes import *
class SerializerError(YAMLError):
pass
class Serializer(object):
ANCHOR_TEMPLATE = u'id%03d'
def __init__(self, encoding=None,
explicit_start=None, explicit_end=None, version=None, tags=None):
self.use_encoding = encoding
self.use_explicit_start = explicit_start
self.use_explicit_end = explicit_end
self.use_version = version
self.use_tags = tags
self.serialized_nodes = {}
self.anchors = {}
self.last_anchor_id = 0
self.closed = None
def open(self):
if self.closed is None:
self.emit(StreamStartEvent(encoding=self.use_encoding))
self.closed = False
elif self.closed:
raise SerializerError("serializer is closed")
else:
raise SerializerError("serializer is already opened")
def close(self):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif not self.closed:
self.emit(StreamEndEvent())
self.closed = True
#def __del__(self):
# self.close()
def serialize(self, node):
if self.closed is None:
raise SerializerError("serializer is not opened")
elif self.closed:
raise SerializerError("serializer is closed")
self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
version=self.use_version, tags=self.use_tags))
self.anchor_node(node)
self.serialize_node(node, None, None)
self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
self.serialized_nodes = {}
self.anchors = {}
self.last_alias_id = 0
def anchor_node(self, node):
if node in self.anchors:
if self.anchors[node] is None:
self.anchors[node] = self.generate_anchor(node)
else:
self.anchors[node] = None
if isinstance(node, SequenceNode):
for item in node.value:
self.anchor_node(item)
elif isinstance(node, MappingNode):
for key, value in node.value:
self.anchor_node(key)
self.anchor_node(value)
def generate_anchor(self, node):
self.last_anchor_id += 1
return self.ANCHOR_TEMPLATE % self.last_anchor_id
def serialize_node(self, node, parent, index):
alias = self.anchors[node]
if node in self.serialized_nodes:
self.emit(AliasEvent(alias))
else:
self.serialized_nodes[node] = True
self.descend_resolver(parent, index)
if isinstance(node, ScalarNode):
detected_tag = self.resolve(ScalarNode, node.value, (True, False))
default_tag = self.resolve(ScalarNode, node.value, (False, True))
implicit = (node.tag == detected_tag), (node.tag == default_tag)
self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
style=node.style))
elif isinstance(node, SequenceNode):
implicit = (node.tag
== self.resolve(SequenceNode, node.value, True))
self.emit(SequenceStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
index = 0
for item in node.value:
self.serialize_node(item, node, index)
index += 1
self.emit(SequenceEndEvent())
elif isinstance(node, MappingNode):
implicit = (node.tag
== self.resolve(MappingNode, node.value, True))
self.emit(MappingStartEvent(alias, node.tag, implicit,
flow_style=node.flow_style))
for key, value in node.value:
self.serialize_node(key, node, None)
self.serialize_node(value, node, key)
self.emit(MappingEndEvent())
self.ascend_resolver()
|
PypiClean
|
/stochss-compute-1.0.2.tar.gz/stochss-compute-1.0.2/stochss_compute/core/messages.py
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from abc import ABC, abstractmethod
from enum import Enum
from hashlib import md5
from gillespy2 import Model, Results
from tornado.escape import json_encode, json_decode
class SimStatus(Enum):
'''
Status describing a remote simulation.
'''
PENDING = 'The simulation is pending.'
RUNNING = 'The simulation is still running.'
READY = 'Simulation is done and results exist in the cache.'
ERROR = 'The Simulation has encountered an error.'
DOES_NOT_EXIST = 'There is no evidence of this simulation either running or on disk.'
@staticmethod
def from_str(name):
'''
Convert str to Enum.
'''
if name == 'PENDING':
return SimStatus.PENDING
if name == 'RUNNING':
return SimStatus.RUNNING
if name == 'READY':
return SimStatus.READY
if name == 'ERROR':
return SimStatus.ERROR
if name == 'DOES_NOT_EXIST':
return SimStatus.DOES_NOT_EXIST
class Request(ABC):
'''
Base class.
'''
@abstractmethod
def encode(self):
'''
Encode self for http.
'''
@staticmethod
@abstractmethod
def parse(raw_request):
'''
Parse http for python.
'''
class Response(ABC):
'''
Base class.
'''
@abstractmethod
def encode(self):
'''
Encode self for http.
'''
@staticmethod
@abstractmethod
def parse(raw_response):
'''
Parse http for python.
'''
class SimulationRunRequest(Request):
'''
A simulation request.
:param model: A model to run.
:type model: gillespy2.Model
:param kwargs: kwargs for the model.run() call.
:type kwargs: dict[str, Any]
'''
def __init__(self, model, **kwargs):
self.model = model
self.kwargs = kwargs
def encode(self):
'''
JSON-encode model and then encode self to dict.
'''
return {'model': self.model.to_json(),
'kwargs': self.kwargs}
@staticmethod
def parse(raw_request):
'''
Parse HTTP request.
:param raw_request: The request.
:type raw_request: dict[str, str]
:returns: The decoded object.
:rtype: SimulationRunRequest
'''
request_dict = json_decode(raw_request)
model = Model.from_json(request_dict['model'])
kwargs = request_dict['kwargs']
return SimulationRunRequest(model, **kwargs)
def hash(self):
'''
Generate a unique hash of this simulation request.
Does not include number_of_trajectories in this calculation.
:returns: md5 hex digest.
:rtype: str
'''
anon_model_string = self.model.to_anon().to_json(encode_private=False)
popped_kwargs = {kw:self.kwargs[kw] for kw in self.kwargs if kw!='number_of_trajectories'}
kwargs_string = json_encode(popped_kwargs)
request_string = f'{anon_model_string}{kwargs_string}'
_hash = md5(str.encode(request_string)).hexdigest()
return _hash
class SimulationRunResponse(Response):
'''
A response from the server regarding a SimulationRunRequest.
:param status: The status of the simulation.
:type status: SimStatus
:param error_message: Possible error message.
:type error_message: str | None
:param results_id: Hash of the simulation request. Identifies the results.
:type results_id: str | None
:param results: JSON-Encoded gillespy2.Results
:type results: str | None
'''
def __init__(self, status, error_message = None, results_id = None, results = None, task_id = None):
self.status = status
self.error_message = error_message
self.results_id = results_id
self.results = results
self.task_id = task_id
def encode(self):
'''
Encode self to dict.
'''
return {'status': self.status.name,
'error_message': self.error_message or '',
'results_id': self.results_id or '',
'results': self.results or '',
'task_id': self.task_id or '',}
@staticmethod
def parse(raw_response):
'''
Parse HTTP response.
:param raw_response: The response.
:type raw_response: dict[str, str]
:returns: The decoded object.
:rtype: SimulationRunResponse
'''
response_dict = json_decode(raw_response)
status = SimStatus.from_str(response_dict['status'])
results_id = response_dict['results_id']
error_message = response_dict['error_message']
task_id = response_dict['task_id']
if response_dict['results'] != '':
results = Results.from_json(response_dict['results'])
else:
results = None
return SimulationRunResponse(status, error_message, results_id, results, task_id)
class StatusRequest(Request):
'''
A request for simulation status.
:param results_id: Hash of the SimulationRunRequest
:type results_id: str
'''
def __init__(self, results_id):
self.results_id = results_id
def encode(self):
'''
:returns: self.__dict__
:rtype: dict
'''
return self.__dict__
@staticmethod
def parse(raw_request):
'''
Parse HTTP request.
:param raw_request: The request.
:type raw_request: dict[str, str]
:returns: The decoded object.
:rtype: StatusRequest
'''
request_dict = json_decode(raw_request)
return StatusRequest(request_dict['results_id'])
class StatusResponse(Response):
'''
A response from the server about simulation status.
:param status: Status of the simulation
:type status: SimStatus
:param message: Possible error message or otherwise
:type message: str
'''
def __init__(self, status, message = None):
self.status = status
self.message = message
def encode(self):
'''
Encodes self.
:returns: self as dict
:rtype: dict[str, str]
'''
return {'status': self.status.name,
'message': self.message or ''}
@staticmethod
def parse(raw_response):
'''
Parse HTTP response.
:param raw_response: The response.
:type raw_response: dict[str, str]
:returns: The decoded object.
:rtype: StatusResponse
'''
response_dict = json_decode(raw_response)
status = SimStatus.from_str(response_dict['status'])
message = response_dict['message']
if not message:
return StatusResponse(status)
else:
return StatusResponse(status, message)
class ResultsRequest(Request):
'''
Request results from the server.
:param results_id: Hash of the SimulationRunRequest
:type results_id: str
'''
def __init__(self, results_id):
self.results_id = results_id
def encode(self):
'''
:returns: self.__dict__
:rtype: dict
'''
return self.__dict__
@staticmethod
def parse(raw_request):
'''
Parse HTTP request.
:param raw_request: The request.
:type raw_request: dict[str, str]
:returns: The decoded object.
:rtype: ResultsRequest
'''
request_dict = json_decode(raw_request)
return ResultsRequest(request_dict['results_id'])
class ResultsResponse(Response):
'''
A response from the server about the Results.
:param results: The requested Results from the cache. (JSON)
:type results: str
'''
def __init__(self, results = None):
self.results = results
def encode(self):
'''
:returns: self.__dict__
:rtype: dict
'''
return {'results': self.results or ''}
@staticmethod
def parse(raw_response):
'''
Parse HTTP response.
:param raw_response: The response.
:type raw_response: dict[str, str]
:returns: The decoded object.
:rtype: ResultsResponse
'''
response_dict = json_decode(raw_response)
if response_dict['results'] != '':
results = Results.from_json(response_dict['results'])
else:
results = None
return ResultsResponse(results)
class SourceIpRequest(Request):
'''
Restrict server access.
:param cloud_key: Random key generated locally during launch.
:type cloud_key: str
'''
def __init__(self, cloud_key):
self.cloud_key = cloud_key
def encode(self):
'''
:returns: self.__dict__
:rtype: dict
'''
return self.__dict__
@staticmethod
def parse(raw_request):
'''
Parse HTTP request.
:param raw_request: The request.
:type raw_request: dict[str, str]
:returns: The decoded object.
:rtype: SourceIpRequest
'''
request_dict = json_decode(raw_request)
return SourceIpRequest(request_dict['cloud_key'])
class SourceIpResponse(Response):
'''
Response from server containing IP address of the source.
:param source_ip: IP address of the client.
:type source_ip: str
'''
def __init__(self, source_ip):
self.source_ip = source_ip
def encode(self):
'''
:returns: self.__dict__
:rtype: dict
'''
return self.__dict__
@staticmethod
def parse(raw_response):
'''
Parses a http response and returns a python object.
:param raw_response: A raw http SourceIpResponse from the server.
:type raw_response: str
:returns: The decoded object.
:rtype: SourceIpResponse
'''
response_dict = json_decode(raw_response)
return SourceIpResponse(response_dict['source_ip'])
|
PypiClean
|
/surface_tracker-0.0.1.tar.gz/surface_tracker-0.0.1/src/surface_tracker/heatmap.py
|
import enum
import typing as T
import cv2
import numpy as np
from .location import SurfaceLocation
from .surface import SurfaceId
class SurfaceHeatmap:
class ColorFormat(enum.Enum):
RGB = enum.auto()
@property
def channel_count(self) -> int:
if self == SurfaceHeatmap.ColorFormat.RGB:
return 3
raise NotImplementedError()
@staticmethod
def _create_surface_heatmap(
points_in_image_space: T.List[T.Tuple[int, int]], location: SurfaceLocation
):
surface_heatmap = SurfaceHeatmap(surface_uid=location.surface_uid)
surface_heatmap._add_points(
points_in_image_space=points_in_image_space, location=location
)
return surface_heatmap
def __init__(self, surface_uid: SurfaceId):
self.__surface_uid = surface_uid
self.__points_in_surface_space_numpy = []
self._invalidate_cached_computations()
def _add_points(
self,
points_in_image_space: T.List[T.Tuple[int, int]],
location: SurfaceLocation,
):
points_in_image_space_numpy = np.asarray(
points_in_image_space, dtype=np.float32
)
new_points_in_surface_space_numpy = location._map_from_image_to_surface(
points=np.asarray(points_in_image_space_numpy, dtype=np.float32)
)
for p in new_points_in_surface_space_numpy:
self.__points_in_surface_space_numpy.append(p)
def image(
self,
size: T.Tuple[int, int],
color_format: T.Optional["SurfaceHeatmap.ColorFormat"] = None,
) -> np.ndarray:
if color_format is None:
color_format = SurfaceHeatmap.ColorFormat.RGB
elif not isinstance(color_format, SurfaceHeatmap.ColorFormat):
raise ValueError(
f"color_format must be an instance of SurfaceHeatmap.ColorFormat"
)
cache_key = (size, color_format)
heatmap_resolution = 31
heatmap_blur_factor = 0.0
if cache_key not in self.__heatmap_image_by_size_and_color_format:
heatmap_data = np.asarray(self.__points_in_surface_space_numpy)
aspect_ratio = size[1] / size[0]
grid = (
max(1, int(heatmap_resolution * aspect_ratio)),
int(heatmap_resolution),
)
if len(heatmap_data) > 0:
xvals = heatmap_data[:, 0]
yvals = 1.0 - heatmap_data[:, 1]
histogram, *edges = np.histogram2d(
yvals, xvals, bins=grid, range=[[0, 1.0], [0, 1.0]], normed=False
)
filter_h = 19 + heatmap_blur_factor * 15
filter_w = filter_h * aspect_ratio
filter_h = int(filter_h) // 2 * 2 + 1
filter_w = int(filter_w) // 2 * 2 + 1
histogram = cv2.GaussianBlur(histogram, (filter_h, filter_w), 0)
histogram_max = histogram.max()
histogram *= (255.0 / histogram_max) if histogram_max else 0.0
histogram = histogram.astype(np.uint8)
else:
histogram = np.zeros(grid, dtype=np.uint8)
histogram = cv2.applyColorMap(histogram, cv2.COLORMAP_JET)
if color_format == SurfaceHeatmap.ColorFormat.RGB:
heatmap = np.ones((*grid, color_format.channel_count), dtype=np.uint8)
heatmap[:, :, 0] = histogram[:, :, 2] # red
heatmap[:, :, 1] = histogram[:, :, 1] # green
heatmap[:, :, 2] = histogram[:, :, 0] # blue
else:
raise ValueError(f'Unsupported color_format: "{color_format}"')
heatmap = cv2.resize(heatmap, dsize=size, interpolation=cv2.INTER_CUBIC)
assert len(heatmap.shape) == 3 # sanity check
assert (heatmap.shape[1], heatmap.shape[0]) == size # sanity check
self.__heatmap_image_by_size_and_color_format[cache_key] = heatmap
return self.__heatmap_image_by_size_and_color_format[cache_key]
def _invalidate_cached_computations(self):
self.__heatmap_image_by_size_and_color_format = {}
|
PypiClean
|
/fedmsg_fasclient-0.7.tar.gz/fedmsg_fasclient-0.7/fedmsg_fasclient.py
|
import pprint
import subprocess
import fedmsg.consumers
import moksha.hub.reactor
class FasClientConsumer(fedmsg.consumers.FedmsgConsumer):
# Because we are interested in a variety of topics, we tell moksha that
# we're interested in all of them (it doesn't know how to do complicated
# distinctions). But then we'll filter later in our consume() method.
topic = '*'
interesting_topics = [
'org.fedoraproject.prod.fas.role.update',
'org.fedoraproject.prod.fas.group.member.sponsor',
'org.fedoraproject.prod.fas.group.member.remove',
'org.fedoraproject.prod.fas.user.update',
]
config_key = 'fasclient.consumer.enabled'
def __init__(self, hub):
super(FasClientConsumer, self).__init__(hub)
# This is required. It is the number of seconds that we should wait
# until we ultimately act on a pkgdb message.
self.delay = self.hub.config['fasclient.consumer.delay']
# We use this to manage our state
self.queued_messages = []
def consume(self, msg):
if msg['topic'] not in self.interesting_topics:
return
# Strip off moksha's outer envelope, which should always be present
msg = msg.get('body', {})
# Check to see if it's not crazy-malformed
if 'msg' not in msg:
self.log.warning("msg %r, %r is crazy malformed" % (
msg.get('msg_id'), msg.get('topic')))
return
# Only run fasclient if the user changed his/her ssh key in FAS
if msg['topic'] == 'org.fedoraproject.prod.fas.user.update':
fields = msg['msg'].get('fields', [])
if 'ssh_key' not in fields:
self.log.debug("msg %r has no 'ssh_key' in %r" % (
msg.get('msg_id'), fields))
return
else:
self.log.info("%r bears ssh_key change" % msg.get('msg_id'))
# Skip the run when certain groups are updated
if msg['topic'].startswith('org.fedoraproject.prod.fas.group.member.'):
group = msg['msg'].get('group', None)
if group in ['cla_fpca']:
self.log.debug("msg %r group 'cla_fpca'" % msg.get('msg_id'))
return
else:
self.log.info("%r is not for cla_fpca" % msg.get('msg_id'))
self.log.info("Got a message %r" % msg['topic'])
def delayed_consume():
if self.queued_messages:
try:
self.action(self.queued_messages)
finally:
# Empty our list at the end of the day.
self.queued_messages = []
else:
self.log.debug("Woke up, but there were no messages.")
self.queued_messages.append(msg)
moksha.hub.reactor.reactor.callLater(self.delay, delayed_consume)
def action(self, messages):
self.log.debug("Acting on %s" % pprint.pformat(messages))
command = '/usr/bin/sudo -i /usr/bin/ansible-playbook ' \
'/srv/web/infra/ansible/playbooks/run_fasClient.yml'
command = command.split()
self.log.info("Running %r" % command)
process = subprocess.Popen(args=command)
stdout, stderr = process.communicate()
if process.returncode == 0:
self.log.info("%r was successful" % command)
else:
self.log.error("%r exited with %r, stdout: %s, stderr: %s" % (
command, process.returncode, stdout, stderr))
|
PypiClean
|
/gpm_api-0.2.4.tar.gz/gpm_api-0.2.4/gpm_api/visualization/plot.py
|
import inspect
import cartopy
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
import numpy as np
import xarray as xr
from matplotlib.collections import PolyCollection
from mpl_toolkits.axes_grid1 import make_axes_locatable
from scipy.ndimage import binary_dilation
### TODO: Add xarray + cartopy (xr_carto) (xr_mpl)
# _plot_cartopy_xr_imshow
# _plot_cartopy_xr_pcolormesh
def is_generator(obj):
return inspect.isgeneratorfunction(obj) or inspect.isgenerator(obj)
def _preprocess_figure_args(ax, fig_kwargs={}, subplot_kwargs={}):
if ax is not None:
if len(subplot_kwargs) >= 1:
raise ValueError("Provide `subplot_kwargs`only if `ax`is None")
if len(fig_kwargs) >= 1:
raise ValueError("Provide `fig_kwargs` only if `ax`is None")
# If ax is not specified, specify the figure defaults
# if ax is None:
# Set default figure size and dpi
# fig_kwargs['figsize'] = (12, 10)
# fig_kwargs['dpi'] = 100
def _preprocess_subplot_kwargs(subplot_kwargs):
subplot_kwargs = subplot_kwargs.copy()
if "projection" not in subplot_kwargs:
subplot_kwargs["projection"] = ccrs.PlateCarree()
return subplot_kwargs
def get_extent(da, x="lon", y="lat"):
# TODO: compute corners array to estimate the extent
# - OR increase by 1° in everydirection and then wrap between -180, 180,90,90
# Get the minimum and maximum longitude and latitude values
lon_min, lon_max = da[x].min(), da[x].max()
lat_min, lat_max = da[y].min(), da[y].max()
extent = (lon_min, lon_max, lat_min, lat_max)
return extent
def get_antimeridian_mask(lons, buffer=True):
"""Get mask of longitude coordinates neighbors crossing the antimeridian."""
# Check vertical edges
row_idx, col_idx = np.where(np.abs(np.diff(lons, axis=0)) > 180)
row_idx_rev, col_idx_rev = np.where(np.abs(np.diff(lons[::-1, :], axis=0)) > 180)
row_idx_rev = lons.shape[0] - row_idx_rev - 1
row_indices = np.append(row_idx, row_idx_rev)
col_indices = np.append(col_idx, col_idx_rev)
# Check horizontal
row_idx, col_idx = np.where(np.abs(np.diff(lons, axis=1)) > 180)
row_idx_rev, col_idx_rev = np.where(np.abs(np.diff(lons[:, ::-1], axis=1)) > 180)
col_idx_rev = lons.shape[1] - col_idx_rev - 1
row_indices = np.append(row_indices, np.append(row_idx, row_idx_rev))
col_indices = np.append(col_indices, np.append(col_idx, col_idx_rev))
# Create mask
mask = np.zeros(lons.shape)
mask[row_indices, col_indices] = 1
# Buffer by 1 in all directions to ensure edges not crossing the antimeridian
mask = binary_dilation(mask)
return mask
def get_masked_cells_polycollection(x, y, arr, mask, plot_kwargs):
from gpm_api.utils.area import _from_corners_to_bounds, _get_lonlat_corners, is_vertex_clockwise
# - Buffer mask by 1 to derive vertices of all masked QuadMesh
mask = binary_dilation(mask)
# - Get index of masked quadmesh
row_mask, col_mask = np.where(mask)
# - Retrieve values of masked cells
array = arr[row_mask, col_mask]
# - Retrieve QuadMesh corners (m+1 x n+1)
x_corners, y_corners = _get_lonlat_corners(x, y)
# - Retrieve QuadMesh bounds (m*n x 4)
x_bounds = _from_corners_to_bounds(x_corners)
y_bounds = _from_corners_to_bounds(y_corners)
# - Retrieve vertices of masked QuadMesh (n_masked, 4, 2)
x_vertices = x_bounds[row_mask, col_mask]
y_vertices = y_bounds[row_mask, col_mask]
vertices = np.stack((x_vertices, y_vertices), axis=2)
# Check that are counterclockwise oriented (check first vertex)
# TODO: this check should be updated to use pyresample.future.spherical
if is_vertex_clockwise(vertices[0, :, :]):
vertices = vertices[:, ::-1, :]
# - Define additional kwargs for PolyCollection
plot_kwargs = plot_kwargs.copy()
if "edgecolors" not in plot_kwargs:
plot_kwargs["edgecolors"] = "face" # 'none'
if "linewidth" not in plot_kwargs:
plot_kwargs["linewidth"] = 0
plot_kwargs["antialiaseds"] = False # to better plotting quality
# - Define PolyCollection
coll = PolyCollection(
verts=vertices,
array=array,
transform=ccrs.Geodetic(),
**plot_kwargs,
)
return coll
def get_valid_pcolormesh_inputs(x, y, data):
"""
Fill non-finite values with neighbour valid coordinates.
pcolormesh does not accept non-finite values in the coordinates.
This function:
- Infill NaN/Inf in lat/x with closest values
- Mask the corresponding pixels in the data that must not be displayed.
"""
# TODO:
# - Combine mask if x, y and data are already masked !
# --> Add in plot_cartopy_pcolormesh !
# - Instead of np.interp, can use nearest neighbors or just 0 to speed up?
from skimage.morphology import dilation, square
# Retrieve mask of invalid coordinates
mask = np.logical_or(~np.isfinite(x), ~np.isfinite(y))
# If no invalid coordinates, return original data
if np.all(~mask):
return x, y, data
mask = dilation(mask, square(10))
data_masked = np.ma.masked_where(mask, data)
x_dummy = x.copy()
x_dummy[mask] = np.interp(np.flatnonzero(mask), np.flatnonzero(~mask), x[~mask])
y_dummy = y.copy()
y_dummy[mask] = np.interp(np.flatnonzero(mask), np.flatnonzero(~mask), y[~mask])
return x_dummy, y_dummy, data_masked
####--------------------------------------------------------------------------.
def plot_cartopy_background(ax):
"""Plot cartopy background."""
# - Add coastlines
ax.coastlines()
ax.add_feature(cartopy.feature.LAND, facecolor=[0.9, 0.9, 0.9])
ax.add_feature(cartopy.feature.OCEAN, alpha=0.6)
ax.add_feature(cartopy.feature.BORDERS) # BORDERS also draws provinces, ...
# - Add grid lines
gl = ax.gridlines(
crs=ccrs.PlateCarree(),
draw_labels=True,
linewidth=1,
color="gray",
alpha=0.1,
linestyle="-",
)
gl.top_labels = False # gl.xlabels_top = False
gl.right_labels = False # gl.ylabels_right = False
gl.xlines = True
gl.ylines = True
return ax
def plot_colorbar(p, ax, cbar_kwargs={}, size="5%", pad=0.1):
"""Add a colorbar to a matplotlib/cartopy plot.
p: matplotlib.image.AxesImage
ax: cartopy.mpl.geoaxes.GeoAxesSubplot
"""
cbar_kwargs = cbar_kwargs.copy() # otherwise pop ticklabels outside the function
ticklabels = cbar_kwargs.pop("ticklabels", None)
divider = make_axes_locatable(ax)
cax = divider.new_horizontal(size=size, pad=pad, axes_class=plt.Axes)
p.figure.add_axes(cax)
cbar = plt.colorbar(p, cax=cax, ax=ax, **cbar_kwargs)
if ticklabels is not None:
_ = cbar.ax.set_yticklabels(ticklabels)
return cbar
####--------------------------------------------------------------------------.
def _plot_cartopy_imshow(
ax,
da,
x,
y,
interpolation="nearest",
add_colorbar=True,
plot_kwargs={},
cbar_kwargs={},
):
"""Plot imshow with cartopy."""
# - Ensure image with correct dimensions orders
da = da.transpose(y, x)
arr = da.data.compute()
# - Derive extent
extent = [-180, 180, -90, 90] # TODO: Derive from data !!!!
# TODO: ensure y data is increasing --> origin = "lower"
# TODO: ensure y data is decreasing --> origin = "upper"
# - Add variable field with cartopy
p = ax.imshow(
arr,
transform=ccrs.PlateCarree(),
extent=extent,
origin="lower",
interpolation=interpolation,
**plot_kwargs,
)
# - Set the extent
extent = get_extent(da, x="lon", y="lat")
ax.set_extent(extent)
# - Add colorbar
if add_colorbar:
# --> TODO: set axis proportion in a meaningful way ...
_ = plot_colorbar(p=p, ax=ax, cbar_kwargs=cbar_kwargs)
return p
def _plot_rgb_pcolormesh(x, y, image, ax, **kwargs):
"""Plot xarray RGB DataArray with non uniform-coordinates.
Matplotlib, cartopy and xarray pcolormesh currently does not support RGB(A) arrays.
This is a temporary workaround !
"""
if image.shape[2] not in [3, 4]:
raise ValueError("Expecting RGB or RGB(A) arrays.")
colorTuple = image.reshape((image.shape[0] * image.shape[1], image.shape[2]))
im = ax.pcolormesh(
x,
y,
image[:, :, 1], # dummy to work ...
color=colorTuple,
**kwargs,
)
# im.set_array(None)
return im
def _plot_cartopy_pcolormesh(
ax,
da,
x,
y,
rgb=False,
add_colorbar=True,
plot_kwargs={},
cbar_kwargs={},
):
"""Plot imshow with cartopy.
The function currently does not allow to zoom on regions across the antimeridian.
The function mask scanning pixels which spans across the antimeridian.
"""
# - Get x, y, and array to plot
da = da.compute()
x = da[x].data
y = da[y].data
arr = da.data
# - Ensure arguments
if rgb:
add_colorbar = False
# - Mask cells crossing the antimeridian
mask = get_antimeridian_mask(x, buffer=True)
is_crossing_antimeridian = np.any(mask)
if is_crossing_antimeridian:
arr = np.ma.masked_where(mask, arr)
# Sanitize cmap bad color to avoid cartopy bug
if "cmap" in plot_kwargs:
cmap = plot_kwargs["cmap"]
bad = cmap.get_bad()
bad[3] = 0 # enforce to 0 (transparent)
cmap.set_bad(bad)
plot_kwargs["cmap"] = cmap
# - Add variable field with cartopy
if not rgb:
p = ax.pcolormesh(
x,
y,
arr,
transform=ccrs.PlateCarree(),
**plot_kwargs,
)
# - Add PolyCollection of QuadMesh cells crossing the antimeridian
if is_crossing_antimeridian:
coll = get_masked_cells_polycollection(
x, y, arr.data, mask=mask, plot_kwargs=plot_kwargs
)
p.axes.add_collection(coll)
# - Add RGB
else:
p = _plot_rgb_pcolormesh(x, y, arr, ax=ax, **plot_kwargs)
if is_crossing_antimeridian:
plot_kwargs["facecolors"] = arr.reshape((arr.shape[0] * arr.shape[1], arr.shape[2]))
coll = get_masked_cells_polycollection(
x, y, arr.data, mask=mask, plot_kwargs=plot_kwargs
)
p.axes.add_collection(coll)
# - Set the extent
# --> To be set in projection coordinates of crs !!!
# lon/lat conversion to proj required !
# extent = get_extent(da, x="lon", y="lat")
# ax.set_extent(extent)
# - Add colorbar
if add_colorbar:
# --> TODO: set axis proportion in a meaningful way ...
_ = plot_colorbar(p=p, ax=ax, cbar_kwargs=cbar_kwargs)
return p
def _plot_mpl_imshow(
ax,
da,
x,
y,
interpolation="nearest",
add_colorbar=True,
plot_kwargs={},
cbar_kwargs={},
):
"""Plot imshow with matplotlib."""
# - Ensure image with correct dimensions orders
da = da.transpose(y, x)
arr = da.data.compute()
# - Add variable field with matplotlib
p = ax.imshow(
arr,
origin="upper",
interpolation=interpolation,
**plot_kwargs,
)
# - Add colorbar
if add_colorbar:
# --> TODO: set axis proportion in a meaningful way ...
_ = plot_colorbar(p=p, ax=ax, cbar_kwargs=cbar_kwargs)
# - Return mappable
return p
# def _get_colorbar_inset_axes_kwargs(p):
# from mpl_toolkits.axes_grid1.inset_locator import inset_axes
# colorbar_axes = p.colorbar.ax
# # Get the position and size of the colorbar axes in figure coordinates
# bbox = colorbar_axes.get_position()
# # Extract the width and height of the colorbar axes in figure coordinates
# width = bbox.x1 - bbox.x0
# height = bbox.y1 - bbox.y0
# # Get the location of the colorbar axes ('upper', 'lower', 'center', etc.)
# # This information will be used to set the 'loc' parameter of inset_axes
# loc = 'upper right' # Modify this according to your preference
# # Get the transformation of the colorbar axes with respect to the image axes
# # This information will be used to set the 'bbox_transform' parameter of inset_axes
# bbox_transform = colorbar_axes.get_transform()
# # Calculate the coordinates of the colorbar axes relative to the image axes
# x0, y0 = bbox_transform.transform((bbox.x0, bbox.y0))
# x1, y1 = bbox_transform.transform((bbox.x1, bbox.y1))
# bbox_to_anchor = (x0, y0, x1 - x0, y1 - y0)
def set_colorbar_fully_transparent(p):
# from mpl_toolkits.axes_grid1.inset_locator import inset_axes
# colorbar_axes = p.colorbar.ax
# # Get the position and size of the colorbar axes in figure coordinates
# bbox = colorbar_axes.get_position()
# # Extract the width and height of the colorbar axes in figure coordinates
# width = bbox.x1 - bbox.x0
# height = bbox.y1 - bbox.y0
# # Get the location of the colorbar axes ('upper', 'lower', 'center', etc.)
# # This information will be used to set the 'loc' parameter of inset_axes
# loc = 'upper right' # Modify this according to your preference
# # Get the transformation of the colorbar axes with respect to the image axes
# # This information will be used to set the 'bbox_transform' parameter of inset_axes
# bbox_transform = colorbar_axes.get_transform()
# # Calculate the coordinates of the colorbar axes relative to the image axes
# x0, y0 = bbox_transform.transform((bbox.x0, bbox.y0))
# x1, y1 = bbox_transform.transform((bbox.x1, bbox.y1))
# bbox_to_anchor = (x0, y0, x1 - x0, y1 - y0)
# # Create the inset axes using the retrieved parameters
# inset_ax = inset_axes(p.axes,
# width=width,
# height=height,
# loc=loc,
# bbox_to_anchor=bbox_to_anchor,
# bbox_transform=p.axes.transAxes,
# borderpad=0)
# Get the position of the colorbar
cbar_pos = p.colorbar.ax.get_position()
cbar_x, cbar_y = cbar_pos.x0, cbar_pos.y0
cbar_width, cbar_height = cbar_pos.width, cbar_pos.height
# Remove the colorbar
p.colorbar.ax.set_visible(False)
# Now plot an empty rectangle
fig = plt.gcf()
rect = plt.Rectangle(
(cbar_x, cbar_y),
cbar_width,
cbar_height,
transform=fig.transFigure,
facecolor="none",
edgecolor="none",
)
fig.patches.append(rect)
def _plot_xr_imshow(
ax,
da,
x,
y,
interpolation="nearest",
add_colorbar=True,
plot_kwargs={},
cbar_kwargs={},
xarray_colorbar=True,
visible_colorbar=True,
):
"""Plot imshow with xarray.
The colorbar is added with xarray to enable to display multiple colorbars
when calling this function multiple times on different fields with
different colorbars.
"""
# --> BUG with colorbar: https://github.com/pydata/xarray/issues/7014
ticklabels = cbar_kwargs.pop("ticklabels", None)
if not add_colorbar:
cbar_kwargs = {}
p = da.plot.imshow(
x=x,
y=y,
ax=ax,
interpolation=interpolation,
add_colorbar=add_colorbar,
cbar_kwargs=cbar_kwargs,
**plot_kwargs,
)
plt.title(da.name)
if add_colorbar and ticklabels is not None:
p.colorbar.ax.set_yticklabels(ticklabels)
# Make the colorbar fully transparent with a smart trick ;)
# - TODO: this still cause issues when plotting 2 colorbars !
if add_colorbar and not visible_colorbar:
set_colorbar_fully_transparent(p)
# Add manually the colorbar
# p = da.plot.imshow(
# x=x,
# y=y,
# ax=ax,
# interpolation=interpolation,
# add_colorbar=False,
# **plot_kwargs,
# )
# plt.title(da.name)
# if add_colorbar:
# _ = plot_colorbar(p=p, ax=ax, cbar_kwargs=cbar_kwargs)
return p
def _plot_xr_pcolormesh(
ax,
da,
x,
y,
add_colorbar=True,
plot_kwargs={},
cbar_kwargs={},
):
"""Plot pcolormesh with xarray."""
ticklabels = cbar_kwargs.pop("ticklabels", None)
if not add_colorbar:
cbar_kwargs = {}
p = da.plot.pcolormesh(
x=x,
y=y,
ax=ax,
add_colorbar=add_colorbar,
cbar_kwargs=cbar_kwargs,
**plot_kwargs,
)
plt.title(da.name)
if add_colorbar and ticklabels is not None:
p.colorbar.ax.set_yticklabels(ticklabels)
return p
####--------------------------------------------------------------------------.
def plot_map(
da,
x="lon",
y="lat",
ax=None,
add_colorbar=True,
add_swath_lines=True, # used only for GPM orbit objects
add_background=True,
rgb=False,
interpolation="nearest", # used only for GPM grid objects
fig_kwargs={},
subplot_kwargs={},
cbar_kwargs={},
**plot_kwargs,
):
from gpm_api.checks import is_grid, is_orbit
from .grid import plot_grid_map
from .orbit import plot_orbit_map
# Plot orbit
if is_orbit(da):
p = plot_orbit_map(
da=da,
x=x,
y=y,
ax=ax,
add_colorbar=add_colorbar,
add_swath_lines=add_swath_lines,
add_background=add_background,
rgb=rgb,
fig_kwargs=fig_kwargs,
subplot_kwargs=subplot_kwargs,
cbar_kwargs=cbar_kwargs,
**plot_kwargs,
)
# Plot grid
elif is_grid(da):
p = plot_grid_map(
da=da,
x=x,
y=y,
ax=ax,
add_colorbar=add_colorbar,
interpolation=interpolation,
add_background=add_background,
fig_kwargs=fig_kwargs,
subplot_kwargs=subplot_kwargs,
cbar_kwargs=cbar_kwargs,
**plot_kwargs,
)
else:
raise ValueError("Can not plot. It's neither a GPM grid, neither a GPM orbit.")
# Return mappable
return p
def plot_image(
da,
x=None,
y=None,
ax=None,
add_colorbar=True,
interpolation="nearest",
fig_kwargs={},
cbar_kwargs={},
**plot_kwargs,
):
# figsize, dpi, subplot_kw only used if ax is None
from gpm_api.checks import is_grid, is_orbit
from gpm_api.visualization.grid import plot_grid_image
from gpm_api.visualization.orbit import plot_orbit_image
# Plot orbit
if is_orbit(da):
p = plot_orbit_image(
da=da,
x=x,
y=y,
ax=ax,
add_colorbar=add_colorbar,
interpolation=interpolation,
fig_kwargs=fig_kwargs,
cbar_kwargs=cbar_kwargs,
**plot_kwargs,
)
# Plot grid
elif is_grid(da):
p = plot_grid_image(
da=da,
x=x,
y=y,
ax=ax,
add_colorbar=add_colorbar,
interpolation=interpolation,
fig_kwargs=fig_kwargs,
cbar_kwargs=cbar_kwargs,
**plot_kwargs,
)
else:
raise ValueError("Can not plot. It's neither a GPM GRID, neither a GPM ORBIT.")
# Return mappable
return p
####--------------------------------------------------------------------------.
def plot_map_mesh(
xr_obj,
x="lon",
y="lat",
ax=None,
edgecolors="k",
linewidth=0.1,
add_background=True,
fig_kwargs={},
subplot_kwargs={},
**plot_kwargs,
):
# Interpolation only for grid objects
# figsize, dpi, subplot_kw only used if ax is None
from gpm_api.checks import is_orbit # is_grid
from .grid import plot_grid_mesh
from .orbit import plot_orbit_mesh
# Plot orbit
if is_orbit(xr_obj):
p = plot_orbit_mesh(
da=xr_obj[y],
ax=ax,
x=x,
y=y,
edgecolors=edgecolors,
linewidth=linewidth,
add_background=add_background,
fig_kwargs=fig_kwargs,
subplot_kwargs=subplot_kwargs,
**plot_kwargs,
)
else: # Plot grid
p = plot_grid_mesh(
xr_obj=xr_obj,
x=x,
y=y,
ax=ax,
edgecolors=edgecolors,
linewidth=linewidth,
add_background=add_background,
fig_kwargs=fig_kwargs,
subplot_kwargs=subplot_kwargs,
**plot_kwargs,
)
# Return mappable
return p
def plot_map_mesh_centroids(
xr_obj,
x="lon",
y="lat",
ax=None,
c="r",
s=1,
add_background=True,
fig_kwargs={},
subplot_kwargs={},
**plot_kwargs,
):
"""Plot GPM orbit granule mesh centroids in a cartographic map."""
# - Check inputs
_preprocess_figure_args(ax=ax, fig_kwargs=fig_kwargs, subplot_kwargs=subplot_kwargs)
# - Initialize figure
if ax is None:
subplot_kwargs = _preprocess_subplot_kwargs(subplot_kwargs)
fig, ax = plt.subplots(subplot_kw=subplot_kwargs, **fig_kwargs)
# - Add cartopy background
if add_background:
ax = plot_cartopy_background(ax)
# Plot centroids
lon = xr_obj[x].data
lat = xr_obj[y].data
p = ax.scatter(lon, lat, transform=ccrs.PlateCarree(), c=c, s=s, **plot_kwargs)
# - Return mappable
return p
####--------------------------------------------------------------------------.
def _plot_labels(
xr_obj,
label_name=None,
max_n_labels=50,
add_colorbar=True,
interpolation="nearest",
cmap="Paired",
fig_kwargs={},
**plot_kwargs,
):
"""Plot labels.
The maximum allowed number of labels to plot is 'max_n_labels'.
"""
from ximage.labels.labels import get_label_indices, redefine_label_array
from ximage.labels.plot_labels import get_label_colorbar_settings
from gpm_api.visualization.plot import plot_image
if isinstance(xr_obj, xr.Dataset):
dataarray = xr_obj[label_name]
else:
if label_name is not None:
dataarray = xr_obj[label_name]
else:
dataarray = xr_obj
dataarray = dataarray.compute()
label_indices = get_label_indices(dataarray)
n_labels = len(label_indices)
if add_colorbar and n_labels > max_n_labels:
msg = f"""The array currently contains {n_labels} labels
and 'max_n_labels' is set to {max_n_labels}. The colorbar is not displayed!"""
print(msg)
add_colorbar = False
# Relabel array from 1 to ... for plotting
dataarray = redefine_label_array(dataarray, label_indices=label_indices)
# Replace 0 with nan
dataarray = dataarray.where(dataarray > 0)
# Define appropriate colormap
plot_kwargs, cbar_kwargs = get_label_colorbar_settings(label_indices, cmap="Paired")
# Plot image
p = plot_image(
dataarray,
interpolation=interpolation,
add_colorbar=add_colorbar,
cbar_kwargs=cbar_kwargs,
fig_kwargs=fig_kwargs,
**plot_kwargs,
)
return p
def plot_labels(
obj, # Dataset, DataArray or generator
label_name=None,
max_n_labels=50,
add_colorbar=True,
interpolation="nearest",
cmap="Paired",
fig_kwargs={},
**plot_kwargs,
):
if is_generator(obj):
for label_id, xr_obj in obj:
p = _plot_labels(
xr_obj=xr_obj,
label_name=label_name,
max_n_labels=max_n_labels,
add_colorbar=add_colorbar,
interpolation=interpolation,
cmap=cmap,
fig_kwargs=fig_kwargs,
**plot_kwargs,
)
plt.show()
else:
p = _plot_labels(
xr_obj=obj,
label_name=label_name,
max_n_labels=max_n_labels,
add_colorbar=add_colorbar,
interpolation=interpolation,
cmap=cmap,
fig_kwargs=fig_kwargs,
**plot_kwargs,
)
return p
def plot_patches(
patch_gen,
variable=None,
add_colorbar=True,
interpolation="nearest",
fig_kwargs={},
cbar_kwargs={},
**plot_kwargs,
):
"""Plot patches."""
from gpm_api.visualization.plot import plot_image
# Plot patches
for label_id, xr_patch in patch_gen:
if isinstance(xr_patch, xr.Dataset):
if variable is None:
raise ValueError("'variable' must be specified when plotting xr.Dataset patches.")
xr_patch = xr_patch[variable]
try:
plot_image(
xr_patch,
interpolation=interpolation,
add_colorbar=add_colorbar,
fig_kwargs=fig_kwargs,
cbar_kwargs=cbar_kwargs,
**plot_kwargs,
)
plt.show()
except:
pass
return
|
PypiClean
|
/SCOSpy-0.4.4-py3-none-any.whl/SCOS/SCOS.py
|
from bitstring import BitArray, BitStream
import datetime
accessFlag = {
0: {'code': 0, 'description': "Inserted Packet"},
1: {'code': 1, 'description': "Updated"},
}
simFlag = {
0: {"flag": "00", "description": "Not Simulated Packet"},
1: {"flag": "01", "description": "Simulated Packet"}
}
SpacecraftID = {
816: {
'id': 816,
"Spacecraft": "BepiColombo",
"Band": "X-Band"
},
817: {
'id': 817,
"Spacecraft": "BepiColombo",
"Band": "Ka-Band"
},
}
groundStation = {
21: {'id': 21, 'station': "Kourou"},
22: {'id': 22, 'station': "Perth"},
23: {'id': 23, 'station': "New Norcia"},
24: {'id': 24, 'station': "Cebreros"},
25: {'id': 25, 'station': "Malargue"},
30: {'id': 30, 'station': "Maspalomas"},
97: {'id': 97, 'station': "Usuda"},
98: {'id': 98, 'station': "Uchinoura"},
}
PacketType = {
1: {'id': 1, 'description': "Telemetry Packet"},
2: {'id': 2, 'description': "Telecommand Packet"},
3: {'id': 3, 'description': "Event Packet"},
}
FilingFlag = {
0: {'id': 0, 'description': "Packet not filed in MSC archive"},
1: {'id': 1, 'description': "Packet filed in MSC archive"},
}
DistFlag = {
0: {'id': 0, 'description': "Packet is not to be distributed to the MSC application"},
1: {'id': 1, 'description': "Packet is to be distributed to the MSC application"},
}
TsPolicy = {
0: {'id': 0, 'description': "Packet timestamped with creation time"},
1: {'id': 1, 'description': "Packet timestamped with frame recived time"},
2: {'id': 2, 'description': "Packet timestamped with SCET"},
}
timeQuality = {
0: {'id': 0, 'description': "Good"},
1: {'id': 1, 'description': "Inaccurate"},
2: {'id': 2, 'description': "Bad"},
}
StreamID = {
1: {'id': 1, 'description': "Telecommand Stream"},
1000: {'id': 1000, 'description': "VC0 Real-Time Non-Science or Events (online)"},
1001: {'id': 1001, 'description': "VC1 Playback Non-Science or Events (online)"},
1002: {'id': 1002, 'description': "VC2 Science (online)"},
1003: {'id': 1003, 'description': "VC3 File-Transfer (online)"},
2000: {'id': 2000, 'description': "VC0 Real-Time Non-Science or Events (offline)"},
2001: {'id': 2001, 'description': "VC1 Playback Non-Science or Events (offline)"},
2002: {'id': 2002, 'description': "VC1 Playback Non-Science or Events (offline)"},
2003: {'id': 2003, 'description': "VC2 Science (offline)"},
65535: {'id': 65535, 'description': "Internal non Spacecraft Telemetry"},
}
MissionID = {
816: {'id': 816, 'description': "BepiColombo"},
0: {'id': 0, 'description': "Not Decode"},
}
DataUnitType = {
0: {'id': 0, 'description': "TM Transfer Frame"},
1: {'id': 1, 'description': "TM Source Packet"},
2: {'id': 2, 'description': "Internal MCS TM Packet"},
}
Qualifier = {
'0' : {'id': 0, 'description': "Good"},
'1' : {'id': 1, 'description': "Good"},
'2' : {'id': 2, 'description': "CLCW"},
'10': {'id': 10, 'description': "Bad"},
'11': {'id': 11, 'description': "Bad"},
'12': {'id': 12, 'description': "User Defined Constant"},
'20': {'id': 20, 'description': "Idle"},
'21': {'id': 21, 'description': "Idle"},
'22': {'id': 22, 'description': "Status Consistency Check"},
'32': {'id': 32, 'description': "Dynamic Misc"},
'42': {'id': 42, 'description': "Online MIB changes"},
'52': {'id': 52, 'description': "SPPG"},
'62': {'id': 62, 'description': "SPID Validity"},
'72': {'id': 72, 'description': "TPKT Configuration"},
'82': {'id': 82, 'description': "External Source"},
}
def Time(sec, mlsec):
return datetime.datetime(1970,1,1,0,0,0,0)+datetime.timedelta(seconds=sec)+datetime.timedelta(microseconds=mlsec)
class SCOS:
def __init__(self,data):
self.CPH=SCOS_CPH(data[0:120])
if self.CPH.PType['id'] == 1:
self.TMPH=SCOS_TMPH(data[120:152])
self.Data=data[152:]
elif self.CPH.PType['id'] ==2:
self.TCPH=SCOS_TCPH(data[120:208],self.CPH.SeqCounter)
self.Data=data[208:]
# c = BitArray(hex=hexdata[208:])
# self.SPH=SPHeader(c[:48])
# self.SPH=SPHeader_new(hexdata[208:220])
# self.Data=TelecommandData(hexdata)
else:
print("Not Decoded")
class SCOS_TCPH:
def __init__(self,data,SSC):
# 0: Uplink seconds, 1: Uplink milliseconds, 2: Execution seconds, 3: Execution milliseconds
# 4: Last update seconds, 5: Last Update milliseconds, 6: Request ID, 7: Request Element Index
# 8: Variable address size, 9: PUS APID, 10: PUS SSC, 11: PUS Service Type, 12: PUS SunbService Type
# 13: PUS Ack Flag, 14: Uplink Flag, 15: Source Host, 16: Source Type, 17: Request Detail Fixed Size
dp = BitStream(hex=data).unpack(7*'uint:32,'+4*'uint:16,'+6*'uint:8,'+'uint:16')
self.UplinkTime = Time(*dp[0:2])
self.ExecTime = Time(*dp[2:4])
self.LUTime = Time(*dp[4:6])
self.RequestID = dp[6]
self.ReqElemIdx = dp[7]
self.VarAddSz = dp[8]
self.PUSAPID = dp[9]
self.PUSSSC = dp[10]
self.PID, self.PCAT=BitStream(hex=data[64:68]).unpack(2*'uint:8,')
self.PUSService = dp[11]
self.PUSST= dp[11]
self.PUSSubService = dp[12]
self.PUSSST = dp[12]
self.PUSAck = dp[13]
self.UplinkFlag = dp[14]
self.SourceHost = dp[15]
self.SourceType = dp[16]
self.ReqDetFixedSize = dp[17]
class SCOS_CPH:
def __init__(self,data):
# 0: cTree, 1: AccessFlag, 2: Spere, 3: SimFlag, 4: Filling Time seconds, 5: Filling Time milliseconds
# 6: Creation Time seconds, 7: Creation Time milliseconds, 8: Create ID, 9: Spacecraft ID, 10: GroundStation
# 11: Size of the packet, 12: Packet Type, 13: Version, 14-15: spere, 16: Filing Flag, 17: Distributed Flag
# 18: Timestamp policy, 19: Time quality, 20: stream ID, 21 Sequence Counter, 22: SPID, 23 Retr key 1,
# 24 Retr Key 2, 25: Mission ID, 26: Context ID, 27: Domain ID, 28: DB Rel, 29: DB Iss, 30: Spere
dp=BitStream(hex=data[0:120]).unpack('bytes:2,uint:8,uint:6, uint:2,'+5*'uint:32,'+2*'uint:16,'+'uint:32,'+2*'uint:4,'+4*'uint:1,'+2*'uint:2,'+'uint:16,'+2*'uint:32,'+8*'uint:16,')
self.CTree = dp[0]
self.AccessF = accessFlag[dp[1]]
self.SimFlag = simFlag[dp[3]]
self.FilingTime=Time(dp[4],dp[5])
self.CreationTime=Time(dp[6], dp[7])
self.CreateID=dp[8]
if dp[9] in SpacecraftID:
self.SCID=SpacecraftID[dp[9]]
else:
self.SCID = {'id': dp[9],"Spacecraft": "Not in DB","Band": "Not in DB"}
if dp[10] in groundStation:
self.GSID = groundStation[dp[10]]
else:
self.GSID = {'id': dp[10], 'station': "Not in DB"}
self.PSize=dp[11]
self.PType=PacketType[dp[12]]
self.Version=dp[13]
self.FilingFlag=FilingFlag[dp[16]]
self.DistFlag=DistFlag[dp[17]]
self.TSPolicy=TsPolicy[dp[18]]
self.TQ= timeQuality[dp[19]]
self.StreamID = StreamID[dp[20]]
self.SeqCounter=dp[21]
self.SPID=dp[22]
if dp[25] in MissionID:
self.MissionID=MissionID[dp[25]]
else:
self.MissionID={'id': dp[25], "description": "Not in DB"}
self.MIB = f"{dp[28]}.{dp[29]}"
class SCOS_TMPH:
def __init__(self,data):
# 0: Not Used, 1: TPSD, 2: Unused, 3: data Unit Type, 4: Quealifier, 5: APID, 6: SSC
# 7: PUS Service, 8: PUS SubService
dp= BitStream(hex=data).unpack(2*'uint:32,'+'uint:8,'+2*'uint:4,'+2*'uint:16,'+2*'uint:8,')
self.TPSD=dp[1]
self.RouteID={'DataUnitType' : DataUnitType[dp[3]], 'Qualifier' : Qualifier[str(dp[4])]} #RouteID(data[16:20])
self.PUSAPID=dp[5]
self.PUSSSC=dp[6]
self.PUSService=dp[7]
self.PUSSubService=dp[8]
self.PUSST=dp[7]
self.PUSSST=dp[8]
|
PypiClean
|
/pycrate-0.6.0.tar.gz/pycrate-0.6.0/pycrate_asn1dir/T125.py
|
from pycrate_asn1rt.utils import *
from pycrate_asn1rt.err import *
from pycrate_asn1rt.glob import make_GLOBAL, GLOBAL
from pycrate_asn1rt.dictobj import ASN1Dict
from pycrate_asn1rt.refobj import *
from pycrate_asn1rt.setobj import *
from pycrate_asn1rt.asnobj_basic import *
from pycrate_asn1rt.asnobj_str import *
from pycrate_asn1rt.asnobj_construct import *
from pycrate_asn1rt.asnobj_class import *
from pycrate_asn1rt.asnobj_ext import *
from pycrate_asn1rt.init import init_modules
class MCS_PROTOCOL:
_name_ = u'MCS-PROTOCOL'
_oid_ = []
_obj_ = [
u'ChannelId',
u'StaticChannelId',
u'DynamicChannelId',
u'UserId',
u'PrivateChannelId',
u'AssignedChannelId',
u'TokenId',
u'TokenStatus',
u'DataPriority',
u'Segmentation',
u'DomainParameters',
u'Connect-Initial',
u'Connect-Response',
u'Connect-Additional',
u'Connect-Result',
u'PlumbDomainIndication',
u'ErectDomainRequest',
u'ChannelAttributes',
u'MergeChannelsRequest',
u'MergeChannelsConfirm',
u'PurgeChannelsIndication',
u'TokenAttributes',
u'MergeTokensRequest',
u'MergeTokensConfirm',
u'PurgeTokensIndication',
u'DisconnectProviderUltimatum',
u'RejectMCSPDUUltimatum',
u'AttachUserRequest',
u'AttachUserConfirm',
u'DetachUserRequest',
u'DetachUserIndication',
u'ChannelJoinRequest',
u'ChannelJoinConfirm',
u'ChannelLeaveRequest',
u'ChannelConveneRequest',
u'ChannelConveneConfirm',
u'ChannelDisbandRequest',
u'ChannelDisbandIndication',
u'ChannelAdmitRequest',
u'ChannelAdmitIndication',
u'ChannelExpelRequest',
u'ChannelExpelIndication',
u'SendDataRequest',
u'SendDataIndication',
u'UniformSendDataRequest',
u'UniformSendDataIndication',
u'TokenGrabRequest',
u'TokenGrabConfirm',
u'TokenInhibitRequest',
u'TokenInhibitConfirm',
u'TokenGiveRequest',
u'TokenGiveIndication',
u'TokenGiveResponse',
u'TokenGiveConfirm',
u'TokenPleaseRequest',
u'TokenPleaseIndication',
u'TokenReleaseRequest',
u'TokenReleaseConfirm',
u'TokenTestRequest',
u'TokenTestConfirm',
u'Reason',
u'Result',
u'Diagnostic',
u'ConnectMCSPDU',
u'DomainMCSPDU',
]
_type_ = [
u'ChannelId',
u'StaticChannelId',
u'DynamicChannelId',
u'UserId',
u'PrivateChannelId',
u'AssignedChannelId',
u'TokenId',
u'TokenStatus',
u'DataPriority',
u'Segmentation',
u'DomainParameters',
u'Connect-Initial',
u'Connect-Response',
u'Connect-Additional',
u'Connect-Result',
u'PlumbDomainIndication',
u'ErectDomainRequest',
u'ChannelAttributes',
u'MergeChannelsRequest',
u'MergeChannelsConfirm',
u'PurgeChannelsIndication',
u'TokenAttributes',
u'MergeTokensRequest',
u'MergeTokensConfirm',
u'PurgeTokensIndication',
u'DisconnectProviderUltimatum',
u'RejectMCSPDUUltimatum',
u'AttachUserRequest',
u'AttachUserConfirm',
u'DetachUserRequest',
u'DetachUserIndication',
u'ChannelJoinRequest',
u'ChannelJoinConfirm',
u'ChannelLeaveRequest',
u'ChannelConveneRequest',
u'ChannelConveneConfirm',
u'ChannelDisbandRequest',
u'ChannelDisbandIndication',
u'ChannelAdmitRequest',
u'ChannelAdmitIndication',
u'ChannelExpelRequest',
u'ChannelExpelIndication',
u'SendDataRequest',
u'SendDataIndication',
u'UniformSendDataRequest',
u'UniformSendDataIndication',
u'TokenGrabRequest',
u'TokenGrabConfirm',
u'TokenInhibitRequest',
u'TokenInhibitConfirm',
u'TokenGiveRequest',
u'TokenGiveIndication',
u'TokenGiveResponse',
u'TokenGiveConfirm',
u'TokenPleaseRequest',
u'TokenPleaseIndication',
u'TokenReleaseRequest',
u'TokenReleaseConfirm',
u'TokenTestRequest',
u'TokenTestConfirm',
u'Reason',
u'Result',
u'Diagnostic',
u'ConnectMCSPDU',
u'DomainMCSPDU',
]
_set_ = [
]
_val_ = [
]
_class_ = [
]
_param_ = [
]
#-----< ChannelId >-----#
ChannelId = INT(name=u'ChannelId', mode=MODE_TYPE)
ChannelId._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
#-----< StaticChannelId >-----#
StaticChannelId = INT(name=u'StaticChannelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelId')))
StaticChannelId._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=1, ub=1000)], ev=None, er=[])
#-----< DynamicChannelId >-----#
DynamicChannelId = INT(name=u'DynamicChannelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelId')))
DynamicChannelId._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=1001, ub=65535)], ev=None, er=[])
#-----< UserId >-----#
UserId = INT(name=u'UserId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'DynamicChannelId')))
#-----< PrivateChannelId >-----#
PrivateChannelId = INT(name=u'PrivateChannelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'DynamicChannelId')))
#-----< AssignedChannelId >-----#
AssignedChannelId = INT(name=u'AssignedChannelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'DynamicChannelId')))
#-----< TokenId >-----#
TokenId = INT(name=u'TokenId', mode=MODE_TYPE)
TokenId._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=1, ub=65535)], ev=None, er=[])
#-----< TokenStatus >-----#
TokenStatus = ENUM(name=u'TokenStatus', mode=MODE_TYPE)
TokenStatus._cont = ASN1Dict([(u'notInUse', 0), (u'selfGrabbed', 1), (u'otherGrabbed', 2), (u'selfInhibited', 3), (u'otherInhibited', 4), (u'selfRecipient', 5), (u'selfGiving', 6), (u'otherGiving', 7)])
TokenStatus._ext = None
#-----< DataPriority >-----#
DataPriority = ENUM(name=u'DataPriority', mode=MODE_TYPE)
DataPriority._cont = ASN1Dict([(u'top', 0), (u'high', 1), (u'medium', 2), (u'low', 3)])
DataPriority._ext = None
#-----< Segmentation >-----#
Segmentation = BIT_STR(name=u'Segmentation', mode=MODE_TYPE)
Segmentation._cont = ASN1Dict([(u'begin', 0), (u'end', 1)])
Segmentation._const_sz = ASN1Set(rv=[2], rr=[], ev=None, er=[])
#-----< DomainParameters >-----#
DomainParameters = SEQ(name=u'DomainParameters', mode=MODE_TYPE)
_DomainParameters_maxChannelIds = INT(name=u'maxChannelIds', mode=MODE_TYPE)
_DomainParameters_maxChannelIds._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
_DomainParameters_maxUserIds = INT(name=u'maxUserIds', mode=MODE_TYPE)
_DomainParameters_maxUserIds._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
_DomainParameters_maxTokenIds = INT(name=u'maxTokenIds', mode=MODE_TYPE)
_DomainParameters_maxTokenIds._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
_DomainParameters_numPriorities = INT(name=u'numPriorities', mode=MODE_TYPE)
_DomainParameters_numPriorities._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
_DomainParameters_minThroughput = INT(name=u'minThroughput', mode=MODE_TYPE)
_DomainParameters_minThroughput._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
_DomainParameters_maxHeight = INT(name=u'maxHeight', mode=MODE_TYPE)
_DomainParameters_maxHeight._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
_DomainParameters_maxMCSPDUsize = INT(name=u'maxMCSPDUsize', mode=MODE_TYPE)
_DomainParameters_maxMCSPDUsize._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
_DomainParameters_protocolVersion = INT(name=u'protocolVersion', mode=MODE_TYPE)
_DomainParameters_protocolVersion._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
DomainParameters._cont = ASN1Dict([
(u'maxChannelIds', _DomainParameters_maxChannelIds),
(u'maxUserIds', _DomainParameters_maxUserIds),
(u'maxTokenIds', _DomainParameters_maxTokenIds),
(u'numPriorities', _DomainParameters_numPriorities),
(u'minThroughput', _DomainParameters_minThroughput),
(u'maxHeight', _DomainParameters_maxHeight),
(u'maxMCSPDUsize', _DomainParameters_maxMCSPDUsize),
(u'protocolVersion', _DomainParameters_protocolVersion),
])
DomainParameters._ext = None
#-----< Connect-Initial >-----#
Connect_Initial = SEQ(name=u'Connect-Initial', mode=MODE_TYPE, tag=(101, TAG_APPLICATION, TAG_IMPLICIT))
_Connect_Initial_callingDomainSelector = OCT_STR(name=u'callingDomainSelector', mode=MODE_TYPE)
_Connect_Initial_calledDomainSelector = OCT_STR(name=u'calledDomainSelector', mode=MODE_TYPE)
_Connect_Initial_upwardFlag = BOOL(name=u'upwardFlag', mode=MODE_TYPE)
_Connect_Initial_targetParameters = SEQ(name=u'targetParameters', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'DomainParameters')))
_Connect_Initial_minimumParameters = SEQ(name=u'minimumParameters', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'DomainParameters')))
_Connect_Initial_maximumParameters = SEQ(name=u'maximumParameters', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'DomainParameters')))
_Connect_Initial_userData = OCT_STR(name=u'userData', mode=MODE_TYPE)
Connect_Initial._cont = ASN1Dict([
(u'callingDomainSelector', _Connect_Initial_callingDomainSelector),
(u'calledDomainSelector', _Connect_Initial_calledDomainSelector),
(u'upwardFlag', _Connect_Initial_upwardFlag),
(u'targetParameters', _Connect_Initial_targetParameters),
(u'minimumParameters', _Connect_Initial_minimumParameters),
(u'maximumParameters', _Connect_Initial_maximumParameters),
(u'userData', _Connect_Initial_userData),
])
Connect_Initial._ext = None
#-----< Connect-Response >-----#
Connect_Response = SEQ(name=u'Connect-Response', mode=MODE_TYPE, tag=(102, TAG_APPLICATION, TAG_IMPLICIT))
_Connect_Response_result = ENUM(name=u'result', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Result')))
_Connect_Response_calledConnectId = INT(name=u'calledConnectId', mode=MODE_TYPE)
_Connect_Response_calledConnectId._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
_Connect_Response_domainParameters = SEQ(name=u'domainParameters', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'DomainParameters')))
_Connect_Response_userData = OCT_STR(name=u'userData', mode=MODE_TYPE)
Connect_Response._cont = ASN1Dict([
(u'result', _Connect_Response_result),
(u'calledConnectId', _Connect_Response_calledConnectId),
(u'domainParameters', _Connect_Response_domainParameters),
(u'userData', _Connect_Response_userData),
])
Connect_Response._ext = None
#-----< Connect-Additional >-----#
Connect_Additional = SEQ(name=u'Connect-Additional', mode=MODE_TYPE, tag=(103, TAG_APPLICATION, TAG_IMPLICIT))
_Connect_Additional_calledConnectId = INT(name=u'calledConnectId', mode=MODE_TYPE)
_Connect_Additional_calledConnectId._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
_Connect_Additional_dataPriority = ENUM(name=u'dataPriority', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'DataPriority')))
Connect_Additional._cont = ASN1Dict([
(u'calledConnectId', _Connect_Additional_calledConnectId),
(u'dataPriority', _Connect_Additional_dataPriority),
])
Connect_Additional._ext = None
#-----< Connect-Result >-----#
Connect_Result = SEQ(name=u'Connect-Result', mode=MODE_TYPE, tag=(104, TAG_APPLICATION, TAG_IMPLICIT))
_Connect_Result_result = ENUM(name=u'result', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Result')))
Connect_Result._cont = ASN1Dict([
(u'result', _Connect_Result_result),
])
Connect_Result._ext = None
#-----< PlumbDomainIndication >-----#
PlumbDomainIndication = SEQ(name=u'PlumbDomainIndication', mode=MODE_TYPE, tag=(0, TAG_APPLICATION, TAG_IMPLICIT))
_PlumbDomainIndication_heightLimit = INT(name=u'heightLimit', mode=MODE_TYPE)
_PlumbDomainIndication_heightLimit._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
PlumbDomainIndication._cont = ASN1Dict([
(u'heightLimit', _PlumbDomainIndication_heightLimit),
])
PlumbDomainIndication._ext = None
#-----< ErectDomainRequest >-----#
ErectDomainRequest = SEQ(name=u'ErectDomainRequest', mode=MODE_TYPE, tag=(1, TAG_APPLICATION, TAG_IMPLICIT))
_ErectDomainRequest_subHeight = INT(name=u'subHeight', mode=MODE_TYPE)
_ErectDomainRequest_subHeight._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
_ErectDomainRequest_subInterval = INT(name=u'subInterval', mode=MODE_TYPE)
_ErectDomainRequest_subInterval._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
ErectDomainRequest._cont = ASN1Dict([
(u'subHeight', _ErectDomainRequest_subHeight),
(u'subInterval', _ErectDomainRequest_subInterval),
])
ErectDomainRequest._ext = None
#-----< ChannelAttributes >-----#
ChannelAttributes = CHOICE(name=u'ChannelAttributes', mode=MODE_TYPE)
_ChannelAttributes_static = SEQ(name=u'static', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__ChannelAttributes_static_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'StaticChannelId')))
_ChannelAttributes_static._cont = ASN1Dict([
(u'channelId', __ChannelAttributes_static_channelId),
])
_ChannelAttributes_static._ext = None
_ChannelAttributes_userId = SEQ(name=u'userId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__ChannelAttributes_userId_joined = BOOL(name=u'joined', mode=MODE_TYPE)
__ChannelAttributes_userId_userId = INT(name=u'userId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_ChannelAttributes_userId._cont = ASN1Dict([
(u'joined', __ChannelAttributes_userId_joined),
(u'userId', __ChannelAttributes_userId_userId),
])
_ChannelAttributes_userId._ext = None
_ChannelAttributes_private = SEQ(name=u'private', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__ChannelAttributes_private_joined = BOOL(name=u'joined', mode=MODE_TYPE)
__ChannelAttributes_private_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'PrivateChannelId')))
__ChannelAttributes_private_manager = INT(name=u'manager', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
__ChannelAttributes_private_admitted = SET_OF(name=u'admitted', mode=MODE_TYPE)
___ChannelAttributes_private_admitted__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
__ChannelAttributes_private_admitted._cont = ___ChannelAttributes_private_admitted__item_
_ChannelAttributes_private._cont = ASN1Dict([
(u'joined', __ChannelAttributes_private_joined),
(u'channelId', __ChannelAttributes_private_channelId),
(u'manager', __ChannelAttributes_private_manager),
(u'admitted', __ChannelAttributes_private_admitted),
])
_ChannelAttributes_private._ext = None
_ChannelAttributes_assigned = SEQ(name=u'assigned', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__ChannelAttributes_assigned_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'AssignedChannelId')))
_ChannelAttributes_assigned._cont = ASN1Dict([
(u'channelId', __ChannelAttributes_assigned_channelId),
])
_ChannelAttributes_assigned._ext = None
ChannelAttributes._cont = ASN1Dict([
(u'static', _ChannelAttributes_static),
(u'userId', _ChannelAttributes_userId),
(u'private', _ChannelAttributes_private),
(u'assigned', _ChannelAttributes_assigned),
])
ChannelAttributes._ext = None
#-----< MergeChannelsRequest >-----#
MergeChannelsRequest = SEQ(name=u'MergeChannelsRequest', mode=MODE_TYPE, tag=(2, TAG_APPLICATION, TAG_IMPLICIT))
_MergeChannelsRequest_mergeChannels = SET_OF(name=u'mergeChannels', mode=MODE_TYPE)
__MergeChannelsRequest_mergeChannels__item_ = CHOICE(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelAttributes')))
_MergeChannelsRequest_mergeChannels._cont = __MergeChannelsRequest_mergeChannels__item_
_MergeChannelsRequest_purgeChannelIds = SET_OF(name=u'purgeChannelIds', mode=MODE_TYPE)
__MergeChannelsRequest_purgeChannelIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelId')))
_MergeChannelsRequest_purgeChannelIds._cont = __MergeChannelsRequest_purgeChannelIds__item_
MergeChannelsRequest._cont = ASN1Dict([
(u'mergeChannels', _MergeChannelsRequest_mergeChannels),
(u'purgeChannelIds', _MergeChannelsRequest_purgeChannelIds),
])
MergeChannelsRequest._ext = None
#-----< MergeChannelsConfirm >-----#
MergeChannelsConfirm = SEQ(name=u'MergeChannelsConfirm', mode=MODE_TYPE, tag=(3, TAG_APPLICATION, TAG_IMPLICIT))
_MergeChannelsConfirm_mergeChannels = SET_OF(name=u'mergeChannels', mode=MODE_TYPE)
__MergeChannelsConfirm_mergeChannels__item_ = CHOICE(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelAttributes')))
_MergeChannelsConfirm_mergeChannels._cont = __MergeChannelsConfirm_mergeChannels__item_
_MergeChannelsConfirm_purgeChannelIds = SET_OF(name=u'purgeChannelIds', mode=MODE_TYPE)
__MergeChannelsConfirm_purgeChannelIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelId')))
_MergeChannelsConfirm_purgeChannelIds._cont = __MergeChannelsConfirm_purgeChannelIds__item_
MergeChannelsConfirm._cont = ASN1Dict([
(u'mergeChannels', _MergeChannelsConfirm_mergeChannels),
(u'purgeChannelIds', _MergeChannelsConfirm_purgeChannelIds),
])
MergeChannelsConfirm._ext = None
#-----< PurgeChannelsIndication >-----#
PurgeChannelsIndication = SEQ(name=u'PurgeChannelsIndication', mode=MODE_TYPE, tag=(4, TAG_APPLICATION, TAG_IMPLICIT))
_PurgeChannelsIndication_detachUserIds = SET_OF(name=u'detachUserIds', mode=MODE_TYPE)
__PurgeChannelsIndication_detachUserIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_PurgeChannelsIndication_detachUserIds._cont = __PurgeChannelsIndication_detachUserIds__item_
_PurgeChannelsIndication_purgeChannelIds = SET_OF(name=u'purgeChannelIds', mode=MODE_TYPE)
__PurgeChannelsIndication_purgeChannelIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelId')))
_PurgeChannelsIndication_purgeChannelIds._cont = __PurgeChannelsIndication_purgeChannelIds__item_
PurgeChannelsIndication._cont = ASN1Dict([
(u'detachUserIds', _PurgeChannelsIndication_detachUserIds),
(u'purgeChannelIds', _PurgeChannelsIndication_purgeChannelIds),
])
PurgeChannelsIndication._ext = None
#-----< TokenAttributes >-----#
TokenAttributes = CHOICE(name=u'TokenAttributes', mode=MODE_TYPE)
_TokenAttributes_grabbed = SEQ(name=u'grabbed', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__TokenAttributes_grabbed_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
__TokenAttributes_grabbed_grabber = INT(name=u'grabber', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenAttributes_grabbed._cont = ASN1Dict([
(u'tokenId', __TokenAttributes_grabbed_tokenId),
(u'grabber', __TokenAttributes_grabbed_grabber),
])
_TokenAttributes_grabbed._ext = None
_TokenAttributes_inhibited = SEQ(name=u'inhibited', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__TokenAttributes_inhibited_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
__TokenAttributes_inhibited_inhibitors = SET_OF(name=u'inhibitors', mode=MODE_TYPE)
___TokenAttributes_inhibited_inhibitors__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
__TokenAttributes_inhibited_inhibitors._cont = ___TokenAttributes_inhibited_inhibitors__item_
_TokenAttributes_inhibited._cont = ASN1Dict([
(u'tokenId', __TokenAttributes_inhibited_tokenId),
(u'inhibitors', __TokenAttributes_inhibited_inhibitors),
])
_TokenAttributes_inhibited._ext = None
_TokenAttributes_giving = SEQ(name=u'giving', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__TokenAttributes_giving_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
__TokenAttributes_giving_grabber = INT(name=u'grabber', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
__TokenAttributes_giving_recipient = INT(name=u'recipient', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenAttributes_giving._cont = ASN1Dict([
(u'tokenId', __TokenAttributes_giving_tokenId),
(u'grabber', __TokenAttributes_giving_grabber),
(u'recipient', __TokenAttributes_giving_recipient),
])
_TokenAttributes_giving._ext = None
_TokenAttributes_ungivable = SEQ(name=u'ungivable', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__TokenAttributes_ungivable_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
__TokenAttributes_ungivable_grabber = INT(name=u'grabber', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenAttributes_ungivable._cont = ASN1Dict([
(u'tokenId', __TokenAttributes_ungivable_tokenId),
(u'grabber', __TokenAttributes_ungivable_grabber),
])
_TokenAttributes_ungivable._ext = None
_TokenAttributes_given = SEQ(name=u'given', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__TokenAttributes_given_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
__TokenAttributes_given_recipient = INT(name=u'recipient', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenAttributes_given._cont = ASN1Dict([
(u'tokenId', __TokenAttributes_given_tokenId),
(u'recipient', __TokenAttributes_given_recipient),
])
_TokenAttributes_given._ext = None
TokenAttributes._cont = ASN1Dict([
(u'grabbed', _TokenAttributes_grabbed),
(u'inhibited', _TokenAttributes_inhibited),
(u'giving', _TokenAttributes_giving),
(u'ungivable', _TokenAttributes_ungivable),
(u'given', _TokenAttributes_given),
])
TokenAttributes._ext = None
#-----< MergeTokensRequest >-----#
MergeTokensRequest = SEQ(name=u'MergeTokensRequest', mode=MODE_TYPE, tag=(5, TAG_APPLICATION, TAG_IMPLICIT))
_MergeTokensRequest_mergeTokens = SET_OF(name=u'mergeTokens', mode=MODE_TYPE)
__MergeTokensRequest_mergeTokens__item_ = CHOICE(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenAttributes')))
_MergeTokensRequest_mergeTokens._cont = __MergeTokensRequest_mergeTokens__item_
_MergeTokensRequest_purgeTokenIds = SET_OF(name=u'purgeTokenIds', mode=MODE_TYPE)
__MergeTokensRequest_purgeTokenIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
_MergeTokensRequest_purgeTokenIds._cont = __MergeTokensRequest_purgeTokenIds__item_
MergeTokensRequest._cont = ASN1Dict([
(u'mergeTokens', _MergeTokensRequest_mergeTokens),
(u'purgeTokenIds', _MergeTokensRequest_purgeTokenIds),
])
MergeTokensRequest._ext = None
#-----< MergeTokensConfirm >-----#
MergeTokensConfirm = SEQ(name=u'MergeTokensConfirm', mode=MODE_TYPE, tag=(6, TAG_APPLICATION, TAG_IMPLICIT))
_MergeTokensConfirm_mergeTokens = SET_OF(name=u'mergeTokens', mode=MODE_TYPE)
__MergeTokensConfirm_mergeTokens__item_ = CHOICE(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenAttributes')))
_MergeTokensConfirm_mergeTokens._cont = __MergeTokensConfirm_mergeTokens__item_
_MergeTokensConfirm_purgeTokenIds = SET_OF(name=u'purgeTokenIds', mode=MODE_TYPE)
__MergeTokensConfirm_purgeTokenIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
_MergeTokensConfirm_purgeTokenIds._cont = __MergeTokensConfirm_purgeTokenIds__item_
MergeTokensConfirm._cont = ASN1Dict([
(u'mergeTokens', _MergeTokensConfirm_mergeTokens),
(u'purgeTokenIds', _MergeTokensConfirm_purgeTokenIds),
])
MergeTokensConfirm._ext = None
#-----< PurgeTokensIndication >-----#
PurgeTokensIndication = SEQ(name=u'PurgeTokensIndication', mode=MODE_TYPE, tag=(7, TAG_APPLICATION, TAG_IMPLICIT))
_PurgeTokensIndication_purgeTokenIds = SET_OF(name=u'purgeTokenIds', mode=MODE_TYPE)
__PurgeTokensIndication_purgeTokenIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
_PurgeTokensIndication_purgeTokenIds._cont = __PurgeTokensIndication_purgeTokenIds__item_
PurgeTokensIndication._cont = ASN1Dict([
(u'purgeTokenIds', _PurgeTokensIndication_purgeTokenIds),
])
PurgeTokensIndication._ext = None
#-----< DisconnectProviderUltimatum >-----#
DisconnectProviderUltimatum = SEQ(name=u'DisconnectProviderUltimatum', mode=MODE_TYPE, tag=(8, TAG_APPLICATION, TAG_IMPLICIT))
_DisconnectProviderUltimatum_reason = ENUM(name=u'reason', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Reason')))
DisconnectProviderUltimatum._cont = ASN1Dict([
(u'reason', _DisconnectProviderUltimatum_reason),
])
DisconnectProviderUltimatum._ext = None
#-----< RejectMCSPDUUltimatum >-----#
RejectMCSPDUUltimatum = SEQ(name=u'RejectMCSPDUUltimatum', mode=MODE_TYPE, tag=(9, TAG_APPLICATION, TAG_IMPLICIT))
_RejectMCSPDUUltimatum_diagnostic = ENUM(name=u'diagnostic', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Diagnostic')))
_RejectMCSPDUUltimatum_initialOctets = OCT_STR(name=u'initialOctets', mode=MODE_TYPE)
RejectMCSPDUUltimatum._cont = ASN1Dict([
(u'diagnostic', _RejectMCSPDUUltimatum_diagnostic),
(u'initialOctets', _RejectMCSPDUUltimatum_initialOctets),
])
RejectMCSPDUUltimatum._ext = None
#-----< AttachUserRequest >-----#
AttachUserRequest = SEQ(name=u'AttachUserRequest', mode=MODE_TYPE, tag=(10, TAG_APPLICATION, TAG_IMPLICIT))
AttachUserRequest._cont = ASN1Dict([
])
AttachUserRequest._ext = None
#-----< AttachUserConfirm >-----#
AttachUserConfirm = SEQ(name=u'AttachUserConfirm', mode=MODE_TYPE, tag=(11, TAG_APPLICATION, TAG_IMPLICIT))
_AttachUserConfirm_result = ENUM(name=u'result', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Result')))
_AttachUserConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')), opt=True)
AttachUserConfirm._cont = ASN1Dict([
(u'result', _AttachUserConfirm_result),
(u'initiator', _AttachUserConfirm_initiator),
])
AttachUserConfirm._ext = None
#-----< DetachUserRequest >-----#
DetachUserRequest = SEQ(name=u'DetachUserRequest', mode=MODE_TYPE, tag=(12, TAG_APPLICATION, TAG_IMPLICIT))
_DetachUserRequest_reason = ENUM(name=u'reason', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Reason')))
_DetachUserRequest_userIds = SET_OF(name=u'userIds', mode=MODE_TYPE)
__DetachUserRequest_userIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_DetachUserRequest_userIds._cont = __DetachUserRequest_userIds__item_
DetachUserRequest._cont = ASN1Dict([
(u'reason', _DetachUserRequest_reason),
(u'userIds', _DetachUserRequest_userIds),
])
DetachUserRequest._ext = None
#-----< DetachUserIndication >-----#
DetachUserIndication = SEQ(name=u'DetachUserIndication', mode=MODE_TYPE, tag=(13, TAG_APPLICATION, TAG_IMPLICIT))
_DetachUserIndication_reason = ENUM(name=u'reason', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Reason')))
_DetachUserIndication_userIds = SET_OF(name=u'userIds', mode=MODE_TYPE)
__DetachUserIndication_userIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_DetachUserIndication_userIds._cont = __DetachUserIndication_userIds__item_
DetachUserIndication._cont = ASN1Dict([
(u'reason', _DetachUserIndication_reason),
(u'userIds', _DetachUserIndication_userIds),
])
DetachUserIndication._ext = None
#-----< ChannelJoinRequest >-----#
ChannelJoinRequest = SEQ(name=u'ChannelJoinRequest', mode=MODE_TYPE, tag=(14, TAG_APPLICATION, TAG_IMPLICIT))
_ChannelJoinRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_ChannelJoinRequest_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelId')))
ChannelJoinRequest._cont = ASN1Dict([
(u'initiator', _ChannelJoinRequest_initiator),
(u'channelId', _ChannelJoinRequest_channelId),
])
ChannelJoinRequest._ext = None
#-----< ChannelJoinConfirm >-----#
ChannelJoinConfirm = SEQ(name=u'ChannelJoinConfirm', mode=MODE_TYPE, tag=(15, TAG_APPLICATION, TAG_IMPLICIT))
_ChannelJoinConfirm_result = ENUM(name=u'result', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Result')))
_ChannelJoinConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_ChannelJoinConfirm_requested = INT(name=u'requested', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelId')))
_ChannelJoinConfirm_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelId')), opt=True)
ChannelJoinConfirm._cont = ASN1Dict([
(u'result', _ChannelJoinConfirm_result),
(u'initiator', _ChannelJoinConfirm_initiator),
(u'requested', _ChannelJoinConfirm_requested),
(u'channelId', _ChannelJoinConfirm_channelId),
])
ChannelJoinConfirm._ext = None
#-----< ChannelLeaveRequest >-----#
ChannelLeaveRequest = SEQ(name=u'ChannelLeaveRequest', mode=MODE_TYPE, tag=(16, TAG_APPLICATION, TAG_IMPLICIT))
_ChannelLeaveRequest_channelIds = SET_OF(name=u'channelIds', mode=MODE_TYPE)
__ChannelLeaveRequest_channelIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelId')))
_ChannelLeaveRequest_channelIds._cont = __ChannelLeaveRequest_channelIds__item_
ChannelLeaveRequest._cont = ASN1Dict([
(u'channelIds', _ChannelLeaveRequest_channelIds),
])
ChannelLeaveRequest._ext = None
#-----< ChannelConveneRequest >-----#
ChannelConveneRequest = SEQ(name=u'ChannelConveneRequest', mode=MODE_TYPE, tag=(17, TAG_APPLICATION, TAG_IMPLICIT))
_ChannelConveneRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
ChannelConveneRequest._cont = ASN1Dict([
(u'initiator', _ChannelConveneRequest_initiator),
])
ChannelConveneRequest._ext = None
#-----< ChannelConveneConfirm >-----#
ChannelConveneConfirm = SEQ(name=u'ChannelConveneConfirm', mode=MODE_TYPE, tag=(18, TAG_APPLICATION, TAG_IMPLICIT))
_ChannelConveneConfirm_result = ENUM(name=u'result', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Result')))
_ChannelConveneConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_ChannelConveneConfirm_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'PrivateChannelId')), opt=True)
ChannelConveneConfirm._cont = ASN1Dict([
(u'result', _ChannelConveneConfirm_result),
(u'initiator', _ChannelConveneConfirm_initiator),
(u'channelId', _ChannelConveneConfirm_channelId),
])
ChannelConveneConfirm._ext = None
#-----< ChannelDisbandRequest >-----#
ChannelDisbandRequest = SEQ(name=u'ChannelDisbandRequest', mode=MODE_TYPE, tag=(19, TAG_APPLICATION, TAG_IMPLICIT))
_ChannelDisbandRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_ChannelDisbandRequest_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'PrivateChannelId')))
ChannelDisbandRequest._cont = ASN1Dict([
(u'initiator', _ChannelDisbandRequest_initiator),
(u'channelId', _ChannelDisbandRequest_channelId),
])
ChannelDisbandRequest._ext = None
#-----< ChannelDisbandIndication >-----#
ChannelDisbandIndication = SEQ(name=u'ChannelDisbandIndication', mode=MODE_TYPE, tag=(20, TAG_APPLICATION, TAG_IMPLICIT))
_ChannelDisbandIndication_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'PrivateChannelId')))
ChannelDisbandIndication._cont = ASN1Dict([
(u'channelId', _ChannelDisbandIndication_channelId),
])
ChannelDisbandIndication._ext = None
#-----< ChannelAdmitRequest >-----#
ChannelAdmitRequest = SEQ(name=u'ChannelAdmitRequest', mode=MODE_TYPE, tag=(21, TAG_APPLICATION, TAG_IMPLICIT))
_ChannelAdmitRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_ChannelAdmitRequest_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'PrivateChannelId')))
_ChannelAdmitRequest_userIds = SET_OF(name=u'userIds', mode=MODE_TYPE)
__ChannelAdmitRequest_userIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_ChannelAdmitRequest_userIds._cont = __ChannelAdmitRequest_userIds__item_
ChannelAdmitRequest._cont = ASN1Dict([
(u'initiator', _ChannelAdmitRequest_initiator),
(u'channelId', _ChannelAdmitRequest_channelId),
(u'userIds', _ChannelAdmitRequest_userIds),
])
ChannelAdmitRequest._ext = None
#-----< ChannelAdmitIndication >-----#
ChannelAdmitIndication = SEQ(name=u'ChannelAdmitIndication', mode=MODE_TYPE, tag=(22, TAG_APPLICATION, TAG_IMPLICIT))
_ChannelAdmitIndication_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_ChannelAdmitIndication_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'PrivateChannelId')))
_ChannelAdmitIndication_userIds = SET_OF(name=u'userIds', mode=MODE_TYPE)
__ChannelAdmitIndication_userIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_ChannelAdmitIndication_userIds._cont = __ChannelAdmitIndication_userIds__item_
ChannelAdmitIndication._cont = ASN1Dict([
(u'initiator', _ChannelAdmitIndication_initiator),
(u'channelId', _ChannelAdmitIndication_channelId),
(u'userIds', _ChannelAdmitIndication_userIds),
])
ChannelAdmitIndication._ext = None
#-----< ChannelExpelRequest >-----#
ChannelExpelRequest = SEQ(name=u'ChannelExpelRequest', mode=MODE_TYPE, tag=(23, TAG_APPLICATION, TAG_IMPLICIT))
_ChannelExpelRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_ChannelExpelRequest_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'PrivateChannelId')))
_ChannelExpelRequest_userIds = SET_OF(name=u'userIds', mode=MODE_TYPE)
__ChannelExpelRequest_userIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_ChannelExpelRequest_userIds._cont = __ChannelExpelRequest_userIds__item_
ChannelExpelRequest._cont = ASN1Dict([
(u'initiator', _ChannelExpelRequest_initiator),
(u'channelId', _ChannelExpelRequest_channelId),
(u'userIds', _ChannelExpelRequest_userIds),
])
ChannelExpelRequest._ext = None
#-----< ChannelExpelIndication >-----#
ChannelExpelIndication = SEQ(name=u'ChannelExpelIndication', mode=MODE_TYPE, tag=(24, TAG_APPLICATION, TAG_IMPLICIT))
_ChannelExpelIndication_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'PrivateChannelId')))
_ChannelExpelIndication_userIds = SET_OF(name=u'userIds', mode=MODE_TYPE)
__ChannelExpelIndication_userIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_ChannelExpelIndication_userIds._cont = __ChannelExpelIndication_userIds__item_
ChannelExpelIndication._cont = ASN1Dict([
(u'channelId', _ChannelExpelIndication_channelId),
(u'userIds', _ChannelExpelIndication_userIds),
])
ChannelExpelIndication._ext = None
#-----< SendDataRequest >-----#
SendDataRequest = SEQ(name=u'SendDataRequest', mode=MODE_TYPE, tag=(25, TAG_APPLICATION, TAG_IMPLICIT))
_SendDataRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_SendDataRequest_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelId')))
_SendDataRequest_dataPriority = ENUM(name=u'dataPriority', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'DataPriority')))
_SendDataRequest_segmentation = BIT_STR(name=u'segmentation', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Segmentation')))
_SendDataRequest_userData = OCT_STR(name=u'userData', mode=MODE_TYPE)
SendDataRequest._cont = ASN1Dict([
(u'initiator', _SendDataRequest_initiator),
(u'channelId', _SendDataRequest_channelId),
(u'dataPriority', _SendDataRequest_dataPriority),
(u'segmentation', _SendDataRequest_segmentation),
(u'userData', _SendDataRequest_userData),
])
SendDataRequest._ext = None
#-----< SendDataIndication >-----#
SendDataIndication = SEQ(name=u'SendDataIndication', mode=MODE_TYPE, tag=(26, TAG_APPLICATION, TAG_IMPLICIT))
_SendDataIndication_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_SendDataIndication_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelId')))
_SendDataIndication_dataPriority = ENUM(name=u'dataPriority', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'DataPriority')))
_SendDataIndication_segmentation = BIT_STR(name=u'segmentation', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Segmentation')))
_SendDataIndication_userData = OCT_STR(name=u'userData', mode=MODE_TYPE)
SendDataIndication._cont = ASN1Dict([
(u'initiator', _SendDataIndication_initiator),
(u'channelId', _SendDataIndication_channelId),
(u'dataPriority', _SendDataIndication_dataPriority),
(u'segmentation', _SendDataIndication_segmentation),
(u'userData', _SendDataIndication_userData),
])
SendDataIndication._ext = None
#-----< UniformSendDataRequest >-----#
UniformSendDataRequest = SEQ(name=u'UniformSendDataRequest', mode=MODE_TYPE, tag=(27, TAG_APPLICATION, TAG_IMPLICIT))
_UniformSendDataRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_UniformSendDataRequest_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelId')))
_UniformSendDataRequest_dataPriority = ENUM(name=u'dataPriority', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'DataPriority')))
_UniformSendDataRequest_segmentation = BIT_STR(name=u'segmentation', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Segmentation')))
_UniformSendDataRequest_userData = OCT_STR(name=u'userData', mode=MODE_TYPE)
UniformSendDataRequest._cont = ASN1Dict([
(u'initiator', _UniformSendDataRequest_initiator),
(u'channelId', _UniformSendDataRequest_channelId),
(u'dataPriority', _UniformSendDataRequest_dataPriority),
(u'segmentation', _UniformSendDataRequest_segmentation),
(u'userData', _UniformSendDataRequest_userData),
])
UniformSendDataRequest._ext = None
#-----< UniformSendDataIndication >-----#
UniformSendDataIndication = SEQ(name=u'UniformSendDataIndication', mode=MODE_TYPE, tag=(28, TAG_APPLICATION, TAG_IMPLICIT))
_UniformSendDataIndication_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_UniformSendDataIndication_channelId = INT(name=u'channelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelId')))
_UniformSendDataIndication_dataPriority = ENUM(name=u'dataPriority', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'DataPriority')))
_UniformSendDataIndication_segmentation = BIT_STR(name=u'segmentation', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Segmentation')))
_UniformSendDataIndication_userData = OCT_STR(name=u'userData', mode=MODE_TYPE)
UniformSendDataIndication._cont = ASN1Dict([
(u'initiator', _UniformSendDataIndication_initiator),
(u'channelId', _UniformSendDataIndication_channelId),
(u'dataPriority', _UniformSendDataIndication_dataPriority),
(u'segmentation', _UniformSendDataIndication_segmentation),
(u'userData', _UniformSendDataIndication_userData),
])
UniformSendDataIndication._ext = None
#-----< TokenGrabRequest >-----#
TokenGrabRequest = SEQ(name=u'TokenGrabRequest', mode=MODE_TYPE, tag=(29, TAG_APPLICATION, TAG_IMPLICIT))
_TokenGrabRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenGrabRequest_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
TokenGrabRequest._cont = ASN1Dict([
(u'initiator', _TokenGrabRequest_initiator),
(u'tokenId', _TokenGrabRequest_tokenId),
])
TokenGrabRequest._ext = None
#-----< TokenGrabConfirm >-----#
TokenGrabConfirm = SEQ(name=u'TokenGrabConfirm', mode=MODE_TYPE, tag=(30, TAG_APPLICATION, TAG_IMPLICIT))
_TokenGrabConfirm_result = ENUM(name=u'result', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Result')))
_TokenGrabConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenGrabConfirm_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
_TokenGrabConfirm_tokenStatus = ENUM(name=u'tokenStatus', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenStatus')))
TokenGrabConfirm._cont = ASN1Dict([
(u'result', _TokenGrabConfirm_result),
(u'initiator', _TokenGrabConfirm_initiator),
(u'tokenId', _TokenGrabConfirm_tokenId),
(u'tokenStatus', _TokenGrabConfirm_tokenStatus),
])
TokenGrabConfirm._ext = None
#-----< TokenInhibitRequest >-----#
TokenInhibitRequest = SEQ(name=u'TokenInhibitRequest', mode=MODE_TYPE, tag=(31, TAG_APPLICATION, TAG_IMPLICIT))
_TokenInhibitRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenInhibitRequest_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
TokenInhibitRequest._cont = ASN1Dict([
(u'initiator', _TokenInhibitRequest_initiator),
(u'tokenId', _TokenInhibitRequest_tokenId),
])
TokenInhibitRequest._ext = None
#-----< TokenInhibitConfirm >-----#
TokenInhibitConfirm = SEQ(name=u'TokenInhibitConfirm', mode=MODE_TYPE, tag=(32, TAG_APPLICATION, TAG_IMPLICIT))
_TokenInhibitConfirm_result = ENUM(name=u'result', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Result')))
_TokenInhibitConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenInhibitConfirm_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
_TokenInhibitConfirm_tokenStatus = ENUM(name=u'tokenStatus', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenStatus')))
TokenInhibitConfirm._cont = ASN1Dict([
(u'result', _TokenInhibitConfirm_result),
(u'initiator', _TokenInhibitConfirm_initiator),
(u'tokenId', _TokenInhibitConfirm_tokenId),
(u'tokenStatus', _TokenInhibitConfirm_tokenStatus),
])
TokenInhibitConfirm._ext = None
#-----< TokenGiveRequest >-----#
TokenGiveRequest = SEQ(name=u'TokenGiveRequest', mode=MODE_TYPE, tag=(33, TAG_APPLICATION, TAG_IMPLICIT))
_TokenGiveRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenGiveRequest_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
_TokenGiveRequest_recipient = INT(name=u'recipient', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
TokenGiveRequest._cont = ASN1Dict([
(u'initiator', _TokenGiveRequest_initiator),
(u'tokenId', _TokenGiveRequest_tokenId),
(u'recipient', _TokenGiveRequest_recipient),
])
TokenGiveRequest._ext = None
#-----< TokenGiveIndication >-----#
TokenGiveIndication = SEQ(name=u'TokenGiveIndication', mode=MODE_TYPE, tag=(34, TAG_APPLICATION, TAG_IMPLICIT))
_TokenGiveIndication_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenGiveIndication_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
_TokenGiveIndication_recipient = INT(name=u'recipient', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
TokenGiveIndication._cont = ASN1Dict([
(u'initiator', _TokenGiveIndication_initiator),
(u'tokenId', _TokenGiveIndication_tokenId),
(u'recipient', _TokenGiveIndication_recipient),
])
TokenGiveIndication._ext = None
#-----< TokenGiveResponse >-----#
TokenGiveResponse = SEQ(name=u'TokenGiveResponse', mode=MODE_TYPE, tag=(35, TAG_APPLICATION, TAG_IMPLICIT))
_TokenGiveResponse_result = ENUM(name=u'result', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Result')))
_TokenGiveResponse_recipient = INT(name=u'recipient', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenGiveResponse_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
TokenGiveResponse._cont = ASN1Dict([
(u'result', _TokenGiveResponse_result),
(u'recipient', _TokenGiveResponse_recipient),
(u'tokenId', _TokenGiveResponse_tokenId),
])
TokenGiveResponse._ext = None
#-----< TokenGiveConfirm >-----#
TokenGiveConfirm = SEQ(name=u'TokenGiveConfirm', mode=MODE_TYPE, tag=(36, TAG_APPLICATION, TAG_IMPLICIT))
_TokenGiveConfirm_result = ENUM(name=u'result', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Result')))
_TokenGiveConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenGiveConfirm_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
_TokenGiveConfirm_tokenStatus = ENUM(name=u'tokenStatus', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenStatus')))
TokenGiveConfirm._cont = ASN1Dict([
(u'result', _TokenGiveConfirm_result),
(u'initiator', _TokenGiveConfirm_initiator),
(u'tokenId', _TokenGiveConfirm_tokenId),
(u'tokenStatus', _TokenGiveConfirm_tokenStatus),
])
TokenGiveConfirm._ext = None
#-----< TokenPleaseRequest >-----#
TokenPleaseRequest = SEQ(name=u'TokenPleaseRequest', mode=MODE_TYPE, tag=(37, TAG_APPLICATION, TAG_IMPLICIT))
_TokenPleaseRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenPleaseRequest_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
TokenPleaseRequest._cont = ASN1Dict([
(u'initiator', _TokenPleaseRequest_initiator),
(u'tokenId', _TokenPleaseRequest_tokenId),
])
TokenPleaseRequest._ext = None
#-----< TokenPleaseIndication >-----#
TokenPleaseIndication = SEQ(name=u'TokenPleaseIndication', mode=MODE_TYPE, tag=(38, TAG_APPLICATION, TAG_IMPLICIT))
_TokenPleaseIndication_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenPleaseIndication_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
TokenPleaseIndication._cont = ASN1Dict([
(u'initiator', _TokenPleaseIndication_initiator),
(u'tokenId', _TokenPleaseIndication_tokenId),
])
TokenPleaseIndication._ext = None
#-----< TokenReleaseRequest >-----#
TokenReleaseRequest = SEQ(name=u'TokenReleaseRequest', mode=MODE_TYPE, tag=(39, TAG_APPLICATION, TAG_IMPLICIT))
_TokenReleaseRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenReleaseRequest_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
TokenReleaseRequest._cont = ASN1Dict([
(u'initiator', _TokenReleaseRequest_initiator),
(u'tokenId', _TokenReleaseRequest_tokenId),
])
TokenReleaseRequest._ext = None
#-----< TokenReleaseConfirm >-----#
TokenReleaseConfirm = SEQ(name=u'TokenReleaseConfirm', mode=MODE_TYPE, tag=(40, TAG_APPLICATION, TAG_IMPLICIT))
_TokenReleaseConfirm_result = ENUM(name=u'result', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Result')))
_TokenReleaseConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenReleaseConfirm_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
_TokenReleaseConfirm_tokenStatus = ENUM(name=u'tokenStatus', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenStatus')))
TokenReleaseConfirm._cont = ASN1Dict([
(u'result', _TokenReleaseConfirm_result),
(u'initiator', _TokenReleaseConfirm_initiator),
(u'tokenId', _TokenReleaseConfirm_tokenId),
(u'tokenStatus', _TokenReleaseConfirm_tokenStatus),
])
TokenReleaseConfirm._ext = None
#-----< TokenTestRequest >-----#
TokenTestRequest = SEQ(name=u'TokenTestRequest', mode=MODE_TYPE, tag=(41, TAG_APPLICATION, TAG_IMPLICIT))
_TokenTestRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenTestRequest_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
TokenTestRequest._cont = ASN1Dict([
(u'initiator', _TokenTestRequest_initiator),
(u'tokenId', _TokenTestRequest_tokenId),
])
TokenTestRequest._ext = None
#-----< TokenTestConfirm >-----#
TokenTestConfirm = SEQ(name=u'TokenTestConfirm', mode=MODE_TYPE, tag=(42, TAG_APPLICATION, TAG_IMPLICIT))
_TokenTestConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UserId')))
_TokenTestConfirm_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenId')))
_TokenTestConfirm_tokenStatus = ENUM(name=u'tokenStatus', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenStatus')))
TokenTestConfirm._cont = ASN1Dict([
(u'initiator', _TokenTestConfirm_initiator),
(u'tokenId', _TokenTestConfirm_tokenId),
(u'tokenStatus', _TokenTestConfirm_tokenStatus),
])
TokenTestConfirm._ext = None
#-----< Reason >-----#
Reason = ENUM(name=u'Reason', mode=MODE_TYPE)
Reason._cont = ASN1Dict([(u'rn-domain-disconnected', 0), (u'rn-provider-initiated', 1), (u'rn-token-purged', 2), (u'rn-user-requested', 3), (u'rn-channel-purged', 4)])
Reason._ext = None
#-----< Result >-----#
Result = ENUM(name=u'Result', mode=MODE_TYPE)
Result._cont = ASN1Dict([(u'rt-successful', 0), (u'rt-domain-merging', 1), (u'rt-domain-not-hierarchical', 2), (u'rt-no-such-channel', 3), (u'rt-no-such-domain', 4), (u'rt-no-such-user', 5), (u'rt-not-admitted', 6), (u'rt-other-user-id', 7), (u'rt-parameters-unacceptable', 8), (u'rt-token-not-available', 9), (u'rt-token-not-possessed', 10), (u'rt-too-many-channels', 11), (u'rt-too-many-tokens', 12), (u'rt-too-many-users', 13), (u'rt-unspecified-failure', 14), (u'rt-user-rejected', 15)])
Result._ext = None
#-----< Diagnostic >-----#
Diagnostic = ENUM(name=u'Diagnostic', mode=MODE_TYPE)
Diagnostic._cont = ASN1Dict([(u'dc-inconsistent-merge', 0), (u'dc-forbidden-PDU-downward', 1), (u'dc-forbidden-PDU-upward', 2), (u'dc-invalid-BER-encoding', 3), (u'dc-invalid-PER-encoding', 4), (u'dc-misrouted-user', 5), (u'dc-unrequested-confirm', 6), (u'dc-wrong-transport-priority', 7), (u'dc-channel-id-conflict', 8), (u'dc-token-id-conflict', 9), (u'dc-not-user-id-channel', 10), (u'dc-too-many-channels', 11), (u'dc-too-many-tokens', 12), (u'dc-too-many-users', 13)])
Diagnostic._ext = None
#-----< ConnectMCSPDU >-----#
ConnectMCSPDU = CHOICE(name=u'ConnectMCSPDU', mode=MODE_TYPE)
_ConnectMCSPDU_connect_initial = SEQ(name=u'connect-initial', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Connect-Initial')))
_ConnectMCSPDU_connect_response = SEQ(name=u'connect-response', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Connect-Response')))
_ConnectMCSPDU_connect_additional = SEQ(name=u'connect-additional', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Connect-Additional')))
_ConnectMCSPDU_connect_result = SEQ(name=u'connect-result', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'Connect-Result')))
ConnectMCSPDU._cont = ASN1Dict([
(u'connect-initial', _ConnectMCSPDU_connect_initial),
(u'connect-response', _ConnectMCSPDU_connect_response),
(u'connect-additional', _ConnectMCSPDU_connect_additional),
(u'connect-result', _ConnectMCSPDU_connect_result),
])
ConnectMCSPDU._ext = None
#-----< DomainMCSPDU >-----#
DomainMCSPDU = CHOICE(name=u'DomainMCSPDU', mode=MODE_TYPE)
_DomainMCSPDU_plumbDomainIndication = SEQ(name=u'plumbDomainIndication', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'PlumbDomainIndication')))
_DomainMCSPDU_erectDomainRequest = SEQ(name=u'erectDomainRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ErectDomainRequest')))
_DomainMCSPDU_mergeChannelsRequest = SEQ(name=u'mergeChannelsRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'MergeChannelsRequest')))
_DomainMCSPDU_mergeChannelsConfirm = SEQ(name=u'mergeChannelsConfirm', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'MergeChannelsConfirm')))
_DomainMCSPDU_purgeChannelsIndication = SEQ(name=u'purgeChannelsIndication', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'PurgeChannelsIndication')))
_DomainMCSPDU_mergeTokensRequest = SEQ(name=u'mergeTokensRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'MergeTokensRequest')))
_DomainMCSPDU_mergeTokensConfirm = SEQ(name=u'mergeTokensConfirm', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'MergeTokensConfirm')))
_DomainMCSPDU_purgeTokensIndication = SEQ(name=u'purgeTokensIndication', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'PurgeTokensIndication')))
_DomainMCSPDU_disconnectProviderUltimatum = SEQ(name=u'disconnectProviderUltimatum', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'DisconnectProviderUltimatum')))
_DomainMCSPDU_rejectMCSPDUUltimatum = SEQ(name=u'rejectMCSPDUUltimatum', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'RejectMCSPDUUltimatum')))
_DomainMCSPDU_attachUserRequest = SEQ(name=u'attachUserRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'AttachUserRequest')))
_DomainMCSPDU_attachUserConfirm = SEQ(name=u'attachUserConfirm', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'AttachUserConfirm')))
_DomainMCSPDU_detachUserRequest = SEQ(name=u'detachUserRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'DetachUserRequest')))
_DomainMCSPDU_detachUserIndication = SEQ(name=u'detachUserIndication', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'DetachUserIndication')))
_DomainMCSPDU_channelJoinRequest = SEQ(name=u'channelJoinRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelJoinRequest')))
_DomainMCSPDU_channelJoinConfirm = SEQ(name=u'channelJoinConfirm', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelJoinConfirm')))
_DomainMCSPDU_channelLeaveRequest = SEQ(name=u'channelLeaveRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelLeaveRequest')))
_DomainMCSPDU_channelConveneRequest = SEQ(name=u'channelConveneRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelConveneRequest')))
_DomainMCSPDU_channelConveneConfirm = SEQ(name=u'channelConveneConfirm', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelConveneConfirm')))
_DomainMCSPDU_channelDisbandRequest = SEQ(name=u'channelDisbandRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelDisbandRequest')))
_DomainMCSPDU_channelDisbandIndication = SEQ(name=u'channelDisbandIndication', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelDisbandIndication')))
_DomainMCSPDU_channelAdmitRequest = SEQ(name=u'channelAdmitRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelAdmitRequest')))
_DomainMCSPDU_channelAdmitIndication = SEQ(name=u'channelAdmitIndication', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelAdmitIndication')))
_DomainMCSPDU_channelExpelRequest = SEQ(name=u'channelExpelRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelExpelRequest')))
_DomainMCSPDU_channelExpelIndication = SEQ(name=u'channelExpelIndication', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'ChannelExpelIndication')))
_DomainMCSPDU_sendDataRequest = SEQ(name=u'sendDataRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'SendDataRequest')))
_DomainMCSPDU_sendDataIndication = SEQ(name=u'sendDataIndication', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'SendDataIndication')))
_DomainMCSPDU_uniformSendDataRequest = SEQ(name=u'uniformSendDataRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UniformSendDataRequest')))
_DomainMCSPDU_uniformSendDataIndication = SEQ(name=u'uniformSendDataIndication', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'UniformSendDataIndication')))
_DomainMCSPDU_tokenGrabRequest = SEQ(name=u'tokenGrabRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenGrabRequest')))
_DomainMCSPDU_tokenGrabConfirm = SEQ(name=u'tokenGrabConfirm', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenGrabConfirm')))
_DomainMCSPDU_tokenInhibitRequest = SEQ(name=u'tokenInhibitRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenInhibitRequest')))
_DomainMCSPDU_tokenInhibitConfirm = SEQ(name=u'tokenInhibitConfirm', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenInhibitConfirm')))
_DomainMCSPDU_tokenGiveRequest = SEQ(name=u'tokenGiveRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenGiveRequest')))
_DomainMCSPDU_tokenGiveIndication = SEQ(name=u'tokenGiveIndication', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenGiveIndication')))
_DomainMCSPDU_tokenGiveResponse = SEQ(name=u'tokenGiveResponse', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenGiveResponse')))
_DomainMCSPDU_tokenGiveConfirm = SEQ(name=u'tokenGiveConfirm', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenGiveConfirm')))
_DomainMCSPDU_tokenPleaseRequest = SEQ(name=u'tokenPleaseRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenPleaseRequest')))
_DomainMCSPDU_tokenPleaseIndication = SEQ(name=u'tokenPleaseIndication', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenPleaseIndication')))
_DomainMCSPDU_tokenReleaseRequest = SEQ(name=u'tokenReleaseRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenReleaseRequest')))
_DomainMCSPDU_tokenReleaseConfirm = SEQ(name=u'tokenReleaseConfirm', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenReleaseConfirm')))
_DomainMCSPDU_tokenTestRequest = SEQ(name=u'tokenTestRequest', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenTestRequest')))
_DomainMCSPDU_tokenTestConfirm = SEQ(name=u'tokenTestConfirm', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL', 'TokenTestConfirm')))
DomainMCSPDU._cont = ASN1Dict([
(u'plumbDomainIndication', _DomainMCSPDU_plumbDomainIndication),
(u'erectDomainRequest', _DomainMCSPDU_erectDomainRequest),
(u'mergeChannelsRequest', _DomainMCSPDU_mergeChannelsRequest),
(u'mergeChannelsConfirm', _DomainMCSPDU_mergeChannelsConfirm),
(u'purgeChannelsIndication', _DomainMCSPDU_purgeChannelsIndication),
(u'mergeTokensRequest', _DomainMCSPDU_mergeTokensRequest),
(u'mergeTokensConfirm', _DomainMCSPDU_mergeTokensConfirm),
(u'purgeTokensIndication', _DomainMCSPDU_purgeTokensIndication),
(u'disconnectProviderUltimatum', _DomainMCSPDU_disconnectProviderUltimatum),
(u'rejectMCSPDUUltimatum', _DomainMCSPDU_rejectMCSPDUUltimatum),
(u'attachUserRequest', _DomainMCSPDU_attachUserRequest),
(u'attachUserConfirm', _DomainMCSPDU_attachUserConfirm),
(u'detachUserRequest', _DomainMCSPDU_detachUserRequest),
(u'detachUserIndication', _DomainMCSPDU_detachUserIndication),
(u'channelJoinRequest', _DomainMCSPDU_channelJoinRequest),
(u'channelJoinConfirm', _DomainMCSPDU_channelJoinConfirm),
(u'channelLeaveRequest', _DomainMCSPDU_channelLeaveRequest),
(u'channelConveneRequest', _DomainMCSPDU_channelConveneRequest),
(u'channelConveneConfirm', _DomainMCSPDU_channelConveneConfirm),
(u'channelDisbandRequest', _DomainMCSPDU_channelDisbandRequest),
(u'channelDisbandIndication', _DomainMCSPDU_channelDisbandIndication),
(u'channelAdmitRequest', _DomainMCSPDU_channelAdmitRequest),
(u'channelAdmitIndication', _DomainMCSPDU_channelAdmitIndication),
(u'channelExpelRequest', _DomainMCSPDU_channelExpelRequest),
(u'channelExpelIndication', _DomainMCSPDU_channelExpelIndication),
(u'sendDataRequest', _DomainMCSPDU_sendDataRequest),
(u'sendDataIndication', _DomainMCSPDU_sendDataIndication),
(u'uniformSendDataRequest', _DomainMCSPDU_uniformSendDataRequest),
(u'uniformSendDataIndication', _DomainMCSPDU_uniformSendDataIndication),
(u'tokenGrabRequest', _DomainMCSPDU_tokenGrabRequest),
(u'tokenGrabConfirm', _DomainMCSPDU_tokenGrabConfirm),
(u'tokenInhibitRequest', _DomainMCSPDU_tokenInhibitRequest),
(u'tokenInhibitConfirm', _DomainMCSPDU_tokenInhibitConfirm),
(u'tokenGiveRequest', _DomainMCSPDU_tokenGiveRequest),
(u'tokenGiveIndication', _DomainMCSPDU_tokenGiveIndication),
(u'tokenGiveResponse', _DomainMCSPDU_tokenGiveResponse),
(u'tokenGiveConfirm', _DomainMCSPDU_tokenGiveConfirm),
(u'tokenPleaseRequest', _DomainMCSPDU_tokenPleaseRequest),
(u'tokenPleaseIndication', _DomainMCSPDU_tokenPleaseIndication),
(u'tokenReleaseRequest', _DomainMCSPDU_tokenReleaseRequest),
(u'tokenReleaseConfirm', _DomainMCSPDU_tokenReleaseConfirm),
(u'tokenTestRequest', _DomainMCSPDU_tokenTestRequest),
(u'tokenTestConfirm', _DomainMCSPDU_tokenTestConfirm),
])
DomainMCSPDU._ext = None
_all_ = [
ChannelId,
StaticChannelId,
DynamicChannelId,
UserId,
PrivateChannelId,
AssignedChannelId,
TokenId,
TokenStatus,
DataPriority,
Segmentation,
_DomainParameters_maxChannelIds,
_DomainParameters_maxUserIds,
_DomainParameters_maxTokenIds,
_DomainParameters_numPriorities,
_DomainParameters_minThroughput,
_DomainParameters_maxHeight,
_DomainParameters_maxMCSPDUsize,
_DomainParameters_protocolVersion,
DomainParameters,
_Connect_Initial_callingDomainSelector,
_Connect_Initial_calledDomainSelector,
_Connect_Initial_upwardFlag,
_Connect_Initial_targetParameters,
_Connect_Initial_minimumParameters,
_Connect_Initial_maximumParameters,
_Connect_Initial_userData,
Connect_Initial,
_Connect_Response_result,
_Connect_Response_calledConnectId,
_Connect_Response_domainParameters,
_Connect_Response_userData,
Connect_Response,
_Connect_Additional_calledConnectId,
_Connect_Additional_dataPriority,
Connect_Additional,
_Connect_Result_result,
Connect_Result,
_PlumbDomainIndication_heightLimit,
PlumbDomainIndication,
_ErectDomainRequest_subHeight,
_ErectDomainRequest_subInterval,
ErectDomainRequest,
__ChannelAttributes_static_channelId,
_ChannelAttributes_static,
__ChannelAttributes_userId_joined,
__ChannelAttributes_userId_userId,
_ChannelAttributes_userId,
__ChannelAttributes_private_joined,
__ChannelAttributes_private_channelId,
__ChannelAttributes_private_manager,
___ChannelAttributes_private_admitted__item_,
__ChannelAttributes_private_admitted,
_ChannelAttributes_private,
__ChannelAttributes_assigned_channelId,
_ChannelAttributes_assigned,
ChannelAttributes,
__MergeChannelsRequest_mergeChannels__item_,
_MergeChannelsRequest_mergeChannels,
__MergeChannelsRequest_purgeChannelIds__item_,
_MergeChannelsRequest_purgeChannelIds,
MergeChannelsRequest,
__MergeChannelsConfirm_mergeChannels__item_,
_MergeChannelsConfirm_mergeChannels,
__MergeChannelsConfirm_purgeChannelIds__item_,
_MergeChannelsConfirm_purgeChannelIds,
MergeChannelsConfirm,
__PurgeChannelsIndication_detachUserIds__item_,
_PurgeChannelsIndication_detachUserIds,
__PurgeChannelsIndication_purgeChannelIds__item_,
_PurgeChannelsIndication_purgeChannelIds,
PurgeChannelsIndication,
__TokenAttributes_grabbed_tokenId,
__TokenAttributes_grabbed_grabber,
_TokenAttributes_grabbed,
__TokenAttributes_inhibited_tokenId,
___TokenAttributes_inhibited_inhibitors__item_,
__TokenAttributes_inhibited_inhibitors,
_TokenAttributes_inhibited,
__TokenAttributes_giving_tokenId,
__TokenAttributes_giving_grabber,
__TokenAttributes_giving_recipient,
_TokenAttributes_giving,
__TokenAttributes_ungivable_tokenId,
__TokenAttributes_ungivable_grabber,
_TokenAttributes_ungivable,
__TokenAttributes_given_tokenId,
__TokenAttributes_given_recipient,
_TokenAttributes_given,
TokenAttributes,
__MergeTokensRequest_mergeTokens__item_,
_MergeTokensRequest_mergeTokens,
__MergeTokensRequest_purgeTokenIds__item_,
_MergeTokensRequest_purgeTokenIds,
MergeTokensRequest,
__MergeTokensConfirm_mergeTokens__item_,
_MergeTokensConfirm_mergeTokens,
__MergeTokensConfirm_purgeTokenIds__item_,
_MergeTokensConfirm_purgeTokenIds,
MergeTokensConfirm,
__PurgeTokensIndication_purgeTokenIds__item_,
_PurgeTokensIndication_purgeTokenIds,
PurgeTokensIndication,
_DisconnectProviderUltimatum_reason,
DisconnectProviderUltimatum,
_RejectMCSPDUUltimatum_diagnostic,
_RejectMCSPDUUltimatum_initialOctets,
RejectMCSPDUUltimatum,
AttachUserRequest,
_AttachUserConfirm_result,
_AttachUserConfirm_initiator,
AttachUserConfirm,
_DetachUserRequest_reason,
__DetachUserRequest_userIds__item_,
_DetachUserRequest_userIds,
DetachUserRequest,
_DetachUserIndication_reason,
__DetachUserIndication_userIds__item_,
_DetachUserIndication_userIds,
DetachUserIndication,
_ChannelJoinRequest_initiator,
_ChannelJoinRequest_channelId,
ChannelJoinRequest,
_ChannelJoinConfirm_result,
_ChannelJoinConfirm_initiator,
_ChannelJoinConfirm_requested,
_ChannelJoinConfirm_channelId,
ChannelJoinConfirm,
__ChannelLeaveRequest_channelIds__item_,
_ChannelLeaveRequest_channelIds,
ChannelLeaveRequest,
_ChannelConveneRequest_initiator,
ChannelConveneRequest,
_ChannelConveneConfirm_result,
_ChannelConveneConfirm_initiator,
_ChannelConveneConfirm_channelId,
ChannelConveneConfirm,
_ChannelDisbandRequest_initiator,
_ChannelDisbandRequest_channelId,
ChannelDisbandRequest,
_ChannelDisbandIndication_channelId,
ChannelDisbandIndication,
_ChannelAdmitRequest_initiator,
_ChannelAdmitRequest_channelId,
__ChannelAdmitRequest_userIds__item_,
_ChannelAdmitRequest_userIds,
ChannelAdmitRequest,
_ChannelAdmitIndication_initiator,
_ChannelAdmitIndication_channelId,
__ChannelAdmitIndication_userIds__item_,
_ChannelAdmitIndication_userIds,
ChannelAdmitIndication,
_ChannelExpelRequest_initiator,
_ChannelExpelRequest_channelId,
__ChannelExpelRequest_userIds__item_,
_ChannelExpelRequest_userIds,
ChannelExpelRequest,
_ChannelExpelIndication_channelId,
__ChannelExpelIndication_userIds__item_,
_ChannelExpelIndication_userIds,
ChannelExpelIndication,
_SendDataRequest_initiator,
_SendDataRequest_channelId,
_SendDataRequest_dataPriority,
_SendDataRequest_segmentation,
_SendDataRequest_userData,
SendDataRequest,
_SendDataIndication_initiator,
_SendDataIndication_channelId,
_SendDataIndication_dataPriority,
_SendDataIndication_segmentation,
_SendDataIndication_userData,
SendDataIndication,
_UniformSendDataRequest_initiator,
_UniformSendDataRequest_channelId,
_UniformSendDataRequest_dataPriority,
_UniformSendDataRequest_segmentation,
_UniformSendDataRequest_userData,
UniformSendDataRequest,
_UniformSendDataIndication_initiator,
_UniformSendDataIndication_channelId,
_UniformSendDataIndication_dataPriority,
_UniformSendDataIndication_segmentation,
_UniformSendDataIndication_userData,
UniformSendDataIndication,
_TokenGrabRequest_initiator,
_TokenGrabRequest_tokenId,
TokenGrabRequest,
_TokenGrabConfirm_result,
_TokenGrabConfirm_initiator,
_TokenGrabConfirm_tokenId,
_TokenGrabConfirm_tokenStatus,
TokenGrabConfirm,
_TokenInhibitRequest_initiator,
_TokenInhibitRequest_tokenId,
TokenInhibitRequest,
_TokenInhibitConfirm_result,
_TokenInhibitConfirm_initiator,
_TokenInhibitConfirm_tokenId,
_TokenInhibitConfirm_tokenStatus,
TokenInhibitConfirm,
_TokenGiveRequest_initiator,
_TokenGiveRequest_tokenId,
_TokenGiveRequest_recipient,
TokenGiveRequest,
_TokenGiveIndication_initiator,
_TokenGiveIndication_tokenId,
_TokenGiveIndication_recipient,
TokenGiveIndication,
_TokenGiveResponse_result,
_TokenGiveResponse_recipient,
_TokenGiveResponse_tokenId,
TokenGiveResponse,
_TokenGiveConfirm_result,
_TokenGiveConfirm_initiator,
_TokenGiveConfirm_tokenId,
_TokenGiveConfirm_tokenStatus,
TokenGiveConfirm,
_TokenPleaseRequest_initiator,
_TokenPleaseRequest_tokenId,
TokenPleaseRequest,
_TokenPleaseIndication_initiator,
_TokenPleaseIndication_tokenId,
TokenPleaseIndication,
_TokenReleaseRequest_initiator,
_TokenReleaseRequest_tokenId,
TokenReleaseRequest,
_TokenReleaseConfirm_result,
_TokenReleaseConfirm_initiator,
_TokenReleaseConfirm_tokenId,
_TokenReleaseConfirm_tokenStatus,
TokenReleaseConfirm,
_TokenTestRequest_initiator,
_TokenTestRequest_tokenId,
TokenTestRequest,
_TokenTestConfirm_initiator,
_TokenTestConfirm_tokenId,
_TokenTestConfirm_tokenStatus,
TokenTestConfirm,
Reason,
Result,
Diagnostic,
_ConnectMCSPDU_connect_initial,
_ConnectMCSPDU_connect_response,
_ConnectMCSPDU_connect_additional,
_ConnectMCSPDU_connect_result,
ConnectMCSPDU,
_DomainMCSPDU_plumbDomainIndication,
_DomainMCSPDU_erectDomainRequest,
_DomainMCSPDU_mergeChannelsRequest,
_DomainMCSPDU_mergeChannelsConfirm,
_DomainMCSPDU_purgeChannelsIndication,
_DomainMCSPDU_mergeTokensRequest,
_DomainMCSPDU_mergeTokensConfirm,
_DomainMCSPDU_purgeTokensIndication,
_DomainMCSPDU_disconnectProviderUltimatum,
_DomainMCSPDU_rejectMCSPDUUltimatum,
_DomainMCSPDU_attachUserRequest,
_DomainMCSPDU_attachUserConfirm,
_DomainMCSPDU_detachUserRequest,
_DomainMCSPDU_detachUserIndication,
_DomainMCSPDU_channelJoinRequest,
_DomainMCSPDU_channelJoinConfirm,
_DomainMCSPDU_channelLeaveRequest,
_DomainMCSPDU_channelConveneRequest,
_DomainMCSPDU_channelConveneConfirm,
_DomainMCSPDU_channelDisbandRequest,
_DomainMCSPDU_channelDisbandIndication,
_DomainMCSPDU_channelAdmitRequest,
_DomainMCSPDU_channelAdmitIndication,
_DomainMCSPDU_channelExpelRequest,
_DomainMCSPDU_channelExpelIndication,
_DomainMCSPDU_sendDataRequest,
_DomainMCSPDU_sendDataIndication,
_DomainMCSPDU_uniformSendDataRequest,
_DomainMCSPDU_uniformSendDataIndication,
_DomainMCSPDU_tokenGrabRequest,
_DomainMCSPDU_tokenGrabConfirm,
_DomainMCSPDU_tokenInhibitRequest,
_DomainMCSPDU_tokenInhibitConfirm,
_DomainMCSPDU_tokenGiveRequest,
_DomainMCSPDU_tokenGiveIndication,
_DomainMCSPDU_tokenGiveResponse,
_DomainMCSPDU_tokenGiveConfirm,
_DomainMCSPDU_tokenPleaseRequest,
_DomainMCSPDU_tokenPleaseIndication,
_DomainMCSPDU_tokenReleaseRequest,
_DomainMCSPDU_tokenReleaseConfirm,
_DomainMCSPDU_tokenTestRequest,
_DomainMCSPDU_tokenTestConfirm,
DomainMCSPDU,
]
class MCS_PROTOCOL_3:
_name_ = u'MCS-PROTOCOL-3'
_oid_ = []
_obj_ = [
u'H221NonStandardIdentifier',
u'Key',
u'NonStandardParameter',
u'ChannelId',
u'StaticChannelId',
u'DynamicChannelId',
u'UserId',
u'PrivateChannelId',
u'AssignedChannelId',
u'TokenId',
u'TokenStatus',
u'DataPriority',
u'Segmentation',
u'ExtendedParameters',
u'ExtendedParameterPropose',
u'ExtendedParameterAccept',
u'PlumbDomainIndication',
u'ErectDomainRequest',
u'ChannelAttributes',
u'MergeChannelsRequest',
u'MergeChannelsConfirm',
u'PurgeChannelsIndication',
u'TokenAttributes',
u'MergeTokensRequest',
u'MergeTokensConfirm',
u'PurgeTokensIndication',
u'DisconnectProviderUltimatum',
u'RejectMCSPDUUltimatum',
u'AttachUserRequest',
u'AttachUserConfirm',
u'DetachUserRequest',
u'DetachUserIndication',
u'ChannelJoinRequest',
u'ChannelJoinConfirm',
u'ChannelLeaveRequest',
u'ChannelConveneRequest',
u'ChannelConveneConfirm',
u'ChannelDisbandRequest',
u'ChannelDisbandIndication',
u'ChannelAdmitRequest',
u'ChannelAdmitIndication',
u'ChannelExpelRequest',
u'ChannelExpelIndication',
u'SendDataRequest',
u'SendDataIndication',
u'UniformSendDataRequest',
u'UniformSendDataIndication',
u'TokenGrabRequest',
u'TokenGrabConfirm',
u'TokenInhibitRequest',
u'TokenInhibitConfirm',
u'TokenGiveRequest',
u'TokenGiveIndication',
u'TokenGiveResponse',
u'TokenGiveConfirm',
u'TokenPleaseRequest',
u'TokenPleaseIndication',
u'TokenReleaseRequest',
u'TokenReleaseConfirm',
u'TokenTestRequest',
u'TokenTestConfirm',
u'CapabilityID',
u'CapabilityClass',
u'ParticipationIndicator',
u'RequestCapability',
u'SeqOfRequestCapabilities',
u'IndicationCapability',
u'SeqOfIndicationCapabilities',
u'CapabilitiesNotificationRequest',
u'CapabilitiesNotificationIndication',
u'Reason',
u'Result',
u'Diagnostic',
u'NonStandardPDU',
u'ExtendedParameterMCSPDU',
u'DomainMCSPDU',
]
_type_ = [
u'H221NonStandardIdentifier',
u'Key',
u'NonStandardParameter',
u'ChannelId',
u'StaticChannelId',
u'DynamicChannelId',
u'UserId',
u'PrivateChannelId',
u'AssignedChannelId',
u'TokenId',
u'TokenStatus',
u'DataPriority',
u'Segmentation',
u'ExtendedParameters',
u'ExtendedParameterPropose',
u'ExtendedParameterAccept',
u'PlumbDomainIndication',
u'ErectDomainRequest',
u'ChannelAttributes',
u'MergeChannelsRequest',
u'MergeChannelsConfirm',
u'PurgeChannelsIndication',
u'TokenAttributes',
u'MergeTokensRequest',
u'MergeTokensConfirm',
u'PurgeTokensIndication',
u'DisconnectProviderUltimatum',
u'RejectMCSPDUUltimatum',
u'AttachUserRequest',
u'AttachUserConfirm',
u'DetachUserRequest',
u'DetachUserIndication',
u'ChannelJoinRequest',
u'ChannelJoinConfirm',
u'ChannelLeaveRequest',
u'ChannelConveneRequest',
u'ChannelConveneConfirm',
u'ChannelDisbandRequest',
u'ChannelDisbandIndication',
u'ChannelAdmitRequest',
u'ChannelAdmitIndication',
u'ChannelExpelRequest',
u'ChannelExpelIndication',
u'SendDataRequest',
u'SendDataIndication',
u'UniformSendDataRequest',
u'UniformSendDataIndication',
u'TokenGrabRequest',
u'TokenGrabConfirm',
u'TokenInhibitRequest',
u'TokenInhibitConfirm',
u'TokenGiveRequest',
u'TokenGiveIndication',
u'TokenGiveResponse',
u'TokenGiveConfirm',
u'TokenPleaseRequest',
u'TokenPleaseIndication',
u'TokenReleaseRequest',
u'TokenReleaseConfirm',
u'TokenTestRequest',
u'TokenTestConfirm',
u'CapabilityID',
u'CapabilityClass',
u'ParticipationIndicator',
u'RequestCapability',
u'SeqOfRequestCapabilities',
u'IndicationCapability',
u'SeqOfIndicationCapabilities',
u'CapabilitiesNotificationRequest',
u'CapabilitiesNotificationIndication',
u'Reason',
u'Result',
u'Diagnostic',
u'NonStandardPDU',
u'ExtendedParameterMCSPDU',
u'DomainMCSPDU',
]
_set_ = [
]
_val_ = [
]
_class_ = [
]
_param_ = [
]
#-----< H221NonStandardIdentifier >-----#
H221NonStandardIdentifier = OCT_STR(name=u'H221NonStandardIdentifier', mode=MODE_TYPE)
H221NonStandardIdentifier._const_sz = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=4, ub=255)], ev=None, er=[])
#-----< Key >-----#
Key = CHOICE(name=u'Key', mode=MODE_TYPE)
_Key_object = OID(name=u'object', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Key_h221NonStandard = OCT_STR(name=u'h221NonStandard', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'H221NonStandardIdentifier')))
Key._cont = ASN1Dict([
(u'object', _Key_object),
(u'h221NonStandard', _Key_h221NonStandard),
])
Key._ext = None
#-----< NonStandardParameter >-----#
NonStandardParameter = SEQ(name=u'NonStandardParameter', mode=MODE_TYPE)
_NonStandardParameter_key = CHOICE(name=u'key', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Key')))
_NonStandardParameter_data = OCT_STR(name=u'data', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
NonStandardParameter._cont = ASN1Dict([
(u'key', _NonStandardParameter_key),
(u'data', _NonStandardParameter_data),
])
NonStandardParameter._ext = None
#-----< ChannelId >-----#
ChannelId = INT(name=u'ChannelId', mode=MODE_TYPE)
ChannelId._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
#-----< StaticChannelId >-----#
StaticChannelId = INT(name=u'StaticChannelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelId')))
StaticChannelId._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=1, ub=1000)], ev=None, er=[])
#-----< DynamicChannelId >-----#
DynamicChannelId = INT(name=u'DynamicChannelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelId')))
DynamicChannelId._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=1001, ub=65535)], ev=None, er=[])
#-----< UserId >-----#
UserId = INT(name=u'UserId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'DynamicChannelId')))
#-----< PrivateChannelId >-----#
PrivateChannelId = INT(name=u'PrivateChannelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'DynamicChannelId')))
#-----< AssignedChannelId >-----#
AssignedChannelId = INT(name=u'AssignedChannelId', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'DynamicChannelId')))
#-----< TokenId >-----#
TokenId = INT(name=u'TokenId', mode=MODE_TYPE)
TokenId._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=1, ub=65535)], ev=None, er=[])
#-----< TokenStatus >-----#
TokenStatus = CHOICE(name=u'TokenStatus', mode=MODE_TYPE)
_TokenStatus_notInUse = NULL(name=u'notInUse', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_TokenStatus_selfGrabbed = NULL(name=u'selfGrabbed', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_TokenStatus_otherGrabbed = NULL(name=u'otherGrabbed', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_TokenStatus_selfInhibited = NULL(name=u'selfInhibited', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_TokenStatus_otherInhibited = NULL(name=u'otherInhibited', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_TokenStatus_selfRecipient = NULL(name=u'selfRecipient', mode=MODE_TYPE, tag=(5, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_TokenStatus_selfGiving = NULL(name=u'selfGiving', mode=MODE_TYPE, tag=(6, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_TokenStatus_otherGiving = NULL(name=u'otherGiving', mode=MODE_TYPE, tag=(7, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
TokenStatus._cont = ASN1Dict([
(u'notInUse', _TokenStatus_notInUse),
(u'selfGrabbed', _TokenStatus_selfGrabbed),
(u'otherGrabbed', _TokenStatus_otherGrabbed),
(u'selfInhibited', _TokenStatus_selfInhibited),
(u'otherInhibited', _TokenStatus_otherInhibited),
(u'selfRecipient', _TokenStatus_selfRecipient),
(u'selfGiving', _TokenStatus_selfGiving),
(u'otherGiving', _TokenStatus_otherGiving),
])
TokenStatus._ext = []
#-----< DataPriority >-----#
DataPriority = CHOICE(name=u'DataPriority', mode=MODE_TYPE)
_DataPriority_top = NULL(name=u'top', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_DataPriority_high = NULL(name=u'high', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_DataPriority_medium = NULL(name=u'medium', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_DataPriority_low = NULL(name=u'low', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
DataPriority._cont = ASN1Dict([
(u'top', _DataPriority_top),
(u'high', _DataPriority_high),
(u'medium', _DataPriority_medium),
(u'low', _DataPriority_low),
])
DataPriority._ext = []
#-----< Segmentation >-----#
Segmentation = BIT_STR(name=u'Segmentation', mode=MODE_TYPE)
Segmentation._cont = ASN1Dict([(u'begin', 0), (u'end', 1)])
Segmentation._const_sz = ASN1Set(rv=[2], rr=[], ev=None, er=[])
#-----< ExtendedParameters >-----#
ExtendedParameters = SEQ(name=u'ExtendedParameters', mode=MODE_TYPE)
_ExtendedParameters_unreliableDataSupported = BOOL(name=u'unreliableDataSupported', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_ExtendedParameters_domainReferenceID = INT(name=u'domainReferenceID', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_ExtendedParameters_domainReferenceID._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_ExtendedParameters_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__ExtendedParameters_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_ExtendedParameters_nonStandard._cont = __ExtendedParameters_nonStandard__item_
ExtendedParameters._cont = ASN1Dict([
(u'unreliableDataSupported', _ExtendedParameters_unreliableDataSupported),
(u'domainReferenceID', _ExtendedParameters_domainReferenceID),
(u'nonStandard', _ExtendedParameters_nonStandard),
])
ExtendedParameters._ext = []
#-----< ExtendedParameterPropose >-----#
ExtendedParameterPropose = SEQ(name=u'ExtendedParameterPropose', mode=MODE_TYPE)
_ExtendedParameterPropose_targetExtendedParameters = SEQ(name=u'targetExtendedParameters', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ExtendedParameters')))
_ExtendedParameterPropose_minimumExtendedParameters = SEQ(name=u'minimumExtendedParameters', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ExtendedParameters')))
_ExtendedParameterPropose_maximumExtendedParameters = SEQ(name=u'maximumExtendedParameters', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ExtendedParameters')))
_ExtendedParameterPropose_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__ExtendedParameterPropose_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_ExtendedParameterPropose_nonStandard._cont = __ExtendedParameterPropose_nonStandard__item_
ExtendedParameterPropose._cont = ASN1Dict([
(u'targetExtendedParameters', _ExtendedParameterPropose_targetExtendedParameters),
(u'minimumExtendedParameters', _ExtendedParameterPropose_minimumExtendedParameters),
(u'maximumExtendedParameters', _ExtendedParameterPropose_maximumExtendedParameters),
(u'nonStandard', _ExtendedParameterPropose_nonStandard),
])
ExtendedParameterPropose._ext = []
#-----< ExtendedParameterAccept >-----#
ExtendedParameterAccept = SEQ(name=u'ExtendedParameterAccept', mode=MODE_TYPE)
_ExtendedParameterAccept_extendedParameters = SEQ(name=u'extendedParameters', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ExtendedParameters')))
_ExtendedParameterAccept_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__ExtendedParameterAccept_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_ExtendedParameterAccept_nonStandard._cont = __ExtendedParameterAccept_nonStandard__item_
ExtendedParameterAccept._cont = ASN1Dict([
(u'extendedParameters', _ExtendedParameterAccept_extendedParameters),
(u'nonStandard', _ExtendedParameterAccept_nonStandard),
])
ExtendedParameterAccept._ext = []
#-----< PlumbDomainIndication >-----#
PlumbDomainIndication = SEQ(name=u'PlumbDomainIndication', mode=MODE_TYPE)
_PlumbDomainIndication_heightLimit = INT(name=u'heightLimit', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_PlumbDomainIndication_heightLimit._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
_PlumbDomainIndication_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__PlumbDomainIndication_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_PlumbDomainIndication_nonStandard._cont = __PlumbDomainIndication_nonStandard__item_
PlumbDomainIndication._cont = ASN1Dict([
(u'heightLimit', _PlumbDomainIndication_heightLimit),
(u'nonStandard', _PlumbDomainIndication_nonStandard),
])
PlumbDomainIndication._ext = []
#-----< ErectDomainRequest >-----#
ErectDomainRequest = SEQ(name=u'ErectDomainRequest', mode=MODE_TYPE)
_ErectDomainRequest_subHeight = INT(name=u'subHeight', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_ErectDomainRequest_subHeight._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
_ErectDomainRequest_subInterval = INT(name=u'subInterval', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_ErectDomainRequest_subInterval._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
_ErectDomainRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__ErectDomainRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_ErectDomainRequest_nonStandard._cont = __ErectDomainRequest_nonStandard__item_
ErectDomainRequest._cont = ASN1Dict([
(u'subHeight', _ErectDomainRequest_subHeight),
(u'subInterval', _ErectDomainRequest_subInterval),
(u'nonStandard', _ErectDomainRequest_nonStandard),
])
ErectDomainRequest._ext = []
#-----< ChannelAttributes >-----#
ChannelAttributes = CHOICE(name=u'ChannelAttributes', mode=MODE_TYPE)
_ChannelAttributes_static = SEQ(name=u'static', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__ChannelAttributes_static_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'StaticChannelId')))
__ChannelAttributes_static_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
___ChannelAttributes_static_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
__ChannelAttributes_static_nonStandard._cont = ___ChannelAttributes_static_nonStandard__item_
_ChannelAttributes_static._cont = ASN1Dict([
(u'channelId', __ChannelAttributes_static_channelId),
(u'nonStandard', __ChannelAttributes_static_nonStandard),
])
_ChannelAttributes_static._ext = []
_ChannelAttributes_userId = SEQ(name=u'userId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__ChannelAttributes_userId_joined = BOOL(name=u'joined', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__ChannelAttributes_userId_userId = INT(name=u'userId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
__ChannelAttributes_userId_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
___ChannelAttributes_userId_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
__ChannelAttributes_userId_nonStandard._cont = ___ChannelAttributes_userId_nonStandard__item_
_ChannelAttributes_userId._cont = ASN1Dict([
(u'joined', __ChannelAttributes_userId_joined),
(u'userId', __ChannelAttributes_userId_userId),
(u'nonStandard', __ChannelAttributes_userId_nonStandard),
])
_ChannelAttributes_userId._ext = []
_ChannelAttributes_private = SEQ(name=u'private', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__ChannelAttributes_private_joined = BOOL(name=u'joined', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__ChannelAttributes_private_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'PrivateChannelId')))
__ChannelAttributes_private_manager = INT(name=u'manager', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
__ChannelAttributes_private_admitted = SET_OF(name=u'admitted', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
___ChannelAttributes_private_admitted__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
__ChannelAttributes_private_admitted._cont = ___ChannelAttributes_private_admitted__item_
__ChannelAttributes_private_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
___ChannelAttributes_private_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
__ChannelAttributes_private_nonStandard._cont = ___ChannelAttributes_private_nonStandard__item_
_ChannelAttributes_private._cont = ASN1Dict([
(u'joined', __ChannelAttributes_private_joined),
(u'channelId', __ChannelAttributes_private_channelId),
(u'manager', __ChannelAttributes_private_manager),
(u'admitted', __ChannelAttributes_private_admitted),
(u'nonStandard', __ChannelAttributes_private_nonStandard),
])
_ChannelAttributes_private._ext = []
_ChannelAttributes_assigned = SEQ(name=u'assigned', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__ChannelAttributes_assigned_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'AssignedChannelId')))
__ChannelAttributes_assigned_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
___ChannelAttributes_assigned_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
__ChannelAttributes_assigned_nonStandard._cont = ___ChannelAttributes_assigned_nonStandard__item_
_ChannelAttributes_assigned._cont = ASN1Dict([
(u'channelId', __ChannelAttributes_assigned_channelId),
(u'nonStandard', __ChannelAttributes_assigned_nonStandard),
])
_ChannelAttributes_assigned._ext = []
ChannelAttributes._cont = ASN1Dict([
(u'static', _ChannelAttributes_static),
(u'userId', _ChannelAttributes_userId),
(u'private', _ChannelAttributes_private),
(u'assigned', _ChannelAttributes_assigned),
])
ChannelAttributes._ext = []
#-----< MergeChannelsRequest >-----#
MergeChannelsRequest = SEQ(name=u'MergeChannelsRequest', mode=MODE_TYPE)
_MergeChannelsRequest_mergeChannels = SET_OF(name=u'mergeChannels', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__MergeChannelsRequest_mergeChannels__item_ = CHOICE(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelAttributes')))
_MergeChannelsRequest_mergeChannels._cont = __MergeChannelsRequest_mergeChannels__item_
_MergeChannelsRequest_purgeChannelIds = SET_OF(name=u'purgeChannelIds', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__MergeChannelsRequest_purgeChannelIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelId')))
_MergeChannelsRequest_purgeChannelIds._cont = __MergeChannelsRequest_purgeChannelIds__item_
_MergeChannelsRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MergeChannelsRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_MergeChannelsRequest_nonStandard._cont = __MergeChannelsRequest_nonStandard__item_
MergeChannelsRequest._cont = ASN1Dict([
(u'mergeChannels', _MergeChannelsRequest_mergeChannels),
(u'purgeChannelIds', _MergeChannelsRequest_purgeChannelIds),
(u'nonStandard', _MergeChannelsRequest_nonStandard),
])
MergeChannelsRequest._ext = []
#-----< MergeChannelsConfirm >-----#
MergeChannelsConfirm = SEQ(name=u'MergeChannelsConfirm', mode=MODE_TYPE)
_MergeChannelsConfirm_mergeChannels = SET_OF(name=u'mergeChannels', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__MergeChannelsConfirm_mergeChannels__item_ = CHOICE(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelAttributes')))
_MergeChannelsConfirm_mergeChannels._cont = __MergeChannelsConfirm_mergeChannels__item_
_MergeChannelsConfirm_purgeChannelIds = SET_OF(name=u'purgeChannelIds', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__MergeChannelsConfirm_purgeChannelIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelId')))
_MergeChannelsConfirm_purgeChannelIds._cont = __MergeChannelsConfirm_purgeChannelIds__item_
_MergeChannelsConfirm_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MergeChannelsConfirm_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_MergeChannelsConfirm_nonStandard._cont = __MergeChannelsConfirm_nonStandard__item_
MergeChannelsConfirm._cont = ASN1Dict([
(u'mergeChannels', _MergeChannelsConfirm_mergeChannels),
(u'purgeChannelIds', _MergeChannelsConfirm_purgeChannelIds),
(u'nonStandard', _MergeChannelsConfirm_nonStandard),
])
MergeChannelsConfirm._ext = []
#-----< PurgeChannelsIndication >-----#
PurgeChannelsIndication = SEQ(name=u'PurgeChannelsIndication', mode=MODE_TYPE)
_PurgeChannelsIndication_detachChannelIds = SET_OF(name=u'detachChannelIds', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__PurgeChannelsIndication_detachChannelIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelId')))
_PurgeChannelsIndication_detachChannelIds._cont = __PurgeChannelsIndication_detachChannelIds__item_
_PurgeChannelsIndication_purgeChannelIds = SET_OF(name=u'purgeChannelIds', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__PurgeChannelsIndication_purgeChannelIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelId')))
_PurgeChannelsIndication_purgeChannelIds._cont = __PurgeChannelsIndication_purgeChannelIds__item_
_PurgeChannelsIndication_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__PurgeChannelsIndication_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_PurgeChannelsIndication_nonStandard._cont = __PurgeChannelsIndication_nonStandard__item_
PurgeChannelsIndication._cont = ASN1Dict([
(u'detachChannelIds', _PurgeChannelsIndication_detachChannelIds),
(u'purgeChannelIds', _PurgeChannelsIndication_purgeChannelIds),
(u'nonStandard', _PurgeChannelsIndication_nonStandard),
])
PurgeChannelsIndication._ext = []
#-----< TokenAttributes >-----#
TokenAttributes = CHOICE(name=u'TokenAttributes', mode=MODE_TYPE)
_TokenAttributes_grabbed = SEQ(name=u'grabbed', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__TokenAttributes_grabbed_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
__TokenAttributes_grabbed_grabber = INT(name=u'grabber', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
__TokenAttributes_grabbed_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
___TokenAttributes_grabbed_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
__TokenAttributes_grabbed_nonStandard._cont = ___TokenAttributes_grabbed_nonStandard__item_
_TokenAttributes_grabbed._cont = ASN1Dict([
(u'tokenId', __TokenAttributes_grabbed_tokenId),
(u'grabber', __TokenAttributes_grabbed_grabber),
(u'nonStandard', __TokenAttributes_grabbed_nonStandard),
])
_TokenAttributes_grabbed._ext = []
_TokenAttributes_inhibited = SEQ(name=u'inhibited', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__TokenAttributes_inhibited_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
__TokenAttributes_inhibited_inhibitors = SET_OF(name=u'inhibitors', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
___TokenAttributes_inhibited_inhibitors__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
__TokenAttributes_inhibited_inhibitors._cont = ___TokenAttributes_inhibited_inhibitors__item_
__TokenAttributes_inhibited_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
___TokenAttributes_inhibited_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
__TokenAttributes_inhibited_nonStandard._cont = ___TokenAttributes_inhibited_nonStandard__item_
_TokenAttributes_inhibited._cont = ASN1Dict([
(u'tokenId', __TokenAttributes_inhibited_tokenId),
(u'inhibitors', __TokenAttributes_inhibited_inhibitors),
(u'nonStandard', __TokenAttributes_inhibited_nonStandard),
])
_TokenAttributes_inhibited._ext = []
_TokenAttributes_giving = SEQ(name=u'giving', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__TokenAttributes_giving_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
__TokenAttributes_giving_grabber = INT(name=u'grabber', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
__TokenAttributes_giving_recipient = INT(name=u'recipient', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
__TokenAttributes_giving_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
___TokenAttributes_giving_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
__TokenAttributes_giving_nonStandard._cont = ___TokenAttributes_giving_nonStandard__item_
_TokenAttributes_giving._cont = ASN1Dict([
(u'tokenId', __TokenAttributes_giving_tokenId),
(u'grabber', __TokenAttributes_giving_grabber),
(u'recipient', __TokenAttributes_giving_recipient),
(u'nonStandard', __TokenAttributes_giving_nonStandard),
])
_TokenAttributes_giving._ext = []
_TokenAttributes_ungivable = SEQ(name=u'ungivable', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__TokenAttributes_ungivable_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
__TokenAttributes_ungivable_grabber = INT(name=u'grabber', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
__TokenAttributes_ungivable_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
___TokenAttributes_ungivable_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
__TokenAttributes_ungivable_nonStandard._cont = ___TokenAttributes_ungivable_nonStandard__item_
_TokenAttributes_ungivable._cont = ASN1Dict([
(u'tokenId', __TokenAttributes_ungivable_tokenId),
(u'grabber', __TokenAttributes_ungivable_grabber),
(u'nonStandard', __TokenAttributes_ungivable_nonStandard),
])
_TokenAttributes_ungivable._ext = []
_TokenAttributes_given = SEQ(name=u'given', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__TokenAttributes_given_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
__TokenAttributes_given_recipient = INT(name=u'recipient', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
__TokenAttributes_given_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
___TokenAttributes_given_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
__TokenAttributes_given_nonStandard._cont = ___TokenAttributes_given_nonStandard__item_
_TokenAttributes_given._cont = ASN1Dict([
(u'tokenId', __TokenAttributes_given_tokenId),
(u'recipient', __TokenAttributes_given_recipient),
(u'nonStandard', __TokenAttributes_given_nonStandard),
])
_TokenAttributes_given._ext = []
TokenAttributes._cont = ASN1Dict([
(u'grabbed', _TokenAttributes_grabbed),
(u'inhibited', _TokenAttributes_inhibited),
(u'giving', _TokenAttributes_giving),
(u'ungivable', _TokenAttributes_ungivable),
(u'given', _TokenAttributes_given),
])
TokenAttributes._ext = []
#-----< MergeTokensRequest >-----#
MergeTokensRequest = SEQ(name=u'MergeTokensRequest', mode=MODE_TYPE)
_MergeTokensRequest_mergeTokens = SET_OF(name=u'mergeTokens', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__MergeTokensRequest_mergeTokens__item_ = CHOICE(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenAttributes')))
_MergeTokensRequest_mergeTokens._cont = __MergeTokensRequest_mergeTokens__item_
_MergeTokensRequest_purgeTokenIds = SET_OF(name=u'purgeTokenIds', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__MergeTokensRequest_purgeTokenIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_MergeTokensRequest_purgeTokenIds._cont = __MergeTokensRequest_purgeTokenIds__item_
_MergeTokensRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MergeTokensRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_MergeTokensRequest_nonStandard._cont = __MergeTokensRequest_nonStandard__item_
MergeTokensRequest._cont = ASN1Dict([
(u'mergeTokens', _MergeTokensRequest_mergeTokens),
(u'purgeTokenIds', _MergeTokensRequest_purgeTokenIds),
(u'nonStandard', _MergeTokensRequest_nonStandard),
])
MergeTokensRequest._ext = []
#-----< MergeTokensConfirm >-----#
MergeTokensConfirm = SEQ(name=u'MergeTokensConfirm', mode=MODE_TYPE)
_MergeTokensConfirm_mergeTokens = SET_OF(name=u'mergeTokens', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__MergeTokensConfirm_mergeTokens__item_ = CHOICE(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenAttributes')))
_MergeTokensConfirm_mergeTokens._cont = __MergeTokensConfirm_mergeTokens__item_
_MergeTokensConfirm_purgeTokenIds = SET_OF(name=u'purgeTokenIds', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__MergeTokensConfirm_purgeTokenIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_MergeTokensConfirm_purgeTokenIds._cont = __MergeTokensConfirm_purgeTokenIds__item_
_MergeTokensConfirm_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MergeTokensConfirm_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_MergeTokensConfirm_nonStandard._cont = __MergeTokensConfirm_nonStandard__item_
MergeTokensConfirm._cont = ASN1Dict([
(u'mergeTokens', _MergeTokensConfirm_mergeTokens),
(u'purgeTokenIds', _MergeTokensConfirm_purgeTokenIds),
(u'nonStandard', _MergeTokensConfirm_nonStandard),
])
MergeTokensConfirm._ext = []
#-----< PurgeTokensIndication >-----#
PurgeTokensIndication = SEQ(name=u'PurgeTokensIndication', mode=MODE_TYPE)
_PurgeTokensIndication_purgeTokenIds = SET_OF(name=u'purgeTokenIds', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__PurgeTokensIndication_purgeTokenIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_PurgeTokensIndication_purgeTokenIds._cont = __PurgeTokensIndication_purgeTokenIds__item_
_PurgeTokensIndication_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__PurgeTokensIndication_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_PurgeTokensIndication_nonStandard._cont = __PurgeTokensIndication_nonStandard__item_
PurgeTokensIndication._cont = ASN1Dict([
(u'purgeTokenIds', _PurgeTokensIndication_purgeTokenIds),
(u'nonStandard', _PurgeTokensIndication_nonStandard),
])
PurgeTokensIndication._ext = []
#-----< DisconnectProviderUltimatum >-----#
DisconnectProviderUltimatum = SEQ(name=u'DisconnectProviderUltimatum', mode=MODE_TYPE)
_DisconnectProviderUltimatum_reason = CHOICE(name=u'reason', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Reason')))
_DisconnectProviderUltimatum_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__DisconnectProviderUltimatum_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_DisconnectProviderUltimatum_nonStandard._cont = __DisconnectProviderUltimatum_nonStandard__item_
DisconnectProviderUltimatum._cont = ASN1Dict([
(u'reason', _DisconnectProviderUltimatum_reason),
(u'nonStandard', _DisconnectProviderUltimatum_nonStandard),
])
DisconnectProviderUltimatum._ext = []
#-----< RejectMCSPDUUltimatum >-----#
RejectMCSPDUUltimatum = SEQ(name=u'RejectMCSPDUUltimatum', mode=MODE_TYPE)
_RejectMCSPDUUltimatum_diagnostic = CHOICE(name=u'diagnostic', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Diagnostic')))
_RejectMCSPDUUltimatum_initialOctets = OCT_STR(name=u'initialOctets', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_RejectMCSPDUUltimatum_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__RejectMCSPDUUltimatum_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_RejectMCSPDUUltimatum_nonStandard._cont = __RejectMCSPDUUltimatum_nonStandard__item_
RejectMCSPDUUltimatum._cont = ASN1Dict([
(u'diagnostic', _RejectMCSPDUUltimatum_diagnostic),
(u'initialOctets', _RejectMCSPDUUltimatum_initialOctets),
(u'nonStandard', _RejectMCSPDUUltimatum_nonStandard),
])
RejectMCSPDUUltimatum._ext = []
#-----< AttachUserRequest >-----#
AttachUserRequest = SEQ(name=u'AttachUserRequest', mode=MODE_TYPE)
_AttachUserRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__AttachUserRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_AttachUserRequest_nonStandard._cont = __AttachUserRequest_nonStandard__item_
AttachUserRequest._cont = ASN1Dict([
(u'nonStandard', _AttachUserRequest_nonStandard),
])
AttachUserRequest._ext = []
#-----< AttachUserConfirm >-----#
AttachUserConfirm = SEQ(name=u'AttachUserConfirm', mode=MODE_TYPE)
_AttachUserConfirm_result = CHOICE(name=u'result', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Result')))
_AttachUserConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')), opt=True)
_AttachUserConfirm_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__AttachUserConfirm_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_AttachUserConfirm_nonStandard._cont = __AttachUserConfirm_nonStandard__item_
AttachUserConfirm._cont = ASN1Dict([
(u'result', _AttachUserConfirm_result),
(u'initiator', _AttachUserConfirm_initiator),
(u'nonStandard', _AttachUserConfirm_nonStandard),
])
AttachUserConfirm._ext = []
#-----< DetachUserRequest >-----#
DetachUserRequest = SEQ(name=u'DetachUserRequest', mode=MODE_TYPE)
_DetachUserRequest_reason = CHOICE(name=u'reason', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Reason')))
_DetachUserRequest_userIds = SET_OF(name=u'userIds', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__DetachUserRequest_userIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_DetachUserRequest_userIds._cont = __DetachUserRequest_userIds__item_
_DetachUserRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__DetachUserRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_DetachUserRequest_nonStandard._cont = __DetachUserRequest_nonStandard__item_
DetachUserRequest._cont = ASN1Dict([
(u'reason', _DetachUserRequest_reason),
(u'userIds', _DetachUserRequest_userIds),
(u'nonStandard', _DetachUserRequest_nonStandard),
])
DetachUserRequest._ext = []
#-----< DetachUserIndication >-----#
DetachUserIndication = SEQ(name=u'DetachUserIndication', mode=MODE_TYPE)
_DetachUserIndication_reason = CHOICE(name=u'reason', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Reason')))
_DetachUserIndication_userIds = SET_OF(name=u'userIds', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__DetachUserIndication_userIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_DetachUserIndication_userIds._cont = __DetachUserIndication_userIds__item_
_DetachUserIndication_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__DetachUserIndication_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_DetachUserIndication_nonStandard._cont = __DetachUserIndication_nonStandard__item_
DetachUserIndication._cont = ASN1Dict([
(u'reason', _DetachUserIndication_reason),
(u'userIds', _DetachUserIndication_userIds),
(u'nonStandard', _DetachUserIndication_nonStandard),
])
DetachUserIndication._ext = []
#-----< ChannelJoinRequest >-----#
ChannelJoinRequest = SEQ(name=u'ChannelJoinRequest', mode=MODE_TYPE)
_ChannelJoinRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_ChannelJoinRequest_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelId')))
_ChannelJoinRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__ChannelJoinRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_ChannelJoinRequest_nonStandard._cont = __ChannelJoinRequest_nonStandard__item_
ChannelJoinRequest._cont = ASN1Dict([
(u'initiator', _ChannelJoinRequest_initiator),
(u'channelId', _ChannelJoinRequest_channelId),
(u'nonStandard', _ChannelJoinRequest_nonStandard),
])
ChannelJoinRequest._ext = []
#-----< ChannelJoinConfirm >-----#
ChannelJoinConfirm = SEQ(name=u'ChannelJoinConfirm', mode=MODE_TYPE)
_ChannelJoinConfirm_result = CHOICE(name=u'result', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Result')))
_ChannelJoinConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_ChannelJoinConfirm_requested = INT(name=u'requested', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelId')))
_ChannelJoinConfirm_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelId')), opt=True)
_ChannelJoinConfirm_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__ChannelJoinConfirm_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_ChannelJoinConfirm_nonStandard._cont = __ChannelJoinConfirm_nonStandard__item_
ChannelJoinConfirm._cont = ASN1Dict([
(u'result', _ChannelJoinConfirm_result),
(u'initiator', _ChannelJoinConfirm_initiator),
(u'requested', _ChannelJoinConfirm_requested),
(u'channelId', _ChannelJoinConfirm_channelId),
(u'nonStandard', _ChannelJoinConfirm_nonStandard),
])
ChannelJoinConfirm._ext = []
#-----< ChannelLeaveRequest >-----#
ChannelLeaveRequest = SEQ(name=u'ChannelLeaveRequest', mode=MODE_TYPE)
_ChannelLeaveRequest_channelIds = SET_OF(name=u'channelIds', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__ChannelLeaveRequest_channelIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelId')))
_ChannelLeaveRequest_channelIds._cont = __ChannelLeaveRequest_channelIds__item_
_ChannelLeaveRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__ChannelLeaveRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_ChannelLeaveRequest_nonStandard._cont = __ChannelLeaveRequest_nonStandard__item_
ChannelLeaveRequest._cont = ASN1Dict([
(u'channelIds', _ChannelLeaveRequest_channelIds),
(u'nonStandard', _ChannelLeaveRequest_nonStandard),
])
ChannelLeaveRequest._ext = []
#-----< ChannelConveneRequest >-----#
ChannelConveneRequest = SEQ(name=u'ChannelConveneRequest', mode=MODE_TYPE)
_ChannelConveneRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_ChannelConveneRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__ChannelConveneRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_ChannelConveneRequest_nonStandard._cont = __ChannelConveneRequest_nonStandard__item_
ChannelConveneRequest._cont = ASN1Dict([
(u'initiator', _ChannelConveneRequest_initiator),
(u'nonStandard', _ChannelConveneRequest_nonStandard),
])
ChannelConveneRequest._ext = []
#-----< ChannelConveneConfirm >-----#
ChannelConveneConfirm = SEQ(name=u'ChannelConveneConfirm', mode=MODE_TYPE)
_ChannelConveneConfirm_result = CHOICE(name=u'result', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Result')))
_ChannelConveneConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_ChannelConveneConfirm_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'PrivateChannelId')), opt=True)
_ChannelConveneConfirm_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__ChannelConveneConfirm_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_ChannelConveneConfirm_nonStandard._cont = __ChannelConveneConfirm_nonStandard__item_
ChannelConveneConfirm._cont = ASN1Dict([
(u'result', _ChannelConveneConfirm_result),
(u'initiator', _ChannelConveneConfirm_initiator),
(u'channelId', _ChannelConveneConfirm_channelId),
(u'nonStandard', _ChannelConveneConfirm_nonStandard),
])
ChannelConveneConfirm._ext = []
#-----< ChannelDisbandRequest >-----#
ChannelDisbandRequest = SEQ(name=u'ChannelDisbandRequest', mode=MODE_TYPE)
_ChannelDisbandRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_ChannelDisbandRequest_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'PrivateChannelId')))
_ChannelDisbandRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__ChannelDisbandRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_ChannelDisbandRequest_nonStandard._cont = __ChannelDisbandRequest_nonStandard__item_
ChannelDisbandRequest._cont = ASN1Dict([
(u'initiator', _ChannelDisbandRequest_initiator),
(u'channelId', _ChannelDisbandRequest_channelId),
(u'nonStandard', _ChannelDisbandRequest_nonStandard),
])
ChannelDisbandRequest._ext = []
#-----< ChannelDisbandIndication >-----#
ChannelDisbandIndication = SEQ(name=u'ChannelDisbandIndication', mode=MODE_TYPE)
_ChannelDisbandIndication_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'PrivateChannelId')))
_ChannelDisbandIndication_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__ChannelDisbandIndication_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_ChannelDisbandIndication_nonStandard._cont = __ChannelDisbandIndication_nonStandard__item_
ChannelDisbandIndication._cont = ASN1Dict([
(u'channelId', _ChannelDisbandIndication_channelId),
(u'nonStandard', _ChannelDisbandIndication_nonStandard),
])
ChannelDisbandIndication._ext = []
#-----< ChannelAdmitRequest >-----#
ChannelAdmitRequest = SEQ(name=u'ChannelAdmitRequest', mode=MODE_TYPE)
_ChannelAdmitRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_ChannelAdmitRequest_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'PrivateChannelId')))
_ChannelAdmitRequest_userIds = SET_OF(name=u'userIds', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__ChannelAdmitRequest_userIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_ChannelAdmitRequest_userIds._cont = __ChannelAdmitRequest_userIds__item_
_ChannelAdmitRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__ChannelAdmitRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_ChannelAdmitRequest_nonStandard._cont = __ChannelAdmitRequest_nonStandard__item_
ChannelAdmitRequest._cont = ASN1Dict([
(u'initiator', _ChannelAdmitRequest_initiator),
(u'channelId', _ChannelAdmitRequest_channelId),
(u'userIds', _ChannelAdmitRequest_userIds),
(u'nonStandard', _ChannelAdmitRequest_nonStandard),
])
ChannelAdmitRequest._ext = []
#-----< ChannelAdmitIndication >-----#
ChannelAdmitIndication = SEQ(name=u'ChannelAdmitIndication', mode=MODE_TYPE)
_ChannelAdmitIndication_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_ChannelAdmitIndication_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'PrivateChannelId')))
_ChannelAdmitIndication_userIds = SET_OF(name=u'userIds', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__ChannelAdmitIndication_userIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_ChannelAdmitIndication_userIds._cont = __ChannelAdmitIndication_userIds__item_
_ChannelAdmitIndication_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__ChannelAdmitIndication_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_ChannelAdmitIndication_nonStandard._cont = __ChannelAdmitIndication_nonStandard__item_
ChannelAdmitIndication._cont = ASN1Dict([
(u'initiator', _ChannelAdmitIndication_initiator),
(u'channelId', _ChannelAdmitIndication_channelId),
(u'userIds', _ChannelAdmitIndication_userIds),
(u'nonStandard', _ChannelAdmitIndication_nonStandard),
])
ChannelAdmitIndication._ext = []
#-----< ChannelExpelRequest >-----#
ChannelExpelRequest = SEQ(name=u'ChannelExpelRequest', mode=MODE_TYPE)
_ChannelExpelRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_ChannelExpelRequest_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'PrivateChannelId')))
_ChannelExpelRequest_userIds = SET_OF(name=u'userIds', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__ChannelExpelRequest_userIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_ChannelExpelRequest_userIds._cont = __ChannelExpelRequest_userIds__item_
_ChannelExpelRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__ChannelExpelRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_ChannelExpelRequest_nonStandard._cont = __ChannelExpelRequest_nonStandard__item_
ChannelExpelRequest._cont = ASN1Dict([
(u'initiator', _ChannelExpelRequest_initiator),
(u'channelId', _ChannelExpelRequest_channelId),
(u'userIds', _ChannelExpelRequest_userIds),
(u'nonStandard', _ChannelExpelRequest_nonStandard),
])
ChannelExpelRequest._ext = []
#-----< ChannelExpelIndication >-----#
ChannelExpelIndication = SEQ(name=u'ChannelExpelIndication', mode=MODE_TYPE)
_ChannelExpelIndication_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'PrivateChannelId')))
_ChannelExpelIndication_userIds = SET_OF(name=u'userIds', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__ChannelExpelIndication_userIds__item_ = INT(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_ChannelExpelIndication_userIds._cont = __ChannelExpelIndication_userIds__item_
_ChannelExpelIndication_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__ChannelExpelIndication_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_ChannelExpelIndication_nonStandard._cont = __ChannelExpelIndication_nonStandard__item_
ChannelExpelIndication._cont = ASN1Dict([
(u'channelId', _ChannelExpelIndication_channelId),
(u'userIds', _ChannelExpelIndication_userIds),
(u'nonStandard', _ChannelExpelIndication_nonStandard),
])
ChannelExpelIndication._ext = []
#-----< SendDataRequest >-----#
SendDataRequest = SEQ(name=u'SendDataRequest', mode=MODE_TYPE)
_SendDataRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_SendDataRequest_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelId')))
_SendDataRequest_reliability = BOOL(name=u'reliability', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_SendDataRequest_domainReferenceID = INT(name=u'domainReferenceID', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_SendDataRequest_domainReferenceID._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_SendDataRequest_dataPriority = CHOICE(name=u'dataPriority', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'DataPriority')))
_SendDataRequest_segmentation = BIT_STR(name=u'segmentation', mode=MODE_TYPE, tag=(5, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Segmentation')))
_SendDataRequest_userData = OCT_STR(name=u'userData', mode=MODE_TYPE, tag=(6, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_SendDataRequest_totalDataSize = INT(name=u'totalDataSize', mode=MODE_TYPE, tag=(7, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_SendDataRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(8, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__SendDataRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_SendDataRequest_nonStandard._cont = __SendDataRequest_nonStandard__item_
SendDataRequest._cont = ASN1Dict([
(u'initiator', _SendDataRequest_initiator),
(u'channelId', _SendDataRequest_channelId),
(u'reliability', _SendDataRequest_reliability),
(u'domainReferenceID', _SendDataRequest_domainReferenceID),
(u'dataPriority', _SendDataRequest_dataPriority),
(u'segmentation', _SendDataRequest_segmentation),
(u'userData', _SendDataRequest_userData),
(u'totalDataSize', _SendDataRequest_totalDataSize),
(u'nonStandard', _SendDataRequest_nonStandard),
])
SendDataRequest._ext = []
#-----< SendDataIndication >-----#
SendDataIndication = SEQ(name=u'SendDataIndication', mode=MODE_TYPE)
_SendDataIndication_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_SendDataIndication_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelId')))
_SendDataIndication_reliability = BOOL(name=u'reliability', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_SendDataIndication_domainReferenceID = INT(name=u'domainReferenceID', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_SendDataIndication_domainReferenceID._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_SendDataIndication_dataPriority = CHOICE(name=u'dataPriority', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'DataPriority')))
_SendDataIndication_segmentation = BIT_STR(name=u'segmentation', mode=MODE_TYPE, tag=(5, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Segmentation')))
_SendDataIndication_userData = OCT_STR(name=u'userData', mode=MODE_TYPE, tag=(6, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_SendDataIndication_totalDataSize = INT(name=u'totalDataSize', mode=MODE_TYPE, tag=(7, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_SendDataIndication_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(8, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__SendDataIndication_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_SendDataIndication_nonStandard._cont = __SendDataIndication_nonStandard__item_
SendDataIndication._cont = ASN1Dict([
(u'initiator', _SendDataIndication_initiator),
(u'channelId', _SendDataIndication_channelId),
(u'reliability', _SendDataIndication_reliability),
(u'domainReferenceID', _SendDataIndication_domainReferenceID),
(u'dataPriority', _SendDataIndication_dataPriority),
(u'segmentation', _SendDataIndication_segmentation),
(u'userData', _SendDataIndication_userData),
(u'totalDataSize', _SendDataIndication_totalDataSize),
(u'nonStandard', _SendDataIndication_nonStandard),
])
SendDataIndication._ext = []
#-----< UniformSendDataRequest >-----#
UniformSendDataRequest = SEQ(name=u'UniformSendDataRequest', mode=MODE_TYPE)
_UniformSendDataRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_UniformSendDataRequest_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelId')))
_UniformSendDataRequest_reliability = BOOL(name=u'reliability', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_UniformSendDataRequest_domainReferenceID = INT(name=u'domainReferenceID', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_UniformSendDataRequest_domainReferenceID._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_UniformSendDataRequest_dataPriority = CHOICE(name=u'dataPriority', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'DataPriority')))
_UniformSendDataRequest_segmentation = BIT_STR(name=u'segmentation', mode=MODE_TYPE, tag=(5, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Segmentation')))
_UniformSendDataRequest_userData = OCT_STR(name=u'userData', mode=MODE_TYPE, tag=(6, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_UniformSendDataRequest_totalDataSize = INT(name=u'totalDataSize', mode=MODE_TYPE, tag=(7, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_UniformSendDataRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(8, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__UniformSendDataRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_UniformSendDataRequest_nonStandard._cont = __UniformSendDataRequest_nonStandard__item_
UniformSendDataRequest._cont = ASN1Dict([
(u'initiator', _UniformSendDataRequest_initiator),
(u'channelId', _UniformSendDataRequest_channelId),
(u'reliability', _UniformSendDataRequest_reliability),
(u'domainReferenceID', _UniformSendDataRequest_domainReferenceID),
(u'dataPriority', _UniformSendDataRequest_dataPriority),
(u'segmentation', _UniformSendDataRequest_segmentation),
(u'userData', _UniformSendDataRequest_userData),
(u'totalDataSize', _UniformSendDataRequest_totalDataSize),
(u'nonStandard', _UniformSendDataRequest_nonStandard),
])
UniformSendDataRequest._ext = []
#-----< UniformSendDataIndication >-----#
UniformSendDataIndication = SEQ(name=u'UniformSendDataIndication', mode=MODE_TYPE)
_UniformSendDataIndication_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_UniformSendDataIndication_channelId = INT(name=u'channelId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelId')))
_UniformSendDataIndication_reliability = BOOL(name=u'reliability', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_UniformSendDataIndication_domainReferenceID = INT(name=u'domainReferenceID', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_UniformSendDataIndication_domainReferenceID._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_UniformSendDataIndication_dataPriority = CHOICE(name=u'dataPriority', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'DataPriority')))
_UniformSendDataIndication_segmentation = BIT_STR(name=u'segmentation', mode=MODE_TYPE, tag=(5, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Segmentation')))
_UniformSendDataIndication_userData = OCT_STR(name=u'userData', mode=MODE_TYPE, tag=(6, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_UniformSendDataIndication_totalDataSize = INT(name=u'totalDataSize', mode=MODE_TYPE, tag=(7, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_UniformSendDataIndication_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(8, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__UniformSendDataIndication_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_UniformSendDataIndication_nonStandard._cont = __UniformSendDataIndication_nonStandard__item_
UniformSendDataIndication._cont = ASN1Dict([
(u'initiator', _UniformSendDataIndication_initiator),
(u'channelId', _UniformSendDataIndication_channelId),
(u'reliability', _UniformSendDataIndication_reliability),
(u'domainReferenceID', _UniformSendDataIndication_domainReferenceID),
(u'dataPriority', _UniformSendDataIndication_dataPriority),
(u'segmentation', _UniformSendDataIndication_segmentation),
(u'userData', _UniformSendDataIndication_userData),
(u'totalDataSize', _UniformSendDataIndication_totalDataSize),
(u'nonStandard', _UniformSendDataIndication_nonStandard),
])
UniformSendDataIndication._ext = []
#-----< TokenGrabRequest >-----#
TokenGrabRequest = SEQ(name=u'TokenGrabRequest', mode=MODE_TYPE)
_TokenGrabRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenGrabRequest_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_TokenGrabRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TokenGrabRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_TokenGrabRequest_nonStandard._cont = __TokenGrabRequest_nonStandard__item_
TokenGrabRequest._cont = ASN1Dict([
(u'initiator', _TokenGrabRequest_initiator),
(u'tokenId', _TokenGrabRequest_tokenId),
(u'nonStandard', _TokenGrabRequest_nonStandard),
])
TokenGrabRequest._ext = []
#-----< TokenGrabConfirm >-----#
TokenGrabConfirm = SEQ(name=u'TokenGrabConfirm', mode=MODE_TYPE)
_TokenGrabConfirm_result = CHOICE(name=u'result', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Result')))
_TokenGrabConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenGrabConfirm_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_TokenGrabConfirm_tokenStatus = CHOICE(name=u'tokenStatus', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenStatus')))
_TokenGrabConfirm_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TokenGrabConfirm_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_TokenGrabConfirm_nonStandard._cont = __TokenGrabConfirm_nonStandard__item_
TokenGrabConfirm._cont = ASN1Dict([
(u'result', _TokenGrabConfirm_result),
(u'initiator', _TokenGrabConfirm_initiator),
(u'tokenId', _TokenGrabConfirm_tokenId),
(u'tokenStatus', _TokenGrabConfirm_tokenStatus),
(u'nonStandard', _TokenGrabConfirm_nonStandard),
])
TokenGrabConfirm._ext = []
#-----< TokenInhibitRequest >-----#
TokenInhibitRequest = SEQ(name=u'TokenInhibitRequest', mode=MODE_TYPE)
_TokenInhibitRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenInhibitRequest_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_TokenInhibitRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TokenInhibitRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_TokenInhibitRequest_nonStandard._cont = __TokenInhibitRequest_nonStandard__item_
TokenInhibitRequest._cont = ASN1Dict([
(u'initiator', _TokenInhibitRequest_initiator),
(u'tokenId', _TokenInhibitRequest_tokenId),
(u'nonStandard', _TokenInhibitRequest_nonStandard),
])
TokenInhibitRequest._ext = []
#-----< TokenInhibitConfirm >-----#
TokenInhibitConfirm = SEQ(name=u'TokenInhibitConfirm', mode=MODE_TYPE)
_TokenInhibitConfirm_result = CHOICE(name=u'result', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Result')))
_TokenInhibitConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenInhibitConfirm_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_TokenInhibitConfirm_tokenStatus = CHOICE(name=u'tokenStatus', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenStatus')))
_TokenInhibitConfirm_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TokenInhibitConfirm_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_TokenInhibitConfirm_nonStandard._cont = __TokenInhibitConfirm_nonStandard__item_
TokenInhibitConfirm._cont = ASN1Dict([
(u'result', _TokenInhibitConfirm_result),
(u'initiator', _TokenInhibitConfirm_initiator),
(u'tokenId', _TokenInhibitConfirm_tokenId),
(u'tokenStatus', _TokenInhibitConfirm_tokenStatus),
(u'nonStandard', _TokenInhibitConfirm_nonStandard),
])
TokenInhibitConfirm._ext = []
#-----< TokenGiveRequest >-----#
TokenGiveRequest = SEQ(name=u'TokenGiveRequest', mode=MODE_TYPE)
_TokenGiveRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenGiveRequest_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_TokenGiveRequest_recipient = INT(name=u'recipient', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenGiveRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TokenGiveRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_TokenGiveRequest_nonStandard._cont = __TokenGiveRequest_nonStandard__item_
TokenGiveRequest._cont = ASN1Dict([
(u'initiator', _TokenGiveRequest_initiator),
(u'tokenId', _TokenGiveRequest_tokenId),
(u'recipient', _TokenGiveRequest_recipient),
(u'nonStandard', _TokenGiveRequest_nonStandard),
])
TokenGiveRequest._ext = []
#-----< TokenGiveIndication >-----#
TokenGiveIndication = SEQ(name=u'TokenGiveIndication', mode=MODE_TYPE)
_TokenGiveIndication_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenGiveIndication_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_TokenGiveIndication_recipient = INT(name=u'recipient', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenGiveIndication_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TokenGiveIndication_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_TokenGiveIndication_nonStandard._cont = __TokenGiveIndication_nonStandard__item_
TokenGiveIndication._cont = ASN1Dict([
(u'initiator', _TokenGiveIndication_initiator),
(u'tokenId', _TokenGiveIndication_tokenId),
(u'recipient', _TokenGiveIndication_recipient),
(u'nonStandard', _TokenGiveIndication_nonStandard),
])
TokenGiveIndication._ext = []
#-----< TokenGiveResponse >-----#
TokenGiveResponse = SEQ(name=u'TokenGiveResponse', mode=MODE_TYPE)
_TokenGiveResponse_result = CHOICE(name=u'result', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Result')))
_TokenGiveResponse_recipient = INT(name=u'recipient', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenGiveResponse_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_TokenGiveResponse_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TokenGiveResponse_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_TokenGiveResponse_nonStandard._cont = __TokenGiveResponse_nonStandard__item_
TokenGiveResponse._cont = ASN1Dict([
(u'result', _TokenGiveResponse_result),
(u'recipient', _TokenGiveResponse_recipient),
(u'tokenId', _TokenGiveResponse_tokenId),
(u'nonStandard', _TokenGiveResponse_nonStandard),
])
TokenGiveResponse._ext = []
#-----< TokenGiveConfirm >-----#
TokenGiveConfirm = SEQ(name=u'TokenGiveConfirm', mode=MODE_TYPE)
_TokenGiveConfirm_result = CHOICE(name=u'result', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Result')))
_TokenGiveConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenGiveConfirm_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_TokenGiveConfirm_tokenStatus = CHOICE(name=u'tokenStatus', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenStatus')))
_TokenGiveConfirm_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TokenGiveConfirm_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_TokenGiveConfirm_nonStandard._cont = __TokenGiveConfirm_nonStandard__item_
TokenGiveConfirm._cont = ASN1Dict([
(u'result', _TokenGiveConfirm_result),
(u'initiator', _TokenGiveConfirm_initiator),
(u'tokenId', _TokenGiveConfirm_tokenId),
(u'tokenStatus', _TokenGiveConfirm_tokenStatus),
(u'nonStandard', _TokenGiveConfirm_nonStandard),
])
TokenGiveConfirm._ext = []
#-----< TokenPleaseRequest >-----#
TokenPleaseRequest = SEQ(name=u'TokenPleaseRequest', mode=MODE_TYPE)
_TokenPleaseRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenPleaseRequest_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_TokenPleaseRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TokenPleaseRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_TokenPleaseRequest_nonStandard._cont = __TokenPleaseRequest_nonStandard__item_
TokenPleaseRequest._cont = ASN1Dict([
(u'initiator', _TokenPleaseRequest_initiator),
(u'tokenId', _TokenPleaseRequest_tokenId),
(u'nonStandard', _TokenPleaseRequest_nonStandard),
])
TokenPleaseRequest._ext = []
#-----< TokenPleaseIndication >-----#
TokenPleaseIndication = SEQ(name=u'TokenPleaseIndication', mode=MODE_TYPE)
_TokenPleaseIndication_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenPleaseIndication_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_TokenPleaseIndication_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TokenPleaseIndication_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_TokenPleaseIndication_nonStandard._cont = __TokenPleaseIndication_nonStandard__item_
TokenPleaseIndication._cont = ASN1Dict([
(u'initiator', _TokenPleaseIndication_initiator),
(u'tokenId', _TokenPleaseIndication_tokenId),
(u'nonStandard', _TokenPleaseIndication_nonStandard),
])
TokenPleaseIndication._ext = []
#-----< TokenReleaseRequest >-----#
TokenReleaseRequest = SEQ(name=u'TokenReleaseRequest', mode=MODE_TYPE)
_TokenReleaseRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenReleaseRequest_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_TokenReleaseRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TokenReleaseRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_TokenReleaseRequest_nonStandard._cont = __TokenReleaseRequest_nonStandard__item_
TokenReleaseRequest._cont = ASN1Dict([
(u'initiator', _TokenReleaseRequest_initiator),
(u'tokenId', _TokenReleaseRequest_tokenId),
(u'nonStandard', _TokenReleaseRequest_nonStandard),
])
TokenReleaseRequest._ext = []
#-----< TokenReleaseConfirm >-----#
TokenReleaseConfirm = SEQ(name=u'TokenReleaseConfirm', mode=MODE_TYPE)
_TokenReleaseConfirm_result = CHOICE(name=u'result', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Result')))
_TokenReleaseConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenReleaseConfirm_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_TokenReleaseConfirm_tokenStatus = CHOICE(name=u'tokenStatus', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenStatus')))
_TokenReleaseConfirm_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TokenReleaseConfirm_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_TokenReleaseConfirm_nonStandard._cont = __TokenReleaseConfirm_nonStandard__item_
TokenReleaseConfirm._cont = ASN1Dict([
(u'result', _TokenReleaseConfirm_result),
(u'initiator', _TokenReleaseConfirm_initiator),
(u'tokenId', _TokenReleaseConfirm_tokenId),
(u'tokenStatus', _TokenReleaseConfirm_tokenStatus),
(u'nonStandard', _TokenReleaseConfirm_nonStandard),
])
TokenReleaseConfirm._ext = []
#-----< TokenTestRequest >-----#
TokenTestRequest = SEQ(name=u'TokenTestRequest', mode=MODE_TYPE)
_TokenTestRequest_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenTestRequest_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_TokenTestRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TokenTestRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_TokenTestRequest_nonStandard._cont = __TokenTestRequest_nonStandard__item_
TokenTestRequest._cont = ASN1Dict([
(u'initiator', _TokenTestRequest_initiator),
(u'tokenId', _TokenTestRequest_tokenId),
(u'nonStandard', _TokenTestRequest_nonStandard),
])
TokenTestRequest._ext = []
#-----< TokenTestConfirm >-----#
TokenTestConfirm = SEQ(name=u'TokenTestConfirm', mode=MODE_TYPE)
_TokenTestConfirm_initiator = INT(name=u'initiator', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UserId')))
_TokenTestConfirm_tokenId = INT(name=u'tokenId', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenId')))
_TokenTestConfirm_tokenStatus = CHOICE(name=u'tokenStatus', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenStatus')))
_TokenTestConfirm_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TokenTestConfirm_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_TokenTestConfirm_nonStandard._cont = __TokenTestConfirm_nonStandard__item_
TokenTestConfirm._cont = ASN1Dict([
(u'initiator', _TokenTestConfirm_initiator),
(u'tokenId', _TokenTestConfirm_tokenId),
(u'tokenStatus', _TokenTestConfirm_tokenStatus),
(u'nonStandard', _TokenTestConfirm_nonStandard),
])
TokenTestConfirm._ext = []
#-----< CapabilityID >-----#
CapabilityID = CHOICE(name=u'CapabilityID', mode=MODE_TYPE)
_CapabilityID_standardID = INT(name=u'standardID', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_CapabilityID_standardID._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_CapabilityID_nonstandardID = CHOICE(name=u'nonstandardID', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'Key')))
CapabilityID._cont = ASN1Dict([
(u'standardID', _CapabilityID_standardID),
(u'nonstandardID', _CapabilityID_nonstandardID),
])
CapabilityID._ext = None
#-----< CapabilityClass >-----#
CapabilityClass = CHOICE(name=u'CapabilityClass', mode=MODE_TYPE)
_CapabilityClass_null = NULL(name=u'null', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_CapabilityClass_unsignedMin = INT(name=u'unsignedMin', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_CapabilityClass_unsignedMin._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
_CapabilityClass_unsignedMax = INT(name=u'unsignedMax', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_CapabilityClass_unsignedMax._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=None)], ev=None, er=[])
CapabilityClass._cont = ASN1Dict([
(u'null', _CapabilityClass_null),
(u'unsignedMin', _CapabilityClass_unsignedMin),
(u'unsignedMax', _CapabilityClass_unsignedMax),
])
CapabilityClass._ext = None
#-----< ParticipationIndicator >-----#
ParticipationIndicator = CHOICE(name=u'ParticipationIndicator', mode=MODE_TYPE)
_ParticipationIndicator_global_ = NULL(name=u'global', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_ParticipationIndicator_partial = INT(name=u'partial', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_ParticipationIndicator_partial._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=1, ub=2)], ev=None, er=[])
ParticipationIndicator._cont = ASN1Dict([
(u'global', _ParticipationIndicator_global_),
(u'partial', _ParticipationIndicator_partial),
])
ParticipationIndicator._ext = None
#-----< RequestCapability >-----#
RequestCapability = SEQ(name=u'RequestCapability', mode=MODE_TYPE)
_RequestCapability_capabilityID = CHOICE(name=u'capabilityID', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'CapabilityID')))
_RequestCapability_capabilityClass = CHOICE(name=u'capabilityClass', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'CapabilityClass')))
_RequestCapability_participationIndicator = CHOICE(name=u'participationIndicator', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ParticipationIndicator')))
_RequestCapability_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__RequestCapability_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_RequestCapability_nonStandard._cont = __RequestCapability_nonStandard__item_
RequestCapability._cont = ASN1Dict([
(u'capabilityID', _RequestCapability_capabilityID),
(u'capabilityClass', _RequestCapability_capabilityClass),
(u'participationIndicator', _RequestCapability_participationIndicator),
(u'nonStandard', _RequestCapability_nonStandard),
])
RequestCapability._ext = []
#-----< SeqOfRequestCapabilities >-----#
SeqOfRequestCapabilities = SEQ_OF(name=u'SeqOfRequestCapabilities', mode=MODE_TYPE)
_SeqOfRequestCapabilities__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'RequestCapability')))
SeqOfRequestCapabilities._cont = _SeqOfRequestCapabilities__item_
#-----< IndicationCapability >-----#
IndicationCapability = SEQ(name=u'IndicationCapability', mode=MODE_TYPE)
_IndicationCapability_capabilityID = CHOICE(name=u'capabilityID', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'CapabilityID')))
_IndicationCapability_capabilityClass = CHOICE(name=u'capabilityClass', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'CapabilityClass')))
_IndicationCapability_summitProviderSupported = BOOL(name=u'summitProviderSupported', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_IndicationCapability_intermediateNodeSupported = BOOL(name=u'intermediateNodeSupported', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_IndicationCapability_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__IndicationCapability_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_IndicationCapability_nonStandard._cont = __IndicationCapability_nonStandard__item_
IndicationCapability._cont = ASN1Dict([
(u'capabilityID', _IndicationCapability_capabilityID),
(u'capabilityClass', _IndicationCapability_capabilityClass),
(u'summitProviderSupported', _IndicationCapability_summitProviderSupported),
(u'intermediateNodeSupported', _IndicationCapability_intermediateNodeSupported),
(u'nonStandard', _IndicationCapability_nonStandard),
])
IndicationCapability._ext = []
#-----< SeqOfIndicationCapabilities >-----#
SeqOfIndicationCapabilities = SEQ_OF(name=u'SeqOfIndicationCapabilities', mode=MODE_TYPE)
_SeqOfIndicationCapabilities__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'IndicationCapability')))
SeqOfIndicationCapabilities._cont = _SeqOfIndicationCapabilities__item_
#-----< CapabilitiesNotificationRequest >-----#
CapabilitiesNotificationRequest = SEQ(name=u'CapabilitiesNotificationRequest', mode=MODE_TYPE)
_CapabilitiesNotificationRequest_v2NodePresent = BOOL(name=u'v2NodePresent', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_CapabilitiesNotificationRequest_addList = SEQ_OF(name=u'addList', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'SeqOfRequestCapabilities')), opt=True)
_CapabilitiesNotificationRequest_removeList = SEQ_OF(name=u'removeList', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'SeqOfRequestCapabilities')), opt=True)
_CapabilitiesNotificationRequest_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__CapabilitiesNotificationRequest_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_CapabilitiesNotificationRequest_nonStandard._cont = __CapabilitiesNotificationRequest_nonStandard__item_
CapabilitiesNotificationRequest._cont = ASN1Dict([
(u'v2NodePresent', _CapabilitiesNotificationRequest_v2NodePresent),
(u'addList', _CapabilitiesNotificationRequest_addList),
(u'removeList', _CapabilitiesNotificationRequest_removeList),
(u'nonStandard', _CapabilitiesNotificationRequest_nonStandard),
])
CapabilitiesNotificationRequest._ext = []
#-----< CapabilitiesNotificationIndication >-----#
CapabilitiesNotificationIndication = SEQ(name=u'CapabilitiesNotificationIndication', mode=MODE_TYPE)
_CapabilitiesNotificationIndication_v2NodePresent = BOOL(name=u'v2NodePresent', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_CapabilitiesNotificationIndication_addList = SEQ_OF(name=u'addList', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'SeqOfIndicationCapabilities')), opt=True)
_CapabilitiesNotificationIndication_removeList = SEQ_OF(name=u'removeList', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'SeqOfIndicationCapabilities')), opt=True)
_CapabilitiesNotificationIndication_nonStandard = SEQ_OF(name=u'nonStandard', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__CapabilitiesNotificationIndication_nonStandard__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
_CapabilitiesNotificationIndication_nonStandard._cont = __CapabilitiesNotificationIndication_nonStandard__item_
CapabilitiesNotificationIndication._cont = ASN1Dict([
(u'v2NodePresent', _CapabilitiesNotificationIndication_v2NodePresent),
(u'addList', _CapabilitiesNotificationIndication_addList),
(u'removeList', _CapabilitiesNotificationIndication_removeList),
(u'nonStandard', _CapabilitiesNotificationIndication_nonStandard),
])
CapabilitiesNotificationIndication._ext = []
#-----< Reason >-----#
Reason = CHOICE(name=u'Reason', mode=MODE_TYPE)
_Reason_rn_domain_disconnected = NULL(name=u'rn-domain-disconnected', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Reason_rn_provider_initiated = NULL(name=u'rn-provider-initiated', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Reason_rn_token_purged = NULL(name=u'rn-token-purged', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Reason_rn_user_requested = NULL(name=u'rn-user-requested', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Reason_rn_channel_purged = NULL(name=u'rn-channel-purged', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
Reason._cont = ASN1Dict([
(u'rn-domain-disconnected', _Reason_rn_domain_disconnected),
(u'rn-provider-initiated', _Reason_rn_provider_initiated),
(u'rn-token-purged', _Reason_rn_token_purged),
(u'rn-user-requested', _Reason_rn_user_requested),
(u'rn-channel-purged', _Reason_rn_channel_purged),
])
Reason._ext = []
#-----< Result >-----#
Result = CHOICE(name=u'Result', mode=MODE_TYPE)
_Result_rt_successful = NULL(name=u'rt-successful', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Result_rt_domain_merging = NULL(name=u'rt-domain-merging', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Result_rt_domain_not_hierarchical = NULL(name=u'rt-domain-not-hierarchical', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Result_rt_no_such_channel = NULL(name=u'rt-no-such-channel', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Result_rt_no_such_domain = NULL(name=u'rt-no-such-domain', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Result_rt_no_such_user = NULL(name=u'rt-no-such-user', mode=MODE_TYPE, tag=(5, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Result_rt_not_admitted = NULL(name=u'rt-not-admitted', mode=MODE_TYPE, tag=(6, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Result_rt_other_user_id = NULL(name=u'rt-other-user-id', mode=MODE_TYPE, tag=(7, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Result_rt_parameters_unacceptable = NULL(name=u'rt-parameters-unacceptable', mode=MODE_TYPE, tag=(8, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Result_rt_token_not_available = NULL(name=u'rt-token-not-available', mode=MODE_TYPE, tag=(9, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Result_rt_token_not_possessed = NULL(name=u'rt-token-not-possessed', mode=MODE_TYPE, tag=(10, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Result_rt_too_many_channels = NULL(name=u'rt-too-many-channels', mode=MODE_TYPE, tag=(11, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Result_rt_too_many_tokens = NULL(name=u'rt-too-many-tokens', mode=MODE_TYPE, tag=(12, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Result_rt_too_many_users = NULL(name=u'rt-too-many-users', mode=MODE_TYPE, tag=(13, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Result_rt_unspecified_failure = NULL(name=u'rt-unspecified-failure', mode=MODE_TYPE, tag=(14, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Result_rt_user_rejected = NULL(name=u'rt-user-rejected', mode=MODE_TYPE, tag=(15, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
Result._cont = ASN1Dict([
(u'rt-successful', _Result_rt_successful),
(u'rt-domain-merging', _Result_rt_domain_merging),
(u'rt-domain-not-hierarchical', _Result_rt_domain_not_hierarchical),
(u'rt-no-such-channel', _Result_rt_no_such_channel),
(u'rt-no-such-domain', _Result_rt_no_such_domain),
(u'rt-no-such-user', _Result_rt_no_such_user),
(u'rt-not-admitted', _Result_rt_not_admitted),
(u'rt-other-user-id', _Result_rt_other_user_id),
(u'rt-parameters-unacceptable', _Result_rt_parameters_unacceptable),
(u'rt-token-not-available', _Result_rt_token_not_available),
(u'rt-token-not-possessed', _Result_rt_token_not_possessed),
(u'rt-too-many-channels', _Result_rt_too_many_channels),
(u'rt-too-many-tokens', _Result_rt_too_many_tokens),
(u'rt-too-many-users', _Result_rt_too_many_users),
(u'rt-unspecified-failure', _Result_rt_unspecified_failure),
(u'rt-user-rejected', _Result_rt_user_rejected),
])
Result._ext = []
#-----< Diagnostic >-----#
Diagnostic = CHOICE(name=u'Diagnostic', mode=MODE_TYPE)
_Diagnostic_dc_inconsistent_merge = NULL(name=u'dc-inconsistent-merge', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Diagnostic_dc_forbidden_PDU_downward = NULL(name=u'dc-forbidden-PDU-downward', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Diagnostic_dc_forbidden_PDU_upward = NULL(name=u'dc-forbidden-PDU-upward', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Diagnostic_dc_invalid_BER_encoding = NULL(name=u'dc-invalid-BER-encoding', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Diagnostic_dc_invalid_PER_encoding = NULL(name=u'dc-invalid-PER-encoding', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Diagnostic_dc_misrouted_user = NULL(name=u'dc-misrouted-user', mode=MODE_TYPE, tag=(5, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Diagnostic_dc_unrequested_confirm = NULL(name=u'dc-unrequested-confirm', mode=MODE_TYPE, tag=(6, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Diagnostic_dc_wrong_transport_priority = NULL(name=u'dc-wrong-transport-priority', mode=MODE_TYPE, tag=(7, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Diagnostic_dc_channel_id_conflict = NULL(name=u'dc-channel-id-conflict', mode=MODE_TYPE, tag=(8, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Diagnostic_dc_token_id_conflict = NULL(name=u'dc-token-id-conflict', mode=MODE_TYPE, tag=(9, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Diagnostic_dc_not_user_id_channel = NULL(name=u'dc-not-user-id-channel', mode=MODE_TYPE, tag=(10, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Diagnostic_dc_too_many_channels = NULL(name=u'dc-too-many-channels', mode=MODE_TYPE, tag=(11, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Diagnostic_dc_too_many_tokens = NULL(name=u'dc-too-many-tokens', mode=MODE_TYPE, tag=(12, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Diagnostic_dc_too_many_users = NULL(name=u'dc-too-many-users', mode=MODE_TYPE, tag=(13, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
Diagnostic._cont = ASN1Dict([
(u'dc-inconsistent-merge', _Diagnostic_dc_inconsistent_merge),
(u'dc-forbidden-PDU-downward', _Diagnostic_dc_forbidden_PDU_downward),
(u'dc-forbidden-PDU-upward', _Diagnostic_dc_forbidden_PDU_upward),
(u'dc-invalid-BER-encoding', _Diagnostic_dc_invalid_BER_encoding),
(u'dc-invalid-PER-encoding', _Diagnostic_dc_invalid_PER_encoding),
(u'dc-misrouted-user', _Diagnostic_dc_misrouted_user),
(u'dc-unrequested-confirm', _Diagnostic_dc_unrequested_confirm),
(u'dc-wrong-transport-priority', _Diagnostic_dc_wrong_transport_priority),
(u'dc-channel-id-conflict', _Diagnostic_dc_channel_id_conflict),
(u'dc-token-id-conflict', _Diagnostic_dc_token_id_conflict),
(u'dc-not-user-id-channel', _Diagnostic_dc_not_user_id_channel),
(u'dc-too-many-channels', _Diagnostic_dc_too_many_channels),
(u'dc-too-many-tokens', _Diagnostic_dc_too_many_tokens),
(u'dc-too-many-users', _Diagnostic_dc_too_many_users),
])
Diagnostic._ext = []
#-----< NonStandardPDU >-----#
NonStandardPDU = SEQ(name=u'NonStandardPDU', mode=MODE_TYPE)
_NonStandardPDU_data = SEQ(name=u'data', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardParameter')))
NonStandardPDU._cont = ASN1Dict([
(u'data', _NonStandardPDU_data),
])
NonStandardPDU._ext = []
#-----< ExtendedParameterMCSPDU >-----#
ExtendedParameterMCSPDU = CHOICE(name=u'ExtendedParameterMCSPDU', mode=MODE_TYPE)
_ExtendedParameterMCSPDU_extendedParameterPropose = SEQ(name=u'extendedParameterPropose', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ExtendedParameterPropose')))
_ExtendedParameterMCSPDU_extendedParameterAccept = SEQ(name=u'extendedParameterAccept', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ExtendedParameterAccept')))
_ExtendedParameterMCSPDU_nonStandard = SEQ(name=u'nonStandard', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardPDU')))
ExtendedParameterMCSPDU._cont = ASN1Dict([
(u'extendedParameterPropose', _ExtendedParameterMCSPDU_extendedParameterPropose),
(u'extendedParameterAccept', _ExtendedParameterMCSPDU_extendedParameterAccept),
(u'nonStandard', _ExtendedParameterMCSPDU_nonStandard),
])
ExtendedParameterMCSPDU._ext = []
#-----< DomainMCSPDU >-----#
DomainMCSPDU = CHOICE(name=u'DomainMCSPDU', mode=MODE_TYPE)
_DomainMCSPDU_plumbDomainIndication = SEQ(name=u'plumbDomainIndication', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'PlumbDomainIndication')))
_DomainMCSPDU_erectDomainRequest = SEQ(name=u'erectDomainRequest', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ErectDomainRequest')))
_DomainMCSPDU_mergeChannelsRequest = SEQ(name=u'mergeChannelsRequest', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'MergeChannelsRequest')))
_DomainMCSPDU_mergeChannelsConfirm = SEQ(name=u'mergeChannelsConfirm', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'MergeChannelsConfirm')))
_DomainMCSPDU_purgeChannelsIndication = SEQ(name=u'purgeChannelsIndication', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'PurgeChannelsIndication')))
_DomainMCSPDU_mergeTokensRequest = SEQ(name=u'mergeTokensRequest', mode=MODE_TYPE, tag=(5, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'MergeTokensRequest')))
_DomainMCSPDU_mergeTokensConfirm = SEQ(name=u'mergeTokensConfirm', mode=MODE_TYPE, tag=(6, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'MergeTokensConfirm')))
_DomainMCSPDU_purgeTokensIndication = SEQ(name=u'purgeTokensIndication', mode=MODE_TYPE, tag=(7, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'PurgeTokensIndication')))
_DomainMCSPDU_disconnectProviderUltimatum = SEQ(name=u'disconnectProviderUltimatum', mode=MODE_TYPE, tag=(8, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'DisconnectProviderUltimatum')))
_DomainMCSPDU_rejectMCSPDUUltimatum = SEQ(name=u'rejectMCSPDUUltimatum', mode=MODE_TYPE, tag=(9, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'RejectMCSPDUUltimatum')))
_DomainMCSPDU_attachUserRequest = SEQ(name=u'attachUserRequest', mode=MODE_TYPE, tag=(10, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'AttachUserRequest')))
_DomainMCSPDU_attachUserConfirm = SEQ(name=u'attachUserConfirm', mode=MODE_TYPE, tag=(11, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'AttachUserConfirm')))
_DomainMCSPDU_detachUserRequest = SEQ(name=u'detachUserRequest', mode=MODE_TYPE, tag=(12, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'DetachUserRequest')))
_DomainMCSPDU_detachUserIndication = SEQ(name=u'detachUserIndication', mode=MODE_TYPE, tag=(13, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'DetachUserIndication')))
_DomainMCSPDU_channelJoinRequest = SEQ(name=u'channelJoinRequest', mode=MODE_TYPE, tag=(14, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelJoinRequest')))
_DomainMCSPDU_channelJoinConfirm = SEQ(name=u'channelJoinConfirm', mode=MODE_TYPE, tag=(15, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelJoinConfirm')))
_DomainMCSPDU_channelLeaveRequest = SEQ(name=u'channelLeaveRequest', mode=MODE_TYPE, tag=(16, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelLeaveRequest')))
_DomainMCSPDU_channelConveneRequest = SEQ(name=u'channelConveneRequest', mode=MODE_TYPE, tag=(17, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelConveneRequest')))
_DomainMCSPDU_channelConveneConfirm = SEQ(name=u'channelConveneConfirm', mode=MODE_TYPE, tag=(18, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelConveneConfirm')))
_DomainMCSPDU_channelDisbandRequest = SEQ(name=u'channelDisbandRequest', mode=MODE_TYPE, tag=(19, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelDisbandRequest')))
_DomainMCSPDU_channelDisbandIndication = SEQ(name=u'channelDisbandIndication', mode=MODE_TYPE, tag=(20, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelDisbandIndication')))
_DomainMCSPDU_channelAdmitRequest = SEQ(name=u'channelAdmitRequest', mode=MODE_TYPE, tag=(21, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelAdmitRequest')))
_DomainMCSPDU_channelAdmitIndication = SEQ(name=u'channelAdmitIndication', mode=MODE_TYPE, tag=(22, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelAdmitIndication')))
_DomainMCSPDU_channelExpelRequest = SEQ(name=u'channelExpelRequest', mode=MODE_TYPE, tag=(23, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelExpelRequest')))
_DomainMCSPDU_channelExpelIndication = SEQ(name=u'channelExpelIndication', mode=MODE_TYPE, tag=(24, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'ChannelExpelIndication')))
_DomainMCSPDU_sendDataRequest = SEQ(name=u'sendDataRequest', mode=MODE_TYPE, tag=(25, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'SendDataRequest')))
_DomainMCSPDU_sendDataIndication = SEQ(name=u'sendDataIndication', mode=MODE_TYPE, tag=(26, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'SendDataIndication')))
_DomainMCSPDU_uniformSendDataRequest = SEQ(name=u'uniformSendDataRequest', mode=MODE_TYPE, tag=(27, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UniformSendDataRequest')))
_DomainMCSPDU_uniformSendDataIndication = SEQ(name=u'uniformSendDataIndication', mode=MODE_TYPE, tag=(28, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'UniformSendDataIndication')))
_DomainMCSPDU_tokenGrabRequest = SEQ(name=u'tokenGrabRequest', mode=MODE_TYPE, tag=(29, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenGrabRequest')))
_DomainMCSPDU_tokenGrabConfirm = SEQ(name=u'tokenGrabConfirm', mode=MODE_TYPE, tag=(30, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenGrabConfirm')))
_DomainMCSPDU_tokenInhibitRequest = SEQ(name=u'tokenInhibitRequest', mode=MODE_TYPE, tag=(31, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenInhibitRequest')))
_DomainMCSPDU_tokenInhibitConfirm = SEQ(name=u'tokenInhibitConfirm', mode=MODE_TYPE, tag=(32, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenInhibitConfirm')))
_DomainMCSPDU_tokenGiveRequest = SEQ(name=u'tokenGiveRequest', mode=MODE_TYPE, tag=(33, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenGiveRequest')))
_DomainMCSPDU_tokenGiveIndication = SEQ(name=u'tokenGiveIndication', mode=MODE_TYPE, tag=(34, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenGiveIndication')))
_DomainMCSPDU_tokenGiveResponse = SEQ(name=u'tokenGiveResponse', mode=MODE_TYPE, tag=(35, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenGiveResponse')))
_DomainMCSPDU_tokenGiveConfirm = SEQ(name=u'tokenGiveConfirm', mode=MODE_TYPE, tag=(36, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenGiveConfirm')))
_DomainMCSPDU_tokenPleaseRequest = SEQ(name=u'tokenPleaseRequest', mode=MODE_TYPE, tag=(37, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenPleaseRequest')))
_DomainMCSPDU_tokenPleaseIndication = SEQ(name=u'tokenPleaseIndication', mode=MODE_TYPE, tag=(38, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenPleaseIndication')))
_DomainMCSPDU_tokenReleaseRequest = SEQ(name=u'tokenReleaseRequest', mode=MODE_TYPE, tag=(39, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenReleaseRequest')))
_DomainMCSPDU_tokenReleaseConfirm = SEQ(name=u'tokenReleaseConfirm', mode=MODE_TYPE, tag=(40, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenReleaseConfirm')))
_DomainMCSPDU_tokenTestRequest = SEQ(name=u'tokenTestRequest', mode=MODE_TYPE, tag=(41, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenTestRequest')))
_DomainMCSPDU_tokenTestConfirm = SEQ(name=u'tokenTestConfirm', mode=MODE_TYPE, tag=(42, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'TokenTestConfirm')))
_DomainMCSPDU_nonStandard = SEQ(name=u'nonStandard', mode=MODE_TYPE, tag=(43, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MCS-PROTOCOL-3', 'NonStandardPDU')))
DomainMCSPDU._cont = ASN1Dict([
(u'plumbDomainIndication', _DomainMCSPDU_plumbDomainIndication),
(u'erectDomainRequest', _DomainMCSPDU_erectDomainRequest),
(u'mergeChannelsRequest', _DomainMCSPDU_mergeChannelsRequest),
(u'mergeChannelsConfirm', _DomainMCSPDU_mergeChannelsConfirm),
(u'purgeChannelsIndication', _DomainMCSPDU_purgeChannelsIndication),
(u'mergeTokensRequest', _DomainMCSPDU_mergeTokensRequest),
(u'mergeTokensConfirm', _DomainMCSPDU_mergeTokensConfirm),
(u'purgeTokensIndication', _DomainMCSPDU_purgeTokensIndication),
(u'disconnectProviderUltimatum', _DomainMCSPDU_disconnectProviderUltimatum),
(u'rejectMCSPDUUltimatum', _DomainMCSPDU_rejectMCSPDUUltimatum),
(u'attachUserRequest', _DomainMCSPDU_attachUserRequest),
(u'attachUserConfirm', _DomainMCSPDU_attachUserConfirm),
(u'detachUserRequest', _DomainMCSPDU_detachUserRequest),
(u'detachUserIndication', _DomainMCSPDU_detachUserIndication),
(u'channelJoinRequest', _DomainMCSPDU_channelJoinRequest),
(u'channelJoinConfirm', _DomainMCSPDU_channelJoinConfirm),
(u'channelLeaveRequest', _DomainMCSPDU_channelLeaveRequest),
(u'channelConveneRequest', _DomainMCSPDU_channelConveneRequest),
(u'channelConveneConfirm', _DomainMCSPDU_channelConveneConfirm),
(u'channelDisbandRequest', _DomainMCSPDU_channelDisbandRequest),
(u'channelDisbandIndication', _DomainMCSPDU_channelDisbandIndication),
(u'channelAdmitRequest', _DomainMCSPDU_channelAdmitRequest),
(u'channelAdmitIndication', _DomainMCSPDU_channelAdmitIndication),
(u'channelExpelRequest', _DomainMCSPDU_channelExpelRequest),
(u'channelExpelIndication', _DomainMCSPDU_channelExpelIndication),
(u'sendDataRequest', _DomainMCSPDU_sendDataRequest),
(u'sendDataIndication', _DomainMCSPDU_sendDataIndication),
(u'uniformSendDataRequest', _DomainMCSPDU_uniformSendDataRequest),
(u'uniformSendDataIndication', _DomainMCSPDU_uniformSendDataIndication),
(u'tokenGrabRequest', _DomainMCSPDU_tokenGrabRequest),
(u'tokenGrabConfirm', _DomainMCSPDU_tokenGrabConfirm),
(u'tokenInhibitRequest', _DomainMCSPDU_tokenInhibitRequest),
(u'tokenInhibitConfirm', _DomainMCSPDU_tokenInhibitConfirm),
(u'tokenGiveRequest', _DomainMCSPDU_tokenGiveRequest),
(u'tokenGiveIndication', _DomainMCSPDU_tokenGiveIndication),
(u'tokenGiveResponse', _DomainMCSPDU_tokenGiveResponse),
(u'tokenGiveConfirm', _DomainMCSPDU_tokenGiveConfirm),
(u'tokenPleaseRequest', _DomainMCSPDU_tokenPleaseRequest),
(u'tokenPleaseIndication', _DomainMCSPDU_tokenPleaseIndication),
(u'tokenReleaseRequest', _DomainMCSPDU_tokenReleaseRequest),
(u'tokenReleaseConfirm', _DomainMCSPDU_tokenReleaseConfirm),
(u'tokenTestRequest', _DomainMCSPDU_tokenTestRequest),
(u'tokenTestConfirm', _DomainMCSPDU_tokenTestConfirm),
(u'nonStandard', _DomainMCSPDU_nonStandard),
])
DomainMCSPDU._ext = []
_all_ = [
H221NonStandardIdentifier,
_Key_object,
_Key_h221NonStandard,
Key,
_NonStandardParameter_key,
_NonStandardParameter_data,
NonStandardParameter,
ChannelId,
StaticChannelId,
DynamicChannelId,
UserId,
PrivateChannelId,
AssignedChannelId,
TokenId,
_TokenStatus_notInUse,
_TokenStatus_selfGrabbed,
_TokenStatus_otherGrabbed,
_TokenStatus_selfInhibited,
_TokenStatus_otherInhibited,
_TokenStatus_selfRecipient,
_TokenStatus_selfGiving,
_TokenStatus_otherGiving,
TokenStatus,
_DataPriority_top,
_DataPriority_high,
_DataPriority_medium,
_DataPriority_low,
DataPriority,
Segmentation,
_ExtendedParameters_unreliableDataSupported,
_ExtendedParameters_domainReferenceID,
__ExtendedParameters_nonStandard__item_,
_ExtendedParameters_nonStandard,
ExtendedParameters,
_ExtendedParameterPropose_targetExtendedParameters,
_ExtendedParameterPropose_minimumExtendedParameters,
_ExtendedParameterPropose_maximumExtendedParameters,
__ExtendedParameterPropose_nonStandard__item_,
_ExtendedParameterPropose_nonStandard,
ExtendedParameterPropose,
_ExtendedParameterAccept_extendedParameters,
__ExtendedParameterAccept_nonStandard__item_,
_ExtendedParameterAccept_nonStandard,
ExtendedParameterAccept,
_PlumbDomainIndication_heightLimit,
__PlumbDomainIndication_nonStandard__item_,
_PlumbDomainIndication_nonStandard,
PlumbDomainIndication,
_ErectDomainRequest_subHeight,
_ErectDomainRequest_subInterval,
__ErectDomainRequest_nonStandard__item_,
_ErectDomainRequest_nonStandard,
ErectDomainRequest,
__ChannelAttributes_static_channelId,
___ChannelAttributes_static_nonStandard__item_,
__ChannelAttributes_static_nonStandard,
_ChannelAttributes_static,
__ChannelAttributes_userId_joined,
__ChannelAttributes_userId_userId,
___ChannelAttributes_userId_nonStandard__item_,
__ChannelAttributes_userId_nonStandard,
_ChannelAttributes_userId,
__ChannelAttributes_private_joined,
__ChannelAttributes_private_channelId,
__ChannelAttributes_private_manager,
___ChannelAttributes_private_admitted__item_,
__ChannelAttributes_private_admitted,
___ChannelAttributes_private_nonStandard__item_,
__ChannelAttributes_private_nonStandard,
_ChannelAttributes_private,
__ChannelAttributes_assigned_channelId,
___ChannelAttributes_assigned_nonStandard__item_,
__ChannelAttributes_assigned_nonStandard,
_ChannelAttributes_assigned,
ChannelAttributes,
__MergeChannelsRequest_mergeChannels__item_,
_MergeChannelsRequest_mergeChannels,
__MergeChannelsRequest_purgeChannelIds__item_,
_MergeChannelsRequest_purgeChannelIds,
__MergeChannelsRequest_nonStandard__item_,
_MergeChannelsRequest_nonStandard,
MergeChannelsRequest,
__MergeChannelsConfirm_mergeChannels__item_,
_MergeChannelsConfirm_mergeChannels,
__MergeChannelsConfirm_purgeChannelIds__item_,
_MergeChannelsConfirm_purgeChannelIds,
__MergeChannelsConfirm_nonStandard__item_,
_MergeChannelsConfirm_nonStandard,
MergeChannelsConfirm,
__PurgeChannelsIndication_detachChannelIds__item_,
_PurgeChannelsIndication_detachChannelIds,
__PurgeChannelsIndication_purgeChannelIds__item_,
_PurgeChannelsIndication_purgeChannelIds,
__PurgeChannelsIndication_nonStandard__item_,
_PurgeChannelsIndication_nonStandard,
PurgeChannelsIndication,
__TokenAttributes_grabbed_tokenId,
__TokenAttributes_grabbed_grabber,
___TokenAttributes_grabbed_nonStandard__item_,
__TokenAttributes_grabbed_nonStandard,
_TokenAttributes_grabbed,
__TokenAttributes_inhibited_tokenId,
___TokenAttributes_inhibited_inhibitors__item_,
__TokenAttributes_inhibited_inhibitors,
___TokenAttributes_inhibited_nonStandard__item_,
__TokenAttributes_inhibited_nonStandard,
_TokenAttributes_inhibited,
__TokenAttributes_giving_tokenId,
__TokenAttributes_giving_grabber,
__TokenAttributes_giving_recipient,
___TokenAttributes_giving_nonStandard__item_,
__TokenAttributes_giving_nonStandard,
_TokenAttributes_giving,
__TokenAttributes_ungivable_tokenId,
__TokenAttributes_ungivable_grabber,
___TokenAttributes_ungivable_nonStandard__item_,
__TokenAttributes_ungivable_nonStandard,
_TokenAttributes_ungivable,
__TokenAttributes_given_tokenId,
__TokenAttributes_given_recipient,
___TokenAttributes_given_nonStandard__item_,
__TokenAttributes_given_nonStandard,
_TokenAttributes_given,
TokenAttributes,
__MergeTokensRequest_mergeTokens__item_,
_MergeTokensRequest_mergeTokens,
__MergeTokensRequest_purgeTokenIds__item_,
_MergeTokensRequest_purgeTokenIds,
__MergeTokensRequest_nonStandard__item_,
_MergeTokensRequest_nonStandard,
MergeTokensRequest,
__MergeTokensConfirm_mergeTokens__item_,
_MergeTokensConfirm_mergeTokens,
__MergeTokensConfirm_purgeTokenIds__item_,
_MergeTokensConfirm_purgeTokenIds,
__MergeTokensConfirm_nonStandard__item_,
_MergeTokensConfirm_nonStandard,
MergeTokensConfirm,
__PurgeTokensIndication_purgeTokenIds__item_,
_PurgeTokensIndication_purgeTokenIds,
__PurgeTokensIndication_nonStandard__item_,
_PurgeTokensIndication_nonStandard,
PurgeTokensIndication,
_DisconnectProviderUltimatum_reason,
__DisconnectProviderUltimatum_nonStandard__item_,
_DisconnectProviderUltimatum_nonStandard,
DisconnectProviderUltimatum,
_RejectMCSPDUUltimatum_diagnostic,
_RejectMCSPDUUltimatum_initialOctets,
__RejectMCSPDUUltimatum_nonStandard__item_,
_RejectMCSPDUUltimatum_nonStandard,
RejectMCSPDUUltimatum,
__AttachUserRequest_nonStandard__item_,
_AttachUserRequest_nonStandard,
AttachUserRequest,
_AttachUserConfirm_result,
_AttachUserConfirm_initiator,
__AttachUserConfirm_nonStandard__item_,
_AttachUserConfirm_nonStandard,
AttachUserConfirm,
_DetachUserRequest_reason,
__DetachUserRequest_userIds__item_,
_DetachUserRequest_userIds,
__DetachUserRequest_nonStandard__item_,
_DetachUserRequest_nonStandard,
DetachUserRequest,
_DetachUserIndication_reason,
__DetachUserIndication_userIds__item_,
_DetachUserIndication_userIds,
__DetachUserIndication_nonStandard__item_,
_DetachUserIndication_nonStandard,
DetachUserIndication,
_ChannelJoinRequest_initiator,
_ChannelJoinRequest_channelId,
__ChannelJoinRequest_nonStandard__item_,
_ChannelJoinRequest_nonStandard,
ChannelJoinRequest,
_ChannelJoinConfirm_result,
_ChannelJoinConfirm_initiator,
_ChannelJoinConfirm_requested,
_ChannelJoinConfirm_channelId,
__ChannelJoinConfirm_nonStandard__item_,
_ChannelJoinConfirm_nonStandard,
ChannelJoinConfirm,
__ChannelLeaveRequest_channelIds__item_,
_ChannelLeaveRequest_channelIds,
__ChannelLeaveRequest_nonStandard__item_,
_ChannelLeaveRequest_nonStandard,
ChannelLeaveRequest,
_ChannelConveneRequest_initiator,
__ChannelConveneRequest_nonStandard__item_,
_ChannelConveneRequest_nonStandard,
ChannelConveneRequest,
_ChannelConveneConfirm_result,
_ChannelConveneConfirm_initiator,
_ChannelConveneConfirm_channelId,
__ChannelConveneConfirm_nonStandard__item_,
_ChannelConveneConfirm_nonStandard,
ChannelConveneConfirm,
_ChannelDisbandRequest_initiator,
_ChannelDisbandRequest_channelId,
__ChannelDisbandRequest_nonStandard__item_,
_ChannelDisbandRequest_nonStandard,
ChannelDisbandRequest,
_ChannelDisbandIndication_channelId,
__ChannelDisbandIndication_nonStandard__item_,
_ChannelDisbandIndication_nonStandard,
ChannelDisbandIndication,
_ChannelAdmitRequest_initiator,
_ChannelAdmitRequest_channelId,
__ChannelAdmitRequest_userIds__item_,
_ChannelAdmitRequest_userIds,
__ChannelAdmitRequest_nonStandard__item_,
_ChannelAdmitRequest_nonStandard,
ChannelAdmitRequest,
_ChannelAdmitIndication_initiator,
_ChannelAdmitIndication_channelId,
__ChannelAdmitIndication_userIds__item_,
_ChannelAdmitIndication_userIds,
__ChannelAdmitIndication_nonStandard__item_,
_ChannelAdmitIndication_nonStandard,
ChannelAdmitIndication,
_ChannelExpelRequest_initiator,
_ChannelExpelRequest_channelId,
__ChannelExpelRequest_userIds__item_,
_ChannelExpelRequest_userIds,
__ChannelExpelRequest_nonStandard__item_,
_ChannelExpelRequest_nonStandard,
ChannelExpelRequest,
_ChannelExpelIndication_channelId,
__ChannelExpelIndication_userIds__item_,
_ChannelExpelIndication_userIds,
__ChannelExpelIndication_nonStandard__item_,
_ChannelExpelIndication_nonStandard,
ChannelExpelIndication,
_SendDataRequest_initiator,
_SendDataRequest_channelId,
_SendDataRequest_reliability,
_SendDataRequest_domainReferenceID,
_SendDataRequest_dataPriority,
_SendDataRequest_segmentation,
_SendDataRequest_userData,
_SendDataRequest_totalDataSize,
__SendDataRequest_nonStandard__item_,
_SendDataRequest_nonStandard,
SendDataRequest,
_SendDataIndication_initiator,
_SendDataIndication_channelId,
_SendDataIndication_reliability,
_SendDataIndication_domainReferenceID,
_SendDataIndication_dataPriority,
_SendDataIndication_segmentation,
_SendDataIndication_userData,
_SendDataIndication_totalDataSize,
__SendDataIndication_nonStandard__item_,
_SendDataIndication_nonStandard,
SendDataIndication,
_UniformSendDataRequest_initiator,
_UniformSendDataRequest_channelId,
_UniformSendDataRequest_reliability,
_UniformSendDataRequest_domainReferenceID,
_UniformSendDataRequest_dataPriority,
_UniformSendDataRequest_segmentation,
_UniformSendDataRequest_userData,
_UniformSendDataRequest_totalDataSize,
__UniformSendDataRequest_nonStandard__item_,
_UniformSendDataRequest_nonStandard,
UniformSendDataRequest,
_UniformSendDataIndication_initiator,
_UniformSendDataIndication_channelId,
_UniformSendDataIndication_reliability,
_UniformSendDataIndication_domainReferenceID,
_UniformSendDataIndication_dataPriority,
_UniformSendDataIndication_segmentation,
_UniformSendDataIndication_userData,
_UniformSendDataIndication_totalDataSize,
__UniformSendDataIndication_nonStandard__item_,
_UniformSendDataIndication_nonStandard,
UniformSendDataIndication,
_TokenGrabRequest_initiator,
_TokenGrabRequest_tokenId,
__TokenGrabRequest_nonStandard__item_,
_TokenGrabRequest_nonStandard,
TokenGrabRequest,
_TokenGrabConfirm_result,
_TokenGrabConfirm_initiator,
_TokenGrabConfirm_tokenId,
_TokenGrabConfirm_tokenStatus,
__TokenGrabConfirm_nonStandard__item_,
_TokenGrabConfirm_nonStandard,
TokenGrabConfirm,
_TokenInhibitRequest_initiator,
_TokenInhibitRequest_tokenId,
__TokenInhibitRequest_nonStandard__item_,
_TokenInhibitRequest_nonStandard,
TokenInhibitRequest,
_TokenInhibitConfirm_result,
_TokenInhibitConfirm_initiator,
_TokenInhibitConfirm_tokenId,
_TokenInhibitConfirm_tokenStatus,
__TokenInhibitConfirm_nonStandard__item_,
_TokenInhibitConfirm_nonStandard,
TokenInhibitConfirm,
_TokenGiveRequest_initiator,
_TokenGiveRequest_tokenId,
_TokenGiveRequest_recipient,
__TokenGiveRequest_nonStandard__item_,
_TokenGiveRequest_nonStandard,
TokenGiveRequest,
_TokenGiveIndication_initiator,
_TokenGiveIndication_tokenId,
_TokenGiveIndication_recipient,
__TokenGiveIndication_nonStandard__item_,
_TokenGiveIndication_nonStandard,
TokenGiveIndication,
_TokenGiveResponse_result,
_TokenGiveResponse_recipient,
_TokenGiveResponse_tokenId,
__TokenGiveResponse_nonStandard__item_,
_TokenGiveResponse_nonStandard,
TokenGiveResponse,
_TokenGiveConfirm_result,
_TokenGiveConfirm_initiator,
_TokenGiveConfirm_tokenId,
_TokenGiveConfirm_tokenStatus,
__TokenGiveConfirm_nonStandard__item_,
_TokenGiveConfirm_nonStandard,
TokenGiveConfirm,
_TokenPleaseRequest_initiator,
_TokenPleaseRequest_tokenId,
__TokenPleaseRequest_nonStandard__item_,
_TokenPleaseRequest_nonStandard,
TokenPleaseRequest,
_TokenPleaseIndication_initiator,
_TokenPleaseIndication_tokenId,
__TokenPleaseIndication_nonStandard__item_,
_TokenPleaseIndication_nonStandard,
TokenPleaseIndication,
_TokenReleaseRequest_initiator,
_TokenReleaseRequest_tokenId,
__TokenReleaseRequest_nonStandard__item_,
_TokenReleaseRequest_nonStandard,
TokenReleaseRequest,
_TokenReleaseConfirm_result,
_TokenReleaseConfirm_initiator,
_TokenReleaseConfirm_tokenId,
_TokenReleaseConfirm_tokenStatus,
__TokenReleaseConfirm_nonStandard__item_,
_TokenReleaseConfirm_nonStandard,
TokenReleaseConfirm,
_TokenTestRequest_initiator,
_TokenTestRequest_tokenId,
__TokenTestRequest_nonStandard__item_,
_TokenTestRequest_nonStandard,
TokenTestRequest,
_TokenTestConfirm_initiator,
_TokenTestConfirm_tokenId,
_TokenTestConfirm_tokenStatus,
__TokenTestConfirm_nonStandard__item_,
_TokenTestConfirm_nonStandard,
TokenTestConfirm,
_CapabilityID_standardID,
_CapabilityID_nonstandardID,
CapabilityID,
_CapabilityClass_null,
_CapabilityClass_unsignedMin,
_CapabilityClass_unsignedMax,
CapabilityClass,
_ParticipationIndicator_global_,
_ParticipationIndicator_partial,
ParticipationIndicator,
_RequestCapability_capabilityID,
_RequestCapability_capabilityClass,
_RequestCapability_participationIndicator,
__RequestCapability_nonStandard__item_,
_RequestCapability_nonStandard,
RequestCapability,
_SeqOfRequestCapabilities__item_,
SeqOfRequestCapabilities,
_IndicationCapability_capabilityID,
_IndicationCapability_capabilityClass,
_IndicationCapability_summitProviderSupported,
_IndicationCapability_intermediateNodeSupported,
__IndicationCapability_nonStandard__item_,
_IndicationCapability_nonStandard,
IndicationCapability,
_SeqOfIndicationCapabilities__item_,
SeqOfIndicationCapabilities,
_CapabilitiesNotificationRequest_v2NodePresent,
_CapabilitiesNotificationRequest_addList,
_CapabilitiesNotificationRequest_removeList,
__CapabilitiesNotificationRequest_nonStandard__item_,
_CapabilitiesNotificationRequest_nonStandard,
CapabilitiesNotificationRequest,
_CapabilitiesNotificationIndication_v2NodePresent,
_CapabilitiesNotificationIndication_addList,
_CapabilitiesNotificationIndication_removeList,
__CapabilitiesNotificationIndication_nonStandard__item_,
_CapabilitiesNotificationIndication_nonStandard,
CapabilitiesNotificationIndication,
_Reason_rn_domain_disconnected,
_Reason_rn_provider_initiated,
_Reason_rn_token_purged,
_Reason_rn_user_requested,
_Reason_rn_channel_purged,
Reason,
_Result_rt_successful,
_Result_rt_domain_merging,
_Result_rt_domain_not_hierarchical,
_Result_rt_no_such_channel,
_Result_rt_no_such_domain,
_Result_rt_no_such_user,
_Result_rt_not_admitted,
_Result_rt_other_user_id,
_Result_rt_parameters_unacceptable,
_Result_rt_token_not_available,
_Result_rt_token_not_possessed,
_Result_rt_too_many_channels,
_Result_rt_too_many_tokens,
_Result_rt_too_many_users,
_Result_rt_unspecified_failure,
_Result_rt_user_rejected,
Result,
_Diagnostic_dc_inconsistent_merge,
_Diagnostic_dc_forbidden_PDU_downward,
_Diagnostic_dc_forbidden_PDU_upward,
_Diagnostic_dc_invalid_BER_encoding,
_Diagnostic_dc_invalid_PER_encoding,
_Diagnostic_dc_misrouted_user,
_Diagnostic_dc_unrequested_confirm,
_Diagnostic_dc_wrong_transport_priority,
_Diagnostic_dc_channel_id_conflict,
_Diagnostic_dc_token_id_conflict,
_Diagnostic_dc_not_user_id_channel,
_Diagnostic_dc_too_many_channels,
_Diagnostic_dc_too_many_tokens,
_Diagnostic_dc_too_many_users,
Diagnostic,
_NonStandardPDU_data,
NonStandardPDU,
_ExtendedParameterMCSPDU_extendedParameterPropose,
_ExtendedParameterMCSPDU_extendedParameterAccept,
_ExtendedParameterMCSPDU_nonStandard,
ExtendedParameterMCSPDU,
_DomainMCSPDU_plumbDomainIndication,
_DomainMCSPDU_erectDomainRequest,
_DomainMCSPDU_mergeChannelsRequest,
_DomainMCSPDU_mergeChannelsConfirm,
_DomainMCSPDU_purgeChannelsIndication,
_DomainMCSPDU_mergeTokensRequest,
_DomainMCSPDU_mergeTokensConfirm,
_DomainMCSPDU_purgeTokensIndication,
_DomainMCSPDU_disconnectProviderUltimatum,
_DomainMCSPDU_rejectMCSPDUUltimatum,
_DomainMCSPDU_attachUserRequest,
_DomainMCSPDU_attachUserConfirm,
_DomainMCSPDU_detachUserRequest,
_DomainMCSPDU_detachUserIndication,
_DomainMCSPDU_channelJoinRequest,
_DomainMCSPDU_channelJoinConfirm,
_DomainMCSPDU_channelLeaveRequest,
_DomainMCSPDU_channelConveneRequest,
_DomainMCSPDU_channelConveneConfirm,
_DomainMCSPDU_channelDisbandRequest,
_DomainMCSPDU_channelDisbandIndication,
_DomainMCSPDU_channelAdmitRequest,
_DomainMCSPDU_channelAdmitIndication,
_DomainMCSPDU_channelExpelRequest,
_DomainMCSPDU_channelExpelIndication,
_DomainMCSPDU_sendDataRequest,
_DomainMCSPDU_sendDataIndication,
_DomainMCSPDU_uniformSendDataRequest,
_DomainMCSPDU_uniformSendDataIndication,
_DomainMCSPDU_tokenGrabRequest,
_DomainMCSPDU_tokenGrabConfirm,
_DomainMCSPDU_tokenInhibitRequest,
_DomainMCSPDU_tokenInhibitConfirm,
_DomainMCSPDU_tokenGiveRequest,
_DomainMCSPDU_tokenGiveIndication,
_DomainMCSPDU_tokenGiveResponse,
_DomainMCSPDU_tokenGiveConfirm,
_DomainMCSPDU_tokenPleaseRequest,
_DomainMCSPDU_tokenPleaseIndication,
_DomainMCSPDU_tokenReleaseRequest,
_DomainMCSPDU_tokenReleaseConfirm,
_DomainMCSPDU_tokenTestRequest,
_DomainMCSPDU_tokenTestConfirm,
_DomainMCSPDU_nonStandard,
DomainMCSPDU,
]
class MAP_PROTOCOL:
_name_ = u'MAP-PROTOCOL'
_oid_ = []
_obj_ = [
u'H221NonStandardIdentifier',
u'Key',
u'NonStandardParameter',
u'NonStandardPDU',
u'VersionNumber',
u'Priority',
u'PriorityRange',
u'MAPConnectRequestPDU',
u'MAPConnectConfirmPDU',
u'Reason',
u'MAPDisconnectRequestPDU',
u'MAPDisconnectConfirmPDU',
u'TransportProtocolID',
u'TransportProtocolType',
u'NetworkAddress',
u'PayloadSize',
u'TransportProtocolRequestEntry',
u'MAPArbitrateProtocolsRequestPDU',
u'TransportProtocolConfirmEntry',
u'MAPArbitrateProtocolsConfirmPDU',
u'SenderID',
u'ChannelID',
u'ReliabilityLevel',
u'DataType',
u'DataFlowIdentifier',
u'SequenceNumber',
u'DataDescriptor',
u'DataFrameEntry',
u'MAPDataPDU',
u'Metachannel',
u'MAPAddGroupRequestPDU',
u'MAPRemoveGroupRequestPDU',
u'MAPDisableUnicastRequestPDU',
u'MAPEnableUnicastRequestPDU',
u'MAPEnableUnicastConfirmPDU',
u'MAPDisableMulticastRequestPDU',
u'MAPDisableMulticastConfirmPDU',
u'MAPEnableMulticastRequestPDU',
u'MAPSequenceNumberPDU',
u'MAPPDU',
]
_type_ = [
u'H221NonStandardIdentifier',
u'Key',
u'NonStandardParameter',
u'NonStandardPDU',
u'VersionNumber',
u'Priority',
u'PriorityRange',
u'MAPConnectRequestPDU',
u'MAPConnectConfirmPDU',
u'Reason',
u'MAPDisconnectRequestPDU',
u'MAPDisconnectConfirmPDU',
u'TransportProtocolID',
u'TransportProtocolType',
u'NetworkAddress',
u'PayloadSize',
u'TransportProtocolRequestEntry',
u'MAPArbitrateProtocolsRequestPDU',
u'TransportProtocolConfirmEntry',
u'MAPArbitrateProtocolsConfirmPDU',
u'SenderID',
u'ChannelID',
u'ReliabilityLevel',
u'DataType',
u'DataFlowIdentifier',
u'SequenceNumber',
u'DataDescriptor',
u'DataFrameEntry',
u'MAPDataPDU',
u'Metachannel',
u'MAPAddGroupRequestPDU',
u'MAPRemoveGroupRequestPDU',
u'MAPDisableUnicastRequestPDU',
u'MAPEnableUnicastRequestPDU',
u'MAPEnableUnicastConfirmPDU',
u'MAPDisableMulticastRequestPDU',
u'MAPDisableMulticastConfirmPDU',
u'MAPEnableMulticastRequestPDU',
u'MAPSequenceNumberPDU',
u'MAPPDU',
]
_set_ = [
]
_val_ = [
]
_class_ = [
]
_param_ = [
]
#-----< H221NonStandardIdentifier >-----#
H221NonStandardIdentifier = OCT_STR(name=u'H221NonStandardIdentifier', mode=MODE_TYPE)
H221NonStandardIdentifier._const_sz = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=4, ub=255)], ev=None, er=[])
#-----< Key >-----#
Key = CHOICE(name=u'Key', mode=MODE_TYPE)
_Key_object = OID(name=u'object', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Key_h221NonStandard = OCT_STR(name=u'h221NonStandard', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'H221NonStandardIdentifier')))
Key._cont = ASN1Dict([
(u'object', _Key_object),
(u'h221NonStandard', _Key_h221NonStandard),
])
Key._ext = None
#-----< NonStandardParameter >-----#
NonStandardParameter = SEQ(name=u'NonStandardParameter', mode=MODE_TYPE)
_NonStandardParameter_key = CHOICE(name=u'key', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'Key')))
_NonStandardParameter_data = OCT_STR(name=u'data', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
NonStandardParameter._cont = ASN1Dict([
(u'key', _NonStandardParameter_key),
(u'data', _NonStandardParameter_data),
])
NonStandardParameter._ext = None
#-----< NonStandardPDU >-----#
NonStandardPDU = SEQ(name=u'NonStandardPDU', mode=MODE_TYPE)
_NonStandardPDU_data = SEQ(name=u'data', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
NonStandardPDU._cont = ASN1Dict([
(u'data', _NonStandardPDU_data),
])
NonStandardPDU._ext = []
#-----< VersionNumber >-----#
VersionNumber = SEQ(name=u'VersionNumber', mode=MODE_TYPE)
_VersionNumber_majorVersionNumber = INT(name=u'majorVersionNumber', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_VersionNumber_majorVersionNumber._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_VersionNumber_minorVersionNumber = INT(name=u'minorVersionNumber', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_VersionNumber_minorVersionNumber._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_VersionNumber_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__VersionNumber_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_VersionNumber_nonStandardParameters._cont = __VersionNumber_nonStandardParameters__item_
VersionNumber._cont = ASN1Dict([
(u'majorVersionNumber', _VersionNumber_majorVersionNumber),
(u'minorVersionNumber', _VersionNumber_minorVersionNumber),
(u'nonStandardParameters', _VersionNumber_nonStandardParameters),
])
VersionNumber._ext = []
#-----< Priority >-----#
Priority = INT(name=u'Priority', mode=MODE_TYPE)
Priority._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=15)], ev=None, er=[])
#-----< PriorityRange >-----#
PriorityRange = SEQ(name=u'PriorityRange', mode=MODE_TYPE)
_PriorityRange_highPriority = INT(name=u'highPriority', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'Priority')))
_PriorityRange_lowPriority = INT(name=u'lowPriority', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'Priority')))
_PriorityRange_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__PriorityRange_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_PriorityRange_nonStandardParameters._cont = __PriorityRange_nonStandardParameters__item_
PriorityRange._cont = ASN1Dict([
(u'highPriority', _PriorityRange_highPriority),
(u'lowPriority', _PriorityRange_lowPriority),
(u'nonStandardParameters', _PriorityRange_nonStandardParameters),
])
PriorityRange._ext = []
#-----< MAPConnectRequestPDU >-----#
MAPConnectRequestPDU = SEQ(name=u'MAPConnectRequestPDU', mode=MODE_TYPE)
_MAPConnectRequestPDU_versionNumber = SEQ(name=u'versionNumber', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'VersionNumber')))
_MAPConnectRequestPDU_connectionMAPSAP = INT(name=u'connectionMAPSAP', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_MAPConnectRequestPDU_connectionMAPSAP._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_MAPConnectRequestPDU_domainReferenceID = INT(name=u'domainReferenceID', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_MAPConnectRequestPDU_domainReferenceID._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_MAPConnectRequestPDU_priorityRange = SEQ(name=u'priorityRange', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'PriorityRange')))
_MAPConnectRequestPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPConnectRequestPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPConnectRequestPDU_nonStandardParameters._cont = __MAPConnectRequestPDU_nonStandardParameters__item_
MAPConnectRequestPDU._cont = ASN1Dict([
(u'versionNumber', _MAPConnectRequestPDU_versionNumber),
(u'connectionMAPSAP', _MAPConnectRequestPDU_connectionMAPSAP),
(u'domainReferenceID', _MAPConnectRequestPDU_domainReferenceID),
(u'priorityRange', _MAPConnectRequestPDU_priorityRange),
(u'nonStandardParameters', _MAPConnectRequestPDU_nonStandardParameters),
])
MAPConnectRequestPDU._ext = []
#-----< MAPConnectConfirmPDU >-----#
MAPConnectConfirmPDU = SEQ(name=u'MAPConnectConfirmPDU', mode=MODE_TYPE)
_MAPConnectConfirmPDU_versionNumber = SEQ(name=u'versionNumber', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'VersionNumber')))
_MAPConnectConfirmPDU_connectionMAPSAP = INT(name=u'connectionMAPSAP', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_MAPConnectConfirmPDU_connectionMAPSAP._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_MAPConnectConfirmPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPConnectConfirmPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPConnectConfirmPDU_nonStandardParameters._cont = __MAPConnectConfirmPDU_nonStandardParameters__item_
MAPConnectConfirmPDU._cont = ASN1Dict([
(u'versionNumber', _MAPConnectConfirmPDU_versionNumber),
(u'connectionMAPSAP', _MAPConnectConfirmPDU_connectionMAPSAP),
(u'nonStandardParameters', _MAPConnectConfirmPDU_nonStandardParameters),
])
MAPConnectConfirmPDU._ext = []
#-----< Reason >-----#
Reason = CHOICE(name=u'Reason', mode=MODE_TYPE)
_Reason_providerInitiated = NULL(name=u'providerInitiated', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Reason_userRejected = NULL(name=u'userRejected', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Reason_userInitiated = NULL(name=u'userInitiated', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Reason_invalidMAPSAP = NULL(name=u'invalidMAPSAP', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Reason_invalidDomainReferenceID = NULL(name=u'invalidDomainReferenceID', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Reason_unicastTransition = NULL(name=u'unicastTransition', mode=MODE_TYPE, tag=(5, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Reason_unspecifiedFailure = NULL(name=u'unspecifiedFailure', mode=MODE_TYPE, tag=(6, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_Reason_nonStandardReason = SEQ(name=u'nonStandardReason', mode=MODE_TYPE, tag=(7, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
Reason._cont = ASN1Dict([
(u'providerInitiated', _Reason_providerInitiated),
(u'userRejected', _Reason_userRejected),
(u'userInitiated', _Reason_userInitiated),
(u'invalidMAPSAP', _Reason_invalidMAPSAP),
(u'invalidDomainReferenceID', _Reason_invalidDomainReferenceID),
(u'unicastTransition', _Reason_unicastTransition),
(u'unspecifiedFailure', _Reason_unspecifiedFailure),
(u'nonStandardReason', _Reason_nonStandardReason),
])
Reason._ext = []
#-----< MAPDisconnectRequestPDU >-----#
MAPDisconnectRequestPDU = SEQ(name=u'MAPDisconnectRequestPDU', mode=MODE_TYPE)
_MAPDisconnectRequestPDU_reason = CHOICE(name=u'reason', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'Reason')))
_MAPDisconnectRequestPDU_confirmRequired = BOOL(name=u'confirmRequired', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_MAPDisconnectRequestPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPDisconnectRequestPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPDisconnectRequestPDU_nonStandardParameters._cont = __MAPDisconnectRequestPDU_nonStandardParameters__item_
MAPDisconnectRequestPDU._cont = ASN1Dict([
(u'reason', _MAPDisconnectRequestPDU_reason),
(u'confirmRequired', _MAPDisconnectRequestPDU_confirmRequired),
(u'nonStandardParameters', _MAPDisconnectRequestPDU_nonStandardParameters),
])
MAPDisconnectRequestPDU._ext = []
#-----< MAPDisconnectConfirmPDU >-----#
MAPDisconnectConfirmPDU = SEQ(name=u'MAPDisconnectConfirmPDU', mode=MODE_TYPE)
_MAPDisconnectConfirmPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPDisconnectConfirmPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPDisconnectConfirmPDU_nonStandardParameters._cont = __MAPDisconnectConfirmPDU_nonStandardParameters__item_
MAPDisconnectConfirmPDU._cont = ASN1Dict([
(u'nonStandardParameters', _MAPDisconnectConfirmPDU_nonStandardParameters),
])
MAPDisconnectConfirmPDU._ext = []
#-----< TransportProtocolID >-----#
TransportProtocolID = CHOICE(name=u'TransportProtocolID', mode=MODE_TYPE)
_TransportProtocolID_objectProtocolID = OID(name=u'objectProtocolID', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_TransportProtocolID_h221NonStandardProtocolID = OCT_STR(name=u'h221NonStandardProtocolID', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'H221NonStandardIdentifier')))
_TransportProtocolID_snapProtocolID = OCT_STR(name=u'snapProtocolID', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_TransportProtocolID_snapProtocolID._const_sz = ASN1Set(rv=[5], rr=[], ev=None, er=[])
_TransportProtocolID_nonStandardProtocolID = SEQ(name=u'nonStandardProtocolID', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
TransportProtocolID._cont = ASN1Dict([
(u'objectProtocolID', _TransportProtocolID_objectProtocolID),
(u'h221NonStandardProtocolID', _TransportProtocolID_h221NonStandardProtocolID),
(u'snapProtocolID', _TransportProtocolID_snapProtocolID),
(u'nonStandardProtocolID', _TransportProtocolID_nonStandardProtocolID),
])
TransportProtocolID._ext = []
#-----< TransportProtocolType >-----#
TransportProtocolType = CHOICE(name=u'TransportProtocolType', mode=MODE_TYPE)
_TransportProtocolType_reliableUnicast = NULL(name=u'reliableUnicast', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_TransportProtocolType_unreliableUnicast = NULL(name=u'unreliableUnicast', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_TransportProtocolType_reliableMulticast = NULL(name=u'reliableMulticast', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_TransportProtocolType_unreliableMulticast = NULL(name=u'unreliableMulticast', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_TransportProtocolType_nonStandardProtocolType = SEQ(name=u'nonStandardProtocolType', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
TransportProtocolType._cont = ASN1Dict([
(u'reliableUnicast', _TransportProtocolType_reliableUnicast),
(u'unreliableUnicast', _TransportProtocolType_unreliableUnicast),
(u'reliableMulticast', _TransportProtocolType_reliableMulticast),
(u'unreliableMulticast', _TransportProtocolType_unreliableMulticast),
(u'nonStandardProtocolType', _TransportProtocolType_nonStandardProtocolType),
])
TransportProtocolType._ext = []
#-----< NetworkAddress >-----#
NetworkAddress = SEQ(name=u'NetworkAddress', mode=MODE_TYPE)
_NetworkAddress_nsapAddress = OCT_STR(name=u'nsapAddress', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_NetworkAddress_nsapAddress._const_sz = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=1, ub=20)], ev=None, er=[])
_NetworkAddress_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__NetworkAddress_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_NetworkAddress_nonStandardParameters._cont = __NetworkAddress_nonStandardParameters__item_
NetworkAddress._cont = ASN1Dict([
(u'nsapAddress', _NetworkAddress_nsapAddress),
(u'nonStandardParameters', _NetworkAddress_nonStandardParameters),
])
NetworkAddress._ext = []
#-----< PayloadSize >-----#
PayloadSize = INT(name=u'PayloadSize', mode=MODE_TYPE)
PayloadSize._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=128, ub=65535)], ev=None, er=[])
#-----< TransportProtocolRequestEntry >-----#
TransportProtocolRequestEntry = SEQ(name=u'TransportProtocolRequestEntry', mode=MODE_TYPE)
_TransportProtocolRequestEntry_transportProtocolID = CHOICE(name=u'transportProtocolID', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'TransportProtocolID')))
_TransportProtocolRequestEntry_transportProtocolType = CHOICE(name=u'transportProtocolType', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'TransportProtocolType')))
_TransportProtocolRequestEntry_networkAddress = SEQ(name=u'networkAddress', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'NetworkAddress')))
_TransportProtocolRequestEntry_maximumPayloadFixedFlag = BOOL(name=u'maximumPayloadFixedFlag', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_TransportProtocolRequestEntry_maximumPayloadSize = INT(name=u'maximumPayloadSize', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'PayloadSize')))
_TransportProtocolRequestEntry_preferenceWeighting = INT(name=u'preferenceWeighting', mode=MODE_TYPE, tag=(5, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_TransportProtocolRequestEntry_preferenceWeighting._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_TransportProtocolRequestEntry_nodeCount = INT(name=u'nodeCount', mode=MODE_TYPE, tag=(6, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_TransportProtocolRequestEntry_nodeCount._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_TransportProtocolRequestEntry_numberOfConnections = INT(name=u'numberOfConnections', mode=MODE_TYPE, tag=(7, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_TransportProtocolRequestEntry_numberOfConnections._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_TransportProtocolRequestEntry_configurationData = OCT_STR(name=u'configurationData', mode=MODE_TYPE, tag=(8, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_TransportProtocolRequestEntry_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(9, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TransportProtocolRequestEntry_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_TransportProtocolRequestEntry_nonStandardParameters._cont = __TransportProtocolRequestEntry_nonStandardParameters__item_
TransportProtocolRequestEntry._cont = ASN1Dict([
(u'transportProtocolID', _TransportProtocolRequestEntry_transportProtocolID),
(u'transportProtocolType', _TransportProtocolRequestEntry_transportProtocolType),
(u'networkAddress', _TransportProtocolRequestEntry_networkAddress),
(u'maximumPayloadFixedFlag', _TransportProtocolRequestEntry_maximumPayloadFixedFlag),
(u'maximumPayloadSize', _TransportProtocolRequestEntry_maximumPayloadSize),
(u'preferenceWeighting', _TransportProtocolRequestEntry_preferenceWeighting),
(u'nodeCount', _TransportProtocolRequestEntry_nodeCount),
(u'numberOfConnections', _TransportProtocolRequestEntry_numberOfConnections),
(u'configurationData', _TransportProtocolRequestEntry_configurationData),
(u'nonStandardParameters', _TransportProtocolRequestEntry_nonStandardParameters),
])
TransportProtocolRequestEntry._ext = []
#-----< MAPArbitrateProtocolsRequestPDU >-----#
MAPArbitrateProtocolsRequestPDU = SEQ(name=u'MAPArbitrateProtocolsRequestPDU', mode=MODE_TYPE)
_MAPArbitrateProtocolsRequestPDU_domainReferenceID = INT(name=u'domainReferenceID', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_MAPArbitrateProtocolsRequestPDU_domainReferenceID._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_MAPArbitrateProtocolsRequestPDU_moreToComeFlag = BOOL(name=u'moreToComeFlag', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_MAPArbitrateProtocolsRequestPDU_transportProtocolMenu = SEQ_OF(name=u'transportProtocolMenu', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__MAPArbitrateProtocolsRequestPDU_transportProtocolMenu__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'TransportProtocolRequestEntry')))
_MAPArbitrateProtocolsRequestPDU_transportProtocolMenu._cont = __MAPArbitrateProtocolsRequestPDU_transportProtocolMenu__item_
_MAPArbitrateProtocolsRequestPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPArbitrateProtocolsRequestPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPArbitrateProtocolsRequestPDU_nonStandardParameters._cont = __MAPArbitrateProtocolsRequestPDU_nonStandardParameters__item_
MAPArbitrateProtocolsRequestPDU._cont = ASN1Dict([
(u'domainReferenceID', _MAPArbitrateProtocolsRequestPDU_domainReferenceID),
(u'moreToComeFlag', _MAPArbitrateProtocolsRequestPDU_moreToComeFlag),
(u'transportProtocolMenu', _MAPArbitrateProtocolsRequestPDU_transportProtocolMenu),
(u'nonStandardParameters', _MAPArbitrateProtocolsRequestPDU_nonStandardParameters),
])
MAPArbitrateProtocolsRequestPDU._ext = []
#-----< TransportProtocolConfirmEntry >-----#
TransportProtocolConfirmEntry = SEQ(name=u'TransportProtocolConfirmEntry', mode=MODE_TYPE)
_TransportProtocolConfirmEntry_transportProtocolID = CHOICE(name=u'transportProtocolID', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'TransportProtocolID')))
_TransportProtocolConfirmEntry_transportProtocolType = CHOICE(name=u'transportProtocolType', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'TransportProtocolType')))
_TransportProtocolConfirmEntry_networkAddress = SEQ(name=u'networkAddress', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'NetworkAddress')))
_TransportProtocolConfirmEntry_maximumPayloadSize = INT(name=u'maximumPayloadSize', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'PayloadSize')))
_TransportProtocolConfirmEntry_preferenceWeighting = INT(name=u'preferenceWeighting', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_TransportProtocolConfirmEntry_preferenceWeighting._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_TransportProtocolConfirmEntry_nodeCount = INT(name=u'nodeCount', mode=MODE_TYPE, tag=(5, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_TransportProtocolConfirmEntry_nodeCount._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_TransportProtocolConfirmEntry_numberOfConnections = INT(name=u'numberOfConnections', mode=MODE_TYPE, tag=(6, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_TransportProtocolConfirmEntry_numberOfConnections._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_TransportProtocolConfirmEntry_configurationData = OCT_STR(name=u'configurationData', mode=MODE_TYPE, tag=(7, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_TransportProtocolConfirmEntry_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(8, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__TransportProtocolConfirmEntry_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_TransportProtocolConfirmEntry_nonStandardParameters._cont = __TransportProtocolConfirmEntry_nonStandardParameters__item_
TransportProtocolConfirmEntry._cont = ASN1Dict([
(u'transportProtocolID', _TransportProtocolConfirmEntry_transportProtocolID),
(u'transportProtocolType', _TransportProtocolConfirmEntry_transportProtocolType),
(u'networkAddress', _TransportProtocolConfirmEntry_networkAddress),
(u'maximumPayloadSize', _TransportProtocolConfirmEntry_maximumPayloadSize),
(u'preferenceWeighting', _TransportProtocolConfirmEntry_preferenceWeighting),
(u'nodeCount', _TransportProtocolConfirmEntry_nodeCount),
(u'numberOfConnections', _TransportProtocolConfirmEntry_numberOfConnections),
(u'configurationData', _TransportProtocolConfirmEntry_configurationData),
(u'nonStandardParameters', _TransportProtocolConfirmEntry_nonStandardParameters),
])
TransportProtocolConfirmEntry._ext = []
#-----< MAPArbitrateProtocolsConfirmPDU >-----#
MAPArbitrateProtocolsConfirmPDU = SEQ(name=u'MAPArbitrateProtocolsConfirmPDU', mode=MODE_TYPE)
_MAPArbitrateProtocolsConfirmPDU_domainReferenceID = INT(name=u'domainReferenceID', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_MAPArbitrateProtocolsConfirmPDU_domainReferenceID._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_MAPArbitrateProtocolsConfirmPDU_moreToComeFlag = BOOL(name=u'moreToComeFlag', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_MAPArbitrateProtocolsConfirmPDU_transportProtocolMenu = SEQ_OF(name=u'transportProtocolMenu', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__MAPArbitrateProtocolsConfirmPDU_transportProtocolMenu__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'TransportProtocolConfirmEntry')))
_MAPArbitrateProtocolsConfirmPDU_transportProtocolMenu._cont = __MAPArbitrateProtocolsConfirmPDU_transportProtocolMenu__item_
_MAPArbitrateProtocolsConfirmPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPArbitrateProtocolsConfirmPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPArbitrateProtocolsConfirmPDU_nonStandardParameters._cont = __MAPArbitrateProtocolsConfirmPDU_nonStandardParameters__item_
MAPArbitrateProtocolsConfirmPDU._cont = ASN1Dict([
(u'domainReferenceID', _MAPArbitrateProtocolsConfirmPDU_domainReferenceID),
(u'moreToComeFlag', _MAPArbitrateProtocolsConfirmPDU_moreToComeFlag),
(u'transportProtocolMenu', _MAPArbitrateProtocolsConfirmPDU_transportProtocolMenu),
(u'nonStandardParameters', _MAPArbitrateProtocolsConfirmPDU_nonStandardParameters),
])
MAPArbitrateProtocolsConfirmPDU._ext = []
#-----< SenderID >-----#
SenderID = INT(name=u'SenderID', mode=MODE_TYPE)
SenderID._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=1001, ub=65535)], ev=None, er=[])
#-----< ChannelID >-----#
ChannelID = INT(name=u'ChannelID', mode=MODE_TYPE)
ChannelID._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=1, ub=65535)], ev=None, er=[])
#-----< ReliabilityLevel >-----#
ReliabilityLevel = CHOICE(name=u'ReliabilityLevel', mode=MODE_TYPE)
_ReliabilityLevel_reliable = NULL(name=u'reliable', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_ReliabilityLevel_unreliable = NULL(name=u'unreliable', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_ReliabilityLevel_nonStandardReliabilityLevel = SEQ(name=u'nonStandardReliabilityLevel', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
ReliabilityLevel._cont = ASN1Dict([
(u'reliable', _ReliabilityLevel_reliable),
(u'unreliable', _ReliabilityLevel_unreliable),
(u'nonStandardReliabilityLevel', _ReliabilityLevel_nonStandardReliabilityLevel),
])
ReliabilityLevel._ext = []
#-----< DataType >-----#
DataType = CHOICE(name=u'DataType', mode=MODE_TYPE)
_DataType_nonuniform = NULL(name=u'nonuniform', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_DataType_uniform = NULL(name=u'uniform', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_DataType_proxy = NULL(name=u'proxy', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_DataType_nonStandardDataType = SEQ(name=u'nonStandardDataType', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
DataType._cont = ASN1Dict([
(u'nonuniform', _DataType_nonuniform),
(u'uniform', _DataType_uniform),
(u'proxy', _DataType_proxy),
(u'nonStandardDataType', _DataType_nonStandardDataType),
])
DataType._ext = []
#-----< DataFlowIdentifier >-----#
DataFlowIdentifier = SEQ(name=u'DataFlowIdentifier', mode=MODE_TYPE)
_DataFlowIdentifier_senderID = INT(name=u'senderID', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'SenderID')), opt=True)
_DataFlowIdentifier_metachannelID = SEQ(name=u'metachannelID', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__DataFlowIdentifier_metachannelID_channelID = INT(name=u'channelID', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'ChannelID')))
__DataFlowIdentifier_metachannelID_reliabilityLevel = CHOICE(name=u'reliabilityLevel', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'ReliabilityLevel')))
__DataFlowIdentifier_metachannelID_priority = INT(name=u'priority', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'Priority')))
_DataFlowIdentifier_metachannelID._cont = ASN1Dict([
(u'channelID', __DataFlowIdentifier_metachannelID_channelID),
(u'reliabilityLevel', __DataFlowIdentifier_metachannelID_reliabilityLevel),
(u'priority', __DataFlowIdentifier_metachannelID_priority),
])
_DataFlowIdentifier_metachannelID._ext = []
_DataFlowIdentifier_dataType = CHOICE(name=u'dataType', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'DataType')), opt=True)
_DataFlowIdentifier_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__DataFlowIdentifier_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_DataFlowIdentifier_nonStandardParameters._cont = __DataFlowIdentifier_nonStandardParameters__item_
DataFlowIdentifier._cont = ASN1Dict([
(u'senderID', _DataFlowIdentifier_senderID),
(u'metachannelID', _DataFlowIdentifier_metachannelID),
(u'dataType', _DataFlowIdentifier_dataType),
(u'nonStandardParameters', _DataFlowIdentifier_nonStandardParameters),
])
DataFlowIdentifier._ext = []
#-----< SequenceNumber >-----#
SequenceNumber = INT(name=u'SequenceNumber', mode=MODE_TYPE)
SequenceNumber._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
#-----< DataDescriptor >-----#
DataDescriptor = SEQ(name=u'DataDescriptor', mode=MODE_TYPE)
_DataDescriptor_unicastForwardFlag = BOOL(name=u'unicastForwardFlag', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_DataDescriptor_startingSequenceNumber = INT(name=u'startingSequenceNumber', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'SequenceNumber')))
_DataDescriptor_dataFlowIdentifier = SEQ(name=u'dataFlowIdentifier', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'DataFlowIdentifier')), opt=True)
_DataDescriptor_domainReferenceID = INT(name=u'domainReferenceID', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
_DataDescriptor_domainReferenceID._const_val = ASN1Set(rv=[], rr=[ASN1RangeInt(lb=0, ub=65535)], ev=None, er=[])
_DataDescriptor_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__DataDescriptor_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_DataDescriptor_nonStandardParameters._cont = __DataDescriptor_nonStandardParameters__item_
DataDescriptor._cont = ASN1Dict([
(u'unicastForwardFlag', _DataDescriptor_unicastForwardFlag),
(u'startingSequenceNumber', _DataDescriptor_startingSequenceNumber),
(u'dataFlowIdentifier', _DataDescriptor_dataFlowIdentifier),
(u'domainReferenceID', _DataDescriptor_domainReferenceID),
(u'nonStandardParameters', _DataDescriptor_nonStandardParameters),
])
DataDescriptor._ext = []
#-----< DataFrameEntry >-----#
DataFrameEntry = SEQ(name=u'DataFrameEntry', mode=MODE_TYPE)
_DataFrameEntry_firstSegmentFlag = BOOL(name=u'firstSegmentFlag', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_DataFrameEntry_lastSegmentFlag = BOOL(name=u'lastSegmentFlag', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_DataFrameEntry_userData = OCT_STR(name=u'userData', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
_DataFrameEntry_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__DataFrameEntry_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_DataFrameEntry_nonStandardParameters._cont = __DataFrameEntry_nonStandardParameters__item_
DataFrameEntry._cont = ASN1Dict([
(u'firstSegmentFlag', _DataFrameEntry_firstSegmentFlag),
(u'lastSegmentFlag', _DataFrameEntry_lastSegmentFlag),
(u'userData', _DataFrameEntry_userData),
(u'nonStandardParameters', _DataFrameEntry_nonStandardParameters),
])
DataFrameEntry._ext = []
#-----< MAPDataPDU >-----#
MAPDataPDU = SEQ(name=u'MAPDataPDU', mode=MODE_TYPE)
_MAPDataPDU_dataDescriptor = SEQ(name=u'dataDescriptor', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'DataDescriptor')), opt=True)
_MAPDataPDU_dataFrameArray = SEQ_OF(name=u'dataFrameArray', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT))
__MAPDataPDU_dataFrameArray__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'DataFrameEntry')))
_MAPDataPDU_dataFrameArray._cont = __MAPDataPDU_dataFrameArray__item_
_MAPDataPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPDataPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPDataPDU_nonStandardParameters._cont = __MAPDataPDU_nonStandardParameters__item_
MAPDataPDU._cont = ASN1Dict([
(u'dataDescriptor', _MAPDataPDU_dataDescriptor),
(u'dataFrameArray', _MAPDataPDU_dataFrameArray),
(u'nonStandardParameters', _MAPDataPDU_nonStandardParameters),
])
MAPDataPDU._ext = []
#-----< Metachannel >-----#
Metachannel = SEQ(name=u'Metachannel', mode=MODE_TYPE)
_Metachannel_channelID = INT(name=u'channelID', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'ChannelID')))
_Metachannel_reliabilityLevel = CHOICE(name=u'reliabilityLevel', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'ReliabilityLevel')))
_Metachannel_priorityRange = SEQ(name=u'priorityRange', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'PriorityRange')))
_Metachannel_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__Metachannel_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_Metachannel_nonStandardParameters._cont = __Metachannel_nonStandardParameters__item_
Metachannel._cont = ASN1Dict([
(u'channelID', _Metachannel_channelID),
(u'reliabilityLevel', _Metachannel_reliabilityLevel),
(u'priorityRange', _Metachannel_priorityRange),
(u'nonStandardParameters', _Metachannel_nonStandardParameters),
])
Metachannel._ext = []
#-----< MAPAddGroupRequestPDU >-----#
MAPAddGroupRequestPDU = SEQ(name=u'MAPAddGroupRequestPDU', mode=MODE_TYPE)
_MAPAddGroupRequestPDU_metachannel = SEQ(name=u'metachannel', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'Metachannel')))
_MAPAddGroupRequestPDU_transportProtocolID = CHOICE(name=u'transportProtocolID', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_EXPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'TransportProtocolID')))
_MAPAddGroupRequestPDU_multicastGroupAddress = SEQ(name=u'multicastGroupAddress', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'NetworkAddress')))
_MAPAddGroupRequestPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPAddGroupRequestPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPAddGroupRequestPDU_nonStandardParameters._cont = __MAPAddGroupRequestPDU_nonStandardParameters__item_
MAPAddGroupRequestPDU._cont = ASN1Dict([
(u'metachannel', _MAPAddGroupRequestPDU_metachannel),
(u'transportProtocolID', _MAPAddGroupRequestPDU_transportProtocolID),
(u'multicastGroupAddress', _MAPAddGroupRequestPDU_multicastGroupAddress),
(u'nonStandardParameters', _MAPAddGroupRequestPDU_nonStandardParameters),
])
MAPAddGroupRequestPDU._ext = []
#-----< MAPRemoveGroupRequestPDU >-----#
MAPRemoveGroupRequestPDU = SEQ(name=u'MAPRemoveGroupRequestPDU', mode=MODE_TYPE)
_MAPRemoveGroupRequestPDU_metachannel = SEQ(name=u'metachannel', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'Metachannel')))
_MAPRemoveGroupRequestPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPRemoveGroupRequestPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPRemoveGroupRequestPDU_nonStandardParameters._cont = __MAPRemoveGroupRequestPDU_nonStandardParameters__item_
MAPRemoveGroupRequestPDU._cont = ASN1Dict([
(u'metachannel', _MAPRemoveGroupRequestPDU_metachannel),
(u'nonStandardParameters', _MAPRemoveGroupRequestPDU_nonStandardParameters),
])
MAPRemoveGroupRequestPDU._ext = []
#-----< MAPDisableUnicastRequestPDU >-----#
MAPDisableUnicastRequestPDU = SEQ(name=u'MAPDisableUnicastRequestPDU', mode=MODE_TYPE)
_MAPDisableUnicastRequestPDU_dataFlowIdentifier = SEQ(name=u'dataFlowIdentifier', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'DataFlowIdentifier')))
_MAPDisableUnicastRequestPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPDisableUnicastRequestPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPDisableUnicastRequestPDU_nonStandardParameters._cont = __MAPDisableUnicastRequestPDU_nonStandardParameters__item_
MAPDisableUnicastRequestPDU._cont = ASN1Dict([
(u'dataFlowIdentifier', _MAPDisableUnicastRequestPDU_dataFlowIdentifier),
(u'nonStandardParameters', _MAPDisableUnicastRequestPDU_nonStandardParameters),
])
MAPDisableUnicastRequestPDU._ext = []
#-----< MAPEnableUnicastRequestPDU >-----#
MAPEnableUnicastRequestPDU = SEQ(name=u'MAPEnableUnicastRequestPDU', mode=MODE_TYPE)
_MAPEnableUnicastRequestPDU_dataFlowIdentifier = SEQ(name=u'dataFlowIdentifier', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'DataFlowIdentifier')))
_MAPEnableUnicastRequestPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPEnableUnicastRequestPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPEnableUnicastRequestPDU_nonStandardParameters._cont = __MAPEnableUnicastRequestPDU_nonStandardParameters__item_
MAPEnableUnicastRequestPDU._cont = ASN1Dict([
(u'dataFlowIdentifier', _MAPEnableUnicastRequestPDU_dataFlowIdentifier),
(u'nonStandardParameters', _MAPEnableUnicastRequestPDU_nonStandardParameters),
])
MAPEnableUnicastRequestPDU._ext = []
#-----< MAPEnableUnicastConfirmPDU >-----#
MAPEnableUnicastConfirmPDU = SEQ(name=u'MAPEnableUnicastConfirmPDU', mode=MODE_TYPE)
_MAPEnableUnicastConfirmPDU_dataFlowIdentifier = SEQ(name=u'dataFlowIdentifier', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'DataFlowIdentifier')))
_MAPEnableUnicastConfirmPDU_sequenceNumber = INT(name=u'sequenceNumber', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'SequenceNumber')))
_MAPEnableUnicastConfirmPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPEnableUnicastConfirmPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPEnableUnicastConfirmPDU_nonStandardParameters._cont = __MAPEnableUnicastConfirmPDU_nonStandardParameters__item_
MAPEnableUnicastConfirmPDU._cont = ASN1Dict([
(u'dataFlowIdentifier', _MAPEnableUnicastConfirmPDU_dataFlowIdentifier),
(u'sequenceNumber', _MAPEnableUnicastConfirmPDU_sequenceNumber),
(u'nonStandardParameters', _MAPEnableUnicastConfirmPDU_nonStandardParameters),
])
MAPEnableUnicastConfirmPDU._ext = []
#-----< MAPDisableMulticastRequestPDU >-----#
MAPDisableMulticastRequestPDU = SEQ(name=u'MAPDisableMulticastRequestPDU', mode=MODE_TYPE)
_MAPDisableMulticastRequestPDU_metachannel = SEQ(name=u'metachannel', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'Metachannel')))
_MAPDisableMulticastRequestPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPDisableMulticastRequestPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPDisableMulticastRequestPDU_nonStandardParameters._cont = __MAPDisableMulticastRequestPDU_nonStandardParameters__item_
MAPDisableMulticastRequestPDU._cont = ASN1Dict([
(u'metachannel', _MAPDisableMulticastRequestPDU_metachannel),
(u'nonStandardParameters', _MAPDisableMulticastRequestPDU_nonStandardParameters),
])
MAPDisableMulticastRequestPDU._ext = []
#-----< MAPDisableMulticastConfirmPDU >-----#
MAPDisableMulticastConfirmPDU = SEQ(name=u'MAPDisableMulticastConfirmPDU', mode=MODE_TYPE)
_MAPDisableMulticastConfirmPDU_metachannel = SEQ(name=u'metachannel', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'Metachannel')))
_MAPDisableMulticastConfirmPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPDisableMulticastConfirmPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPDisableMulticastConfirmPDU_nonStandardParameters._cont = __MAPDisableMulticastConfirmPDU_nonStandardParameters__item_
MAPDisableMulticastConfirmPDU._cont = ASN1Dict([
(u'metachannel', _MAPDisableMulticastConfirmPDU_metachannel),
(u'nonStandardParameters', _MAPDisableMulticastConfirmPDU_nonStandardParameters),
])
MAPDisableMulticastConfirmPDU._ext = []
#-----< MAPEnableMulticastRequestPDU >-----#
MAPEnableMulticastRequestPDU = SEQ(name=u'MAPEnableMulticastRequestPDU', mode=MODE_TYPE)
_MAPEnableMulticastRequestPDU_metachannel = SEQ(name=u'metachannel', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'Metachannel')))
_MAPEnableMulticastRequestPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPEnableMulticastRequestPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPEnableMulticastRequestPDU_nonStandardParameters._cont = __MAPEnableMulticastRequestPDU_nonStandardParameters__item_
MAPEnableMulticastRequestPDU._cont = ASN1Dict([
(u'metachannel', _MAPEnableMulticastRequestPDU_metachannel),
(u'nonStandardParameters', _MAPEnableMulticastRequestPDU_nonStandardParameters),
])
MAPEnableMulticastRequestPDU._ext = []
#-----< MAPSequenceNumberPDU >-----#
MAPSequenceNumberPDU = SEQ(name=u'MAPSequenceNumberPDU', mode=MODE_TYPE)
_MAPSequenceNumberPDU_dataFlowIdentifier = SEQ(name=u'dataFlowIdentifier', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'DataFlowIdentifier')))
_MAPSequenceNumberPDU_sequenceNumber = INT(name=u'sequenceNumber', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'SequenceNumber')))
_MAPSequenceNumberPDU_nonStandardParameters = SEQ_OF(name=u'nonStandardParameters', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), opt=True)
__MAPSequenceNumberPDU_nonStandardParameters__item_ = SEQ(name='_item_', mode=MODE_TYPE, typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardParameter')))
_MAPSequenceNumberPDU_nonStandardParameters._cont = __MAPSequenceNumberPDU_nonStandardParameters__item_
MAPSequenceNumberPDU._cont = ASN1Dict([
(u'dataFlowIdentifier', _MAPSequenceNumberPDU_dataFlowIdentifier),
(u'sequenceNumber', _MAPSequenceNumberPDU_sequenceNumber),
(u'nonStandardParameters', _MAPSequenceNumberPDU_nonStandardParameters),
])
MAPSequenceNumberPDU._ext = []
#-----< MAPPDU >-----#
MAPPDU = CHOICE(name=u'MAPPDU', mode=MODE_TYPE)
_MAPPDU_mapConnectRequest = SEQ(name=u'mapConnectRequest', mode=MODE_TYPE, tag=(0, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPConnectRequestPDU')))
_MAPPDU_mapConnectConfirm = SEQ(name=u'mapConnectConfirm', mode=MODE_TYPE, tag=(1, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPConnectConfirmPDU')))
_MAPPDU_mapDisconnectRequest = SEQ(name=u'mapDisconnectRequest', mode=MODE_TYPE, tag=(2, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPDisconnectRequestPDU')))
_MAPPDU_mapDisconnectConfirm = SEQ(name=u'mapDisconnectConfirm', mode=MODE_TYPE, tag=(3, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPDisconnectConfirmPDU')))
_MAPPDU_mapArbitrateProtocolsRequest = SEQ(name=u'mapArbitrateProtocolsRequest', mode=MODE_TYPE, tag=(4, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPArbitrateProtocolsRequestPDU')))
_MAPPDU_mapArbitrateProtocolsConfirm = SEQ(name=u'mapArbitrateProtocolsConfirm', mode=MODE_TYPE, tag=(5, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPArbitrateProtocolsConfirmPDU')))
_MAPPDU_mapData = SEQ(name=u'mapData', mode=MODE_TYPE, tag=(6, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPDataPDU')))
_MAPPDU_mapAddGroupRequest = SEQ(name=u'mapAddGroupRequest', mode=MODE_TYPE, tag=(7, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPAddGroupRequestPDU')))
_MAPPDU_mapRemoveGroupRequest = SEQ(name=u'mapRemoveGroupRequest', mode=MODE_TYPE, tag=(8, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPRemoveGroupRequestPDU')))
_MAPPDU_mapDisableUnicastRequest = SEQ(name=u'mapDisableUnicastRequest', mode=MODE_TYPE, tag=(9, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPDisableUnicastRequestPDU')))
_MAPPDU_mapEnableUnicastRequest = SEQ(name=u'mapEnableUnicastRequest', mode=MODE_TYPE, tag=(10, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPEnableUnicastRequestPDU')))
_MAPPDU_mapEnableUnicastConfirm = SEQ(name=u'mapEnableUnicastConfirm', mode=MODE_TYPE, tag=(11, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPEnableUnicastConfirmPDU')))
_MAPPDU_mapDisableMulticastRequest = SEQ(name=u'mapDisableMulticastRequest', mode=MODE_TYPE, tag=(12, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPDisableMulticastRequestPDU')))
_MAPPDU_mapDisableMulticastConfirm = SEQ(name=u'mapDisableMulticastConfirm', mode=MODE_TYPE, tag=(13, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPDisableMulticastConfirmPDU')))
_MAPPDU_mapEnableMulticastRequest = SEQ(name=u'mapEnableMulticastRequest', mode=MODE_TYPE, tag=(14, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPEnableMulticastRequestPDU')))
_MAPPDU_mapSequenceNumber = SEQ(name=u'mapSequenceNumber', mode=MODE_TYPE, tag=(15, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'MAPSequenceNumberPDU')))
_MAPPDU_nonStandardPDU = SEQ(name=u'nonStandardPDU', mode=MODE_TYPE, tag=(16, TAG_CONTEXT_SPEC, TAG_IMPLICIT), typeref=ASN1RefType(('MAP-PROTOCOL', 'NonStandardPDU')))
MAPPDU._cont = ASN1Dict([
(u'mapConnectRequest', _MAPPDU_mapConnectRequest),
(u'mapConnectConfirm', _MAPPDU_mapConnectConfirm),
(u'mapDisconnectRequest', _MAPPDU_mapDisconnectRequest),
(u'mapDisconnectConfirm', _MAPPDU_mapDisconnectConfirm),
(u'mapArbitrateProtocolsRequest', _MAPPDU_mapArbitrateProtocolsRequest),
(u'mapArbitrateProtocolsConfirm', _MAPPDU_mapArbitrateProtocolsConfirm),
(u'mapData', _MAPPDU_mapData),
(u'mapAddGroupRequest', _MAPPDU_mapAddGroupRequest),
(u'mapRemoveGroupRequest', _MAPPDU_mapRemoveGroupRequest),
(u'mapDisableUnicastRequest', _MAPPDU_mapDisableUnicastRequest),
(u'mapEnableUnicastRequest', _MAPPDU_mapEnableUnicastRequest),
(u'mapEnableUnicastConfirm', _MAPPDU_mapEnableUnicastConfirm),
(u'mapDisableMulticastRequest', _MAPPDU_mapDisableMulticastRequest),
(u'mapDisableMulticastConfirm', _MAPPDU_mapDisableMulticastConfirm),
(u'mapEnableMulticastRequest', _MAPPDU_mapEnableMulticastRequest),
(u'mapSequenceNumber', _MAPPDU_mapSequenceNumber),
(u'nonStandardPDU', _MAPPDU_nonStandardPDU),
])
MAPPDU._ext = []
_all_ = [
H221NonStandardIdentifier,
_Key_object,
_Key_h221NonStandard,
Key,
_NonStandardParameter_key,
_NonStandardParameter_data,
NonStandardParameter,
_NonStandardPDU_data,
NonStandardPDU,
_VersionNumber_majorVersionNumber,
_VersionNumber_minorVersionNumber,
__VersionNumber_nonStandardParameters__item_,
_VersionNumber_nonStandardParameters,
VersionNumber,
Priority,
_PriorityRange_highPriority,
_PriorityRange_lowPriority,
__PriorityRange_nonStandardParameters__item_,
_PriorityRange_nonStandardParameters,
PriorityRange,
_MAPConnectRequestPDU_versionNumber,
_MAPConnectRequestPDU_connectionMAPSAP,
_MAPConnectRequestPDU_domainReferenceID,
_MAPConnectRequestPDU_priorityRange,
__MAPConnectRequestPDU_nonStandardParameters__item_,
_MAPConnectRequestPDU_nonStandardParameters,
MAPConnectRequestPDU,
_MAPConnectConfirmPDU_versionNumber,
_MAPConnectConfirmPDU_connectionMAPSAP,
__MAPConnectConfirmPDU_nonStandardParameters__item_,
_MAPConnectConfirmPDU_nonStandardParameters,
MAPConnectConfirmPDU,
_Reason_providerInitiated,
_Reason_userRejected,
_Reason_userInitiated,
_Reason_invalidMAPSAP,
_Reason_invalidDomainReferenceID,
_Reason_unicastTransition,
_Reason_unspecifiedFailure,
_Reason_nonStandardReason,
Reason,
_MAPDisconnectRequestPDU_reason,
_MAPDisconnectRequestPDU_confirmRequired,
__MAPDisconnectRequestPDU_nonStandardParameters__item_,
_MAPDisconnectRequestPDU_nonStandardParameters,
MAPDisconnectRequestPDU,
__MAPDisconnectConfirmPDU_nonStandardParameters__item_,
_MAPDisconnectConfirmPDU_nonStandardParameters,
MAPDisconnectConfirmPDU,
_TransportProtocolID_objectProtocolID,
_TransportProtocolID_h221NonStandardProtocolID,
_TransportProtocolID_snapProtocolID,
_TransportProtocolID_nonStandardProtocolID,
TransportProtocolID,
_TransportProtocolType_reliableUnicast,
_TransportProtocolType_unreliableUnicast,
_TransportProtocolType_reliableMulticast,
_TransportProtocolType_unreliableMulticast,
_TransportProtocolType_nonStandardProtocolType,
TransportProtocolType,
_NetworkAddress_nsapAddress,
__NetworkAddress_nonStandardParameters__item_,
_NetworkAddress_nonStandardParameters,
NetworkAddress,
PayloadSize,
_TransportProtocolRequestEntry_transportProtocolID,
_TransportProtocolRequestEntry_transportProtocolType,
_TransportProtocolRequestEntry_networkAddress,
_TransportProtocolRequestEntry_maximumPayloadFixedFlag,
_TransportProtocolRequestEntry_maximumPayloadSize,
_TransportProtocolRequestEntry_preferenceWeighting,
_TransportProtocolRequestEntry_nodeCount,
_TransportProtocolRequestEntry_numberOfConnections,
_TransportProtocolRequestEntry_configurationData,
__TransportProtocolRequestEntry_nonStandardParameters__item_,
_TransportProtocolRequestEntry_nonStandardParameters,
TransportProtocolRequestEntry,
_MAPArbitrateProtocolsRequestPDU_domainReferenceID,
_MAPArbitrateProtocolsRequestPDU_moreToComeFlag,
__MAPArbitrateProtocolsRequestPDU_transportProtocolMenu__item_,
_MAPArbitrateProtocolsRequestPDU_transportProtocolMenu,
__MAPArbitrateProtocolsRequestPDU_nonStandardParameters__item_,
_MAPArbitrateProtocolsRequestPDU_nonStandardParameters,
MAPArbitrateProtocolsRequestPDU,
_TransportProtocolConfirmEntry_transportProtocolID,
_TransportProtocolConfirmEntry_transportProtocolType,
_TransportProtocolConfirmEntry_networkAddress,
_TransportProtocolConfirmEntry_maximumPayloadSize,
_TransportProtocolConfirmEntry_preferenceWeighting,
_TransportProtocolConfirmEntry_nodeCount,
_TransportProtocolConfirmEntry_numberOfConnections,
_TransportProtocolConfirmEntry_configurationData,
__TransportProtocolConfirmEntry_nonStandardParameters__item_,
_TransportProtocolConfirmEntry_nonStandardParameters,
TransportProtocolConfirmEntry,
_MAPArbitrateProtocolsConfirmPDU_domainReferenceID,
_MAPArbitrateProtocolsConfirmPDU_moreToComeFlag,
__MAPArbitrateProtocolsConfirmPDU_transportProtocolMenu__item_,
_MAPArbitrateProtocolsConfirmPDU_transportProtocolMenu,
__MAPArbitrateProtocolsConfirmPDU_nonStandardParameters__item_,
_MAPArbitrateProtocolsConfirmPDU_nonStandardParameters,
MAPArbitrateProtocolsConfirmPDU,
SenderID,
ChannelID,
_ReliabilityLevel_reliable,
_ReliabilityLevel_unreliable,
_ReliabilityLevel_nonStandardReliabilityLevel,
ReliabilityLevel,
_DataType_nonuniform,
_DataType_uniform,
_DataType_proxy,
_DataType_nonStandardDataType,
DataType,
_DataFlowIdentifier_senderID,
__DataFlowIdentifier_metachannelID_channelID,
__DataFlowIdentifier_metachannelID_reliabilityLevel,
__DataFlowIdentifier_metachannelID_priority,
_DataFlowIdentifier_metachannelID,
_DataFlowIdentifier_dataType,
__DataFlowIdentifier_nonStandardParameters__item_,
_DataFlowIdentifier_nonStandardParameters,
DataFlowIdentifier,
SequenceNumber,
_DataDescriptor_unicastForwardFlag,
_DataDescriptor_startingSequenceNumber,
_DataDescriptor_dataFlowIdentifier,
_DataDescriptor_domainReferenceID,
__DataDescriptor_nonStandardParameters__item_,
_DataDescriptor_nonStandardParameters,
DataDescriptor,
_DataFrameEntry_firstSegmentFlag,
_DataFrameEntry_lastSegmentFlag,
_DataFrameEntry_userData,
__DataFrameEntry_nonStandardParameters__item_,
_DataFrameEntry_nonStandardParameters,
DataFrameEntry,
_MAPDataPDU_dataDescriptor,
__MAPDataPDU_dataFrameArray__item_,
_MAPDataPDU_dataFrameArray,
__MAPDataPDU_nonStandardParameters__item_,
_MAPDataPDU_nonStandardParameters,
MAPDataPDU,
_Metachannel_channelID,
_Metachannel_reliabilityLevel,
_Metachannel_priorityRange,
__Metachannel_nonStandardParameters__item_,
_Metachannel_nonStandardParameters,
Metachannel,
_MAPAddGroupRequestPDU_metachannel,
_MAPAddGroupRequestPDU_transportProtocolID,
_MAPAddGroupRequestPDU_multicastGroupAddress,
__MAPAddGroupRequestPDU_nonStandardParameters__item_,
_MAPAddGroupRequestPDU_nonStandardParameters,
MAPAddGroupRequestPDU,
_MAPRemoveGroupRequestPDU_metachannel,
__MAPRemoveGroupRequestPDU_nonStandardParameters__item_,
_MAPRemoveGroupRequestPDU_nonStandardParameters,
MAPRemoveGroupRequestPDU,
_MAPDisableUnicastRequestPDU_dataFlowIdentifier,
__MAPDisableUnicastRequestPDU_nonStandardParameters__item_,
_MAPDisableUnicastRequestPDU_nonStandardParameters,
MAPDisableUnicastRequestPDU,
_MAPEnableUnicastRequestPDU_dataFlowIdentifier,
__MAPEnableUnicastRequestPDU_nonStandardParameters__item_,
_MAPEnableUnicastRequestPDU_nonStandardParameters,
MAPEnableUnicastRequestPDU,
_MAPEnableUnicastConfirmPDU_dataFlowIdentifier,
_MAPEnableUnicastConfirmPDU_sequenceNumber,
__MAPEnableUnicastConfirmPDU_nonStandardParameters__item_,
_MAPEnableUnicastConfirmPDU_nonStandardParameters,
MAPEnableUnicastConfirmPDU,
_MAPDisableMulticastRequestPDU_metachannel,
__MAPDisableMulticastRequestPDU_nonStandardParameters__item_,
_MAPDisableMulticastRequestPDU_nonStandardParameters,
MAPDisableMulticastRequestPDU,
_MAPDisableMulticastConfirmPDU_metachannel,
__MAPDisableMulticastConfirmPDU_nonStandardParameters__item_,
_MAPDisableMulticastConfirmPDU_nonStandardParameters,
MAPDisableMulticastConfirmPDU,
_MAPEnableMulticastRequestPDU_metachannel,
__MAPEnableMulticastRequestPDU_nonStandardParameters__item_,
_MAPEnableMulticastRequestPDU_nonStandardParameters,
MAPEnableMulticastRequestPDU,
_MAPSequenceNumberPDU_dataFlowIdentifier,
_MAPSequenceNumberPDU_sequenceNumber,
__MAPSequenceNumberPDU_nonStandardParameters__item_,
_MAPSequenceNumberPDU_nonStandardParameters,
MAPSequenceNumberPDU,
_MAPPDU_mapConnectRequest,
_MAPPDU_mapConnectConfirm,
_MAPPDU_mapDisconnectRequest,
_MAPPDU_mapDisconnectConfirm,
_MAPPDU_mapArbitrateProtocolsRequest,
_MAPPDU_mapArbitrateProtocolsConfirm,
_MAPPDU_mapData,
_MAPPDU_mapAddGroupRequest,
_MAPPDU_mapRemoveGroupRequest,
_MAPPDU_mapDisableUnicastRequest,
_MAPPDU_mapEnableUnicastRequest,
_MAPPDU_mapEnableUnicastConfirm,
_MAPPDU_mapDisableMulticastRequest,
_MAPPDU_mapDisableMulticastConfirm,
_MAPPDU_mapEnableMulticastRequest,
_MAPPDU_mapSequenceNumber,
_MAPPDU_nonStandardPDU,
MAPPDU,
]
init_modules(MCS_PROTOCOL, MCS_PROTOCOL_3, MAP_PROTOCOL)
|
PypiClean
|
/cubrid-python-9.3.0.0002.tar.gz/RB-9.3.0/CUBRIDdb/cursors.py
|
import sys
import types
from CUBRIDdb import FIELD_TYPE
class BaseCursor(object):
"""
A base for Cursor classes. Useful attributes:
description::
A tuple of DB API 7-tuples describing the columns in
the last executed query; see PEP-249 for details.
arraysize::
default number of rows fetchmany() will fetch
"""
def __init__(self, conn):
self.con = conn.connection
self._cs = conn.connection.cursor()
self.arraysize = 1
self.rowcount = -1
self.description = None
self.charset = conn.charset
self._cs._set_charset_name(conn.charset)
def __del__(self):
if self._cs == None:
pass
self.close()
def __check_state(self):
if self._cs == None:
raise Exception("The cursor has been closed. No operation is allowed any more.")
def close(self):
"""Close the cursor, and no further queries will be possible."""
self.__check_state()
self._cs.close()
self._cs = None
def _bind_params(self, args,set_type=None):
self.__check_state()
if type(args) not in (tuple, list):
args = [args,]
args = list(args)
for i in range(len(args)):
if args[i] == None:
pass
elif isinstance(args[i], bool):
if args[i] == True:
args[i] = '1'
else:
args[i] = '0'
elif isinstance(args[i], tuple):
args[i] = args[i]
else:
# Python3.X dosen't support unicode keyword.
try:
mytest = unicode
except NameError:
if isinstance(args[i], str):
pass
elif isinstance(args[i], bytes):
args[i] = args[i].decode(self.charset)
else:
args[i] = str(args[i])
else:
if isinstance(args[i], unicode):
args[i] = args[i].encode(self.charset)
else:
args[i] = str(args[i])
if type(args[i]) != tuple:
self._cs.bind_param(i+1, args[i])
else:
if set_type == None:
data_type = int(FIELD_TYPE.CHAR)
else:
if type(set_type) != tuple:
set_type = [set_type,]
data_type = set_type[i]
s = self.con.set()
s.imports(args[i], data_type)
self._cs.bind_set(i+1, s)
def execute(self, query, args=None,set_type=None):
"""
Execute a query.
query -- string, query to execute on server
args -- optional sequence or mapping, parameters to use with query.
Returns long integer rows affected, if any
"""
self.__check_state()
self._cs.prepare(query)
if args != None:
self._bind_params(args,set_type)
r = self._cs.execute()
self.rowcount = self._cs.rowcount
self.description = self._cs.description
return r
def executemany(self, query, args):
"""
Execute a multi-row query.
query -- string, query to execute on server
args -- Sequence of sequences or mappings, parameters to use with query
Returns long integer rows affected, if any.
This method improves performance on multiple-row INSERT and REPLACE.
Otherwise it is equivalent to looping over args with execute().
"""
self.__check_state()
for p in args:
self.execute(query, *(p,))
def _fetch_row(self):
self.__check_state()
return self._cs.fetch_row(self._fetch_type)
def fetchone(self):
"""
Fetch the next row of a query result set, returning a single sequence, or None when no more data is available.
"""
self.__check_state()
return self._fetch_row()
def _fetch_many(self, size):
self.__check_state()
rlist = []
i = 0
while size < 0 or i < size:
r = self.fetchone()
if not r:
break
rlist.append(r)
i = i+1
return rlist
def fetchmany(self, size=None):
"""
Fetch the next set of rows of a query result, returning a sequence of sequences (e.g. a list of tuples). An empty sequence is returned when no more rows are available.
The number of rows to fetch per call is specified by the parameter. If it is not given, the cursor's arraysize determines the number of rows to be fetched.
The method should try to fetch as many rows as indicated by the size parameter. If this is not possible due to the specified number of rows not being available, fewer rows may be returned.
"""
self.__check_state()
if size == None:
size = self.arraysize
if size <= 0:
return []
return self._fetch_many(size)
def fetchall(self):
"""
Fetch all (remaining) rows of a query result, returning them as a sequence of sequences (e.g. a list of tuples).
Note that the cursor's arraysize attribute can affect the performance of this operation.
"""
self.__check_state()
return self._fetch_many(-1)
def setinputsizes(self, *args):
"""Does nothing, required by DB API."""
pass
def setoutputsizes(self, *args):
"""Does nothing, required by DB API."""
pass
def nextset(self):
"""Advance to the next result set.
Returns None if there are no more result sets."""
pass
def callproc(self, procname, args=()):
"""
Execute stored procedure procname with args
procname -- string, name of procedure to execute on server
args -- Sequence of parameters to use with procedure
Returns the original args.
"""
pass
def __iter__(self):
"""
Iteration over the result set which calls self.fetchone()
and returns the next row.
"""
self.__check_state()
return self # iter(self.fetchone, None)
def next(self):
"""
Return the next row from the currently executing SQL statement using the same semantics as fetchone().
A StopIteration exception is raised when the result set is exhausted for Python versions 2.2 and later.
"""
self.__check_state()
return self.__next__()
def __next__(self):
self.__check_state()
row = self.fetchone()
if row is None:
raise StopIteration
return row
class CursorTupleRowsMixIn(object):
_fetch_type = 0
class CursorDictTupleMixIn(object):
_fetch_type = 1
class Cursor(CursorTupleRowsMixIn, BaseCursor):
'''
This is the standard Cursor class that returns rows as tuples
and stores the result set in the client.
'''
class DictCursor(CursorDictTupleMixIn, BaseCursor):
'''
This is a Cursor class that returns rows as dictionaries and
stores the result set in the client.
'''
|
PypiClean
|
/epospy-3.0.2.tar.gz/epospy-3.0.2/EPOS/plot/occurrence.py
|
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import matplotlib.colorbar as clrbar
import matplotlib.colors
from matplotlib.cm import get_cmap
import numpy as np
from . import helpers, parametric
from EPOS.population import periodradius
clrs= ['r','g','b','m'] # in epos.prep
#fmt_symbol= {'ls':'', 'marker':'o', 'mew':2, 'ms':8,'alpha':0.6}
def all(epos, color=None, alpha_fac=None):
assert epos.Observation
if hasattr(epos, 'occurrence'):
if 'planet' in epos.occurrence:
colored(epos)
if 'model' in epos.occurrence:
model(epos, color=color)
if alpha_fac is not None:
model(epos, color=color, alpha_fac=alpha_fac)
#if Fade:
model(epos, color=color, Gradient=True)
if 'poly' in epos.occurrence:
colored(epos, Poly=True)
if 'model' in epos.occurrence:
model(epos, color=color, alpha_fac=alpha_fac, Poly=True)
if 'labels' in epos.occurrence['poly']:
# only callable with models right now
poly_only(epos)
if 'bin' in epos.occurrence:
colored(epos, Bins=True)
if 'model' in epos.occurrence:
model(epos, color=color, alpha_fac=alpha_fac, Bins=True)
if 'eta0' in epos.occurrence['bin']:
integrated(epos)
if 'eta' in epos.occurrence['bin']:
integrated(epos, MCMC=True)
integrated(epos, MCMC=True,Planets=True)
if 'xzoom' in epos.occurrence:
if epos.Parametric:
parametric.oneD(epos, Occ=True)
if not epos.MonteCarlo and epos.Msini:
parametric.oneD_y(epos, Occ=True, Convert=True)
if hasattr(epos, 'chain'):
parametric.oneD(epos, Occ=True, MCMC=True)
if epos.Msini:
parametric.oneD_y(epos, Occ=True, MCMC=True, Convert=True)
else:
print ('\nNo occurrence to plot, did you run EPOS.occurrence.all()? \n')
def colored(epos, Bins=False, Poly=False, NB=False):
f, (ax, axb) = plt.subplots(1,2, gridspec_kw = {'width_ratios':[20, 1]})
f.subplots_adjust(wspace=0)
name= 'Survey Completeness'
if epos.name in ['dr25_F','dr25_G','dr25_K','dr25_M','dr25_GK']: name+= ' ('+epos.name[5:]+')'
ax.set_title(name)
helpers.set_axes(ax, epos, Trim=True)
#helpers.set_axes(ax, epos, Trim=hasattr(epos, 'xtrim'))
#ax.plot(epos.obs_xvar, epos.obs_yvar, ls='', marker='.', mew=0, ms=5.0, color='k')
''' color scale? '''
cmap='magma' # viridis, plasma, inferno, magma, spring, cool
cmap='viridis'
vmin, vmax= -4, 0
ticks=np.linspace(vmin, vmax, (vmax-vmin)+1)
clrs, norm= helpers.color_array(np.log10(epos.occurrence['planet']['completeness']),
vmin=vmin,vmax=vmax, cmap=get_cmap(cmap))
ax.scatter(epos.obs_xvar, epos.obs_yvar, color=clrs, s=4)
# colorbar?
cb1 = clrbar.ColorbarBase(axb, cmap=get_cmap(cmap), norm=norm, ticks=ticks,
orientation='vertical') # horizontal
axb.set_yticklabels(100*10.**ticks)
axb.tick_params(axis='y', direction='out')
''' bins?'''
if Bins:
occbin= epos.occurrence['bin']
for k, (xbin, ybin, n, inbin, occ) in enumerate(
zip(occbin['x'],occbin['y'],occbin['n'],occbin['i'], occbin['occ'])
):
clr= clrs[k%4]
# colored dots
#ax.plot(epos.obs_xvar[inbin], epos.obs_yvar[inbin],
# ls='', marker='.', mew=0, ms=5.0, color=clr, zorder=1)
# box
ax.add_patch(patches.Rectangle( (xbin[0],ybin[0]),
xbin[1]-xbin[0], ybin[1]-ybin[0],
fill=False, zorder=2, ls='-', color='k') )
xnudge=1.01
ynudge=1.02
size=16 if not 'textsize' in epos.plotpars else epos.plotpars['textsize']
# 12 fit in box, 16 default
ax.text(xbin[0]*xnudge,ybin[1]/ynudge,'{:.1%}'.format(occ), va='top',
size=size)
ax.text(xbin[0]*xnudge,ybin[1]/ynudge,'\n$\pm${:.1f}'.format(
occbin['err'][k]*100), va='top', size=size)
ax.text(xbin[1]/xnudge,ybin[0]*ynudge,'n={}'.format(n), ha='right',
size=size)
helpers.save(plt, epos.plotdir+'occurrence/bins', NB=NB)
elif Poly:
occpoly= epos.occurrence['poly']
for k, (xc, yc, coords, n, inbin, occ, err) in enumerate(
zip(occpoly['xc'],occpoly['yc'],occpoly['coords'],
occpoly['n'],occpoly['i'], occpoly['occ'], occpoly['err'])
):
# box
ax.add_patch(matplotlib.patches.Polygon(coords,
fill=False, zorder=2, ls='-', color='k') )
size=16 if not 'textsize' in epos.plotpars else epos.plotpars['textsize']
# 12 fit in box, 16 default
ax.text(xc,yc,'{:.1%}\n$\pm${:.1f}'.format(occ, err*100), ha='center', va='center',
size=size)
#ax.text(xbin[1]/xnudge,ybin[0]*ynudge,'n={}'.format(n), ha='right',
# size=size)
helpers.save(plt, epos.plotdir+'occurrence/poly', NB=NB)
else:
helpers.save(plt, epos.plotdir+'occurrence/colored', NB=NB)
def integrated(epos, MCMC=False, Planets=False, NB=False):
f, (ax, axb) = plt.subplots(1,2, gridspec_kw = {'width_ratios':[20, 1]})
f.subplots_adjust(wspace=0)
sy= 'M' if (epos.MassRadius or epos.RV) else 'R'
ax.set_title('Occurrence'+ (' (dln'+sy+' dlnP)' if MCMC else ' (Initial Guess)'))
helpers.set_axes(ax, epos, Trim=True, In=epos.MassRadius)
''' color scale? '''
cmap='jet' # cool, spring
vmin, vmax= -5, 0
ticks=np.linspace(vmin, vmax, (vmax-vmin)+1)
levels=np.linspace(vmin, vmax, 256)
''' 2D pdf '''
pps, pdf, _, _= periodradius(epos, Init=not MCMC)
pdflog= np.log10(pdf) # in %
cs= ax.contourf(epos.X_in, epos.Y_in, pdflog, cmap=cmap, levels=levels)
cbar= f.colorbar(cs, cax=axb, ticks=ticks)
axb.set_yticklabels(100*10.**ticks)
axb.tick_params(axis='y', direction='out')
axb.set_title('%')
''' integrated occurrence per bin'''
occbin= epos.occurrence['bin']
key = 'eta' if MCMC else 'eta0'
for k, (xbin, ybin, n, inbin, occ) in enumerate(
zip(occbin['x'],occbin['y in'],occbin['n'],occbin['i'], occbin[key])
):
clr= clrs[k%4]
# colored dots
#ax.plot(epos.obs_xvar[inbin], epos.obs_yvar[inbin],
# ls='', marker='.', mew=0, ms=5.0, color=clr, zorder=1)
# box
ax.add_patch(patches.Rectangle( (xbin[0],ybin[0]),
xbin[1]-xbin[0], ybin[1]-ybin[0],
fill=False, zorder=2, ls='-', color='k') )
xnudge=1.01
ynudge=1.02
size=16 if not 'textsize' in epos.plotpars else epos.plotpars['textsize']
# 12 fit in box, 16 default
ax.text(xbin[0]*xnudge,ybin[1]/ynudge,'{:.1%}'.format(occ), va='top',size=size)
if MCMC:
ax.text(xbin[0]*xnudge,ybin[1]/ynudge,'\n +{:.1%}\n -{:.1%}'.format(
occbin['eta+'][k],occbin['eta-'][k]
), va='top',size=size)
''' overplot planets '''
if Planets:
ax.plot(epos.obs_xvar, epos.obs_yvar,
ls='', marker='.', mew=0, ms=5, alpha=1, color='k')
fname= 'posterior' if MCMC else 'integrated'
if Planets: fname+= '.planets'
helpers.save(plt, epos.plotdir+'occurrence/'+fname, NB=NB)
def model(epos, color='C0', alpha_fac=None, Bins=False, Poly=False, Gradient=False):
f, ax = plt.subplots()
name= '{}, $\eta={:.2g}$'.format(epos.name, epos.occurrence['model']['eta'])
ax.set_title(name)
helpers.set_axes(ax, epos, Trim=True)
# set transparency / color gradient
if Gradient:
suffix= '.gradient'
weigths= epos.occurrence['model']['completeness']
cmin, cmax= 0.001, 0.1
weigths= np.maximum(np.minimum(weigths,cmax), cmin)
cmap='copper_r'
#ticks=np.linspace(vmin, vmax, (vmax-vmin)+1)
clrs, norm= helpers.color_array(np.log10(weigths),
vmin=np.log10(cmin),vmax=np.log10(cmax), cmap=cmap)
ax.scatter(epos.pfm['P'], epos.pfm['R'],
marker='o', s=13, lw=0, color=clrs,zorder=0)
# colorbar?
# cb1 = clrbar.ColorbarBase(axb, cmap=cmap, norm=norm, ticks=ticks,
# orientation='vertical') # horizontal
# axb.set_yticklabels(100*10.**ticks)
# axb.tick_params(axis='y', direction='out')
elif alpha_fac is not None:
suffix= '.alpha'
weigths= epos.occurrence['model']['completeness']*alpha_fac #*epos.nstars
alpha= np.maximum(np.minimum(weigths,1.), 0.0) # 0.2?
if True:
# color issues with to_rgba_array
clr_rgba = np.empty((len(alpha), 4), float)
for i, a in enumerate(alpha):
clr_rgba[i] = matplotlib.colors.to_rgba(color, a)
else:
clr= np.full_like(weigths,color,dtype=str)
clr_rgba= matplotlib.colors.to_rgba_array(clr) # alpha
#print clr_rgba[0,:]
clr_rgba[:,3]= alpha
ax.scatter(epos.pfm['P'], epos.pfm['R'],
marker='o', s=13, lw=0, color=clr_rgba,zorder=0)
else:
suffix=''
clr= matplotlib.colors.to_rgba(color)
ax.plot(epos.pfm['P'], epos.pfm['R'], ls='', marker='o', mew=0, ms=4,
color=clr, zorder=0)
''' bins'''
if Bins:
occbin= epos.occurrence['model']['bin']
for k, (xbin, ybin, n, inbin, occ) in enumerate(
zip(occbin['x'],occbin['y'],occbin['n'],occbin['i'], occbin['occ'])
):
# box
ax.add_patch(patches.Rectangle( (xbin[0],ybin[0]),
xbin[1]-xbin[0], ybin[1]-ybin[0],
fill=False, zorder=2, ls='-', color='k') )
xnudge=1.01
ynudge=1.02
size=16 if not 'textsize' in epos.plotpars else epos.plotpars['textsize']
# 12 fit in box, 16 default
ax.text(xbin[0]*xnudge,ybin[1]/ynudge,'{:.1%}'.format(occ), va='top',
size=size)
ax.text(xbin[0]*xnudge,ybin[1]/ynudge,'\n$\pm${:.1f}'.format(
occbin['err'][k]*100), va='top', size=size)
ax.text(xbin[1]/xnudge,ybin[0]*ynudge,'n={}'.format(n), ha='right',
size=size)
helpers.save(plt, epos.plotdir+'occurrence/model_bins'+suffix)
elif Poly:
occpoly= epos.occurrence['model']['poly']
for k, (xc, yc, coords, n, inbin, occ, err) in enumerate(
zip(occpoly['xc'],occpoly['yc'],occpoly['coords'],
occpoly['n'],occpoly['i'], occpoly['occ'], occpoly['err'])
):
# box
ax.add_patch(matplotlib.patches.Polygon(coords,
fill=False, zorder=2, ls='-', color='k') )
size=16 if not 'textsize' in epos.plotpars else epos.plotpars['textsize']
# 12 fit in box, 16 default
ax.text(xc,yc,'{:.1%}\n$\pm${:.1%}'.format(occ, err), ha='center', va='center',
size=size)
helpers.save(plt, epos.plotdir+'occurrence/model_poly'+suffix)
else:
helpers.save(plt, epos.plotdir+'occurrence/model'+suffix)
def poly_only(epos):
f, ax = plt.subplots()
ax.set_title('Planet Classes')
helpers.set_axes(ax, epos, Trim=True)
# coordinates are from model routine
occpoly= epos.occurrence['model']['poly']
for k, (xc, yc, coords, label) in enumerate(
zip(occpoly['xc'],occpoly['yc'],occpoly['coords'],
epos.occurrence['poly']['labels'])
):
# box
ax.add_patch(matplotlib.patches.Polygon(coords,
fill=False, zorder=2, ls='-', color='k') )
size=16 if not 'textsize' in epos.plotpars else epos.plotpars['textsize']
# 12 fit in box, 16 default
ax.text(xc,yc,label, ha='center', va='center',
size=size)
helpers.save(plt, epos.plotdir+'occurrence/poly_only')
|
PypiClean
|
/gnosis_neon_safe_eth_py-5.4.3-py3-none-any.whl/gnosis/eth/utils.py
|
import warnings
from secrets import token_bytes
from typing import Tuple, Union
import eth_abi
from eth._utils.address import generate_contract_address
from eth_keys import keys
from eth_typing import AnyAddress, ChecksumAddress, HexStr
from eth_utils import to_normalized_address
from hexbytes import HexBytes
from sha3 import keccak_256
def fast_keccak(value: bytes) -> bytes:
"""
Calculates ethereum keccak256 using fast library `pysha3`
:param value:
:return: Keccak256 used by ethereum as `bytes`
"""
return keccak_256(value).digest()
def fast_keccak_hex(value: bytes) -> HexStr:
"""
Same as `fast_keccak`, but it's a little more optimal calling `hexdigest()`
than calling `digest()` and then `hex()`
:param value:
:return: Keccak256 used by ethereum as an hex string (not 0x prefixed)
"""
return HexStr(keccak_256(value).hexdigest())
def _build_checksum_address(
norm_address: HexStr, address_hash: HexStr
) -> ChecksumAddress:
"""
https://github.com/ethereum/EIPs/blob/master/EIPS/eip-55.md
:param norm_address: address in lowercase (not 0x prefixed)
:param address_hash: keccak256 of `norm_address` (not 0x prefixed)
:return:
"""
return ChecksumAddress(
"0x"
+ (
"".join(
(
norm_address[i].upper()
if int(address_hash[i], 16) > 7
else norm_address[i]
)
for i in range(0, 40)
)
)
)
def fast_to_checksum_address(value: Union[AnyAddress, str, bytes]) -> ChecksumAddress:
"""
Converts to checksum_address. Uses more optimal `pysha3` instead of `eth_utils` for keccak256 calculation
:param value:
:return:
"""
norm_address = to_normalized_address(value)[2:]
address_hash = fast_keccak_hex(norm_address.encode())
return _build_checksum_address(norm_address, address_hash)
def fast_bytes_to_checksum_address(value: bytes) -> ChecksumAddress:
"""
Converts to checksum_address. Uses more optimal `pysha3` instead of `eth_utils` for keccak256 calculation.
As input is already in bytes, some checks and conversions can be skipped, providing a speedup of ~50%
:param value:
:return:
"""
if len(value) != 20:
raise ValueError(
"Cannot convert %s to a checksum address, 20 bytes were expected"
)
norm_address = bytes(value).hex()
address_hash = fast_keccak_hex(norm_address.encode())
return _build_checksum_address(norm_address, address_hash)
def fast_is_checksum_address(value: Union[AnyAddress, str, bytes]) -> bool:
"""
Fast version to check if an address is a checksum_address
:param value:
:return: `True` if checksummed, `False` otherwise
"""
if not isinstance(value, str) or len(value) != 42 or not value.startswith("0x"):
return False
try:
return fast_to_checksum_address(value) == value
except ValueError:
return False
def get_eth_address_with_key() -> Tuple[str, bytes]:
private_key = keys.PrivateKey(token_bytes(32))
address = private_key.public_key.to_checksum_address()
return address, private_key.to_bytes()
def get_eth_address_with_invalid_checksum() -> str:
address, _ = get_eth_address_with_key()
return "0x" + "".join(
[c.lower() if c.isupper() else c.upper() for c in address[2:]]
)
def decode_string_or_bytes32(data: bytes) -> str:
try:
return eth_abi.decode(["string"], data)[0]
except OverflowError:
name = eth_abi.decode(["bytes32"], data)[0]
end_position = name.find(b"\x00")
if end_position == -1:
return name.decode()
else:
return name[:end_position].decode()
def remove_swarm_metadata(code: bytes) -> bytes:
"""
Remove swarm metadata from Solidity bytecode
:param code:
:return: Code without metadata
"""
swarm = b"\xa1\x65bzzr0"
position = code.rfind(swarm)
if position == -1:
raise ValueError("Swarm metadata not found in code %s" % code.hex())
return code[:position]
def compare_byte_code(code_1: bytes, code_2: bytes) -> bool:
"""
Compare code, removing swarm metadata if necessary
:param code_1:
:param code_2:
:return: True if same code, False otherwise
"""
if code_1 == code_2:
return True
else:
codes = []
for code in (code_1, code_2):
try:
codes.append(remove_swarm_metadata(code))
except ValueError:
codes.append(code)
return codes[0] == codes[1]
def mk_contract_address(address: Union[str, bytes], nonce: int) -> ChecksumAddress:
"""
Generate expected contract address when using EVM CREATE
:param address:
:param nonce:
:return:
"""
return fast_to_checksum_address(generate_contract_address(HexBytes(address), nonce))
def mk_contract_address_2(
from_: Union[str, bytes], salt: Union[str, bytes], init_code: Union[str, bytes]
) -> ChecksumAddress:
"""
Generate expected contract address when using EVM CREATE2.
:param from_: The address which is creating this new address (need to be 20 bytes)
:param salt: A salt (32 bytes)
:param init_code: A init code of the contract being created
:return: Address of the new contract
"""
from_ = HexBytes(from_)
salt = HexBytes(salt)
init_code = HexBytes(init_code)
assert len(from_) == 20, f"Address {from_.hex()} is not valid. Must be 20 bytes"
assert len(salt) == 32, f"Salt {salt.hex()} is not valid. Must be 32 bytes"
assert len(init_code) > 0, f"Init code {init_code.hex()} is not valid"
init_code_hash = fast_keccak(init_code)
contract_address = fast_keccak(HexBytes("ff") + from_ + salt + init_code_hash)
return fast_bytes_to_checksum_address(contract_address[12:])
def generate_address_2(
from_: Union[str, bytes], salt: Union[str, bytes], init_code: Union[str, bytes]
) -> ChecksumAddress:
"""
.. deprecated:: use mk_contract_address_2
:param from_:
:param salt:
:param init_code:
:return:
"""
warnings.warn(
"`generate_address_2` is deprecated, use `mk_contract_address_2`",
DeprecationWarning,
)
return mk_contract_address_2(from_, salt, init_code)
|
PypiClean
|
/ais_dom-2023.7.2-py3-none-any.whl/homeassistant/components/incomfort/sensor.py
|
from __future__ import annotations
from dataclasses import dataclass
from typing import Any
from homeassistant.components.sensor import (
DOMAIN as SENSOR_DOMAIN,
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
)
from homeassistant.const import UnitOfPressure, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import slugify
from . import DOMAIN, IncomfortChild
INCOMFORT_HEATER_TEMP = "CV Temp"
INCOMFORT_PRESSURE = "CV Pressure"
INCOMFORT_TAP_TEMP = "Tap Temp"
@dataclass
class IncomfortSensorEntityDescription(SensorEntityDescription):
"""Describes Incomfort sensor entity."""
extra_key: str | None = None
# IncomfortSensor does not support UNDEFINED or None,
# restrict the type to str
name: str = ""
SENSOR_TYPES: tuple[IncomfortSensorEntityDescription, ...] = (
IncomfortSensorEntityDescription(
key="pressure",
name=INCOMFORT_PRESSURE,
device_class=SensorDeviceClass.PRESSURE,
native_unit_of_measurement=UnitOfPressure.BAR,
),
IncomfortSensorEntityDescription(
key="heater_temp",
name=INCOMFORT_HEATER_TEMP,
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
extra_key="is_pumping",
),
IncomfortSensorEntityDescription(
key="tap_temp",
name=INCOMFORT_TAP_TEMP,
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
extra_key="is_tapping",
),
)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up an InComfort/InTouch sensor device."""
if discovery_info is None:
return
client = hass.data[DOMAIN]["client"]
heaters = hass.data[DOMAIN]["heaters"]
entities = [
IncomfortSensor(client, heater, description)
for heater in heaters
for description in SENSOR_TYPES
]
async_add_entities(entities)
class IncomfortSensor(IncomfortChild, SensorEntity):
"""Representation of an InComfort/InTouch sensor device."""
entity_description: IncomfortSensorEntityDescription
def __init__(
self, client, heater, description: IncomfortSensorEntityDescription
) -> None:
"""Initialize the sensor."""
super().__init__()
self.entity_description = description
self._client = client
self._heater = heater
self._unique_id = f"{heater.serial_no}_{slugify(description.name)}"
self.entity_id = f"{SENSOR_DOMAIN}.{DOMAIN}_{slugify(description.name)}"
self._name = f"Boiler {description.name}"
@property
def native_value(self) -> str | None:
"""Return the state of the sensor."""
return self._heater.status[self.entity_description.key]
@property
def extra_state_attributes(self) -> dict[str, Any] | None:
"""Return the device state attributes."""
if (extra_key := self.entity_description.extra_key) is None:
return None
return {extra_key: self._heater.status[extra_key]}
|
PypiClean
|
/Probability-Surrogate-Learning-1.1.tar.gz/Probability-Surrogate-Learning-1.1/LICENSE.md
|
MIT License
Copyright (c) 2023 Wenlin Li
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
|
PypiClean
|
/django_bpp-1.0.9-py3-none-any.whl/django_bpp/staticroot/lodash.mergewith/index.js
|
var LARGE_ARRAY_SIZE = 200;
/** Used to stand-in for `undefined` hash values. */
var HASH_UNDEFINED = '__lodash_hash_undefined__';
/** Used as references for various `Number` constants. */
var MAX_SAFE_INTEGER = 9007199254740991;
/** `Object#toString` result references. */
var argsTag = '[object Arguments]',
arrayTag = '[object Array]',
boolTag = '[object Boolean]',
dateTag = '[object Date]',
errorTag = '[object Error]',
funcTag = '[object Function]',
genTag = '[object GeneratorFunction]',
mapTag = '[object Map]',
numberTag = '[object Number]',
objectTag = '[object Object]',
promiseTag = '[object Promise]',
regexpTag = '[object RegExp]',
setTag = '[object Set]',
stringTag = '[object String]',
symbolTag = '[object Symbol]',
weakMapTag = '[object WeakMap]';
var arrayBufferTag = '[object ArrayBuffer]',
dataViewTag = '[object DataView]',
float32Tag = '[object Float32Array]',
float64Tag = '[object Float64Array]',
int8Tag = '[object Int8Array]',
int16Tag = '[object Int16Array]',
int32Tag = '[object Int32Array]',
uint8Tag = '[object Uint8Array]',
uint8ClampedTag = '[object Uint8ClampedArray]',
uint16Tag = '[object Uint16Array]',
uint32Tag = '[object Uint32Array]';
/**
* Used to match `RegExp`
* [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns).
*/
var reRegExpChar = /[\\^$.*+?()[\]{}|]/g;
/** Used to match `RegExp` flags from their coerced string values. */
var reFlags = /\w*$/;
/** Used to detect host constructors (Safari). */
var reIsHostCtor = /^\[object .+?Constructor\]$/;
/** Used to detect unsigned integer values. */
var reIsUint = /^(?:0|[1-9]\d*)$/;
/** Used to identify `toStringTag` values of typed arrays. */
var typedArrayTags = {};
typedArrayTags[float32Tag] = typedArrayTags[float64Tag] =
typedArrayTags[int8Tag] = typedArrayTags[int16Tag] =
typedArrayTags[int32Tag] = typedArrayTags[uint8Tag] =
typedArrayTags[uint8ClampedTag] = typedArrayTags[uint16Tag] =
typedArrayTags[uint32Tag] = true;
typedArrayTags[argsTag] = typedArrayTags[arrayTag] =
typedArrayTags[arrayBufferTag] = typedArrayTags[boolTag] =
typedArrayTags[dataViewTag] = typedArrayTags[dateTag] =
typedArrayTags[errorTag] = typedArrayTags[funcTag] =
typedArrayTags[mapTag] = typedArrayTags[numberTag] =
typedArrayTags[objectTag] = typedArrayTags[regexpTag] =
typedArrayTags[setTag] = typedArrayTags[stringTag] =
typedArrayTags[weakMapTag] = false;
/** Used to identify `toStringTag` values supported by `_.clone`. */
var cloneableTags = {};
cloneableTags[argsTag] = cloneableTags[arrayTag] =
cloneableTags[arrayBufferTag] = cloneableTags[dataViewTag] =
cloneableTags[boolTag] = cloneableTags[dateTag] =
cloneableTags[float32Tag] = cloneableTags[float64Tag] =
cloneableTags[int8Tag] = cloneableTags[int16Tag] =
cloneableTags[int32Tag] = cloneableTags[mapTag] =
cloneableTags[numberTag] = cloneableTags[objectTag] =
cloneableTags[regexpTag] = cloneableTags[setTag] =
cloneableTags[stringTag] = cloneableTags[symbolTag] =
cloneableTags[uint8Tag] = cloneableTags[uint8ClampedTag] =
cloneableTags[uint16Tag] = cloneableTags[uint32Tag] = true;
cloneableTags[errorTag] = cloneableTags[funcTag] =
cloneableTags[weakMapTag] = false;
/** Detect free variable `global` from Node.js. */
var freeGlobal = typeof global == 'object' && global && global.Object === Object && global;
/** Detect free variable `self`. */
var freeSelf = typeof self == 'object' && self && self.Object === Object && self;
/** Used as a reference to the global object. */
var root = freeGlobal || freeSelf || Function('return this')();
/** Detect free variable `exports`. */
var freeExports = typeof exports == 'object' && exports && !exports.nodeType && exports;
/** Detect free variable `module`. */
var freeModule = freeExports && typeof module == 'object' && module && !module.nodeType && module;
/** Detect the popular CommonJS extension `module.exports`. */
var moduleExports = freeModule && freeModule.exports === freeExports;
/** Detect free variable `process` from Node.js. */
var freeProcess = moduleExports && freeGlobal.process;
/** Used to access faster Node.js helpers. */
var nodeUtil = (function() {
try {
return freeProcess && freeProcess.binding('util');
} catch (e) {}
}());
/* Node.js helper references. */
var nodeIsTypedArray = nodeUtil && nodeUtil.isTypedArray;
/**
* Adds the key-value `pair` to `map`.
*
* @private
* @param {Object} map The map to modify.
* @param {Array} pair The key-value pair to add.
* @returns {Object} Returns `map`.
*/
function addMapEntry(map, pair) {
// Don't return `map.set` because it's not chainable in IE 11.
map.set(pair[0], pair[1]);
return map;
}
/**
* Adds `value` to `set`.
*
* @private
* @param {Object} set The set to modify.
* @param {*} value The value to add.
* @returns {Object} Returns `set`.
*/
function addSetEntry(set, value) {
// Don't return `set.add` because it's not chainable in IE 11.
set.add(value);
return set;
}
/**
* A faster alternative to `Function#apply`, this function invokes `func`
* with the `this` binding of `thisArg` and the arguments of `args`.
*
* @private
* @param {Function} func The function to invoke.
* @param {*} thisArg The `this` binding of `func`.
* @param {Array} args The arguments to invoke `func` with.
* @returns {*} Returns the result of `func`.
*/
function apply(func, thisArg, args) {
switch (args.length) {
case 0: return func.call(thisArg);
case 1: return func.call(thisArg, args[0]);
case 2: return func.call(thisArg, args[0], args[1]);
case 3: return func.call(thisArg, args[0], args[1], args[2]);
}
return func.apply(thisArg, args);
}
/**
* A specialized version of `_.forEach` for arrays without support for
* iteratee shorthands.
*
* @private
* @param {Array} [array] The array to iterate over.
* @param {Function} iteratee The function invoked per iteration.
* @returns {Array} Returns `array`.
*/
function arrayEach(array, iteratee) {
var index = -1,
length = array ? array.length : 0;
while (++index < length) {
if (iteratee(array[index], index, array) === false) {
break;
}
}
return array;
}
/**
* Appends the elements of `values` to `array`.
*
* @private
* @param {Array} array The array to modify.
* @param {Array} values The values to append.
* @returns {Array} Returns `array`.
*/
function arrayPush(array, values) {
var index = -1,
length = values.length,
offset = array.length;
while (++index < length) {
array[offset + index] = values[index];
}
return array;
}
/**
* A specialized version of `_.reduce` for arrays without support for
* iteratee shorthands.
*
* @private
* @param {Array} [array] The array to iterate over.
* @param {Function} iteratee The function invoked per iteration.
* @param {*} [accumulator] The initial value.
* @param {boolean} [initAccum] Specify using the first element of `array` as
* the initial value.
* @returns {*} Returns the accumulated value.
*/
function arrayReduce(array, iteratee, accumulator, initAccum) {
var index = -1,
length = array ? array.length : 0;
if (initAccum && length) {
accumulator = array[++index];
}
while (++index < length) {
accumulator = iteratee(accumulator, array[index], index, array);
}
return accumulator;
}
/**
* The base implementation of `_.times` without support for iteratee shorthands
* or max array length checks.
*
* @private
* @param {number} n The number of times to invoke `iteratee`.
* @param {Function} iteratee The function invoked per iteration.
* @returns {Array} Returns the array of results.
*/
function baseTimes(n, iteratee) {
var index = -1,
result = Array(n);
while (++index < n) {
result[index] = iteratee(index);
}
return result;
}
/**
* The base implementation of `_.unary` without support for storing metadata.
*
* @private
* @param {Function} func The function to cap arguments for.
* @returns {Function} Returns the new capped function.
*/
function baseUnary(func) {
return function(value) {
return func(value);
};
}
/**
* Gets the value at `key` of `object`.
*
* @private
* @param {Object} [object] The object to query.
* @param {string} key The key of the property to get.
* @returns {*} Returns the property value.
*/
function getValue(object, key) {
return object == null ? undefined : object[key];
}
/**
* Checks if `value` is a host object in IE < 9.
*
* @private
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is a host object, else `false`.
*/
function isHostObject(value) {
// Many host objects are `Object` objects that can coerce to strings
// despite having improperly defined `toString` methods.
var result = false;
if (value != null && typeof value.toString != 'function') {
try {
result = !!(value + '');
} catch (e) {}
}
return result;
}
/**
* Converts `map` to its key-value pairs.
*
* @private
* @param {Object} map The map to convert.
* @returns {Array} Returns the key-value pairs.
*/
function mapToArray(map) {
var index = -1,
result = Array(map.size);
map.forEach(function(value, key) {
result[++index] = [key, value];
});
return result;
}
/**
* Creates a unary function that invokes `func` with its argument transformed.
*
* @private
* @param {Function} func The function to wrap.
* @param {Function} transform The argument transform.
* @returns {Function} Returns the new function.
*/
function overArg(func, transform) {
return function(arg) {
return func(transform(arg));
};
}
/**
* Converts `set` to an array of its values.
*
* @private
* @param {Object} set The set to convert.
* @returns {Array} Returns the values.
*/
function setToArray(set) {
var index = -1,
result = Array(set.size);
set.forEach(function(value) {
result[++index] = value;
});
return result;
}
/** Used for built-in method references. */
var arrayProto = Array.prototype,
funcProto = Function.prototype,
objectProto = Object.prototype;
/** Used to detect overreaching core-js shims. */
var coreJsData = root['__core-js_shared__'];
/** Used to detect methods masquerading as native. */
var maskSrcKey = (function() {
var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || '');
return uid ? ('Symbol(src)_1.' + uid) : '';
}());
/** Used to resolve the decompiled source of functions. */
var funcToString = funcProto.toString;
/** Used to check objects for own properties. */
var hasOwnProperty = objectProto.hasOwnProperty;
/** Used to infer the `Object` constructor. */
var objectCtorString = funcToString.call(Object);
/**
* Used to resolve the
* [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
* of values.
*/
var objectToString = objectProto.toString;
/** Used to detect if a method is native. */
var reIsNative = RegExp('^' +
funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&')
.replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$'
);
/** Built-in value references. */
var Buffer = moduleExports ? root.Buffer : undefined,
Symbol = root.Symbol,
Uint8Array = root.Uint8Array,
getPrototype = overArg(Object.getPrototypeOf, Object),
objectCreate = Object.create,
propertyIsEnumerable = objectProto.propertyIsEnumerable,
splice = arrayProto.splice;
/* Built-in method references for those with the same name as other `lodash` methods. */
var nativeGetSymbols = Object.getOwnPropertySymbols,
nativeIsBuffer = Buffer ? Buffer.isBuffer : undefined,
nativeKeys = overArg(Object.keys, Object),
nativeMax = Math.max;
/* Built-in method references that are verified to be native. */
var DataView = getNative(root, 'DataView'),
Map = getNative(root, 'Map'),
Promise = getNative(root, 'Promise'),
Set = getNative(root, 'Set'),
WeakMap = getNative(root, 'WeakMap'),
nativeCreate = getNative(Object, 'create');
/** Used to detect maps, sets, and weakmaps. */
var dataViewCtorString = toSource(DataView),
mapCtorString = toSource(Map),
promiseCtorString = toSource(Promise),
setCtorString = toSource(Set),
weakMapCtorString = toSource(WeakMap);
/** Used to convert symbols to primitives and strings. */
var symbolProto = Symbol ? Symbol.prototype : undefined,
symbolValueOf = symbolProto ? symbolProto.valueOf : undefined;
/**
* Creates a hash object.
*
* @private
* @constructor
* @param {Array} [entries] The key-value pairs to cache.
*/
function Hash(entries) {
var index = -1,
length = entries ? entries.length : 0;
this.clear();
while (++index < length) {
var entry = entries[index];
this.set(entry[0], entry[1]);
}
}
/**
* Removes all key-value entries from the hash.
*
* @private
* @name clear
* @memberOf Hash
*/
function hashClear() {
this.__data__ = nativeCreate ? nativeCreate(null) : {};
}
/**
* Removes `key` and its value from the hash.
*
* @private
* @name delete
* @memberOf Hash
* @param {Object} hash The hash to modify.
* @param {string} key The key of the value to remove.
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
*/
function hashDelete(key) {
return this.has(key) && delete this.__data__[key];
}
/**
* Gets the hash value for `key`.
*
* @private
* @name get
* @memberOf Hash
* @param {string} key The key of the value to get.
* @returns {*} Returns the entry value.
*/
function hashGet(key) {
var data = this.__data__;
if (nativeCreate) {
var result = data[key];
return result === HASH_UNDEFINED ? undefined : result;
}
return hasOwnProperty.call(data, key) ? data[key] : undefined;
}
/**
* Checks if a hash value for `key` exists.
*
* @private
* @name has
* @memberOf Hash
* @param {string} key The key of the entry to check.
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
*/
function hashHas(key) {
var data = this.__data__;
return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key);
}
/**
* Sets the hash `key` to `value`.
*
* @private
* @name set
* @memberOf Hash
* @param {string} key The key of the value to set.
* @param {*} value The value to set.
* @returns {Object} Returns the hash instance.
*/
function hashSet(key, value) {
var data = this.__data__;
data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value;
return this;
}
// Add methods to `Hash`.
Hash.prototype.clear = hashClear;
Hash.prototype['delete'] = hashDelete;
Hash.prototype.get = hashGet;
Hash.prototype.has = hashHas;
Hash.prototype.set = hashSet;
/**
* Creates an list cache object.
*
* @private
* @constructor
* @param {Array} [entries] The key-value pairs to cache.
*/
function ListCache(entries) {
var index = -1,
length = entries ? entries.length : 0;
this.clear();
while (++index < length) {
var entry = entries[index];
this.set(entry[0], entry[1]);
}
}
/**
* Removes all key-value entries from the list cache.
*
* @private
* @name clear
* @memberOf ListCache
*/
function listCacheClear() {
this.__data__ = [];
}
/**
* Removes `key` and its value from the list cache.
*
* @private
* @name delete
* @memberOf ListCache
* @param {string} key The key of the value to remove.
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
*/
function listCacheDelete(key) {
var data = this.__data__,
index = assocIndexOf(data, key);
if (index < 0) {
return false;
}
var lastIndex = data.length - 1;
if (index == lastIndex) {
data.pop();
} else {
splice.call(data, index, 1);
}
return true;
}
/**
* Gets the list cache value for `key`.
*
* @private
* @name get
* @memberOf ListCache
* @param {string} key The key of the value to get.
* @returns {*} Returns the entry value.
*/
function listCacheGet(key) {
var data = this.__data__,
index = assocIndexOf(data, key);
return index < 0 ? undefined : data[index][1];
}
/**
* Checks if a list cache value for `key` exists.
*
* @private
* @name has
* @memberOf ListCache
* @param {string} key The key of the entry to check.
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
*/
function listCacheHas(key) {
return assocIndexOf(this.__data__, key) > -1;
}
/**
* Sets the list cache `key` to `value`.
*
* @private
* @name set
* @memberOf ListCache
* @param {string} key The key of the value to set.
* @param {*} value The value to set.
* @returns {Object} Returns the list cache instance.
*/
function listCacheSet(key, value) {
var data = this.__data__,
index = assocIndexOf(data, key);
if (index < 0) {
data.push([key, value]);
} else {
data[index][1] = value;
}
return this;
}
// Add methods to `ListCache`.
ListCache.prototype.clear = listCacheClear;
ListCache.prototype['delete'] = listCacheDelete;
ListCache.prototype.get = listCacheGet;
ListCache.prototype.has = listCacheHas;
ListCache.prototype.set = listCacheSet;
/**
* Creates a map cache object to store key-value pairs.
*
* @private
* @constructor
* @param {Array} [entries] The key-value pairs to cache.
*/
function MapCache(entries) {
var index = -1,
length = entries ? entries.length : 0;
this.clear();
while (++index < length) {
var entry = entries[index];
this.set(entry[0], entry[1]);
}
}
/**
* Removes all key-value entries from the map.
*
* @private
* @name clear
* @memberOf MapCache
*/
function mapCacheClear() {
this.__data__ = {
'hash': new Hash,
'map': new (Map || ListCache),
'string': new Hash
};
}
/**
* Removes `key` and its value from the map.
*
* @private
* @name delete
* @memberOf MapCache
* @param {string} key The key of the value to remove.
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
*/
function mapCacheDelete(key) {
return getMapData(this, key)['delete'](key);
}
/**
* Gets the map value for `key`.
*
* @private
* @name get
* @memberOf MapCache
* @param {string} key The key of the value to get.
* @returns {*} Returns the entry value.
*/
function mapCacheGet(key) {
return getMapData(this, key).get(key);
}
/**
* Checks if a map value for `key` exists.
*
* @private
* @name has
* @memberOf MapCache
* @param {string} key The key of the entry to check.
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
*/
function mapCacheHas(key) {
return getMapData(this, key).has(key);
}
/**
* Sets the map `key` to `value`.
*
* @private
* @name set
* @memberOf MapCache
* @param {string} key The key of the value to set.
* @param {*} value The value to set.
* @returns {Object} Returns the map cache instance.
*/
function mapCacheSet(key, value) {
getMapData(this, key).set(key, value);
return this;
}
// Add methods to `MapCache`.
MapCache.prototype.clear = mapCacheClear;
MapCache.prototype['delete'] = mapCacheDelete;
MapCache.prototype.get = mapCacheGet;
MapCache.prototype.has = mapCacheHas;
MapCache.prototype.set = mapCacheSet;
/**
* Creates a stack cache object to store key-value pairs.
*
* @private
* @constructor
* @param {Array} [entries] The key-value pairs to cache.
*/
function Stack(entries) {
this.__data__ = new ListCache(entries);
}
/**
* Removes all key-value entries from the stack.
*
* @private
* @name clear
* @memberOf Stack
*/
function stackClear() {
this.__data__ = new ListCache;
}
/**
* Removes `key` and its value from the stack.
*
* @private
* @name delete
* @memberOf Stack
* @param {string} key The key of the value to remove.
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
*/
function stackDelete(key) {
return this.__data__['delete'](key);
}
/**
* Gets the stack value for `key`.
*
* @private
* @name get
* @memberOf Stack
* @param {string} key The key of the value to get.
* @returns {*} Returns the entry value.
*/
function stackGet(key) {
return this.__data__.get(key);
}
/**
* Checks if a stack value for `key` exists.
*
* @private
* @name has
* @memberOf Stack
* @param {string} key The key of the entry to check.
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
*/
function stackHas(key) {
return this.__data__.has(key);
}
/**
* Sets the stack `key` to `value`.
*
* @private
* @name set
* @memberOf Stack
* @param {string} key The key of the value to set.
* @param {*} value The value to set.
* @returns {Object} Returns the stack cache instance.
*/
function stackSet(key, value) {
var cache = this.__data__;
if (cache instanceof ListCache) {
var pairs = cache.__data__;
if (!Map || (pairs.length < LARGE_ARRAY_SIZE - 1)) {
pairs.push([key, value]);
return this;
}
cache = this.__data__ = new MapCache(pairs);
}
cache.set(key, value);
return this;
}
// Add methods to `Stack`.
Stack.prototype.clear = stackClear;
Stack.prototype['delete'] = stackDelete;
Stack.prototype.get = stackGet;
Stack.prototype.has = stackHas;
Stack.prototype.set = stackSet;
/**
* Creates an array of the enumerable property names of the array-like `value`.
*
* @private
* @param {*} value The value to query.
* @param {boolean} inherited Specify returning inherited property names.
* @returns {Array} Returns the array of property names.
*/
function arrayLikeKeys(value, inherited) {
// Safari 8.1 makes `arguments.callee` enumerable in strict mode.
// Safari 9 makes `arguments.length` enumerable in strict mode.
var result = (isArray(value) || isArguments(value))
? baseTimes(value.length, String)
: [];
var length = result.length,
skipIndexes = !!length;
for (var key in value) {
if ((inherited || hasOwnProperty.call(value, key)) &&
!(skipIndexes && (key == 'length' || isIndex(key, length)))) {
result.push(key);
}
}
return result;
}
/**
* This function is like `assignValue` except that it doesn't assign
* `undefined` values.
*
* @private
* @param {Object} object The object to modify.
* @param {string} key The key of the property to assign.
* @param {*} value The value to assign.
*/
function assignMergeValue(object, key, value) {
if ((value !== undefined && !eq(object[key], value)) ||
(typeof key == 'number' && value === undefined && !(key in object))) {
object[key] = value;
}
}
/**
* Assigns `value` to `key` of `object` if the existing value is not equivalent
* using [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
* for equality comparisons.
*
* @private
* @param {Object} object The object to modify.
* @param {string} key The key of the property to assign.
* @param {*} value The value to assign.
*/
function assignValue(object, key, value) {
var objValue = object[key];
if (!(hasOwnProperty.call(object, key) && eq(objValue, value)) ||
(value === undefined && !(key in object))) {
object[key] = value;
}
}
/**
* Gets the index at which the `key` is found in `array` of key-value pairs.
*
* @private
* @param {Array} array The array to inspect.
* @param {*} key The key to search for.
* @returns {number} Returns the index of the matched value, else `-1`.
*/
function assocIndexOf(array, key) {
var length = array.length;
while (length--) {
if (eq(array[length][0], key)) {
return length;
}
}
return -1;
}
/**
* The base implementation of `_.assign` without support for multiple sources
* or `customizer` functions.
*
* @private
* @param {Object} object The destination object.
* @param {Object} source The source object.
* @returns {Object} Returns `object`.
*/
function baseAssign(object, source) {
return object && copyObject(source, keys(source), object);
}
/**
* The base implementation of `_.clone` and `_.cloneDeep` which tracks
* traversed objects.
*
* @private
* @param {*} value The value to clone.
* @param {boolean} [isDeep] Specify a deep clone.
* @param {boolean} [isFull] Specify a clone including symbols.
* @param {Function} [customizer] The function to customize cloning.
* @param {string} [key] The key of `value`.
* @param {Object} [object] The parent object of `value`.
* @param {Object} [stack] Tracks traversed objects and their clone counterparts.
* @returns {*} Returns the cloned value.
*/
function baseClone(value, isDeep, isFull, customizer, key, object, stack) {
var result;
if (customizer) {
result = object ? customizer(value, key, object, stack) : customizer(value);
}
if (result !== undefined) {
return result;
}
if (!isObject(value)) {
return value;
}
var isArr = isArray(value);
if (isArr) {
result = initCloneArray(value);
if (!isDeep) {
return copyArray(value, result);
}
} else {
var tag = getTag(value),
isFunc = tag == funcTag || tag == genTag;
if (isBuffer(value)) {
return cloneBuffer(value, isDeep);
}
if (tag == objectTag || tag == argsTag || (isFunc && !object)) {
if (isHostObject(value)) {
return object ? value : {};
}
result = initCloneObject(isFunc ? {} : value);
if (!isDeep) {
return copySymbols(value, baseAssign(result, value));
}
} else {
if (!cloneableTags[tag]) {
return object ? value : {};
}
result = initCloneByTag(value, tag, baseClone, isDeep);
}
}
// Check for circular references and return its corresponding clone.
stack || (stack = new Stack);
var stacked = stack.get(value);
if (stacked) {
return stacked;
}
stack.set(value, result);
if (!isArr) {
var props = isFull ? getAllKeys(value) : keys(value);
}
arrayEach(props || value, function(subValue, key) {
if (props) {
key = subValue;
subValue = value[key];
}
// Recursively populate clone (susceptible to call stack limits).
assignValue(result, key, baseClone(subValue, isDeep, isFull, customizer, key, value, stack));
});
return result;
}
/**
* The base implementation of `_.create` without support for assigning
* properties to the created object.
*
* @private
* @param {Object} prototype The object to inherit from.
* @returns {Object} Returns the new object.
*/
function baseCreate(proto) {
return isObject(proto) ? objectCreate(proto) : {};
}
/**
* The base implementation of `getAllKeys` and `getAllKeysIn` which uses
* `keysFunc` and `symbolsFunc` to get the enumerable property names and
* symbols of `object`.
*
* @private
* @param {Object} object The object to query.
* @param {Function} keysFunc The function to get the keys of `object`.
* @param {Function} symbolsFunc The function to get the symbols of `object`.
* @returns {Array} Returns the array of property names and symbols.
*/
function baseGetAllKeys(object, keysFunc, symbolsFunc) {
var result = keysFunc(object);
return isArray(object) ? result : arrayPush(result, symbolsFunc(object));
}
/**
* The base implementation of `getTag`.
*
* @private
* @param {*} value The value to query.
* @returns {string} Returns the `toStringTag`.
*/
function baseGetTag(value) {
return objectToString.call(value);
}
/**
* The base implementation of `_.isNative` without bad shim checks.
*
* @private
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is a native function,
* else `false`.
*/
function baseIsNative(value) {
if (!isObject(value) || isMasked(value)) {
return false;
}
var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor;
return pattern.test(toSource(value));
}
/**
* The base implementation of `_.isTypedArray` without Node.js optimizations.
*
* @private
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is a typed array, else `false`.
*/
function baseIsTypedArray(value) {
return isObjectLike(value) &&
isLength(value.length) && !!typedArrayTags[objectToString.call(value)];
}
/**
* The base implementation of `_.keys` which doesn't treat sparse arrays as dense.
*
* @private
* @param {Object} object The object to query.
* @returns {Array} Returns the array of property names.
*/
function baseKeys(object) {
if (!isPrototype(object)) {
return nativeKeys(object);
}
var result = [];
for (var key in Object(object)) {
if (hasOwnProperty.call(object, key) && key != 'constructor') {
result.push(key);
}
}
return result;
}
/**
* The base implementation of `_.keysIn` which doesn't treat sparse arrays as dense.
*
* @private
* @param {Object} object The object to query.
* @returns {Array} Returns the array of property names.
*/
function baseKeysIn(object) {
if (!isObject(object)) {
return nativeKeysIn(object);
}
var isProto = isPrototype(object),
result = [];
for (var key in object) {
if (!(key == 'constructor' && (isProto || !hasOwnProperty.call(object, key)))) {
result.push(key);
}
}
return result;
}
/**
* The base implementation of `_.merge` without support for multiple sources.
*
* @private
* @param {Object} object The destination object.
* @param {Object} source The source object.
* @param {number} srcIndex The index of `source`.
* @param {Function} [customizer] The function to customize merged values.
* @param {Object} [stack] Tracks traversed source values and their merged
* counterparts.
*/
function baseMerge(object, source, srcIndex, customizer, stack) {
if (object === source) {
return;
}
if (!(isArray(source) || isTypedArray(source))) {
var props = baseKeysIn(source);
}
arrayEach(props || source, function(srcValue, key) {
if (props) {
key = srcValue;
srcValue = source[key];
}
if (isObject(srcValue)) {
stack || (stack = new Stack);
baseMergeDeep(object, source, key, srcIndex, baseMerge, customizer, stack);
}
else {
var newValue = customizer
? customizer(object[key], srcValue, (key + ''), object, source, stack)
: undefined;
if (newValue === undefined) {
newValue = srcValue;
}
assignMergeValue(object, key, newValue);
}
});
}
/**
* A specialized version of `baseMerge` for arrays and objects which performs
* deep merges and tracks traversed objects enabling objects with circular
* references to be merged.
*
* @private
* @param {Object} object The destination object.
* @param {Object} source The source object.
* @param {string} key The key of the value to merge.
* @param {number} srcIndex The index of `source`.
* @param {Function} mergeFunc The function to merge values.
* @param {Function} [customizer] The function to customize assigned values.
* @param {Object} [stack] Tracks traversed source values and their merged
* counterparts.
*/
function baseMergeDeep(object, source, key, srcIndex, mergeFunc, customizer, stack) {
var objValue = object[key],
srcValue = source[key],
stacked = stack.get(srcValue);
if (stacked) {
assignMergeValue(object, key, stacked);
return;
}
var newValue = customizer
? customizer(objValue, srcValue, (key + ''), object, source, stack)
: undefined;
var isCommon = newValue === undefined;
if (isCommon) {
newValue = srcValue;
if (isArray(srcValue) || isTypedArray(srcValue)) {
if (isArray(objValue)) {
newValue = objValue;
}
else if (isArrayLikeObject(objValue)) {
newValue = copyArray(objValue);
}
else {
isCommon = false;
newValue = baseClone(srcValue, true);
}
}
else if (isPlainObject(srcValue) || isArguments(srcValue)) {
if (isArguments(objValue)) {
newValue = toPlainObject(objValue);
}
else if (!isObject(objValue) || (srcIndex && isFunction(objValue))) {
isCommon = false;
newValue = baseClone(srcValue, true);
}
else {
newValue = objValue;
}
}
else {
isCommon = false;
}
}
if (isCommon) {
// Recursively merge objects and arrays (susceptible to call stack limits).
stack.set(srcValue, newValue);
mergeFunc(newValue, srcValue, srcIndex, customizer, stack);
stack['delete'](srcValue);
}
assignMergeValue(object, key, newValue);
}
/**
* The base implementation of `_.rest` which doesn't validate or coerce arguments.
*
* @private
* @param {Function} func The function to apply a rest parameter to.
* @param {number} [start=func.length-1] The start position of the rest parameter.
* @returns {Function} Returns the new function.
*/
function baseRest(func, start) {
start = nativeMax(start === undefined ? (func.length - 1) : start, 0);
return function() {
var args = arguments,
index = -1,
length = nativeMax(args.length - start, 0),
array = Array(length);
while (++index < length) {
array[index] = args[start + index];
}
index = -1;
var otherArgs = Array(start + 1);
while (++index < start) {
otherArgs[index] = args[index];
}
otherArgs[start] = array;
return apply(func, this, otherArgs);
};
}
/**
* Creates a clone of `buffer`.
*
* @private
* @param {Buffer} buffer The buffer to clone.
* @param {boolean} [isDeep] Specify a deep clone.
* @returns {Buffer} Returns the cloned buffer.
*/
function cloneBuffer(buffer, isDeep) {
if (isDeep) {
return buffer.slice();
}
var result = new buffer.constructor(buffer.length);
buffer.copy(result);
return result;
}
/**
* Creates a clone of `arrayBuffer`.
*
* @private
* @param {ArrayBuffer} arrayBuffer The array buffer to clone.
* @returns {ArrayBuffer} Returns the cloned array buffer.
*/
function cloneArrayBuffer(arrayBuffer) {
var result = new arrayBuffer.constructor(arrayBuffer.byteLength);
new Uint8Array(result).set(new Uint8Array(arrayBuffer));
return result;
}
/**
* Creates a clone of `dataView`.
*
* @private
* @param {Object} dataView The data view to clone.
* @param {boolean} [isDeep] Specify a deep clone.
* @returns {Object} Returns the cloned data view.
*/
function cloneDataView(dataView, isDeep) {
var buffer = isDeep ? cloneArrayBuffer(dataView.buffer) : dataView.buffer;
return new dataView.constructor(buffer, dataView.byteOffset, dataView.byteLength);
}
/**
* Creates a clone of `map`.
*
* @private
* @param {Object} map The map to clone.
* @param {Function} cloneFunc The function to clone values.
* @param {boolean} [isDeep] Specify a deep clone.
* @returns {Object} Returns the cloned map.
*/
function cloneMap(map, isDeep, cloneFunc) {
var array = isDeep ? cloneFunc(mapToArray(map), true) : mapToArray(map);
return arrayReduce(array, addMapEntry, new map.constructor);
}
/**
* Creates a clone of `regexp`.
*
* @private
* @param {Object} regexp The regexp to clone.
* @returns {Object} Returns the cloned regexp.
*/
function cloneRegExp(regexp) {
var result = new regexp.constructor(regexp.source, reFlags.exec(regexp));
result.lastIndex = regexp.lastIndex;
return result;
}
/**
* Creates a clone of `set`.
*
* @private
* @param {Object} set The set to clone.
* @param {Function} cloneFunc The function to clone values.
* @param {boolean} [isDeep] Specify a deep clone.
* @returns {Object} Returns the cloned set.
*/
function cloneSet(set, isDeep, cloneFunc) {
var array = isDeep ? cloneFunc(setToArray(set), true) : setToArray(set);
return arrayReduce(array, addSetEntry, new set.constructor);
}
/**
* Creates a clone of the `symbol` object.
*
* @private
* @param {Object} symbol The symbol object to clone.
* @returns {Object} Returns the cloned symbol object.
*/
function cloneSymbol(symbol) {
return symbolValueOf ? Object(symbolValueOf.call(symbol)) : {};
}
/**
* Creates a clone of `typedArray`.
*
* @private
* @param {Object} typedArray The typed array to clone.
* @param {boolean} [isDeep] Specify a deep clone.
* @returns {Object} Returns the cloned typed array.
*/
function cloneTypedArray(typedArray, isDeep) {
var buffer = isDeep ? cloneArrayBuffer(typedArray.buffer) : typedArray.buffer;
return new typedArray.constructor(buffer, typedArray.byteOffset, typedArray.length);
}
/**
* Copies the values of `source` to `array`.
*
* @private
* @param {Array} source The array to copy values from.
* @param {Array} [array=[]] The array to copy values to.
* @returns {Array} Returns `array`.
*/
function copyArray(source, array) {
var index = -1,
length = source.length;
array || (array = Array(length));
while (++index < length) {
array[index] = source[index];
}
return array;
}
/**
* Copies properties of `source` to `object`.
*
* @private
* @param {Object} source The object to copy properties from.
* @param {Array} props The property identifiers to copy.
* @param {Object} [object={}] The object to copy properties to.
* @param {Function} [customizer] The function to customize copied values.
* @returns {Object} Returns `object`.
*/
function copyObject(source, props, object, customizer) {
object || (object = {});
var index = -1,
length = props.length;
while (++index < length) {
var key = props[index];
var newValue = customizer
? customizer(object[key], source[key], key, object, source)
: undefined;
assignValue(object, key, newValue === undefined ? source[key] : newValue);
}
return object;
}
/**
* Copies own symbol properties of `source` to `object`.
*
* @private
* @param {Object} source The object to copy symbols from.
* @param {Object} [object={}] The object to copy symbols to.
* @returns {Object} Returns `object`.
*/
function copySymbols(source, object) {
return copyObject(source, getSymbols(source), object);
}
/**
* Creates a function like `_.assign`.
*
* @private
* @param {Function} assigner The function to assign values.
* @returns {Function} Returns the new assigner function.
*/
function createAssigner(assigner) {
return baseRest(function(object, sources) {
var index = -1,
length = sources.length,
customizer = length > 1 ? sources[length - 1] : undefined,
guard = length > 2 ? sources[2] : undefined;
customizer = (assigner.length > 3 && typeof customizer == 'function')
? (length--, customizer)
: undefined;
if (guard && isIterateeCall(sources[0], sources[1], guard)) {
customizer = length < 3 ? undefined : customizer;
length = 1;
}
object = Object(object);
while (++index < length) {
var source = sources[index];
if (source) {
assigner(object, source, index, customizer);
}
}
return object;
});
}
/**
* Creates an array of own enumerable property names and symbols of `object`.
*
* @private
* @param {Object} object The object to query.
* @returns {Array} Returns the array of property names and symbols.
*/
function getAllKeys(object) {
return baseGetAllKeys(object, keys, getSymbols);
}
/**
* Gets the data for `map`.
*
* @private
* @param {Object} map The map to query.
* @param {string} key The reference key.
* @returns {*} Returns the map data.
*/
function getMapData(map, key) {
var data = map.__data__;
return isKeyable(key)
? data[typeof key == 'string' ? 'string' : 'hash']
: data.map;
}
/**
* Gets the native function at `key` of `object`.
*
* @private
* @param {Object} object The object to query.
* @param {string} key The key of the method to get.
* @returns {*} Returns the function if it's native, else `undefined`.
*/
function getNative(object, key) {
var value = getValue(object, key);
return baseIsNative(value) ? value : undefined;
}
/**
* Creates an array of the own enumerable symbol properties of `object`.
*
* @private
* @param {Object} object The object to query.
* @returns {Array} Returns the array of symbols.
*/
var getSymbols = nativeGetSymbols ? overArg(nativeGetSymbols, Object) : stubArray;
/**
* Gets the `toStringTag` of `value`.
*
* @private
* @param {*} value The value to query.
* @returns {string} Returns the `toStringTag`.
*/
var getTag = baseGetTag;
// Fallback for data views, maps, sets, and weak maps in IE 11,
// for data views in Edge < 14, and promises in Node.js.
if ((DataView && getTag(new DataView(new ArrayBuffer(1))) != dataViewTag) ||
(Map && getTag(new Map) != mapTag) ||
(Promise && getTag(Promise.resolve()) != promiseTag) ||
(Set && getTag(new Set) != setTag) ||
(WeakMap && getTag(new WeakMap) != weakMapTag)) {
getTag = function(value) {
var result = objectToString.call(value),
Ctor = result == objectTag ? value.constructor : undefined,
ctorString = Ctor ? toSource(Ctor) : undefined;
if (ctorString) {
switch (ctorString) {
case dataViewCtorString: return dataViewTag;
case mapCtorString: return mapTag;
case promiseCtorString: return promiseTag;
case setCtorString: return setTag;
case weakMapCtorString: return weakMapTag;
}
}
return result;
};
}
/**
* Initializes an array clone.
*
* @private
* @param {Array} array The array to clone.
* @returns {Array} Returns the initialized clone.
*/
function initCloneArray(array) {
var length = array.length,
result = array.constructor(length);
// Add properties assigned by `RegExp#exec`.
if (length && typeof array[0] == 'string' && hasOwnProperty.call(array, 'index')) {
result.index = array.index;
result.input = array.input;
}
return result;
}
/**
* Initializes an object clone.
*
* @private
* @param {Object} object The object to clone.
* @returns {Object} Returns the initialized clone.
*/
function initCloneObject(object) {
return (typeof object.constructor == 'function' && !isPrototype(object))
? baseCreate(getPrototype(object))
: {};
}
/**
* Initializes an object clone based on its `toStringTag`.
*
* **Note:** This function only supports cloning values with tags of
* `Boolean`, `Date`, `Error`, `Number`, `RegExp`, or `String`.
*
* @private
* @param {Object} object The object to clone.
* @param {string} tag The `toStringTag` of the object to clone.
* @param {Function} cloneFunc The function to clone values.
* @param {boolean} [isDeep] Specify a deep clone.
* @returns {Object} Returns the initialized clone.
*/
function initCloneByTag(object, tag, cloneFunc, isDeep) {
var Ctor = object.constructor;
switch (tag) {
case arrayBufferTag:
return cloneArrayBuffer(object);
case boolTag:
case dateTag:
return new Ctor(+object);
case dataViewTag:
return cloneDataView(object, isDeep);
case float32Tag: case float64Tag:
case int8Tag: case int16Tag: case int32Tag:
case uint8Tag: case uint8ClampedTag: case uint16Tag: case uint32Tag:
return cloneTypedArray(object, isDeep);
case mapTag:
return cloneMap(object, isDeep, cloneFunc);
case numberTag:
case stringTag:
return new Ctor(object);
case regexpTag:
return cloneRegExp(object);
case setTag:
return cloneSet(object, isDeep, cloneFunc);
case symbolTag:
return cloneSymbol(object);
}
}
/**
* Checks if `value` is a valid array-like index.
*
* @private
* @param {*} value The value to check.
* @param {number} [length=MAX_SAFE_INTEGER] The upper bounds of a valid index.
* @returns {boolean} Returns `true` if `value` is a valid index, else `false`.
*/
function isIndex(value, length) {
length = length == null ? MAX_SAFE_INTEGER : length;
return !!length &&
(typeof value == 'number' || reIsUint.test(value)) &&
(value > -1 && value % 1 == 0 && value < length);
}
/**
* Checks if the given arguments are from an iteratee call.
*
* @private
* @param {*} value The potential iteratee value argument.
* @param {*} index The potential iteratee index or key argument.
* @param {*} object The potential iteratee object argument.
* @returns {boolean} Returns `true` if the arguments are from an iteratee call,
* else `false`.
*/
function isIterateeCall(value, index, object) {
if (!isObject(object)) {
return false;
}
var type = typeof index;
if (type == 'number'
? (isArrayLike(object) && isIndex(index, object.length))
: (type == 'string' && index in object)
) {
return eq(object[index], value);
}
return false;
}
/**
* Checks if `value` is suitable for use as unique object key.
*
* @private
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is suitable, else `false`.
*/
function isKeyable(value) {
var type = typeof value;
return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean')
? (value !== '__proto__')
: (value === null);
}
/**
* Checks if `func` has its source masked.
*
* @private
* @param {Function} func The function to check.
* @returns {boolean} Returns `true` if `func` is masked, else `false`.
*/
function isMasked(func) {
return !!maskSrcKey && (maskSrcKey in func);
}
/**
* Checks if `value` is likely a prototype object.
*
* @private
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is a prototype, else `false`.
*/
function isPrototype(value) {
var Ctor = value && value.constructor,
proto = (typeof Ctor == 'function' && Ctor.prototype) || objectProto;
return value === proto;
}
/**
* This function is like
* [`Object.keys`](http://ecma-international.org/ecma-262/7.0/#sec-object.keys)
* except that it includes inherited enumerable properties.
*
* @private
* @param {Object} object The object to query.
* @returns {Array} Returns the array of property names.
*/
function nativeKeysIn(object) {
var result = [];
if (object != null) {
for (var key in Object(object)) {
result.push(key);
}
}
return result;
}
/**
* Converts `func` to its source code.
*
* @private
* @param {Function} func The function to process.
* @returns {string} Returns the source code.
*/
function toSource(func) {
if (func != null) {
try {
return funcToString.call(func);
} catch (e) {}
try {
return (func + '');
} catch (e) {}
}
return '';
}
/**
* Performs a
* [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
* comparison between two values to determine if they are equivalent.
*
* @static
* @memberOf _
* @since 4.0.0
* @category Lang
* @param {*} value The value to compare.
* @param {*} other The other value to compare.
* @returns {boolean} Returns `true` if the values are equivalent, else `false`.
* @example
*
* var object = { 'a': 1 };
* var other = { 'a': 1 };
*
* _.eq(object, object);
* // => true
*
* _.eq(object, other);
* // => false
*
* _.eq('a', 'a');
* // => true
*
* _.eq('a', Object('a'));
* // => false
*
* _.eq(NaN, NaN);
* // => true
*/
function eq(value, other) {
return value === other || (value !== value && other !== other);
}
/**
* Checks if `value` is likely an `arguments` object.
*
* @static
* @memberOf _
* @since 0.1.0
* @category Lang
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is an `arguments` object,
* else `false`.
* @example
*
* _.isArguments(function() { return arguments; }());
* // => true
*
* _.isArguments([1, 2, 3]);
* // => false
*/
function isArguments(value) {
// Safari 8.1 makes `arguments.callee` enumerable in strict mode.
return isArrayLikeObject(value) && hasOwnProperty.call(value, 'callee') &&
(!propertyIsEnumerable.call(value, 'callee') || objectToString.call(value) == argsTag);
}
/**
* Checks if `value` is classified as an `Array` object.
*
* @static
* @memberOf _
* @since 0.1.0
* @category Lang
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is an array, else `false`.
* @example
*
* _.isArray([1, 2, 3]);
* // => true
*
* _.isArray(document.body.children);
* // => false
*
* _.isArray('abc');
* // => false
*
* _.isArray(_.noop);
* // => false
*/
var isArray = Array.isArray;
/**
* Checks if `value` is array-like. A value is considered array-like if it's
* not a function and has a `value.length` that's an integer greater than or
* equal to `0` and less than or equal to `Number.MAX_SAFE_INTEGER`.
*
* @static
* @memberOf _
* @since 4.0.0
* @category Lang
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is array-like, else `false`.
* @example
*
* _.isArrayLike([1, 2, 3]);
* // => true
*
* _.isArrayLike(document.body.children);
* // => true
*
* _.isArrayLike('abc');
* // => true
*
* _.isArrayLike(_.noop);
* // => false
*/
function isArrayLike(value) {
return value != null && isLength(value.length) && !isFunction(value);
}
/**
* This method is like `_.isArrayLike` except that it also checks if `value`
* is an object.
*
* @static
* @memberOf _
* @since 4.0.0
* @category Lang
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is an array-like object,
* else `false`.
* @example
*
* _.isArrayLikeObject([1, 2, 3]);
* // => true
*
* _.isArrayLikeObject(document.body.children);
* // => true
*
* _.isArrayLikeObject('abc');
* // => false
*
* _.isArrayLikeObject(_.noop);
* // => false
*/
function isArrayLikeObject(value) {
return isObjectLike(value) && isArrayLike(value);
}
/**
* Checks if `value` is a buffer.
*
* @static
* @memberOf _
* @since 4.3.0
* @category Lang
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is a buffer, else `false`.
* @example
*
* _.isBuffer(new Buffer(2));
* // => true
*
* _.isBuffer(new Uint8Array(2));
* // => false
*/
var isBuffer = nativeIsBuffer || stubFalse;
/**
* Checks if `value` is classified as a `Function` object.
*
* @static
* @memberOf _
* @since 0.1.0
* @category Lang
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is a function, else `false`.
* @example
*
* _.isFunction(_);
* // => true
*
* _.isFunction(/abc/);
* // => false
*/
function isFunction(value) {
// The use of `Object#toString` avoids issues with the `typeof` operator
// in Safari 8-9 which returns 'object' for typed array and other constructors.
var tag = isObject(value) ? objectToString.call(value) : '';
return tag == funcTag || tag == genTag;
}
/**
* Checks if `value` is a valid array-like length.
*
* **Note:** This method is loosely based on
* [`ToLength`](http://ecma-international.org/ecma-262/7.0/#sec-tolength).
*
* @static
* @memberOf _
* @since 4.0.0
* @category Lang
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is a valid length, else `false`.
* @example
*
* _.isLength(3);
* // => true
*
* _.isLength(Number.MIN_VALUE);
* // => false
*
* _.isLength(Infinity);
* // => false
*
* _.isLength('3');
* // => false
*/
function isLength(value) {
return typeof value == 'number' &&
value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER;
}
/**
* Checks if `value` is the
* [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
* of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
*
* @static
* @memberOf _
* @since 0.1.0
* @category Lang
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is an object, else `false`.
* @example
*
* _.isObject({});
* // => true
*
* _.isObject([1, 2, 3]);
* // => true
*
* _.isObject(_.noop);
* // => true
*
* _.isObject(null);
* // => false
*/
function isObject(value) {
var type = typeof value;
return !!value && (type == 'object' || type == 'function');
}
/**
* Checks if `value` is object-like. A value is object-like if it's not `null`
* and has a `typeof` result of "object".
*
* @static
* @memberOf _
* @since 4.0.0
* @category Lang
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is object-like, else `false`.
* @example
*
* _.isObjectLike({});
* // => true
*
* _.isObjectLike([1, 2, 3]);
* // => true
*
* _.isObjectLike(_.noop);
* // => false
*
* _.isObjectLike(null);
* // => false
*/
function isObjectLike(value) {
return !!value && typeof value == 'object';
}
/**
* Checks if `value` is a plain object, that is, an object created by the
* `Object` constructor or one with a `[[Prototype]]` of `null`.
*
* @static
* @memberOf _
* @since 0.8.0
* @category Lang
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is a plain object, else `false`.
* @example
*
* function Foo() {
* this.a = 1;
* }
*
* _.isPlainObject(new Foo);
* // => false
*
* _.isPlainObject([1, 2, 3]);
* // => false
*
* _.isPlainObject({ 'x': 0, 'y': 0 });
* // => true
*
* _.isPlainObject(Object.create(null));
* // => true
*/
function isPlainObject(value) {
if (!isObjectLike(value) ||
objectToString.call(value) != objectTag || isHostObject(value)) {
return false;
}
var proto = getPrototype(value);
if (proto === null) {
return true;
}
var Ctor = hasOwnProperty.call(proto, 'constructor') && proto.constructor;
return (typeof Ctor == 'function' &&
Ctor instanceof Ctor && funcToString.call(Ctor) == objectCtorString);
}
/**
* Checks if `value` is classified as a typed array.
*
* @static
* @memberOf _
* @since 3.0.0
* @category Lang
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is a typed array, else `false`.
* @example
*
* _.isTypedArray(new Uint8Array);
* // => true
*
* _.isTypedArray([]);
* // => false
*/
var isTypedArray = nodeIsTypedArray ? baseUnary(nodeIsTypedArray) : baseIsTypedArray;
/**
* Converts `value` to a plain object flattening inherited enumerable string
* keyed properties of `value` to own properties of the plain object.
*
* @static
* @memberOf _
* @since 3.0.0
* @category Lang
* @param {*} value The value to convert.
* @returns {Object} Returns the converted plain object.
* @example
*
* function Foo() {
* this.b = 2;
* }
*
* Foo.prototype.c = 3;
*
* _.assign({ 'a': 1 }, new Foo);
* // => { 'a': 1, 'b': 2 }
*
* _.assign({ 'a': 1 }, _.toPlainObject(new Foo));
* // => { 'a': 1, 'b': 2, 'c': 3 }
*/
function toPlainObject(value) {
return copyObject(value, keysIn(value));
}
/**
* Creates an array of the own enumerable property names of `object`.
*
* **Note:** Non-object values are coerced to objects. See the
* [ES spec](http://ecma-international.org/ecma-262/7.0/#sec-object.keys)
* for more details.
*
* @static
* @since 0.1.0
* @memberOf _
* @category Object
* @param {Object} object The object to query.
* @returns {Array} Returns the array of property names.
* @example
*
* function Foo() {
* this.a = 1;
* this.b = 2;
* }
*
* Foo.prototype.c = 3;
*
* _.keys(new Foo);
* // => ['a', 'b'] (iteration order is not guaranteed)
*
* _.keys('hi');
* // => ['0', '1']
*/
function keys(object) {
return isArrayLike(object) ? arrayLikeKeys(object) : baseKeys(object);
}
/**
* Creates an array of the own and inherited enumerable property names of `object`.
*
* **Note:** Non-object values are coerced to objects.
*
* @static
* @memberOf _
* @since 3.0.0
* @category Object
* @param {Object} object The object to query.
* @returns {Array} Returns the array of property names.
* @example
*
* function Foo() {
* this.a = 1;
* this.b = 2;
* }
*
* Foo.prototype.c = 3;
*
* _.keysIn(new Foo);
* // => ['a', 'b', 'c'] (iteration order is not guaranteed)
*/
function keysIn(object) {
return isArrayLike(object) ? arrayLikeKeys(object, true) : baseKeysIn(object);
}
/**
* This method is like `_.merge` except that it accepts `customizer` which
* is invoked to produce the merged values of the destination and source
* properties. If `customizer` returns `undefined`, merging is handled by the
* method instead. The `customizer` is invoked with seven arguments:
* (objValue, srcValue, key, object, source, stack).
*
* **Note:** This method mutates `object`.
*
* @static
* @memberOf _
* @since 4.0.0
* @category Object
* @param {Object} object The destination object.
* @param {...Object} sources The source objects.
* @param {Function} customizer The function to customize assigned values.
* @returns {Object} Returns `object`.
* @example
*
* function customizer(objValue, srcValue) {
* if (_.isArray(objValue)) {
* return objValue.concat(srcValue);
* }
* }
*
* var object = { 'a': [1], 'b': [2] };
* var other = { 'a': [3], 'b': [4] };
*
* _.mergeWith(object, other, customizer);
* // => { 'a': [1, 3], 'b': [2, 4] }
*/
var mergeWith = createAssigner(function(object, source, srcIndex, customizer) {
baseMerge(object, source, srcIndex, customizer);
});
/**
* This method returns a new empty array.
*
* @static
* @memberOf _
* @since 4.13.0
* @category Util
* @returns {Array} Returns the new empty array.
* @example
*
* var arrays = _.times(2, _.stubArray);
*
* console.log(arrays);
* // => [[], []]
*
* console.log(arrays[0] === arrays[1]);
* // => false
*/
function stubArray() {
return [];
}
/**
* This method returns `false`.
*
* @static
* @memberOf _
* @since 4.13.0
* @category Util
* @returns {boolean} Returns `false`.
* @example
*
* _.times(2, _.stubFalse);
* // => [false, false]
*/
function stubFalse() {
return false;
}
module.exports = mergeWith;
|
PypiClean
|
/torchact-0.2.2.tar.gz/torchact-0.2.2/README.md
|
# torchact
<div align="center">
TorchAct, collection of activation function for PyTorch.
---
|  [](https://github.com/kaintels/torchact/actions/workflows/ci.yml) [](https://codecov.io/gh/kaintels/torchact) [](https://torchact.readthedocs.io/) |
|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|    [](https://badge.fury.io/py/torchact) [](https://pepy.tech/project/torchact)
|   [](https://github.com/psf/black)
</div>
## Quick Start
```python
import torch
import torch.nn as nn
from torchact import ReLU
model = nn.Sequential(
nn.Linear(5, 3),
ReLU(),
nn.Linear(3, 1)
)
dummy = torch.rand(1, 5)
print(model(dummy))
```
## Installation
```shell
pip install torchact
```
## How to Contribute
Thanks for your contribution!
There are several steps for contributing.
0. Fork this repo (you can work dev branch.)
1. Install library using `requirements.txt`
2. Write your code in torchact folder.
3. Add your module in `__init__.py` (`__version__` cannot be changed. It will be decided later.)
For example.
```python
from .your_module import Your_Module
__all__ = ("ReLU", "SinLU", "Softmax", "Your_Module")
```
3. If you want to test case, Write test case.
For example.
```python
def test_has_attr():
for activation_name in __all__:
if activation_name == "Softmax":
assert hasattr(str_to_class(activation_name)(), "dim")
else:
pass
```
4. Run black style.```black .```
5. Send a PR. Code testing happens automatically. (PYPI is upgraded by the admin himself.)
|
PypiClean
|
/cdktf-cdktf-provider-google-9.0.1.tar.gz/cdktf-cdktf-provider-google-9.0.1/src/cdktf_cdktf_provider_google/compute_region_backend_service/__init__.py
|
import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from typeguard import check_type
from .._jsii import *
import cdktf as _cdktf_9a9027ec
import constructs as _constructs_77d1e7e8
class ComputeRegionBackendService(
_cdktf_9a9027ec.TerraformResource,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendService",
):
'''Represents a {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service google_compute_region_backend_service}.'''
def __init__(
self,
scope: _constructs_77d1e7e8.Construct,
id_: builtins.str,
*,
name: builtins.str,
affinity_cookie_ttl_sec: typing.Optional[jsii.Number] = None,
backend: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["ComputeRegionBackendServiceBackend", typing.Dict[builtins.str, typing.Any]]]]] = None,
cdn_policy: typing.Optional[typing.Union["ComputeRegionBackendServiceCdnPolicy", typing.Dict[builtins.str, typing.Any]]] = None,
circuit_breakers: typing.Optional[typing.Union["ComputeRegionBackendServiceCircuitBreakers", typing.Dict[builtins.str, typing.Any]]] = None,
connection_draining_timeout_sec: typing.Optional[jsii.Number] = None,
consistent_hash: typing.Optional[typing.Union["ComputeRegionBackendServiceConsistentHash", typing.Dict[builtins.str, typing.Any]]] = None,
description: typing.Optional[builtins.str] = None,
enable_cdn: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
failover_policy: typing.Optional[typing.Union["ComputeRegionBackendServiceFailoverPolicy", typing.Dict[builtins.str, typing.Any]]] = None,
health_checks: typing.Optional[typing.Sequence[builtins.str]] = None,
iap: typing.Optional[typing.Union["ComputeRegionBackendServiceIap", typing.Dict[builtins.str, typing.Any]]] = None,
id: typing.Optional[builtins.str] = None,
load_balancing_scheme: typing.Optional[builtins.str] = None,
locality_lb_policy: typing.Optional[builtins.str] = None,
log_config: typing.Optional[typing.Union["ComputeRegionBackendServiceLogConfig", typing.Dict[builtins.str, typing.Any]]] = None,
network: typing.Optional[builtins.str] = None,
outlier_detection: typing.Optional[typing.Union["ComputeRegionBackendServiceOutlierDetection", typing.Dict[builtins.str, typing.Any]]] = None,
port_name: typing.Optional[builtins.str] = None,
project: typing.Optional[builtins.str] = None,
protocol: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
session_affinity: typing.Optional[builtins.str] = None,
timeouts: typing.Optional[typing.Union["ComputeRegionBackendServiceTimeouts", typing.Dict[builtins.str, typing.Any]]] = None,
timeout_sec: typing.Optional[jsii.Number] = None,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
) -> None:
'''Create a new {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service google_compute_region_backend_service} Resource.
:param scope: The scope in which to define this construct.
:param id_: The scoped construct ID. Must be unique amongst siblings in the same scope
:param name: Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression '`a-z <%5B-a-z0-9%5D*%5Ba-z0-9%5D>`_?' which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#name ComputeRegionBackendService#name}
:param affinity_cookie_ttl_sec: Lifetime of cookies in seconds if session_affinity is GENERATED_COOKIE. If set to 0, the cookie is non-persistent and lasts only until the end of the browser session (or equivalent). The maximum allowed value for TTL is one day. When the load balancing scheme is INTERNAL, this field is not used. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#affinity_cookie_ttl_sec ComputeRegionBackendService#affinity_cookie_ttl_sec}
:param backend: backend block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#backend ComputeRegionBackendService#backend}
:param cdn_policy: cdn_policy block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#cdn_policy ComputeRegionBackendService#cdn_policy}
:param circuit_breakers: circuit_breakers block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#circuit_breakers ComputeRegionBackendService#circuit_breakers}
:param connection_draining_timeout_sec: Time for which instance will be drained (not accept new connections, but still work to finish started). Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#connection_draining_timeout_sec ComputeRegionBackendService#connection_draining_timeout_sec}
:param consistent_hash: consistent_hash block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#consistent_hash ComputeRegionBackendService#consistent_hash}
:param description: An optional description of this resource. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#description ComputeRegionBackendService#description}
:param enable_cdn: If true, enable Cloud CDN for this RegionBackendService. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#enable_cdn ComputeRegionBackendService#enable_cdn}
:param failover_policy: failover_policy block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#failover_policy ComputeRegionBackendService#failover_policy}
:param health_checks: The set of URLs to HealthCheck resources for health checking this RegionBackendService. Currently at most one health check can be specified. A health check must be specified unless the backend service uses an internet or serverless NEG as a backend. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#health_checks ComputeRegionBackendService#health_checks}
:param iap: iap block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#iap ComputeRegionBackendService#iap}
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#id ComputeRegionBackendService#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param load_balancing_scheme: Indicates what kind of load balancing this regional backend service will be used for. A backend service created for one type of load balancing cannot be used with the other(s). For more information, refer to `Choosing a load balancer <https://cloud.google.com/load-balancing/docs/backend-service>`_. Default value: "INTERNAL" Possible values: ["EXTERNAL", "EXTERNAL_MANAGED", "INTERNAL", "INTERNAL_MANAGED"] Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#load_balancing_scheme ComputeRegionBackendService#load_balancing_scheme}
:param locality_lb_policy: The load balancing algorithm used within the scope of the locality. The possible values are:. 'ROUND_ROBIN': This is a simple policy in which each healthy backend is selected in round robin order. 'LEAST_REQUEST': An O(1) algorithm which selects two random healthy hosts and picks the host which has fewer active requests. 'RING_HASH': The ring/modulo hash load balancer implements consistent hashing to backends. The algorithm has the property that the addition/removal of a host from a set of N hosts only affects 1/N of the requests. 'RANDOM': The load balancer selects a random healthy host. 'ORIGINAL_DESTINATION': Backend host is selected based on the client connection metadata, i.e., connections are opened to the same address as the destination address of the incoming connection before the connection was redirected to the load balancer. 'MAGLEV': used as a drop in replacement for the ring hash load balancer. Maglev is not as stable as ring hash but has faster table lookup build times and host selection times. For more information about Maglev, refer to https://ai.google/research/pubs/pub44824 'WEIGHTED_MAGLEV': Per-instance weighted Load Balancing via health check reported weights. If set, the Backend Service must configure a non legacy HTTP-based Health Check, and health check replies are expected to contain non-standard HTTP response header field X-Load-Balancing-Endpoint-Weight to specify the per-instance weights. If set, Load Balancing is weight based on the per-instance weights reported in the last processed health check replies, as long as every instance either reported a valid weight or had UNAVAILABLE_WEIGHT. Otherwise, Load Balancing remains equal-weight. This field is applicable to either: A regional backend service with the service_protocol set to HTTP, HTTPS, or HTTP2, and loadBalancingScheme set to INTERNAL_MANAGED. A global backend service with the load_balancing_scheme set to INTERNAL_SELF_MANAGED. A regional backend service with loadBalancingScheme set to EXTERNAL (External Network Load Balancing). Only MAGLEV and WEIGHTED_MAGLEV values are possible for External Network Load Balancing. The default is MAGLEV. If session_affinity is not NONE, and this field is not set to MAGLEV, WEIGHTED_MAGLEV, or RING_HASH, session affinity settings will not take effect. Only ROUND_ROBIN and RING_HASH are supported when the backend service is referenced by a URL map that is bound to target gRPC proxy that has validate_for_proxyless field set to true. Possible values: ["ROUND_ROBIN", "LEAST_REQUEST", "RING_HASH", "RANDOM", "ORIGINAL_DESTINATION", "MAGLEV", "WEIGHTED_MAGLEV"] Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#locality_lb_policy ComputeRegionBackendService#locality_lb_policy}
:param log_config: log_config block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#log_config ComputeRegionBackendService#log_config}
:param network: The URL of the network to which this backend service belongs. This field can only be specified when the load balancing scheme is set to INTERNAL. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#network ComputeRegionBackendService#network}
:param outlier_detection: outlier_detection block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#outlier_detection ComputeRegionBackendService#outlier_detection}
:param port_name: A named port on a backend instance group representing the port for communication to the backend VMs in that group. Required when the loadBalancingScheme is EXTERNAL, EXTERNAL_MANAGED, INTERNAL_MANAGED, or INTERNAL_SELF_MANAGED and the backends are instance groups. The named port must be defined on each backend instance group. This parameter has no meaning if the backends are NEGs. API sets a default of "http" if not given. Must be omitted when the loadBalancingScheme is INTERNAL (Internal TCP/UDP Load Balancing). Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#port_name ComputeRegionBackendService#port_name}
:param project: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#project ComputeRegionBackendService#project}.
:param protocol: The protocol this RegionBackendService uses to communicate with backends. The default is HTTP. **NOTE**: HTTP2 is only valid for beta HTTP/2 load balancer types and may result in errors if used with the GA API. Possible values: ["HTTP", "HTTPS", "HTTP2", "SSL", "TCP", "UDP", "GRPC", "UNSPECIFIED"] Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#protocol ComputeRegionBackendService#protocol}
:param region: The Region in which the created backend service should reside. If it is not provided, the provider region is used. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#region ComputeRegionBackendService#region}
:param session_affinity: Type of session affinity to use. The default is NONE. Session affinity is not applicable if the protocol is UDP. Possible values: ["NONE", "CLIENT_IP", "CLIENT_IP_PORT_PROTO", "CLIENT_IP_PROTO", "GENERATED_COOKIE", "HEADER_FIELD", "HTTP_COOKIE", "CLIENT_IP_NO_DESTINATION"] Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#session_affinity ComputeRegionBackendService#session_affinity}
:param timeouts: timeouts block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#timeouts ComputeRegionBackendService#timeouts}
:param timeout_sec: How many seconds to wait for the backend before considering it a failed request. Default is 30 seconds. Valid range is [1, 86400]. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#timeout_sec ComputeRegionBackendService#timeout_sec}
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__7a7d193a662f1e9eb4abcaa96ce0135d0dd9f195f8f24db270c1567f4623d748)
check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
check_type(argname="argument id_", value=id_, expected_type=type_hints["id_"])
config = ComputeRegionBackendServiceConfig(
name=name,
affinity_cookie_ttl_sec=affinity_cookie_ttl_sec,
backend=backend,
cdn_policy=cdn_policy,
circuit_breakers=circuit_breakers,
connection_draining_timeout_sec=connection_draining_timeout_sec,
consistent_hash=consistent_hash,
description=description,
enable_cdn=enable_cdn,
failover_policy=failover_policy,
health_checks=health_checks,
iap=iap,
id=id,
load_balancing_scheme=load_balancing_scheme,
locality_lb_policy=locality_lb_policy,
log_config=log_config,
network=network,
outlier_detection=outlier_detection,
port_name=port_name,
project=project,
protocol=protocol,
region=region,
session_affinity=session_affinity,
timeouts=timeouts,
timeout_sec=timeout_sec,
connection=connection,
count=count,
depends_on=depends_on,
for_each=for_each,
lifecycle=lifecycle,
provider=provider,
provisioners=provisioners,
)
jsii.create(self.__class__, self, [scope, id_, config])
@jsii.member(jsii_name="putBackend")
def put_backend(
self,
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["ComputeRegionBackendServiceBackend", typing.Dict[builtins.str, typing.Any]]]],
) -> None:
'''
:param value: -
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__705381bd496efb783ebe0f0ad360b9465d3fa796207cea7a63f6cb943ca18d55)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
return typing.cast(None, jsii.invoke(self, "putBackend", [value]))
@jsii.member(jsii_name="putCdnPolicy")
def put_cdn_policy(
self,
*,
cache_key_policy: typing.Optional[typing.Union["ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy", typing.Dict[builtins.str, typing.Any]]] = None,
cache_mode: typing.Optional[builtins.str] = None,
client_ttl: typing.Optional[jsii.Number] = None,
default_ttl: typing.Optional[jsii.Number] = None,
max_ttl: typing.Optional[jsii.Number] = None,
negative_caching: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
negative_caching_policy: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy", typing.Dict[builtins.str, typing.Any]]]]] = None,
serve_while_stale: typing.Optional[jsii.Number] = None,
signed_url_cache_max_age_sec: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param cache_key_policy: cache_key_policy block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#cache_key_policy ComputeRegionBackendService#cache_key_policy}
:param cache_mode: Specifies the cache setting for all responses from this backend. The possible values are: USE_ORIGIN_HEADERS, FORCE_CACHE_ALL and CACHE_ALL_STATIC Possible values: ["USE_ORIGIN_HEADERS", "FORCE_CACHE_ALL", "CACHE_ALL_STATIC"] Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#cache_mode ComputeRegionBackendService#cache_mode}
:param client_ttl: Specifies the maximum allowed TTL for cached content served by this origin. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#client_ttl ComputeRegionBackendService#client_ttl}
:param default_ttl: Specifies the default TTL for cached content served by this origin for responses that do not have an existing valid TTL (max-age or s-max-age). Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#default_ttl ComputeRegionBackendService#default_ttl}
:param max_ttl: Specifies the maximum allowed TTL for cached content served by this origin. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_ttl ComputeRegionBackendService#max_ttl}
:param negative_caching: Negative caching allows per-status code TTLs to be set, in order to apply fine-grained caching for common errors or redirects. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#negative_caching ComputeRegionBackendService#negative_caching}
:param negative_caching_policy: negative_caching_policy block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#negative_caching_policy ComputeRegionBackendService#negative_caching_policy}
:param serve_while_stale: Serve existing content from the cache (if available) when revalidating content with the origin, or when an error is encountered when refreshing the cache. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#serve_while_stale ComputeRegionBackendService#serve_while_stale}
:param signed_url_cache_max_age_sec: Maximum number of seconds the response to a signed URL request will be considered fresh, defaults to 1hr (3600s). After this time period, the response will be revalidated before being served. When serving responses to signed URL requests, Cloud CDN will internally behave as though all responses from this backend had a "Cache-Control: public, max-age=[TTL]" header, regardless of any existing Cache-Control header. The actual headers served in responses will not be altered. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#signed_url_cache_max_age_sec ComputeRegionBackendService#signed_url_cache_max_age_sec}
'''
value = ComputeRegionBackendServiceCdnPolicy(
cache_key_policy=cache_key_policy,
cache_mode=cache_mode,
client_ttl=client_ttl,
default_ttl=default_ttl,
max_ttl=max_ttl,
negative_caching=negative_caching,
negative_caching_policy=negative_caching_policy,
serve_while_stale=serve_while_stale,
signed_url_cache_max_age_sec=signed_url_cache_max_age_sec,
)
return typing.cast(None, jsii.invoke(self, "putCdnPolicy", [value]))
@jsii.member(jsii_name="putCircuitBreakers")
def put_circuit_breakers(
self,
*,
max_connections: typing.Optional[jsii.Number] = None,
max_pending_requests: typing.Optional[jsii.Number] = None,
max_requests: typing.Optional[jsii.Number] = None,
max_requests_per_connection: typing.Optional[jsii.Number] = None,
max_retries: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param max_connections: The maximum number of connections to the backend cluster. Defaults to 1024. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_connections ComputeRegionBackendService#max_connections}
:param max_pending_requests: The maximum number of pending requests to the backend cluster. Defaults to 1024. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_pending_requests ComputeRegionBackendService#max_pending_requests}
:param max_requests: The maximum number of parallel requests to the backend cluster. Defaults to 1024. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_requests ComputeRegionBackendService#max_requests}
:param max_requests_per_connection: Maximum requests for a single backend connection. This parameter is respected by both the HTTP/1.1 and HTTP/2 implementations. If not specified, there is no limit. Setting this parameter to 1 will effectively disable keep alive. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_requests_per_connection ComputeRegionBackendService#max_requests_per_connection}
:param max_retries: The maximum number of parallel retries to the backend cluster. Defaults to 3. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_retries ComputeRegionBackendService#max_retries}
'''
value = ComputeRegionBackendServiceCircuitBreakers(
max_connections=max_connections,
max_pending_requests=max_pending_requests,
max_requests=max_requests,
max_requests_per_connection=max_requests_per_connection,
max_retries=max_retries,
)
return typing.cast(None, jsii.invoke(self, "putCircuitBreakers", [value]))
@jsii.member(jsii_name="putConsistentHash")
def put_consistent_hash(
self,
*,
http_cookie: typing.Optional[typing.Union["ComputeRegionBackendServiceConsistentHashHttpCookie", typing.Dict[builtins.str, typing.Any]]] = None,
http_header_name: typing.Optional[builtins.str] = None,
minimum_ring_size: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param http_cookie: http_cookie block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#http_cookie ComputeRegionBackendService#http_cookie}
:param http_header_name: The hash based on the value of the specified header field. This field is applicable if the sessionAffinity is set to HEADER_FIELD. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#http_header_name ComputeRegionBackendService#http_header_name}
:param minimum_ring_size: The minimum number of virtual nodes to use for the hash ring. Larger ring sizes result in more granular load distributions. If the number of hosts in the load balancing pool is larger than the ring size, each host will be assigned a single virtual node. Defaults to 1024. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#minimum_ring_size ComputeRegionBackendService#minimum_ring_size}
'''
value = ComputeRegionBackendServiceConsistentHash(
http_cookie=http_cookie,
http_header_name=http_header_name,
minimum_ring_size=minimum_ring_size,
)
return typing.cast(None, jsii.invoke(self, "putConsistentHash", [value]))
@jsii.member(jsii_name="putFailoverPolicy")
def put_failover_policy(
self,
*,
disable_connection_drain_on_failover: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
drop_traffic_if_unhealthy: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
failover_ratio: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param disable_connection_drain_on_failover: On failover or failback, this field indicates whether connection drain will be honored. Setting this to true has the following effect: connections to the old active pool are not drained. Connections to the new active pool use the timeout of 10 min (currently fixed). Setting to false has the following effect: both old and new connections will have a drain timeout of 10 min. This can be set to true only if the protocol is TCP. The default is false. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#disable_connection_drain_on_failover ComputeRegionBackendService#disable_connection_drain_on_failover}
:param drop_traffic_if_unhealthy: This option is used only when no healthy VMs are detected in the primary and backup instance groups. When set to true, traffic is dropped. When set to false, new connections are sent across all VMs in the primary group. The default is false. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#drop_traffic_if_unhealthy ComputeRegionBackendService#drop_traffic_if_unhealthy}
:param failover_ratio: The value of the field must be in [0, 1]. If the ratio of the healthy VMs in the primary backend is at or below this number, traffic arriving at the load-balanced IP will be directed to the failover backend. In case where 'failoverRatio' is not set or all the VMs in the backup backend are unhealthy, the traffic will be directed back to the primary backend in the "force" mode, where traffic will be spread to the healthy VMs with the best effort, or to all VMs when no VM is healthy. This field is only used with l4 load balancing. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#failover_ratio ComputeRegionBackendService#failover_ratio}
'''
value = ComputeRegionBackendServiceFailoverPolicy(
disable_connection_drain_on_failover=disable_connection_drain_on_failover,
drop_traffic_if_unhealthy=drop_traffic_if_unhealthy,
failover_ratio=failover_ratio,
)
return typing.cast(None, jsii.invoke(self, "putFailoverPolicy", [value]))
@jsii.member(jsii_name="putIap")
def put_iap(
self,
*,
oauth2_client_id: builtins.str,
oauth2_client_secret: builtins.str,
) -> None:
'''
:param oauth2_client_id: OAuth2 Client ID for IAP. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#oauth2_client_id ComputeRegionBackendService#oauth2_client_id}
:param oauth2_client_secret: OAuth2 Client Secret for IAP. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#oauth2_client_secret ComputeRegionBackendService#oauth2_client_secret}
'''
value = ComputeRegionBackendServiceIap(
oauth2_client_id=oauth2_client_id,
oauth2_client_secret=oauth2_client_secret,
)
return typing.cast(None, jsii.invoke(self, "putIap", [value]))
@jsii.member(jsii_name="putLogConfig")
def put_log_config(
self,
*,
enable: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
sample_rate: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param enable: Whether to enable logging for the load balancer traffic served by this backend service. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#enable ComputeRegionBackendService#enable}
:param sample_rate: This field can only be specified if logging is enabled for this backend service. The value of the field must be in [0, 1]. This configures the sampling rate of requests to the load balancer where 1.0 means all logged requests are reported and 0.0 means no logged requests are reported. The default value is 1.0. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#sample_rate ComputeRegionBackendService#sample_rate}
'''
value = ComputeRegionBackendServiceLogConfig(
enable=enable, sample_rate=sample_rate
)
return typing.cast(None, jsii.invoke(self, "putLogConfig", [value]))
@jsii.member(jsii_name="putOutlierDetection")
def put_outlier_detection(
self,
*,
base_ejection_time: typing.Optional[typing.Union["ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime", typing.Dict[builtins.str, typing.Any]]] = None,
consecutive_errors: typing.Optional[jsii.Number] = None,
consecutive_gateway_failure: typing.Optional[jsii.Number] = None,
enforcing_consecutive_errors: typing.Optional[jsii.Number] = None,
enforcing_consecutive_gateway_failure: typing.Optional[jsii.Number] = None,
enforcing_success_rate: typing.Optional[jsii.Number] = None,
interval: typing.Optional[typing.Union["ComputeRegionBackendServiceOutlierDetectionInterval", typing.Dict[builtins.str, typing.Any]]] = None,
max_ejection_percent: typing.Optional[jsii.Number] = None,
success_rate_minimum_hosts: typing.Optional[jsii.Number] = None,
success_rate_request_volume: typing.Optional[jsii.Number] = None,
success_rate_stdev_factor: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param base_ejection_time: base_ejection_time block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#base_ejection_time ComputeRegionBackendService#base_ejection_time}
:param consecutive_errors: Number of errors before a host is ejected from the connection pool. When the backend host is accessed over HTTP, a 5xx return code qualifies as an error. Defaults to 5. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#consecutive_errors ComputeRegionBackendService#consecutive_errors}
:param consecutive_gateway_failure: The number of consecutive gateway failures (502, 503, 504 status or connection errors that are mapped to one of those status codes) before a consecutive gateway failure ejection occurs. Defaults to 5. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#consecutive_gateway_failure ComputeRegionBackendService#consecutive_gateway_failure}
:param enforcing_consecutive_errors: The percentage chance that a host will be actually ejected when an outlier status is detected through consecutive 5xx. This setting can be used to disable ejection or to ramp it up slowly. Defaults to 100. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#enforcing_consecutive_errors ComputeRegionBackendService#enforcing_consecutive_errors}
:param enforcing_consecutive_gateway_failure: The percentage chance that a host will be actually ejected when an outlier status is detected through consecutive gateway failures. This setting can be used to disable ejection or to ramp it up slowly. Defaults to 0. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#enforcing_consecutive_gateway_failure ComputeRegionBackendService#enforcing_consecutive_gateway_failure}
:param enforcing_success_rate: The percentage chance that a host will be actually ejected when an outlier status is detected through success rate statistics. This setting can be used to disable ejection or to ramp it up slowly. Defaults to 100. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#enforcing_success_rate ComputeRegionBackendService#enforcing_success_rate}
:param interval: interval block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#interval ComputeRegionBackendService#interval}
:param max_ejection_percent: Maximum percentage of hosts in the load balancing pool for the backend service that can be ejected. Defaults to 10%. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_ejection_percent ComputeRegionBackendService#max_ejection_percent}
:param success_rate_minimum_hosts: The number of hosts in a cluster that must have enough request volume to detect success rate outliers. If the number of hosts is less than this setting, outlier detection via success rate statistics is not performed for any host in the cluster. Defaults to 5. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#success_rate_minimum_hosts ComputeRegionBackendService#success_rate_minimum_hosts}
:param success_rate_request_volume: The minimum number of total requests that must be collected in one interval (as defined by the interval duration above) to include this host in success rate based outlier detection. If the volume is lower than this setting, outlier detection via success rate statistics is not performed for that host. Defaults to 100. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#success_rate_request_volume ComputeRegionBackendService#success_rate_request_volume}
:param success_rate_stdev_factor: This factor is used to determine the ejection threshold for success rate outlier ejection. The ejection threshold is the difference between the mean success rate, and the product of this factor and the standard deviation of the mean success rate: mean - (stdev * success_rate_stdev_factor). This factor is divided by a thousand to get a double. That is, if the desired factor is 1.9, the runtime value should be 1900. Defaults to 1900. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#success_rate_stdev_factor ComputeRegionBackendService#success_rate_stdev_factor}
'''
value = ComputeRegionBackendServiceOutlierDetection(
base_ejection_time=base_ejection_time,
consecutive_errors=consecutive_errors,
consecutive_gateway_failure=consecutive_gateway_failure,
enforcing_consecutive_errors=enforcing_consecutive_errors,
enforcing_consecutive_gateway_failure=enforcing_consecutive_gateway_failure,
enforcing_success_rate=enforcing_success_rate,
interval=interval,
max_ejection_percent=max_ejection_percent,
success_rate_minimum_hosts=success_rate_minimum_hosts,
success_rate_request_volume=success_rate_request_volume,
success_rate_stdev_factor=success_rate_stdev_factor,
)
return typing.cast(None, jsii.invoke(self, "putOutlierDetection", [value]))
@jsii.member(jsii_name="putTimeouts")
def put_timeouts(
self,
*,
create: typing.Optional[builtins.str] = None,
delete: typing.Optional[builtins.str] = None,
update: typing.Optional[builtins.str] = None,
) -> None:
'''
:param create: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#create ComputeRegionBackendService#create}.
:param delete: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#delete ComputeRegionBackendService#delete}.
:param update: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#update ComputeRegionBackendService#update}.
'''
value = ComputeRegionBackendServiceTimeouts(
create=create, delete=delete, update=update
)
return typing.cast(None, jsii.invoke(self, "putTimeouts", [value]))
@jsii.member(jsii_name="resetAffinityCookieTtlSec")
def reset_affinity_cookie_ttl_sec(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetAffinityCookieTtlSec", []))
@jsii.member(jsii_name="resetBackend")
def reset_backend(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetBackend", []))
@jsii.member(jsii_name="resetCdnPolicy")
def reset_cdn_policy(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetCdnPolicy", []))
@jsii.member(jsii_name="resetCircuitBreakers")
def reset_circuit_breakers(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetCircuitBreakers", []))
@jsii.member(jsii_name="resetConnectionDrainingTimeoutSec")
def reset_connection_draining_timeout_sec(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetConnectionDrainingTimeoutSec", []))
@jsii.member(jsii_name="resetConsistentHash")
def reset_consistent_hash(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetConsistentHash", []))
@jsii.member(jsii_name="resetDescription")
def reset_description(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetDescription", []))
@jsii.member(jsii_name="resetEnableCdn")
def reset_enable_cdn(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEnableCdn", []))
@jsii.member(jsii_name="resetFailoverPolicy")
def reset_failover_policy(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetFailoverPolicy", []))
@jsii.member(jsii_name="resetHealthChecks")
def reset_health_checks(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetHealthChecks", []))
@jsii.member(jsii_name="resetIap")
def reset_iap(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetIap", []))
@jsii.member(jsii_name="resetId")
def reset_id(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetId", []))
@jsii.member(jsii_name="resetLoadBalancingScheme")
def reset_load_balancing_scheme(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetLoadBalancingScheme", []))
@jsii.member(jsii_name="resetLocalityLbPolicy")
def reset_locality_lb_policy(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetLocalityLbPolicy", []))
@jsii.member(jsii_name="resetLogConfig")
def reset_log_config(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetLogConfig", []))
@jsii.member(jsii_name="resetNetwork")
def reset_network(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetNetwork", []))
@jsii.member(jsii_name="resetOutlierDetection")
def reset_outlier_detection(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetOutlierDetection", []))
@jsii.member(jsii_name="resetPortName")
def reset_port_name(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetPortName", []))
@jsii.member(jsii_name="resetProject")
def reset_project(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetProject", []))
@jsii.member(jsii_name="resetProtocol")
def reset_protocol(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetProtocol", []))
@jsii.member(jsii_name="resetRegion")
def reset_region(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetRegion", []))
@jsii.member(jsii_name="resetSessionAffinity")
def reset_session_affinity(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetSessionAffinity", []))
@jsii.member(jsii_name="resetTimeouts")
def reset_timeouts(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetTimeouts", []))
@jsii.member(jsii_name="resetTimeoutSec")
def reset_timeout_sec(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetTimeoutSec", []))
@jsii.member(jsii_name="synthesizeAttributes")
def _synthesize_attributes(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "synthesizeAttributes", []))
@jsii.python.classproperty
@jsii.member(jsii_name="tfResourceType")
def TF_RESOURCE_TYPE(cls) -> builtins.str:
return typing.cast(builtins.str, jsii.sget(cls, "tfResourceType"))
@builtins.property
@jsii.member(jsii_name="backend")
def backend(self) -> "ComputeRegionBackendServiceBackendList":
return typing.cast("ComputeRegionBackendServiceBackendList", jsii.get(self, "backend"))
@builtins.property
@jsii.member(jsii_name="cdnPolicy")
def cdn_policy(self) -> "ComputeRegionBackendServiceCdnPolicyOutputReference":
return typing.cast("ComputeRegionBackendServiceCdnPolicyOutputReference", jsii.get(self, "cdnPolicy"))
@builtins.property
@jsii.member(jsii_name="circuitBreakers")
def circuit_breakers(
self,
) -> "ComputeRegionBackendServiceCircuitBreakersOutputReference":
return typing.cast("ComputeRegionBackendServiceCircuitBreakersOutputReference", jsii.get(self, "circuitBreakers"))
@builtins.property
@jsii.member(jsii_name="consistentHash")
def consistent_hash(
self,
) -> "ComputeRegionBackendServiceConsistentHashOutputReference":
return typing.cast("ComputeRegionBackendServiceConsistentHashOutputReference", jsii.get(self, "consistentHash"))
@builtins.property
@jsii.member(jsii_name="creationTimestamp")
def creation_timestamp(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "creationTimestamp"))
@builtins.property
@jsii.member(jsii_name="failoverPolicy")
def failover_policy(
self,
) -> "ComputeRegionBackendServiceFailoverPolicyOutputReference":
return typing.cast("ComputeRegionBackendServiceFailoverPolicyOutputReference", jsii.get(self, "failoverPolicy"))
@builtins.property
@jsii.member(jsii_name="fingerprint")
def fingerprint(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "fingerprint"))
@builtins.property
@jsii.member(jsii_name="iap")
def iap(self) -> "ComputeRegionBackendServiceIapOutputReference":
return typing.cast("ComputeRegionBackendServiceIapOutputReference", jsii.get(self, "iap"))
@builtins.property
@jsii.member(jsii_name="logConfig")
def log_config(self) -> "ComputeRegionBackendServiceLogConfigOutputReference":
return typing.cast("ComputeRegionBackendServiceLogConfigOutputReference", jsii.get(self, "logConfig"))
@builtins.property
@jsii.member(jsii_name="outlierDetection")
def outlier_detection(
self,
) -> "ComputeRegionBackendServiceOutlierDetectionOutputReference":
return typing.cast("ComputeRegionBackendServiceOutlierDetectionOutputReference", jsii.get(self, "outlierDetection"))
@builtins.property
@jsii.member(jsii_name="selfLink")
def self_link(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "selfLink"))
@builtins.property
@jsii.member(jsii_name="timeouts")
def timeouts(self) -> "ComputeRegionBackendServiceTimeoutsOutputReference":
return typing.cast("ComputeRegionBackendServiceTimeoutsOutputReference", jsii.get(self, "timeouts"))
@builtins.property
@jsii.member(jsii_name="affinityCookieTtlSecInput")
def affinity_cookie_ttl_sec_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "affinityCookieTtlSecInput"))
@builtins.property
@jsii.member(jsii_name="backendInput")
def backend_input(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["ComputeRegionBackendServiceBackend"]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["ComputeRegionBackendServiceBackend"]]], jsii.get(self, "backendInput"))
@builtins.property
@jsii.member(jsii_name="cdnPolicyInput")
def cdn_policy_input(
self,
) -> typing.Optional["ComputeRegionBackendServiceCdnPolicy"]:
return typing.cast(typing.Optional["ComputeRegionBackendServiceCdnPolicy"], jsii.get(self, "cdnPolicyInput"))
@builtins.property
@jsii.member(jsii_name="circuitBreakersInput")
def circuit_breakers_input(
self,
) -> typing.Optional["ComputeRegionBackendServiceCircuitBreakers"]:
return typing.cast(typing.Optional["ComputeRegionBackendServiceCircuitBreakers"], jsii.get(self, "circuitBreakersInput"))
@builtins.property
@jsii.member(jsii_name="connectionDrainingTimeoutSecInput")
def connection_draining_timeout_sec_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "connectionDrainingTimeoutSecInput"))
@builtins.property
@jsii.member(jsii_name="consistentHashInput")
def consistent_hash_input(
self,
) -> typing.Optional["ComputeRegionBackendServiceConsistentHash"]:
return typing.cast(typing.Optional["ComputeRegionBackendServiceConsistentHash"], jsii.get(self, "consistentHashInput"))
@builtins.property
@jsii.member(jsii_name="descriptionInput")
def description_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "descriptionInput"))
@builtins.property
@jsii.member(jsii_name="enableCdnInput")
def enable_cdn_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "enableCdnInput"))
@builtins.property
@jsii.member(jsii_name="failoverPolicyInput")
def failover_policy_input(
self,
) -> typing.Optional["ComputeRegionBackendServiceFailoverPolicy"]:
return typing.cast(typing.Optional["ComputeRegionBackendServiceFailoverPolicy"], jsii.get(self, "failoverPolicyInput"))
@builtins.property
@jsii.member(jsii_name="healthChecksInput")
def health_checks_input(self) -> typing.Optional[typing.List[builtins.str]]:
return typing.cast(typing.Optional[typing.List[builtins.str]], jsii.get(self, "healthChecksInput"))
@builtins.property
@jsii.member(jsii_name="iapInput")
def iap_input(self) -> typing.Optional["ComputeRegionBackendServiceIap"]:
return typing.cast(typing.Optional["ComputeRegionBackendServiceIap"], jsii.get(self, "iapInput"))
@builtins.property
@jsii.member(jsii_name="idInput")
def id_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "idInput"))
@builtins.property
@jsii.member(jsii_name="loadBalancingSchemeInput")
def load_balancing_scheme_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loadBalancingSchemeInput"))
@builtins.property
@jsii.member(jsii_name="localityLbPolicyInput")
def locality_lb_policy_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "localityLbPolicyInput"))
@builtins.property
@jsii.member(jsii_name="logConfigInput")
def log_config_input(
self,
) -> typing.Optional["ComputeRegionBackendServiceLogConfig"]:
return typing.cast(typing.Optional["ComputeRegionBackendServiceLogConfig"], jsii.get(self, "logConfigInput"))
@builtins.property
@jsii.member(jsii_name="nameInput")
def name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "nameInput"))
@builtins.property
@jsii.member(jsii_name="networkInput")
def network_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "networkInput"))
@builtins.property
@jsii.member(jsii_name="outlierDetectionInput")
def outlier_detection_input(
self,
) -> typing.Optional["ComputeRegionBackendServiceOutlierDetection"]:
return typing.cast(typing.Optional["ComputeRegionBackendServiceOutlierDetection"], jsii.get(self, "outlierDetectionInput"))
@builtins.property
@jsii.member(jsii_name="portNameInput")
def port_name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "portNameInput"))
@builtins.property
@jsii.member(jsii_name="projectInput")
def project_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "projectInput"))
@builtins.property
@jsii.member(jsii_name="protocolInput")
def protocol_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "protocolInput"))
@builtins.property
@jsii.member(jsii_name="regionInput")
def region_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "regionInput"))
@builtins.property
@jsii.member(jsii_name="sessionAffinityInput")
def session_affinity_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "sessionAffinityInput"))
@builtins.property
@jsii.member(jsii_name="timeoutSecInput")
def timeout_sec_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "timeoutSecInput"))
@builtins.property
@jsii.member(jsii_name="timeoutsInput")
def timeouts_input(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, "ComputeRegionBackendServiceTimeouts"]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, "ComputeRegionBackendServiceTimeouts"]], jsii.get(self, "timeoutsInput"))
@builtins.property
@jsii.member(jsii_name="affinityCookieTtlSec")
def affinity_cookie_ttl_sec(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "affinityCookieTtlSec"))
@affinity_cookie_ttl_sec.setter
def affinity_cookie_ttl_sec(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__52c9500e4e9fa93a91b304c7e3e135ccf3d7f2a324b03504bc533c94c5a2df32)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "affinityCookieTtlSec", value)
@builtins.property
@jsii.member(jsii_name="connectionDrainingTimeoutSec")
def connection_draining_timeout_sec(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "connectionDrainingTimeoutSec"))
@connection_draining_timeout_sec.setter
def connection_draining_timeout_sec(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__562c8e062732203e76fac17c68d7268e9b836f2e291e4137b9d74474d5235398)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "connectionDrainingTimeoutSec", value)
@builtins.property
@jsii.member(jsii_name="description")
def description(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "description"))
@description.setter
def description(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__306edbc75d0eb09cd430efdaaa8fe4ba6cca369f0343b0685ace0323a1801649)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "description", value)
@builtins.property
@jsii.member(jsii_name="enableCdn")
def enable_cdn(self) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "enableCdn"))
@enable_cdn.setter
def enable_cdn(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__74f99af0ba03bc970ff0be137026d8fb594305f0ca1bd95bda538343ad7fae74)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "enableCdn", value)
@builtins.property
@jsii.member(jsii_name="healthChecks")
def health_checks(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "healthChecks"))
@health_checks.setter
def health_checks(self, value: typing.List[builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0ba169d163fd0341ebbbfce11a9608f6e5b63e1a95308893e2731f7c62e35e35)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "healthChecks", value)
@builtins.property
@jsii.member(jsii_name="id")
def id(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "id"))
@id.setter
def id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0969e93341a32202ba7e523eca85fd1ee559e04f9d426de5310155b1cbbb02c4)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "id", value)
@builtins.property
@jsii.member(jsii_name="loadBalancingScheme")
def load_balancing_scheme(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "loadBalancingScheme"))
@load_balancing_scheme.setter
def load_balancing_scheme(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__09da2d78ec50a281748ef4e20814f3f26e79186dea33f8ac2e95723aec8c35e0)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "loadBalancingScheme", value)
@builtins.property
@jsii.member(jsii_name="localityLbPolicy")
def locality_lb_policy(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "localityLbPolicy"))
@locality_lb_policy.setter
def locality_lb_policy(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8506a937369ced4764e829b71f8065eedb9f3a7c2570925faea401e3f50db486)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "localityLbPolicy", value)
@builtins.property
@jsii.member(jsii_name="name")
def name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "name"))
@name.setter
def name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__55621c9e6967bdd01d61bfa1a858a6202f0b5315cd29ba41c2e209758ce86117)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "name", value)
@builtins.property
@jsii.member(jsii_name="network")
def network(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "network"))
@network.setter
def network(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__67ebfe702142c520c92089bdb02cba19284e5d5acb56a2fd2c617b5686d4f37e)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "network", value)
@builtins.property
@jsii.member(jsii_name="portName")
def port_name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "portName"))
@port_name.setter
def port_name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c666b75001d11f84a61aa1c90cebb2ab4672e55a574076652e3a0bc126fbaf9d)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "portName", value)
@builtins.property
@jsii.member(jsii_name="project")
def project(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "project"))
@project.setter
def project(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__333ec9777f8f7c081037d0b0ddccd0b02c8518c449592f3684754045d9a1f749)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "project", value)
@builtins.property
@jsii.member(jsii_name="protocol")
def protocol(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "protocol"))
@protocol.setter
def protocol(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__1c43d834b72384b1d30c53e2abadcde998a0e82887efb5b9facc6b030754f79a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "protocol", value)
@builtins.property
@jsii.member(jsii_name="region")
def region(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "region"))
@region.setter
def region(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__88dfda43761cc9369b3092358ef4f0ead987146f7b0686be2cd2398686706989)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "region", value)
@builtins.property
@jsii.member(jsii_name="sessionAffinity")
def session_affinity(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "sessionAffinity"))
@session_affinity.setter
def session_affinity(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__94abaa06d37269786924a92c0395e3fcd0057dce5ec470ee0d4ad0525e8e87dc)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "sessionAffinity", value)
@builtins.property
@jsii.member(jsii_name="timeoutSec")
def timeout_sec(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "timeoutSec"))
@timeout_sec.setter
def timeout_sec(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3297937f636756385624c47a28ecd8391715e93182b1a77e7f5a54f6a1c8b2e4)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "timeoutSec", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceBackend",
jsii_struct_bases=[],
name_mapping={
"group": "group",
"balancing_mode": "balancingMode",
"capacity_scaler": "capacityScaler",
"description": "description",
"failover": "failover",
"max_connections": "maxConnections",
"max_connections_per_endpoint": "maxConnectionsPerEndpoint",
"max_connections_per_instance": "maxConnectionsPerInstance",
"max_rate": "maxRate",
"max_rate_per_endpoint": "maxRatePerEndpoint",
"max_rate_per_instance": "maxRatePerInstance",
"max_utilization": "maxUtilization",
},
)
class ComputeRegionBackendServiceBackend:
def __init__(
self,
*,
group: builtins.str,
balancing_mode: typing.Optional[builtins.str] = None,
capacity_scaler: typing.Optional[jsii.Number] = None,
description: typing.Optional[builtins.str] = None,
failover: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
max_connections: typing.Optional[jsii.Number] = None,
max_connections_per_endpoint: typing.Optional[jsii.Number] = None,
max_connections_per_instance: typing.Optional[jsii.Number] = None,
max_rate: typing.Optional[jsii.Number] = None,
max_rate_per_endpoint: typing.Optional[jsii.Number] = None,
max_rate_per_instance: typing.Optional[jsii.Number] = None,
max_utilization: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param group: The fully-qualified URL of an Instance Group or Network Endpoint Group resource. In case of instance group this defines the list of instances that serve traffic. Member virtual machine instances from each instance group must live in the same zone as the instance group itself. No two backends in a backend service are allowed to use same Instance Group resource. For Network Endpoint Groups this defines list of endpoints. All endpoints of Network Endpoint Group must be hosted on instances located in the same zone as the Network Endpoint Group. Backend services cannot mix Instance Group and Network Endpoint Group backends. When the 'load_balancing_scheme' is INTERNAL, only instance groups are supported. Note that you must specify an Instance Group or Network Endpoint Group resource using the fully-qualified URL, rather than a partial URL. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#group ComputeRegionBackendService#group}
:param balancing_mode: Specifies the balancing mode for this backend. See the `Backend Services Overview <https://cloud.google.com/load-balancing/docs/backend-service#balancing-mode>`_ for an explanation of load balancing modes. Default value: "CONNECTION" Possible values: ["UTILIZATION", "RATE", "CONNECTION"] Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#balancing_mode ComputeRegionBackendService#balancing_mode}
:param capacity_scaler: A multiplier applied to the group's maximum servicing capacity (based on UTILIZATION, RATE or CONNECTION). ~>**NOTE**: This field cannot be set for INTERNAL region backend services (default loadBalancingScheme), but is required for non-INTERNAL backend service. The total capacity_scaler for all backends must be non-zero. A setting of 0 means the group is completely drained, offering 0% of its available Capacity. Valid range is [0.0,1.0]. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#capacity_scaler ComputeRegionBackendService#capacity_scaler}
:param description: An optional description of this resource. Provide this property when you create the resource. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#description ComputeRegionBackendService#description}
:param failover: This field designates whether this is a failover backend. More than one failover backend can be configured for a given RegionBackendService. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#failover ComputeRegionBackendService#failover}
:param max_connections: The max number of simultaneous connections for the group. Can be used with either CONNECTION or UTILIZATION balancing modes. Cannot be set for INTERNAL backend services. For CONNECTION mode, either maxConnections or one of maxConnectionsPerInstance or maxConnectionsPerEndpoint, as appropriate for group type, must be set. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_connections ComputeRegionBackendService#max_connections}
:param max_connections_per_endpoint: The max number of simultaneous connections that a single backend network endpoint can handle. Cannot be set for INTERNAL backend services. This is used to calculate the capacity of the group. Can be used in either CONNECTION or UTILIZATION balancing modes. For CONNECTION mode, either maxConnections or maxConnectionsPerEndpoint must be set. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_connections_per_endpoint ComputeRegionBackendService#max_connections_per_endpoint}
:param max_connections_per_instance: The max number of simultaneous connections that a single backend instance can handle. Cannot be set for INTERNAL backend services. This is used to calculate the capacity of the group. Can be used in either CONNECTION or UTILIZATION balancing modes. For CONNECTION mode, either maxConnections or maxConnectionsPerInstance must be set. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_connections_per_instance ComputeRegionBackendService#max_connections_per_instance}
:param max_rate: The max requests per second (RPS) of the group. Cannot be set for INTERNAL backend services. Can be used with either RATE or UTILIZATION balancing modes, but required if RATE mode. Either maxRate or one of maxRatePerInstance or maxRatePerEndpoint, as appropriate for group type, must be set. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_rate ComputeRegionBackendService#max_rate}
:param max_rate_per_endpoint: The max requests per second (RPS) that a single backend network endpoint can handle. This is used to calculate the capacity of the group. Can be used in either balancing mode. For RATE mode, either maxRate or maxRatePerEndpoint must be set. Cannot be set for INTERNAL backend services. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_rate_per_endpoint ComputeRegionBackendService#max_rate_per_endpoint}
:param max_rate_per_instance: The max requests per second (RPS) that a single backend instance can handle. This is used to calculate the capacity of the group. Can be used in either balancing mode. For RATE mode, either maxRate or maxRatePerInstance must be set. Cannot be set for INTERNAL backend services. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_rate_per_instance ComputeRegionBackendService#max_rate_per_instance}
:param max_utilization: Used when balancingMode is UTILIZATION. This ratio defines the CPU utilization target for the group. Valid range is [0.0, 1.0]. Cannot be set for INTERNAL backend services. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_utilization ComputeRegionBackendService#max_utilization}
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a84ffd6da46f54b7caae12c227efde4a29b7de94f73341429b4bb6ce82854084)
check_type(argname="argument group", value=group, expected_type=type_hints["group"])
check_type(argname="argument balancing_mode", value=balancing_mode, expected_type=type_hints["balancing_mode"])
check_type(argname="argument capacity_scaler", value=capacity_scaler, expected_type=type_hints["capacity_scaler"])
check_type(argname="argument description", value=description, expected_type=type_hints["description"])
check_type(argname="argument failover", value=failover, expected_type=type_hints["failover"])
check_type(argname="argument max_connections", value=max_connections, expected_type=type_hints["max_connections"])
check_type(argname="argument max_connections_per_endpoint", value=max_connections_per_endpoint, expected_type=type_hints["max_connections_per_endpoint"])
check_type(argname="argument max_connections_per_instance", value=max_connections_per_instance, expected_type=type_hints["max_connections_per_instance"])
check_type(argname="argument max_rate", value=max_rate, expected_type=type_hints["max_rate"])
check_type(argname="argument max_rate_per_endpoint", value=max_rate_per_endpoint, expected_type=type_hints["max_rate_per_endpoint"])
check_type(argname="argument max_rate_per_instance", value=max_rate_per_instance, expected_type=type_hints["max_rate_per_instance"])
check_type(argname="argument max_utilization", value=max_utilization, expected_type=type_hints["max_utilization"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"group": group,
}
if balancing_mode is not None:
self._values["balancing_mode"] = balancing_mode
if capacity_scaler is not None:
self._values["capacity_scaler"] = capacity_scaler
if description is not None:
self._values["description"] = description
if failover is not None:
self._values["failover"] = failover
if max_connections is not None:
self._values["max_connections"] = max_connections
if max_connections_per_endpoint is not None:
self._values["max_connections_per_endpoint"] = max_connections_per_endpoint
if max_connections_per_instance is not None:
self._values["max_connections_per_instance"] = max_connections_per_instance
if max_rate is not None:
self._values["max_rate"] = max_rate
if max_rate_per_endpoint is not None:
self._values["max_rate_per_endpoint"] = max_rate_per_endpoint
if max_rate_per_instance is not None:
self._values["max_rate_per_instance"] = max_rate_per_instance
if max_utilization is not None:
self._values["max_utilization"] = max_utilization
@builtins.property
def group(self) -> builtins.str:
'''The fully-qualified URL of an Instance Group or Network Endpoint Group resource.
In case of instance group this defines the list
of instances that serve traffic. Member virtual machine
instances from each instance group must live in the same zone as
the instance group itself. No two backends in a backend service
are allowed to use same Instance Group resource.
For Network Endpoint Groups this defines list of endpoints. All
endpoints of Network Endpoint Group must be hosted on instances
located in the same zone as the Network Endpoint Group.
Backend services cannot mix Instance Group and
Network Endpoint Group backends.
When the 'load_balancing_scheme' is INTERNAL, only instance groups
are supported.
Note that you must specify an Instance Group or Network Endpoint
Group resource using the fully-qualified URL, rather than a
partial URL.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#group ComputeRegionBackendService#group}
'''
result = self._values.get("group")
assert result is not None, "Required property 'group' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def balancing_mode(self) -> typing.Optional[builtins.str]:
'''Specifies the balancing mode for this backend.
See the `Backend Services Overview <https://cloud.google.com/load-balancing/docs/backend-service#balancing-mode>`_
for an explanation of load balancing modes. Default value: "CONNECTION" Possible values: ["UTILIZATION", "RATE", "CONNECTION"]
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#balancing_mode ComputeRegionBackendService#balancing_mode}
'''
result = self._values.get("balancing_mode")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def capacity_scaler(self) -> typing.Optional[jsii.Number]:
'''A multiplier applied to the group's maximum servicing capacity (based on UTILIZATION, RATE or CONNECTION).
~>**NOTE**: This field cannot be set for
INTERNAL region backend services (default loadBalancingScheme),
but is required for non-INTERNAL backend service. The total
capacity_scaler for all backends must be non-zero.
A setting of 0 means the group is completely drained, offering
0% of its available Capacity. Valid range is [0.0,1.0].
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#capacity_scaler ComputeRegionBackendService#capacity_scaler}
'''
result = self._values.get("capacity_scaler")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def description(self) -> typing.Optional[builtins.str]:
'''An optional description of this resource. Provide this property when you create the resource.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#description ComputeRegionBackendService#description}
'''
result = self._values.get("description")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def failover(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''This field designates whether this is a failover backend.
More
than one failover backend can be configured for a given RegionBackendService.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#failover ComputeRegionBackendService#failover}
'''
result = self._values.get("failover")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def max_connections(self) -> typing.Optional[jsii.Number]:
'''The max number of simultaneous connections for the group.
Can
be used with either CONNECTION or UTILIZATION balancing modes.
Cannot be set for INTERNAL backend services.
For CONNECTION mode, either maxConnections or one
of maxConnectionsPerInstance or maxConnectionsPerEndpoint,
as appropriate for group type, must be set.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_connections ComputeRegionBackendService#max_connections}
'''
result = self._values.get("max_connections")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def max_connections_per_endpoint(self) -> typing.Optional[jsii.Number]:
'''The max number of simultaneous connections that a single backend network endpoint can handle. Cannot be set for INTERNAL backend services.
This is used to calculate the capacity of the group. Can be
used in either CONNECTION or UTILIZATION balancing modes. For
CONNECTION mode, either maxConnections or
maxConnectionsPerEndpoint must be set.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_connections_per_endpoint ComputeRegionBackendService#max_connections_per_endpoint}
'''
result = self._values.get("max_connections_per_endpoint")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def max_connections_per_instance(self) -> typing.Optional[jsii.Number]:
'''The max number of simultaneous connections that a single backend instance can handle. Cannot be set for INTERNAL backend services.
This is used to calculate the capacity of the group.
Can be used in either CONNECTION or UTILIZATION balancing modes.
For CONNECTION mode, either maxConnections or
maxConnectionsPerInstance must be set.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_connections_per_instance ComputeRegionBackendService#max_connections_per_instance}
'''
result = self._values.get("max_connections_per_instance")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def max_rate(self) -> typing.Optional[jsii.Number]:
'''The max requests per second (RPS) of the group. Cannot be set for INTERNAL backend services.
Can be used with either RATE or UTILIZATION balancing modes,
but required if RATE mode. Either maxRate or one
of maxRatePerInstance or maxRatePerEndpoint, as appropriate for
group type, must be set.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_rate ComputeRegionBackendService#max_rate}
'''
result = self._values.get("max_rate")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def max_rate_per_endpoint(self) -> typing.Optional[jsii.Number]:
'''The max requests per second (RPS) that a single backend network endpoint can handle.
This is used to calculate the capacity of
the group. Can be used in either balancing mode. For RATE mode,
either maxRate or maxRatePerEndpoint must be set. Cannot be set
for INTERNAL backend services.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_rate_per_endpoint ComputeRegionBackendService#max_rate_per_endpoint}
'''
result = self._values.get("max_rate_per_endpoint")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def max_rate_per_instance(self) -> typing.Optional[jsii.Number]:
'''The max requests per second (RPS) that a single backend instance can handle.
This is used to calculate the capacity of
the group. Can be used in either balancing mode. For RATE mode,
either maxRate or maxRatePerInstance must be set. Cannot be set
for INTERNAL backend services.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_rate_per_instance ComputeRegionBackendService#max_rate_per_instance}
'''
result = self._values.get("max_rate_per_instance")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def max_utilization(self) -> typing.Optional[jsii.Number]:
'''Used when balancingMode is UTILIZATION.
This ratio defines the
CPU utilization target for the group. Valid range is [0.0, 1.0].
Cannot be set for INTERNAL backend services.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_utilization ComputeRegionBackendService#max_utilization}
'''
result = self._values.get("max_utilization")
return typing.cast(typing.Optional[jsii.Number], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceBackend(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class ComputeRegionBackendServiceBackendList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceBackendList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__33ad42b62c7034037df6fc0e372a47ce12ed5f93d1047398fff955799fe8755c)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "ComputeRegionBackendServiceBackendOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c02ae8c7d4c6acec6fc1cb094ec69c8816cc8abeb6c377db26a31e7ec4c4004a)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("ComputeRegionBackendServiceBackendOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__028f467394e4f5484d38e2d9f947ae8a9de767c733136d6400bc0df293924768)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b9871a41436307fb3773b0f8858a5f8765984313d301f66c5350dd168a360ba8)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__459f5d087073020ef953274782f3c5fb40b6e8081ac3f9b7b6bb4b7a26ff62f3)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[ComputeRegionBackendServiceBackend]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[ComputeRegionBackendServiceBackend]]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[ComputeRegionBackendServiceBackend]]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__73780d72a57dc4a7a334bb18f154754c4810a2a6351aaaaf535b669624adaedf)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class ComputeRegionBackendServiceBackendOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceBackendOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d60ce72a29b9b8de82524f272e9c3c7f49ec5af24b170508559ba4020de545dc)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@jsii.member(jsii_name="resetBalancingMode")
def reset_balancing_mode(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetBalancingMode", []))
@jsii.member(jsii_name="resetCapacityScaler")
def reset_capacity_scaler(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetCapacityScaler", []))
@jsii.member(jsii_name="resetDescription")
def reset_description(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetDescription", []))
@jsii.member(jsii_name="resetFailover")
def reset_failover(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetFailover", []))
@jsii.member(jsii_name="resetMaxConnections")
def reset_max_connections(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMaxConnections", []))
@jsii.member(jsii_name="resetMaxConnectionsPerEndpoint")
def reset_max_connections_per_endpoint(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMaxConnectionsPerEndpoint", []))
@jsii.member(jsii_name="resetMaxConnectionsPerInstance")
def reset_max_connections_per_instance(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMaxConnectionsPerInstance", []))
@jsii.member(jsii_name="resetMaxRate")
def reset_max_rate(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMaxRate", []))
@jsii.member(jsii_name="resetMaxRatePerEndpoint")
def reset_max_rate_per_endpoint(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMaxRatePerEndpoint", []))
@jsii.member(jsii_name="resetMaxRatePerInstance")
def reset_max_rate_per_instance(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMaxRatePerInstance", []))
@jsii.member(jsii_name="resetMaxUtilization")
def reset_max_utilization(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMaxUtilization", []))
@builtins.property
@jsii.member(jsii_name="balancingModeInput")
def balancing_mode_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "balancingModeInput"))
@builtins.property
@jsii.member(jsii_name="capacityScalerInput")
def capacity_scaler_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "capacityScalerInput"))
@builtins.property
@jsii.member(jsii_name="descriptionInput")
def description_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "descriptionInput"))
@builtins.property
@jsii.member(jsii_name="failoverInput")
def failover_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "failoverInput"))
@builtins.property
@jsii.member(jsii_name="groupInput")
def group_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "groupInput"))
@builtins.property
@jsii.member(jsii_name="maxConnectionsInput")
def max_connections_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxConnectionsInput"))
@builtins.property
@jsii.member(jsii_name="maxConnectionsPerEndpointInput")
def max_connections_per_endpoint_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxConnectionsPerEndpointInput"))
@builtins.property
@jsii.member(jsii_name="maxConnectionsPerInstanceInput")
def max_connections_per_instance_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxConnectionsPerInstanceInput"))
@builtins.property
@jsii.member(jsii_name="maxRateInput")
def max_rate_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxRateInput"))
@builtins.property
@jsii.member(jsii_name="maxRatePerEndpointInput")
def max_rate_per_endpoint_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxRatePerEndpointInput"))
@builtins.property
@jsii.member(jsii_name="maxRatePerInstanceInput")
def max_rate_per_instance_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxRatePerInstanceInput"))
@builtins.property
@jsii.member(jsii_name="maxUtilizationInput")
def max_utilization_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxUtilizationInput"))
@builtins.property
@jsii.member(jsii_name="balancingMode")
def balancing_mode(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "balancingMode"))
@balancing_mode.setter
def balancing_mode(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__e500bbc3bbb861cca45fe1d2c899d6b659e32cf42f577fbbc9906756eb18f8a2)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "balancingMode", value)
@builtins.property
@jsii.member(jsii_name="capacityScaler")
def capacity_scaler(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "capacityScaler"))
@capacity_scaler.setter
def capacity_scaler(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b9b15898ea02857144bba2101c6cf8424fa9a6b06e92c8e33cdeb1c21df9f1ed)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "capacityScaler", value)
@builtins.property
@jsii.member(jsii_name="description")
def description(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "description"))
@description.setter
def description(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__cbdf14163bf15227bed8d3a287ff6d0ebad8f3b56bbc987dc8936910f03004a8)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "description", value)
@builtins.property
@jsii.member(jsii_name="failover")
def failover(self) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "failover"))
@failover.setter
def failover(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__356f1c06306b3b0e72ac3b00a4bf9e195bab9f8c9adf46c55c59e2c408f7f302)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "failover", value)
@builtins.property
@jsii.member(jsii_name="group")
def group(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "group"))
@group.setter
def group(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__cb817b5f899ab00fc085a04229b510b7725dda0bdd7ccf99554a25346aa4c50a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "group", value)
@builtins.property
@jsii.member(jsii_name="maxConnections")
def max_connections(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "maxConnections"))
@max_connections.setter
def max_connections(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__4c5e880db719f7758dd97ae75b9f11e62b368ea91165c429ac2a66e9766eade1)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "maxConnections", value)
@builtins.property
@jsii.member(jsii_name="maxConnectionsPerEndpoint")
def max_connections_per_endpoint(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "maxConnectionsPerEndpoint"))
@max_connections_per_endpoint.setter
def max_connections_per_endpoint(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5590596f24602ec0f459ce85a1e674563336573ebd3ad4c4d7c496f1e7d93d3b)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "maxConnectionsPerEndpoint", value)
@builtins.property
@jsii.member(jsii_name="maxConnectionsPerInstance")
def max_connections_per_instance(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "maxConnectionsPerInstance"))
@max_connections_per_instance.setter
def max_connections_per_instance(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3f7a98a74dd102bfc173678cff8473b9ed57de54141e2446488e61ffbdd98298)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "maxConnectionsPerInstance", value)
@builtins.property
@jsii.member(jsii_name="maxRate")
def max_rate(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "maxRate"))
@max_rate.setter
def max_rate(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__fa9caa2aee99e9ccd5d0e14d437bc570087206c9dc240392a23f02978917c0bb)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "maxRate", value)
@builtins.property
@jsii.member(jsii_name="maxRatePerEndpoint")
def max_rate_per_endpoint(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "maxRatePerEndpoint"))
@max_rate_per_endpoint.setter
def max_rate_per_endpoint(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b0d2bf64589f55d182149e66f228a3c2fb527656d1644b8df4013fa02ad4c9bc)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "maxRatePerEndpoint", value)
@builtins.property
@jsii.member(jsii_name="maxRatePerInstance")
def max_rate_per_instance(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "maxRatePerInstance"))
@max_rate_per_instance.setter
def max_rate_per_instance(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__bf0d747f033db0b1f1be871e7383d114ae1edabfa84ae0f195d4ef04a789fc24)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "maxRatePerInstance", value)
@builtins.property
@jsii.member(jsii_name="maxUtilization")
def max_utilization(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "maxUtilization"))
@max_utilization.setter
def max_utilization(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3174f5c4fe5f7edf9a50e38789ba5b00700fcc4b7000f11540b8b77875d31e01)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "maxUtilization", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, ComputeRegionBackendServiceBackend]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, ComputeRegionBackendServiceBackend]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, ComputeRegionBackendServiceBackend]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ce8a1426035a988f574e8c5b459f65b49e20b11aa45ea89b8ecdcaf66fa0034b)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceCdnPolicy",
jsii_struct_bases=[],
name_mapping={
"cache_key_policy": "cacheKeyPolicy",
"cache_mode": "cacheMode",
"client_ttl": "clientTtl",
"default_ttl": "defaultTtl",
"max_ttl": "maxTtl",
"negative_caching": "negativeCaching",
"negative_caching_policy": "negativeCachingPolicy",
"serve_while_stale": "serveWhileStale",
"signed_url_cache_max_age_sec": "signedUrlCacheMaxAgeSec",
},
)
class ComputeRegionBackendServiceCdnPolicy:
def __init__(
self,
*,
cache_key_policy: typing.Optional[typing.Union["ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy", typing.Dict[builtins.str, typing.Any]]] = None,
cache_mode: typing.Optional[builtins.str] = None,
client_ttl: typing.Optional[jsii.Number] = None,
default_ttl: typing.Optional[jsii.Number] = None,
max_ttl: typing.Optional[jsii.Number] = None,
negative_caching: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
negative_caching_policy: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union["ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy", typing.Dict[builtins.str, typing.Any]]]]] = None,
serve_while_stale: typing.Optional[jsii.Number] = None,
signed_url_cache_max_age_sec: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param cache_key_policy: cache_key_policy block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#cache_key_policy ComputeRegionBackendService#cache_key_policy}
:param cache_mode: Specifies the cache setting for all responses from this backend. The possible values are: USE_ORIGIN_HEADERS, FORCE_CACHE_ALL and CACHE_ALL_STATIC Possible values: ["USE_ORIGIN_HEADERS", "FORCE_CACHE_ALL", "CACHE_ALL_STATIC"] Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#cache_mode ComputeRegionBackendService#cache_mode}
:param client_ttl: Specifies the maximum allowed TTL for cached content served by this origin. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#client_ttl ComputeRegionBackendService#client_ttl}
:param default_ttl: Specifies the default TTL for cached content served by this origin for responses that do not have an existing valid TTL (max-age or s-max-age). Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#default_ttl ComputeRegionBackendService#default_ttl}
:param max_ttl: Specifies the maximum allowed TTL for cached content served by this origin. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_ttl ComputeRegionBackendService#max_ttl}
:param negative_caching: Negative caching allows per-status code TTLs to be set, in order to apply fine-grained caching for common errors or redirects. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#negative_caching ComputeRegionBackendService#negative_caching}
:param negative_caching_policy: negative_caching_policy block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#negative_caching_policy ComputeRegionBackendService#negative_caching_policy}
:param serve_while_stale: Serve existing content from the cache (if available) when revalidating content with the origin, or when an error is encountered when refreshing the cache. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#serve_while_stale ComputeRegionBackendService#serve_while_stale}
:param signed_url_cache_max_age_sec: Maximum number of seconds the response to a signed URL request will be considered fresh, defaults to 1hr (3600s). After this time period, the response will be revalidated before being served. When serving responses to signed URL requests, Cloud CDN will internally behave as though all responses from this backend had a "Cache-Control: public, max-age=[TTL]" header, regardless of any existing Cache-Control header. The actual headers served in responses will not be altered. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#signed_url_cache_max_age_sec ComputeRegionBackendService#signed_url_cache_max_age_sec}
'''
if isinstance(cache_key_policy, dict):
cache_key_policy = ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy(**cache_key_policy)
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3f762ad61dc54ad3d79699c5aed3f9b69e4e6b50d5e56cb4ffdcb354b86c3172)
check_type(argname="argument cache_key_policy", value=cache_key_policy, expected_type=type_hints["cache_key_policy"])
check_type(argname="argument cache_mode", value=cache_mode, expected_type=type_hints["cache_mode"])
check_type(argname="argument client_ttl", value=client_ttl, expected_type=type_hints["client_ttl"])
check_type(argname="argument default_ttl", value=default_ttl, expected_type=type_hints["default_ttl"])
check_type(argname="argument max_ttl", value=max_ttl, expected_type=type_hints["max_ttl"])
check_type(argname="argument negative_caching", value=negative_caching, expected_type=type_hints["negative_caching"])
check_type(argname="argument negative_caching_policy", value=negative_caching_policy, expected_type=type_hints["negative_caching_policy"])
check_type(argname="argument serve_while_stale", value=serve_while_stale, expected_type=type_hints["serve_while_stale"])
check_type(argname="argument signed_url_cache_max_age_sec", value=signed_url_cache_max_age_sec, expected_type=type_hints["signed_url_cache_max_age_sec"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if cache_key_policy is not None:
self._values["cache_key_policy"] = cache_key_policy
if cache_mode is not None:
self._values["cache_mode"] = cache_mode
if client_ttl is not None:
self._values["client_ttl"] = client_ttl
if default_ttl is not None:
self._values["default_ttl"] = default_ttl
if max_ttl is not None:
self._values["max_ttl"] = max_ttl
if negative_caching is not None:
self._values["negative_caching"] = negative_caching
if negative_caching_policy is not None:
self._values["negative_caching_policy"] = negative_caching_policy
if serve_while_stale is not None:
self._values["serve_while_stale"] = serve_while_stale
if signed_url_cache_max_age_sec is not None:
self._values["signed_url_cache_max_age_sec"] = signed_url_cache_max_age_sec
@builtins.property
def cache_key_policy(
self,
) -> typing.Optional["ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy"]:
'''cache_key_policy block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#cache_key_policy ComputeRegionBackendService#cache_key_policy}
'''
result = self._values.get("cache_key_policy")
return typing.cast(typing.Optional["ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy"], result)
@builtins.property
def cache_mode(self) -> typing.Optional[builtins.str]:
'''Specifies the cache setting for all responses from this backend.
The possible values are: USE_ORIGIN_HEADERS, FORCE_CACHE_ALL and CACHE_ALL_STATIC Possible values: ["USE_ORIGIN_HEADERS", "FORCE_CACHE_ALL", "CACHE_ALL_STATIC"]
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#cache_mode ComputeRegionBackendService#cache_mode}
'''
result = self._values.get("cache_mode")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def client_ttl(self) -> typing.Optional[jsii.Number]:
'''Specifies the maximum allowed TTL for cached content served by this origin.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#client_ttl ComputeRegionBackendService#client_ttl}
'''
result = self._values.get("client_ttl")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def default_ttl(self) -> typing.Optional[jsii.Number]:
'''Specifies the default TTL for cached content served by this origin for responses that do not have an existing valid TTL (max-age or s-max-age).
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#default_ttl ComputeRegionBackendService#default_ttl}
'''
result = self._values.get("default_ttl")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def max_ttl(self) -> typing.Optional[jsii.Number]:
'''Specifies the maximum allowed TTL for cached content served by this origin.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_ttl ComputeRegionBackendService#max_ttl}
'''
result = self._values.get("max_ttl")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def negative_caching(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''Negative caching allows per-status code TTLs to be set, in order to apply fine-grained caching for common errors or redirects.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#negative_caching ComputeRegionBackendService#negative_caching}
'''
result = self._values.get("negative_caching")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def negative_caching_policy(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy"]]]:
'''negative_caching_policy block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#negative_caching_policy ComputeRegionBackendService#negative_caching_policy}
'''
result = self._values.get("negative_caching_policy")
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List["ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy"]]], result)
@builtins.property
def serve_while_stale(self) -> typing.Optional[jsii.Number]:
'''Serve existing content from the cache (if available) when revalidating content with the origin, or when an error is encountered when refreshing the cache.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#serve_while_stale ComputeRegionBackendService#serve_while_stale}
'''
result = self._values.get("serve_while_stale")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def signed_url_cache_max_age_sec(self) -> typing.Optional[jsii.Number]:
'''Maximum number of seconds the response to a signed URL request will be considered fresh, defaults to 1hr (3600s).
After this
time period, the response will be revalidated before
being served.
When serving responses to signed URL requests, Cloud CDN will
internally behave as though all responses from this backend had a
"Cache-Control: public, max-age=[TTL]" header, regardless of any
existing Cache-Control header. The actual headers served in
responses will not be altered.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#signed_url_cache_max_age_sec ComputeRegionBackendService#signed_url_cache_max_age_sec}
'''
result = self._values.get("signed_url_cache_max_age_sec")
return typing.cast(typing.Optional[jsii.Number], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceCdnPolicy(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy",
jsii_struct_bases=[],
name_mapping={
"include_host": "includeHost",
"include_named_cookies": "includeNamedCookies",
"include_protocol": "includeProtocol",
"include_query_string": "includeQueryString",
"query_string_blacklist": "queryStringBlacklist",
"query_string_whitelist": "queryStringWhitelist",
},
)
class ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy:
def __init__(
self,
*,
include_host: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
include_named_cookies: typing.Optional[typing.Sequence[builtins.str]] = None,
include_protocol: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
include_query_string: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
query_string_blacklist: typing.Optional[typing.Sequence[builtins.str]] = None,
query_string_whitelist: typing.Optional[typing.Sequence[builtins.str]] = None,
) -> None:
'''
:param include_host: If true requests to different hosts will be cached separately. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#include_host ComputeRegionBackendService#include_host}
:param include_named_cookies: Names of cookies to include in cache keys. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#include_named_cookies ComputeRegionBackendService#include_named_cookies}
:param include_protocol: If true, http and https requests will be cached separately. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#include_protocol ComputeRegionBackendService#include_protocol}
:param include_query_string: If true, include query string parameters in the cache key according to query_string_whitelist and query_string_blacklist. If neither is set, the entire query string will be included. If false, the query string will be excluded from the cache key entirely. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#include_query_string ComputeRegionBackendService#include_query_string}
:param query_string_blacklist: Names of query string parameters to exclude in cache keys. All other parameters will be included. Either specify query_string_whitelist or query_string_blacklist, not both. '&' and '=' will be percent encoded and not treated as delimiters. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#query_string_blacklist ComputeRegionBackendService#query_string_blacklist}
:param query_string_whitelist: Names of query string parameters to include in cache keys. All other parameters will be excluded. Either specify query_string_whitelist or query_string_blacklist, not both. '&' and '=' will be percent encoded and not treated as delimiters. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#query_string_whitelist ComputeRegionBackendService#query_string_whitelist}
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__379568ce93934ee39c05535e079cd33a2e12de9b3f2e29276b6a8637f132bedf)
check_type(argname="argument include_host", value=include_host, expected_type=type_hints["include_host"])
check_type(argname="argument include_named_cookies", value=include_named_cookies, expected_type=type_hints["include_named_cookies"])
check_type(argname="argument include_protocol", value=include_protocol, expected_type=type_hints["include_protocol"])
check_type(argname="argument include_query_string", value=include_query_string, expected_type=type_hints["include_query_string"])
check_type(argname="argument query_string_blacklist", value=query_string_blacklist, expected_type=type_hints["query_string_blacklist"])
check_type(argname="argument query_string_whitelist", value=query_string_whitelist, expected_type=type_hints["query_string_whitelist"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if include_host is not None:
self._values["include_host"] = include_host
if include_named_cookies is not None:
self._values["include_named_cookies"] = include_named_cookies
if include_protocol is not None:
self._values["include_protocol"] = include_protocol
if include_query_string is not None:
self._values["include_query_string"] = include_query_string
if query_string_blacklist is not None:
self._values["query_string_blacklist"] = query_string_blacklist
if query_string_whitelist is not None:
self._values["query_string_whitelist"] = query_string_whitelist
@builtins.property
def include_host(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''If true requests to different hosts will be cached separately.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#include_host ComputeRegionBackendService#include_host}
'''
result = self._values.get("include_host")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def include_named_cookies(self) -> typing.Optional[typing.List[builtins.str]]:
'''Names of cookies to include in cache keys.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#include_named_cookies ComputeRegionBackendService#include_named_cookies}
'''
result = self._values.get("include_named_cookies")
return typing.cast(typing.Optional[typing.List[builtins.str]], result)
@builtins.property
def include_protocol(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''If true, http and https requests will be cached separately.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#include_protocol ComputeRegionBackendService#include_protocol}
'''
result = self._values.get("include_protocol")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def include_query_string(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''If true, include query string parameters in the cache key according to query_string_whitelist and query_string_blacklist.
If neither is set, the entire query
string will be included.
If false, the query string will be excluded from the cache
key entirely.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#include_query_string ComputeRegionBackendService#include_query_string}
'''
result = self._values.get("include_query_string")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def query_string_blacklist(self) -> typing.Optional[typing.List[builtins.str]]:
'''Names of query string parameters to exclude in cache keys.
All other parameters will be included. Either specify
query_string_whitelist or query_string_blacklist, not both.
'&' and '=' will be percent encoded and not treated as
delimiters.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#query_string_blacklist ComputeRegionBackendService#query_string_blacklist}
'''
result = self._values.get("query_string_blacklist")
return typing.cast(typing.Optional[typing.List[builtins.str]], result)
@builtins.property
def query_string_whitelist(self) -> typing.Optional[typing.List[builtins.str]]:
'''Names of query string parameters to include in cache keys.
All other parameters will be excluded. Either specify
query_string_whitelist or query_string_blacklist, not both.
'&' and '=' will be percent encoded and not treated as
delimiters.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#query_string_whitelist ComputeRegionBackendService#query_string_whitelist}
'''
result = self._values.get("query_string_whitelist")
return typing.cast(typing.Optional[typing.List[builtins.str]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class ComputeRegionBackendServiceCdnPolicyCacheKeyPolicyOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceCdnPolicyCacheKeyPolicyOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__199c372215c35b0865ae27599f11e07814fdf9ec61c6b06c34c3458bc268261e)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetIncludeHost")
def reset_include_host(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetIncludeHost", []))
@jsii.member(jsii_name="resetIncludeNamedCookies")
def reset_include_named_cookies(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetIncludeNamedCookies", []))
@jsii.member(jsii_name="resetIncludeProtocol")
def reset_include_protocol(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetIncludeProtocol", []))
@jsii.member(jsii_name="resetIncludeQueryString")
def reset_include_query_string(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetIncludeQueryString", []))
@jsii.member(jsii_name="resetQueryStringBlacklist")
def reset_query_string_blacklist(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetQueryStringBlacklist", []))
@jsii.member(jsii_name="resetQueryStringWhitelist")
def reset_query_string_whitelist(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetQueryStringWhitelist", []))
@builtins.property
@jsii.member(jsii_name="includeHostInput")
def include_host_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "includeHostInput"))
@builtins.property
@jsii.member(jsii_name="includeNamedCookiesInput")
def include_named_cookies_input(self) -> typing.Optional[typing.List[builtins.str]]:
return typing.cast(typing.Optional[typing.List[builtins.str]], jsii.get(self, "includeNamedCookiesInput"))
@builtins.property
@jsii.member(jsii_name="includeProtocolInput")
def include_protocol_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "includeProtocolInput"))
@builtins.property
@jsii.member(jsii_name="includeQueryStringInput")
def include_query_string_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "includeQueryStringInput"))
@builtins.property
@jsii.member(jsii_name="queryStringBlacklistInput")
def query_string_blacklist_input(
self,
) -> typing.Optional[typing.List[builtins.str]]:
return typing.cast(typing.Optional[typing.List[builtins.str]], jsii.get(self, "queryStringBlacklistInput"))
@builtins.property
@jsii.member(jsii_name="queryStringWhitelistInput")
def query_string_whitelist_input(
self,
) -> typing.Optional[typing.List[builtins.str]]:
return typing.cast(typing.Optional[typing.List[builtins.str]], jsii.get(self, "queryStringWhitelistInput"))
@builtins.property
@jsii.member(jsii_name="includeHost")
def include_host(self) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "includeHost"))
@include_host.setter
def include_host(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__7aa71c2205f76cc9daee07d33568428a7b868016707f7d68fb3c720cc6231765)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "includeHost", value)
@builtins.property
@jsii.member(jsii_name="includeNamedCookies")
def include_named_cookies(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "includeNamedCookies"))
@include_named_cookies.setter
def include_named_cookies(self, value: typing.List[builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__647d5d507b4b2c0685f384e891132dc149015b3867ed296427358ac552ad7ea4)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "includeNamedCookies", value)
@builtins.property
@jsii.member(jsii_name="includeProtocol")
def include_protocol(
self,
) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "includeProtocol"))
@include_protocol.setter
def include_protocol(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__065faa1b914207b3e2105d5b9ac053e3c6c206d5402b5e154d1a9f334cb27434)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "includeProtocol", value)
@builtins.property
@jsii.member(jsii_name="includeQueryString")
def include_query_string(
self,
) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "includeQueryString"))
@include_query_string.setter
def include_query_string(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__432d5cac0d48501477135da33b0c1480bfb775faa681aac7bc5864969dc717aa)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "includeQueryString", value)
@builtins.property
@jsii.member(jsii_name="queryStringBlacklist")
def query_string_blacklist(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "queryStringBlacklist"))
@query_string_blacklist.setter
def query_string_blacklist(self, value: typing.List[builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__63dc0e3e76553beea908d3dd430b8f00f6c84b3a226cb383c8602357a250133c)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "queryStringBlacklist", value)
@builtins.property
@jsii.member(jsii_name="queryStringWhitelist")
def query_string_whitelist(self) -> typing.List[builtins.str]:
return typing.cast(typing.List[builtins.str], jsii.get(self, "queryStringWhitelist"))
@query_string_whitelist.setter
def query_string_whitelist(self, value: typing.List[builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__7de07ebb57db02540135ca77305cf6ea4b2fa8cb06dcab09b4e7d5857ba3d521)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "queryStringWhitelist", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d5d8275ffbd9c4685f84025e63e43f56ef49d2c0a892cb094d32090271ae5903)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy",
jsii_struct_bases=[],
name_mapping={"code": "code"},
)
class ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy:
def __init__(self, *, code: typing.Optional[jsii.Number] = None) -> None:
'''
:param code: The HTTP status code to define a TTL against. Only HTTP status codes 300, 301, 308, 404, 405, 410, 421, 451 and 501 can be specified as values, and you cannot specify a status code more than once. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#code ComputeRegionBackendService#code}
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ab605d31c904330c0eaff74295beb59394008283eba9048b27c64a7ad8eff93a)
check_type(argname="argument code", value=code, expected_type=type_hints["code"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if code is not None:
self._values["code"] = code
@builtins.property
def code(self) -> typing.Optional[jsii.Number]:
'''The HTTP status code to define a TTL against.
Only HTTP status codes 300, 301, 308, 404, 405, 410, 421, 451 and 501
can be specified as values, and you cannot specify a status code more than once.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#code ComputeRegionBackendService#code}
'''
result = self._values.get("code")
return typing.cast(typing.Optional[jsii.Number], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicyList(
_cdktf_9a9027ec.ComplexList,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicyList",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param wraps_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5f22ea4a5a32175e50467b7c9884a3bb0ee0837e874757ab185255d32fd511d5)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument wraps_set", value=wraps_set, expected_type=type_hints["wraps_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, wraps_set])
@jsii.member(jsii_name="get")
def get(
self,
index: jsii.Number,
) -> "ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicyOutputReference":
'''
:param index: the index of the item to return.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__bae51cc8232aba4e45810dbcb7366a432badac90c16fe950a8884267b06656d8)
check_type(argname="argument index", value=index, expected_type=type_hints["index"])
return typing.cast("ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicyOutputReference", jsii.invoke(self, "get", [index]))
@builtins.property
@jsii.member(jsii_name="terraformAttribute")
def _terraform_attribute(self) -> builtins.str:
'''The attribute on the parent resource this class is referencing.'''
return typing.cast(builtins.str, jsii.get(self, "terraformAttribute"))
@_terraform_attribute.setter
def _terraform_attribute(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b00b710afed83d789074b329b34c3efca94bfe67bc48300c57d7b694acc92488)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformAttribute", value)
@builtins.property
@jsii.member(jsii_name="terraformResource")
def _terraform_resource(self) -> _cdktf_9a9027ec.IInterpolatingParent:
'''The parent resource.'''
return typing.cast(_cdktf_9a9027ec.IInterpolatingParent, jsii.get(self, "terraformResource"))
@_terraform_resource.setter
def _terraform_resource(self, value: _cdktf_9a9027ec.IInterpolatingParent) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__2e99505de1cdbc633a081027b2938957ed3ab8cb9c81d0975f444fd5d65cfa0b)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "terraformResource", value)
@builtins.property
@jsii.member(jsii_name="wrapsSet")
def _wraps_set(self) -> builtins.bool:
'''whether the list is wrapping a set (will add tolist() to be able to access an item via an index).'''
return typing.cast(builtins.bool, jsii.get(self, "wrapsSet"))
@_wraps_set.setter
def _wraps_set(self, value: builtins.bool) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f317bd613505711d880fe6b6b3a07db7e455d0589f2c21c85b8d3189b7703abb)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "wrapsSet", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy]]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy]]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5c8ce4267a145c0945f579aef11116d86bae77795092937c475709b93572898f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicyOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicyOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
:param complex_object_index: the index of this item in the list.
:param complex_object_is_from_set: whether the list is wrapping a set (will add tolist() to be able to access an item via an index).
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f044a9b2b9291f7bf321a3ceb7f065717aaadab5a80ff64b968a62db155d0239)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
check_type(argname="argument complex_object_index", value=complex_object_index, expected_type=type_hints["complex_object_index"])
check_type(argname="argument complex_object_is_from_set", value=complex_object_is_from_set, expected_type=type_hints["complex_object_is_from_set"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute, complex_object_index, complex_object_is_from_set])
@jsii.member(jsii_name="resetCode")
def reset_code(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetCode", []))
@builtins.property
@jsii.member(jsii_name="codeInput")
def code_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "codeInput"))
@builtins.property
@jsii.member(jsii_name="code")
def code(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "code"))
@code.setter
def code(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f120e1419fd3a83795d7034b40bf7092c0432ffcaabef0d5dcde262757bc0dc2)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "code", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__762e126574a8f2a98fe6f6b8ab38ea3112bf683014ffb108c0c2706eb9ae32a2)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class ComputeRegionBackendServiceCdnPolicyOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceCdnPolicyOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__1afcc3fcf86329631cc19ce6537e0b1b9730cdee91cf19e622991e3886d18a3f)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="putCacheKeyPolicy")
def put_cache_key_policy(
self,
*,
include_host: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
include_named_cookies: typing.Optional[typing.Sequence[builtins.str]] = None,
include_protocol: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
include_query_string: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
query_string_blacklist: typing.Optional[typing.Sequence[builtins.str]] = None,
query_string_whitelist: typing.Optional[typing.Sequence[builtins.str]] = None,
) -> None:
'''
:param include_host: If true requests to different hosts will be cached separately. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#include_host ComputeRegionBackendService#include_host}
:param include_named_cookies: Names of cookies to include in cache keys. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#include_named_cookies ComputeRegionBackendService#include_named_cookies}
:param include_protocol: If true, http and https requests will be cached separately. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#include_protocol ComputeRegionBackendService#include_protocol}
:param include_query_string: If true, include query string parameters in the cache key according to query_string_whitelist and query_string_blacklist. If neither is set, the entire query string will be included. If false, the query string will be excluded from the cache key entirely. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#include_query_string ComputeRegionBackendService#include_query_string}
:param query_string_blacklist: Names of query string parameters to exclude in cache keys. All other parameters will be included. Either specify query_string_whitelist or query_string_blacklist, not both. '&' and '=' will be percent encoded and not treated as delimiters. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#query_string_blacklist ComputeRegionBackendService#query_string_blacklist}
:param query_string_whitelist: Names of query string parameters to include in cache keys. All other parameters will be excluded. Either specify query_string_whitelist or query_string_blacklist, not both. '&' and '=' will be percent encoded and not treated as delimiters. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#query_string_whitelist ComputeRegionBackendService#query_string_whitelist}
'''
value = ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy(
include_host=include_host,
include_named_cookies=include_named_cookies,
include_protocol=include_protocol,
include_query_string=include_query_string,
query_string_blacklist=query_string_blacklist,
query_string_whitelist=query_string_whitelist,
)
return typing.cast(None, jsii.invoke(self, "putCacheKeyPolicy", [value]))
@jsii.member(jsii_name="putNegativeCachingPolicy")
def put_negative_caching_policy(
self,
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy, typing.Dict[builtins.str, typing.Any]]]],
) -> None:
'''
:param value: -
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__2fafe9e418813c27bb86b466da4afe5cf9926e19d5abac5eaddca8df7f6a3b95)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
return typing.cast(None, jsii.invoke(self, "putNegativeCachingPolicy", [value]))
@jsii.member(jsii_name="resetCacheKeyPolicy")
def reset_cache_key_policy(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetCacheKeyPolicy", []))
@jsii.member(jsii_name="resetCacheMode")
def reset_cache_mode(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetCacheMode", []))
@jsii.member(jsii_name="resetClientTtl")
def reset_client_ttl(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetClientTtl", []))
@jsii.member(jsii_name="resetDefaultTtl")
def reset_default_ttl(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetDefaultTtl", []))
@jsii.member(jsii_name="resetMaxTtl")
def reset_max_ttl(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMaxTtl", []))
@jsii.member(jsii_name="resetNegativeCaching")
def reset_negative_caching(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetNegativeCaching", []))
@jsii.member(jsii_name="resetNegativeCachingPolicy")
def reset_negative_caching_policy(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetNegativeCachingPolicy", []))
@jsii.member(jsii_name="resetServeWhileStale")
def reset_serve_while_stale(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetServeWhileStale", []))
@jsii.member(jsii_name="resetSignedUrlCacheMaxAgeSec")
def reset_signed_url_cache_max_age_sec(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetSignedUrlCacheMaxAgeSec", []))
@builtins.property
@jsii.member(jsii_name="cacheKeyPolicy")
def cache_key_policy(
self,
) -> ComputeRegionBackendServiceCdnPolicyCacheKeyPolicyOutputReference:
return typing.cast(ComputeRegionBackendServiceCdnPolicyCacheKeyPolicyOutputReference, jsii.get(self, "cacheKeyPolicy"))
@builtins.property
@jsii.member(jsii_name="negativeCachingPolicy")
def negative_caching_policy(
self,
) -> ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicyList:
return typing.cast(ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicyList, jsii.get(self, "negativeCachingPolicy"))
@builtins.property
@jsii.member(jsii_name="cacheKeyPolicyInput")
def cache_key_policy_input(
self,
) -> typing.Optional[ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy], jsii.get(self, "cacheKeyPolicyInput"))
@builtins.property
@jsii.member(jsii_name="cacheModeInput")
def cache_mode_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "cacheModeInput"))
@builtins.property
@jsii.member(jsii_name="clientTtlInput")
def client_ttl_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "clientTtlInput"))
@builtins.property
@jsii.member(jsii_name="defaultTtlInput")
def default_ttl_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "defaultTtlInput"))
@builtins.property
@jsii.member(jsii_name="maxTtlInput")
def max_ttl_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxTtlInput"))
@builtins.property
@jsii.member(jsii_name="negativeCachingInput")
def negative_caching_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "negativeCachingInput"))
@builtins.property
@jsii.member(jsii_name="negativeCachingPolicyInput")
def negative_caching_policy_input(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy]]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy]]], jsii.get(self, "negativeCachingPolicyInput"))
@builtins.property
@jsii.member(jsii_name="serveWhileStaleInput")
def serve_while_stale_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "serveWhileStaleInput"))
@builtins.property
@jsii.member(jsii_name="signedUrlCacheMaxAgeSecInput")
def signed_url_cache_max_age_sec_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "signedUrlCacheMaxAgeSecInput"))
@builtins.property
@jsii.member(jsii_name="cacheMode")
def cache_mode(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "cacheMode"))
@cache_mode.setter
def cache_mode(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0ea9599261f5800799bf190ce789cbb1bd8b50f25ce1c95441e7e6e72a78de93)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "cacheMode", value)
@builtins.property
@jsii.member(jsii_name="clientTtl")
def client_ttl(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "clientTtl"))
@client_ttl.setter
def client_ttl(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f741e54aa48e0ba4d404b2c7a0a5800c2d17feb3493760085e9b8afca785b64c)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "clientTtl", value)
@builtins.property
@jsii.member(jsii_name="defaultTtl")
def default_ttl(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "defaultTtl"))
@default_ttl.setter
def default_ttl(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__091e61f296ec1f9e760ed81656614ce61bec3da6415a857c08fb51882995e0ad)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "defaultTtl", value)
@builtins.property
@jsii.member(jsii_name="maxTtl")
def max_ttl(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "maxTtl"))
@max_ttl.setter
def max_ttl(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__9a9d12a8353403405940014d2fe4f33866be6cae9285093a77a5366d565d15fd)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "maxTtl", value)
@builtins.property
@jsii.member(jsii_name="negativeCaching")
def negative_caching(
self,
) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "negativeCaching"))
@negative_caching.setter
def negative_caching(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__d841e23d6d935d9ad3a75aaa84820fb2fe7ddd846840311bdecd266180a80000)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "negativeCaching", value)
@builtins.property
@jsii.member(jsii_name="serveWhileStale")
def serve_while_stale(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "serveWhileStale"))
@serve_while_stale.setter
def serve_while_stale(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c89df93bccd2c6efb1d0c9d89a266e435387de7707231f7a2a68d8da30f06a37)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "serveWhileStale", value)
@builtins.property
@jsii.member(jsii_name="signedUrlCacheMaxAgeSec")
def signed_url_cache_max_age_sec(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "signedUrlCacheMaxAgeSec"))
@signed_url_cache_max_age_sec.setter
def signed_url_cache_max_age_sec(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__908d3e436e2044802a3d1eec93b50b82c90ad0cec10dd19d6e12eaafdb49cc1d)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "signedUrlCacheMaxAgeSec", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(self) -> typing.Optional[ComputeRegionBackendServiceCdnPolicy]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceCdnPolicy], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[ComputeRegionBackendServiceCdnPolicy],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__6ffb386b50fdf9e0c7b64ea26c0a5022760515a83b4fa6b585c9b68c6f3dd1dd)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceCircuitBreakers",
jsii_struct_bases=[],
name_mapping={
"max_connections": "maxConnections",
"max_pending_requests": "maxPendingRequests",
"max_requests": "maxRequests",
"max_requests_per_connection": "maxRequestsPerConnection",
"max_retries": "maxRetries",
},
)
class ComputeRegionBackendServiceCircuitBreakers:
def __init__(
self,
*,
max_connections: typing.Optional[jsii.Number] = None,
max_pending_requests: typing.Optional[jsii.Number] = None,
max_requests: typing.Optional[jsii.Number] = None,
max_requests_per_connection: typing.Optional[jsii.Number] = None,
max_retries: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param max_connections: The maximum number of connections to the backend cluster. Defaults to 1024. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_connections ComputeRegionBackendService#max_connections}
:param max_pending_requests: The maximum number of pending requests to the backend cluster. Defaults to 1024. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_pending_requests ComputeRegionBackendService#max_pending_requests}
:param max_requests: The maximum number of parallel requests to the backend cluster. Defaults to 1024. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_requests ComputeRegionBackendService#max_requests}
:param max_requests_per_connection: Maximum requests for a single backend connection. This parameter is respected by both the HTTP/1.1 and HTTP/2 implementations. If not specified, there is no limit. Setting this parameter to 1 will effectively disable keep alive. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_requests_per_connection ComputeRegionBackendService#max_requests_per_connection}
:param max_retries: The maximum number of parallel retries to the backend cluster. Defaults to 3. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_retries ComputeRegionBackendService#max_retries}
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__266a5682df9681a59744226a311495815409a6484e6aa17c82ee4d35af09af83)
check_type(argname="argument max_connections", value=max_connections, expected_type=type_hints["max_connections"])
check_type(argname="argument max_pending_requests", value=max_pending_requests, expected_type=type_hints["max_pending_requests"])
check_type(argname="argument max_requests", value=max_requests, expected_type=type_hints["max_requests"])
check_type(argname="argument max_requests_per_connection", value=max_requests_per_connection, expected_type=type_hints["max_requests_per_connection"])
check_type(argname="argument max_retries", value=max_retries, expected_type=type_hints["max_retries"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if max_connections is not None:
self._values["max_connections"] = max_connections
if max_pending_requests is not None:
self._values["max_pending_requests"] = max_pending_requests
if max_requests is not None:
self._values["max_requests"] = max_requests
if max_requests_per_connection is not None:
self._values["max_requests_per_connection"] = max_requests_per_connection
if max_retries is not None:
self._values["max_retries"] = max_retries
@builtins.property
def max_connections(self) -> typing.Optional[jsii.Number]:
'''The maximum number of connections to the backend cluster. Defaults to 1024.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_connections ComputeRegionBackendService#max_connections}
'''
result = self._values.get("max_connections")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def max_pending_requests(self) -> typing.Optional[jsii.Number]:
'''The maximum number of pending requests to the backend cluster. Defaults to 1024.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_pending_requests ComputeRegionBackendService#max_pending_requests}
'''
result = self._values.get("max_pending_requests")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def max_requests(self) -> typing.Optional[jsii.Number]:
'''The maximum number of parallel requests to the backend cluster. Defaults to 1024.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_requests ComputeRegionBackendService#max_requests}
'''
result = self._values.get("max_requests")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def max_requests_per_connection(self) -> typing.Optional[jsii.Number]:
'''Maximum requests for a single backend connection.
This parameter
is respected by both the HTTP/1.1 and HTTP/2 implementations. If
not specified, there is no limit. Setting this parameter to 1
will effectively disable keep alive.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_requests_per_connection ComputeRegionBackendService#max_requests_per_connection}
'''
result = self._values.get("max_requests_per_connection")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def max_retries(self) -> typing.Optional[jsii.Number]:
'''The maximum number of parallel retries to the backend cluster. Defaults to 3.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_retries ComputeRegionBackendService#max_retries}
'''
result = self._values.get("max_retries")
return typing.cast(typing.Optional[jsii.Number], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceCircuitBreakers(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class ComputeRegionBackendServiceCircuitBreakersOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceCircuitBreakersOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__893780769ac8d37355e438525acbd4aed3a005fc80fd17d25190253cfe09411d)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetMaxConnections")
def reset_max_connections(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMaxConnections", []))
@jsii.member(jsii_name="resetMaxPendingRequests")
def reset_max_pending_requests(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMaxPendingRequests", []))
@jsii.member(jsii_name="resetMaxRequests")
def reset_max_requests(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMaxRequests", []))
@jsii.member(jsii_name="resetMaxRequestsPerConnection")
def reset_max_requests_per_connection(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMaxRequestsPerConnection", []))
@jsii.member(jsii_name="resetMaxRetries")
def reset_max_retries(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMaxRetries", []))
@builtins.property
@jsii.member(jsii_name="maxConnectionsInput")
def max_connections_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxConnectionsInput"))
@builtins.property
@jsii.member(jsii_name="maxPendingRequestsInput")
def max_pending_requests_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxPendingRequestsInput"))
@builtins.property
@jsii.member(jsii_name="maxRequestsInput")
def max_requests_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxRequestsInput"))
@builtins.property
@jsii.member(jsii_name="maxRequestsPerConnectionInput")
def max_requests_per_connection_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxRequestsPerConnectionInput"))
@builtins.property
@jsii.member(jsii_name="maxRetriesInput")
def max_retries_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxRetriesInput"))
@builtins.property
@jsii.member(jsii_name="maxConnections")
def max_connections(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "maxConnections"))
@max_connections.setter
def max_connections(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f82019ede3e01a5eeb54fe45d89e3c9d001ded87341ba441c5c02598bd71aaa3)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "maxConnections", value)
@builtins.property
@jsii.member(jsii_name="maxPendingRequests")
def max_pending_requests(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "maxPendingRequests"))
@max_pending_requests.setter
def max_pending_requests(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ecc6ff032c620d59d5bdba15c2bc5ce9d343ce3d4c0fa9a9fc64b058b8e40a67)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "maxPendingRequests", value)
@builtins.property
@jsii.member(jsii_name="maxRequests")
def max_requests(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "maxRequests"))
@max_requests.setter
def max_requests(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0c91658cb74df1dc152ca198e9700175b8784a3cef2f99491c1d0cef297a334b)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "maxRequests", value)
@builtins.property
@jsii.member(jsii_name="maxRequestsPerConnection")
def max_requests_per_connection(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "maxRequestsPerConnection"))
@max_requests_per_connection.setter
def max_requests_per_connection(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__600c89c29a6a6fd707558664e49aecee221b8ae2860bff5f0819d34f324f36e9)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "maxRequestsPerConnection", value)
@builtins.property
@jsii.member(jsii_name="maxRetries")
def max_retries(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "maxRetries"))
@max_retries.setter
def max_retries(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__4a28d216d508cd81aa3bb776b09b29429a1e999d3438f84f5504cb93a0a89b45)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "maxRetries", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[ComputeRegionBackendServiceCircuitBreakers]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceCircuitBreakers], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[ComputeRegionBackendServiceCircuitBreakers],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__2a0ed6399f9ad92500995d35e4410b7fa0c21fcd0bd76414bf350ccedb8ae9b1)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceConfig",
jsii_struct_bases=[_cdktf_9a9027ec.TerraformMetaArguments],
name_mapping={
"connection": "connection",
"count": "count",
"depends_on": "dependsOn",
"for_each": "forEach",
"lifecycle": "lifecycle",
"provider": "provider",
"provisioners": "provisioners",
"name": "name",
"affinity_cookie_ttl_sec": "affinityCookieTtlSec",
"backend": "backend",
"cdn_policy": "cdnPolicy",
"circuit_breakers": "circuitBreakers",
"connection_draining_timeout_sec": "connectionDrainingTimeoutSec",
"consistent_hash": "consistentHash",
"description": "description",
"enable_cdn": "enableCdn",
"failover_policy": "failoverPolicy",
"health_checks": "healthChecks",
"iap": "iap",
"id": "id",
"load_balancing_scheme": "loadBalancingScheme",
"locality_lb_policy": "localityLbPolicy",
"log_config": "logConfig",
"network": "network",
"outlier_detection": "outlierDetection",
"port_name": "portName",
"project": "project",
"protocol": "protocol",
"region": "region",
"session_affinity": "sessionAffinity",
"timeouts": "timeouts",
"timeout_sec": "timeoutSec",
},
)
class ComputeRegionBackendServiceConfig(_cdktf_9a9027ec.TerraformMetaArguments):
def __init__(
self,
*,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
name: builtins.str,
affinity_cookie_ttl_sec: typing.Optional[jsii.Number] = None,
backend: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[ComputeRegionBackendServiceBackend, typing.Dict[builtins.str, typing.Any]]]]] = None,
cdn_policy: typing.Optional[typing.Union[ComputeRegionBackendServiceCdnPolicy, typing.Dict[builtins.str, typing.Any]]] = None,
circuit_breakers: typing.Optional[typing.Union[ComputeRegionBackendServiceCircuitBreakers, typing.Dict[builtins.str, typing.Any]]] = None,
connection_draining_timeout_sec: typing.Optional[jsii.Number] = None,
consistent_hash: typing.Optional[typing.Union["ComputeRegionBackendServiceConsistentHash", typing.Dict[builtins.str, typing.Any]]] = None,
description: typing.Optional[builtins.str] = None,
enable_cdn: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
failover_policy: typing.Optional[typing.Union["ComputeRegionBackendServiceFailoverPolicy", typing.Dict[builtins.str, typing.Any]]] = None,
health_checks: typing.Optional[typing.Sequence[builtins.str]] = None,
iap: typing.Optional[typing.Union["ComputeRegionBackendServiceIap", typing.Dict[builtins.str, typing.Any]]] = None,
id: typing.Optional[builtins.str] = None,
load_balancing_scheme: typing.Optional[builtins.str] = None,
locality_lb_policy: typing.Optional[builtins.str] = None,
log_config: typing.Optional[typing.Union["ComputeRegionBackendServiceLogConfig", typing.Dict[builtins.str, typing.Any]]] = None,
network: typing.Optional[builtins.str] = None,
outlier_detection: typing.Optional[typing.Union["ComputeRegionBackendServiceOutlierDetection", typing.Dict[builtins.str, typing.Any]]] = None,
port_name: typing.Optional[builtins.str] = None,
project: typing.Optional[builtins.str] = None,
protocol: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
session_affinity: typing.Optional[builtins.str] = None,
timeouts: typing.Optional[typing.Union["ComputeRegionBackendServiceTimeouts", typing.Dict[builtins.str, typing.Any]]] = None,
timeout_sec: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param connection:
:param count:
:param depends_on:
:param for_each:
:param lifecycle:
:param provider:
:param provisioners:
:param name: Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression '`a-z <%5B-a-z0-9%5D*%5Ba-z0-9%5D>`_?' which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#name ComputeRegionBackendService#name}
:param affinity_cookie_ttl_sec: Lifetime of cookies in seconds if session_affinity is GENERATED_COOKIE. If set to 0, the cookie is non-persistent and lasts only until the end of the browser session (or equivalent). The maximum allowed value for TTL is one day. When the load balancing scheme is INTERNAL, this field is not used. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#affinity_cookie_ttl_sec ComputeRegionBackendService#affinity_cookie_ttl_sec}
:param backend: backend block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#backend ComputeRegionBackendService#backend}
:param cdn_policy: cdn_policy block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#cdn_policy ComputeRegionBackendService#cdn_policy}
:param circuit_breakers: circuit_breakers block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#circuit_breakers ComputeRegionBackendService#circuit_breakers}
:param connection_draining_timeout_sec: Time for which instance will be drained (not accept new connections, but still work to finish started). Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#connection_draining_timeout_sec ComputeRegionBackendService#connection_draining_timeout_sec}
:param consistent_hash: consistent_hash block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#consistent_hash ComputeRegionBackendService#consistent_hash}
:param description: An optional description of this resource. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#description ComputeRegionBackendService#description}
:param enable_cdn: If true, enable Cloud CDN for this RegionBackendService. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#enable_cdn ComputeRegionBackendService#enable_cdn}
:param failover_policy: failover_policy block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#failover_policy ComputeRegionBackendService#failover_policy}
:param health_checks: The set of URLs to HealthCheck resources for health checking this RegionBackendService. Currently at most one health check can be specified. A health check must be specified unless the backend service uses an internet or serverless NEG as a backend. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#health_checks ComputeRegionBackendService#health_checks}
:param iap: iap block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#iap ComputeRegionBackendService#iap}
:param id: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#id ComputeRegionBackendService#id}. Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2. If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
:param load_balancing_scheme: Indicates what kind of load balancing this regional backend service will be used for. A backend service created for one type of load balancing cannot be used with the other(s). For more information, refer to `Choosing a load balancer <https://cloud.google.com/load-balancing/docs/backend-service>`_. Default value: "INTERNAL" Possible values: ["EXTERNAL", "EXTERNAL_MANAGED", "INTERNAL", "INTERNAL_MANAGED"] Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#load_balancing_scheme ComputeRegionBackendService#load_balancing_scheme}
:param locality_lb_policy: The load balancing algorithm used within the scope of the locality. The possible values are:. 'ROUND_ROBIN': This is a simple policy in which each healthy backend is selected in round robin order. 'LEAST_REQUEST': An O(1) algorithm which selects two random healthy hosts and picks the host which has fewer active requests. 'RING_HASH': The ring/modulo hash load balancer implements consistent hashing to backends. The algorithm has the property that the addition/removal of a host from a set of N hosts only affects 1/N of the requests. 'RANDOM': The load balancer selects a random healthy host. 'ORIGINAL_DESTINATION': Backend host is selected based on the client connection metadata, i.e., connections are opened to the same address as the destination address of the incoming connection before the connection was redirected to the load balancer. 'MAGLEV': used as a drop in replacement for the ring hash load balancer. Maglev is not as stable as ring hash but has faster table lookup build times and host selection times. For more information about Maglev, refer to https://ai.google/research/pubs/pub44824 'WEIGHTED_MAGLEV': Per-instance weighted Load Balancing via health check reported weights. If set, the Backend Service must configure a non legacy HTTP-based Health Check, and health check replies are expected to contain non-standard HTTP response header field X-Load-Balancing-Endpoint-Weight to specify the per-instance weights. If set, Load Balancing is weight based on the per-instance weights reported in the last processed health check replies, as long as every instance either reported a valid weight or had UNAVAILABLE_WEIGHT. Otherwise, Load Balancing remains equal-weight. This field is applicable to either: A regional backend service with the service_protocol set to HTTP, HTTPS, or HTTP2, and loadBalancingScheme set to INTERNAL_MANAGED. A global backend service with the load_balancing_scheme set to INTERNAL_SELF_MANAGED. A regional backend service with loadBalancingScheme set to EXTERNAL (External Network Load Balancing). Only MAGLEV and WEIGHTED_MAGLEV values are possible for External Network Load Balancing. The default is MAGLEV. If session_affinity is not NONE, and this field is not set to MAGLEV, WEIGHTED_MAGLEV, or RING_HASH, session affinity settings will not take effect. Only ROUND_ROBIN and RING_HASH are supported when the backend service is referenced by a URL map that is bound to target gRPC proxy that has validate_for_proxyless field set to true. Possible values: ["ROUND_ROBIN", "LEAST_REQUEST", "RING_HASH", "RANDOM", "ORIGINAL_DESTINATION", "MAGLEV", "WEIGHTED_MAGLEV"] Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#locality_lb_policy ComputeRegionBackendService#locality_lb_policy}
:param log_config: log_config block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#log_config ComputeRegionBackendService#log_config}
:param network: The URL of the network to which this backend service belongs. This field can only be specified when the load balancing scheme is set to INTERNAL. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#network ComputeRegionBackendService#network}
:param outlier_detection: outlier_detection block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#outlier_detection ComputeRegionBackendService#outlier_detection}
:param port_name: A named port on a backend instance group representing the port for communication to the backend VMs in that group. Required when the loadBalancingScheme is EXTERNAL, EXTERNAL_MANAGED, INTERNAL_MANAGED, or INTERNAL_SELF_MANAGED and the backends are instance groups. The named port must be defined on each backend instance group. This parameter has no meaning if the backends are NEGs. API sets a default of "http" if not given. Must be omitted when the loadBalancingScheme is INTERNAL (Internal TCP/UDP Load Balancing). Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#port_name ComputeRegionBackendService#port_name}
:param project: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#project ComputeRegionBackendService#project}.
:param protocol: The protocol this RegionBackendService uses to communicate with backends. The default is HTTP. **NOTE**: HTTP2 is only valid for beta HTTP/2 load balancer types and may result in errors if used with the GA API. Possible values: ["HTTP", "HTTPS", "HTTP2", "SSL", "TCP", "UDP", "GRPC", "UNSPECIFIED"] Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#protocol ComputeRegionBackendService#protocol}
:param region: The Region in which the created backend service should reside. If it is not provided, the provider region is used. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#region ComputeRegionBackendService#region}
:param session_affinity: Type of session affinity to use. The default is NONE. Session affinity is not applicable if the protocol is UDP. Possible values: ["NONE", "CLIENT_IP", "CLIENT_IP_PORT_PROTO", "CLIENT_IP_PROTO", "GENERATED_COOKIE", "HEADER_FIELD", "HTTP_COOKIE", "CLIENT_IP_NO_DESTINATION"] Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#session_affinity ComputeRegionBackendService#session_affinity}
:param timeouts: timeouts block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#timeouts ComputeRegionBackendService#timeouts}
:param timeout_sec: How many seconds to wait for the backend before considering it a failed request. Default is 30 seconds. Valid range is [1, 86400]. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#timeout_sec ComputeRegionBackendService#timeout_sec}
'''
if isinstance(lifecycle, dict):
lifecycle = _cdktf_9a9027ec.TerraformResourceLifecycle(**lifecycle)
if isinstance(cdn_policy, dict):
cdn_policy = ComputeRegionBackendServiceCdnPolicy(**cdn_policy)
if isinstance(circuit_breakers, dict):
circuit_breakers = ComputeRegionBackendServiceCircuitBreakers(**circuit_breakers)
if isinstance(consistent_hash, dict):
consistent_hash = ComputeRegionBackendServiceConsistentHash(**consistent_hash)
if isinstance(failover_policy, dict):
failover_policy = ComputeRegionBackendServiceFailoverPolicy(**failover_policy)
if isinstance(iap, dict):
iap = ComputeRegionBackendServiceIap(**iap)
if isinstance(log_config, dict):
log_config = ComputeRegionBackendServiceLogConfig(**log_config)
if isinstance(outlier_detection, dict):
outlier_detection = ComputeRegionBackendServiceOutlierDetection(**outlier_detection)
if isinstance(timeouts, dict):
timeouts = ComputeRegionBackendServiceTimeouts(**timeouts)
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__942d906da0191287bd3769b8797469069205f1e711375baabdb5fd74249f6e12)
check_type(argname="argument connection", value=connection, expected_type=type_hints["connection"])
check_type(argname="argument count", value=count, expected_type=type_hints["count"])
check_type(argname="argument depends_on", value=depends_on, expected_type=type_hints["depends_on"])
check_type(argname="argument for_each", value=for_each, expected_type=type_hints["for_each"])
check_type(argname="argument lifecycle", value=lifecycle, expected_type=type_hints["lifecycle"])
check_type(argname="argument provider", value=provider, expected_type=type_hints["provider"])
check_type(argname="argument provisioners", value=provisioners, expected_type=type_hints["provisioners"])
check_type(argname="argument name", value=name, expected_type=type_hints["name"])
check_type(argname="argument affinity_cookie_ttl_sec", value=affinity_cookie_ttl_sec, expected_type=type_hints["affinity_cookie_ttl_sec"])
check_type(argname="argument backend", value=backend, expected_type=type_hints["backend"])
check_type(argname="argument cdn_policy", value=cdn_policy, expected_type=type_hints["cdn_policy"])
check_type(argname="argument circuit_breakers", value=circuit_breakers, expected_type=type_hints["circuit_breakers"])
check_type(argname="argument connection_draining_timeout_sec", value=connection_draining_timeout_sec, expected_type=type_hints["connection_draining_timeout_sec"])
check_type(argname="argument consistent_hash", value=consistent_hash, expected_type=type_hints["consistent_hash"])
check_type(argname="argument description", value=description, expected_type=type_hints["description"])
check_type(argname="argument enable_cdn", value=enable_cdn, expected_type=type_hints["enable_cdn"])
check_type(argname="argument failover_policy", value=failover_policy, expected_type=type_hints["failover_policy"])
check_type(argname="argument health_checks", value=health_checks, expected_type=type_hints["health_checks"])
check_type(argname="argument iap", value=iap, expected_type=type_hints["iap"])
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
check_type(argname="argument load_balancing_scheme", value=load_balancing_scheme, expected_type=type_hints["load_balancing_scheme"])
check_type(argname="argument locality_lb_policy", value=locality_lb_policy, expected_type=type_hints["locality_lb_policy"])
check_type(argname="argument log_config", value=log_config, expected_type=type_hints["log_config"])
check_type(argname="argument network", value=network, expected_type=type_hints["network"])
check_type(argname="argument outlier_detection", value=outlier_detection, expected_type=type_hints["outlier_detection"])
check_type(argname="argument port_name", value=port_name, expected_type=type_hints["port_name"])
check_type(argname="argument project", value=project, expected_type=type_hints["project"])
check_type(argname="argument protocol", value=protocol, expected_type=type_hints["protocol"])
check_type(argname="argument region", value=region, expected_type=type_hints["region"])
check_type(argname="argument session_affinity", value=session_affinity, expected_type=type_hints["session_affinity"])
check_type(argname="argument timeouts", value=timeouts, expected_type=type_hints["timeouts"])
check_type(argname="argument timeout_sec", value=timeout_sec, expected_type=type_hints["timeout_sec"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"name": name,
}
if connection is not None:
self._values["connection"] = connection
if count is not None:
self._values["count"] = count
if depends_on is not None:
self._values["depends_on"] = depends_on
if for_each is not None:
self._values["for_each"] = for_each
if lifecycle is not None:
self._values["lifecycle"] = lifecycle
if provider is not None:
self._values["provider"] = provider
if provisioners is not None:
self._values["provisioners"] = provisioners
if affinity_cookie_ttl_sec is not None:
self._values["affinity_cookie_ttl_sec"] = affinity_cookie_ttl_sec
if backend is not None:
self._values["backend"] = backend
if cdn_policy is not None:
self._values["cdn_policy"] = cdn_policy
if circuit_breakers is not None:
self._values["circuit_breakers"] = circuit_breakers
if connection_draining_timeout_sec is not None:
self._values["connection_draining_timeout_sec"] = connection_draining_timeout_sec
if consistent_hash is not None:
self._values["consistent_hash"] = consistent_hash
if description is not None:
self._values["description"] = description
if enable_cdn is not None:
self._values["enable_cdn"] = enable_cdn
if failover_policy is not None:
self._values["failover_policy"] = failover_policy
if health_checks is not None:
self._values["health_checks"] = health_checks
if iap is not None:
self._values["iap"] = iap
if id is not None:
self._values["id"] = id
if load_balancing_scheme is not None:
self._values["load_balancing_scheme"] = load_balancing_scheme
if locality_lb_policy is not None:
self._values["locality_lb_policy"] = locality_lb_policy
if log_config is not None:
self._values["log_config"] = log_config
if network is not None:
self._values["network"] = network
if outlier_detection is not None:
self._values["outlier_detection"] = outlier_detection
if port_name is not None:
self._values["port_name"] = port_name
if project is not None:
self._values["project"] = project
if protocol is not None:
self._values["protocol"] = protocol
if region is not None:
self._values["region"] = region
if session_affinity is not None:
self._values["session_affinity"] = session_affinity
if timeouts is not None:
self._values["timeouts"] = timeouts
if timeout_sec is not None:
self._values["timeout_sec"] = timeout_sec
@builtins.property
def connection(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, _cdktf_9a9027ec.WinrmProvisionerConnection]]:
'''
:stability: experimental
'''
result = self._values.get("connection")
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, _cdktf_9a9027ec.WinrmProvisionerConnection]], result)
@builtins.property
def count(
self,
) -> typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]]:
'''
:stability: experimental
'''
result = self._values.get("count")
return typing.cast(typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]], result)
@builtins.property
def depends_on(
self,
) -> typing.Optional[typing.List[_cdktf_9a9027ec.ITerraformDependable]]:
'''
:stability: experimental
'''
result = self._values.get("depends_on")
return typing.cast(typing.Optional[typing.List[_cdktf_9a9027ec.ITerraformDependable]], result)
@builtins.property
def for_each(self) -> typing.Optional[_cdktf_9a9027ec.ITerraformIterator]:
'''
:stability: experimental
'''
result = self._values.get("for_each")
return typing.cast(typing.Optional[_cdktf_9a9027ec.ITerraformIterator], result)
@builtins.property
def lifecycle(self) -> typing.Optional[_cdktf_9a9027ec.TerraformResourceLifecycle]:
'''
:stability: experimental
'''
result = self._values.get("lifecycle")
return typing.cast(typing.Optional[_cdktf_9a9027ec.TerraformResourceLifecycle], result)
@builtins.property
def provider(self) -> typing.Optional[_cdktf_9a9027ec.TerraformProvider]:
'''
:stability: experimental
'''
result = self._values.get("provider")
return typing.cast(typing.Optional[_cdktf_9a9027ec.TerraformProvider], result)
@builtins.property
def provisioners(
self,
) -> typing.Optional[typing.List[typing.Union[_cdktf_9a9027ec.FileProvisioner, _cdktf_9a9027ec.LocalExecProvisioner, _cdktf_9a9027ec.RemoteExecProvisioner]]]:
'''
:stability: experimental
'''
result = self._values.get("provisioners")
return typing.cast(typing.Optional[typing.List[typing.Union[_cdktf_9a9027ec.FileProvisioner, _cdktf_9a9027ec.LocalExecProvisioner, _cdktf_9a9027ec.RemoteExecProvisioner]]], result)
@builtins.property
def name(self) -> builtins.str:
'''Name of the resource.
Provided by the client when the resource is
created. The name must be 1-63 characters long, and comply with
RFC1035. Specifically, the name must be 1-63 characters long and match
the regular expression '`a-z <%5B-a-z0-9%5D*%5Ba-z0-9%5D>`_?' which means the
first character must be a lowercase letter, and all following
characters must be a dash, lowercase letter, or digit, except the last
character, which cannot be a dash.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#name ComputeRegionBackendService#name}
'''
result = self._values.get("name")
assert result is not None, "Required property 'name' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def affinity_cookie_ttl_sec(self) -> typing.Optional[jsii.Number]:
'''Lifetime of cookies in seconds if session_affinity is GENERATED_COOKIE.
If set to 0, the cookie is non-persistent and lasts
only until the end of the browser session (or equivalent). The
maximum allowed value for TTL is one day.
When the load balancing scheme is INTERNAL, this field is not used.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#affinity_cookie_ttl_sec ComputeRegionBackendService#affinity_cookie_ttl_sec}
'''
result = self._values.get("affinity_cookie_ttl_sec")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def backend(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[ComputeRegionBackendServiceBackend]]]:
'''backend block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#backend ComputeRegionBackendService#backend}
'''
result = self._values.get("backend")
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[ComputeRegionBackendServiceBackend]]], result)
@builtins.property
def cdn_policy(self) -> typing.Optional[ComputeRegionBackendServiceCdnPolicy]:
'''cdn_policy block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#cdn_policy ComputeRegionBackendService#cdn_policy}
'''
result = self._values.get("cdn_policy")
return typing.cast(typing.Optional[ComputeRegionBackendServiceCdnPolicy], result)
@builtins.property
def circuit_breakers(
self,
) -> typing.Optional[ComputeRegionBackendServiceCircuitBreakers]:
'''circuit_breakers block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#circuit_breakers ComputeRegionBackendService#circuit_breakers}
'''
result = self._values.get("circuit_breakers")
return typing.cast(typing.Optional[ComputeRegionBackendServiceCircuitBreakers], result)
@builtins.property
def connection_draining_timeout_sec(self) -> typing.Optional[jsii.Number]:
'''Time for which instance will be drained (not accept new connections, but still work to finish started).
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#connection_draining_timeout_sec ComputeRegionBackendService#connection_draining_timeout_sec}
'''
result = self._values.get("connection_draining_timeout_sec")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def consistent_hash(
self,
) -> typing.Optional["ComputeRegionBackendServiceConsistentHash"]:
'''consistent_hash block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#consistent_hash ComputeRegionBackendService#consistent_hash}
'''
result = self._values.get("consistent_hash")
return typing.cast(typing.Optional["ComputeRegionBackendServiceConsistentHash"], result)
@builtins.property
def description(self) -> typing.Optional[builtins.str]:
'''An optional description of this resource.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#description ComputeRegionBackendService#description}
'''
result = self._values.get("description")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def enable_cdn(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''If true, enable Cloud CDN for this RegionBackendService.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#enable_cdn ComputeRegionBackendService#enable_cdn}
'''
result = self._values.get("enable_cdn")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def failover_policy(
self,
) -> typing.Optional["ComputeRegionBackendServiceFailoverPolicy"]:
'''failover_policy block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#failover_policy ComputeRegionBackendService#failover_policy}
'''
result = self._values.get("failover_policy")
return typing.cast(typing.Optional["ComputeRegionBackendServiceFailoverPolicy"], result)
@builtins.property
def health_checks(self) -> typing.Optional[typing.List[builtins.str]]:
'''The set of URLs to HealthCheck resources for health checking this RegionBackendService. Currently at most one health check can be specified.
A health check must be specified unless the backend service uses an internet
or serverless NEG as a backend.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#health_checks ComputeRegionBackendService#health_checks}
'''
result = self._values.get("health_checks")
return typing.cast(typing.Optional[typing.List[builtins.str]], result)
@builtins.property
def iap(self) -> typing.Optional["ComputeRegionBackendServiceIap"]:
'''iap block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#iap ComputeRegionBackendService#iap}
'''
result = self._values.get("iap")
return typing.cast(typing.Optional["ComputeRegionBackendServiceIap"], result)
@builtins.property
def id(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#id ComputeRegionBackendService#id}.
Please be aware that the id field is automatically added to all resources in Terraform providers using a Terraform provider SDK version below 2.
If you experience problems setting this value it might not be settable. Please take a look at the provider documentation to ensure it should be settable.
'''
result = self._values.get("id")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def load_balancing_scheme(self) -> typing.Optional[builtins.str]:
'''Indicates what kind of load balancing this regional backend service will be used for.
A backend service created for one type of load
balancing cannot be used with the other(s). For more information, refer to
`Choosing a load balancer <https://cloud.google.com/load-balancing/docs/backend-service>`_. Default value: "INTERNAL" Possible values: ["EXTERNAL", "EXTERNAL_MANAGED", "INTERNAL", "INTERNAL_MANAGED"]
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#load_balancing_scheme ComputeRegionBackendService#load_balancing_scheme}
'''
result = self._values.get("load_balancing_scheme")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def locality_lb_policy(self) -> typing.Optional[builtins.str]:
'''The load balancing algorithm used within the scope of the locality. The possible values are:.
'ROUND_ROBIN': This is a simple policy in which each healthy backend
is selected in round robin order.
'LEAST_REQUEST': An O(1) algorithm which selects two random healthy
hosts and picks the host which has fewer active requests.
'RING_HASH': The ring/modulo hash load balancer implements consistent
hashing to backends. The algorithm has the property that the
addition/removal of a host from a set of N hosts only affects
1/N of the requests.
'RANDOM': The load balancer selects a random healthy host.
'ORIGINAL_DESTINATION': Backend host is selected based on the client
connection metadata, i.e., connections are opened
to the same address as the destination address of
the incoming connection before the connection
was redirected to the load balancer.
'MAGLEV': used as a drop in replacement for the ring hash load balancer.
Maglev is not as stable as ring hash but has faster table lookup
build times and host selection times. For more information about
Maglev, refer to https://ai.google/research/pubs/pub44824
'WEIGHTED_MAGLEV': Per-instance weighted Load Balancing via health check
reported weights. If set, the Backend Service must
configure a non legacy HTTP-based Health Check, and
health check replies are expected to contain
non-standard HTTP response header field
X-Load-Balancing-Endpoint-Weight to specify the
per-instance weights. If set, Load Balancing is weight
based on the per-instance weights reported in the last
processed health check replies, as long as every
instance either reported a valid weight or had
UNAVAILABLE_WEIGHT. Otherwise, Load Balancing remains
equal-weight.
This field is applicable to either:
A regional backend service with the service_protocol set to HTTP, HTTPS, or HTTP2,
and loadBalancingScheme set to INTERNAL_MANAGED.
A global backend service with the load_balancing_scheme set to INTERNAL_SELF_MANAGED.
A regional backend service with loadBalancingScheme set to EXTERNAL (External Network
Load Balancing). Only MAGLEV and WEIGHTED_MAGLEV values are possible for External
Network Load Balancing. The default is MAGLEV.
If session_affinity is not NONE, and this field is not set to MAGLEV, WEIGHTED_MAGLEV,
or RING_HASH, session affinity settings will not take effect.
Only ROUND_ROBIN and RING_HASH are supported when the backend service is referenced
by a URL map that is bound to target gRPC proxy that has validate_for_proxyless
field set to true. Possible values: ["ROUND_ROBIN", "LEAST_REQUEST", "RING_HASH", "RANDOM", "ORIGINAL_DESTINATION", "MAGLEV", "WEIGHTED_MAGLEV"]
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#locality_lb_policy ComputeRegionBackendService#locality_lb_policy}
'''
result = self._values.get("locality_lb_policy")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def log_config(self) -> typing.Optional["ComputeRegionBackendServiceLogConfig"]:
'''log_config block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#log_config ComputeRegionBackendService#log_config}
'''
result = self._values.get("log_config")
return typing.cast(typing.Optional["ComputeRegionBackendServiceLogConfig"], result)
@builtins.property
def network(self) -> typing.Optional[builtins.str]:
'''The URL of the network to which this backend service belongs.
This field can only be specified when the load balancing scheme is set to INTERNAL.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#network ComputeRegionBackendService#network}
'''
result = self._values.get("network")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def outlier_detection(
self,
) -> typing.Optional["ComputeRegionBackendServiceOutlierDetection"]:
'''outlier_detection block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#outlier_detection ComputeRegionBackendService#outlier_detection}
'''
result = self._values.get("outlier_detection")
return typing.cast(typing.Optional["ComputeRegionBackendServiceOutlierDetection"], result)
@builtins.property
def port_name(self) -> typing.Optional[builtins.str]:
'''A named port on a backend instance group representing the port for communication to the backend VMs in that group.
Required when the
loadBalancingScheme is EXTERNAL, EXTERNAL_MANAGED, INTERNAL_MANAGED, or INTERNAL_SELF_MANAGED
and the backends are instance groups. The named port must be defined on each
backend instance group. This parameter has no meaning if the backends are NEGs. API sets a
default of "http" if not given.
Must be omitted when the loadBalancingScheme is INTERNAL (Internal TCP/UDP Load Balancing).
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#port_name ComputeRegionBackendService#port_name}
'''
result = self._values.get("port_name")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def project(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#project ComputeRegionBackendService#project}.'''
result = self._values.get("project")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def protocol(self) -> typing.Optional[builtins.str]:
'''The protocol this RegionBackendService uses to communicate with backends.
The default is HTTP. **NOTE**: HTTP2 is only valid for beta HTTP/2 load balancer
types and may result in errors if used with the GA API. Possible values: ["HTTP", "HTTPS", "HTTP2", "SSL", "TCP", "UDP", "GRPC", "UNSPECIFIED"]
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#protocol ComputeRegionBackendService#protocol}
'''
result = self._values.get("protocol")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def region(self) -> typing.Optional[builtins.str]:
'''The Region in which the created backend service should reside. If it is not provided, the provider region is used.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#region ComputeRegionBackendService#region}
'''
result = self._values.get("region")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def session_affinity(self) -> typing.Optional[builtins.str]:
'''Type of session affinity to use.
The default is NONE. Session affinity is
not applicable if the protocol is UDP. Possible values: ["NONE", "CLIENT_IP", "CLIENT_IP_PORT_PROTO", "CLIENT_IP_PROTO", "GENERATED_COOKIE", "HEADER_FIELD", "HTTP_COOKIE", "CLIENT_IP_NO_DESTINATION"]
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#session_affinity ComputeRegionBackendService#session_affinity}
'''
result = self._values.get("session_affinity")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def timeouts(self) -> typing.Optional["ComputeRegionBackendServiceTimeouts"]:
'''timeouts block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#timeouts ComputeRegionBackendService#timeouts}
'''
result = self._values.get("timeouts")
return typing.cast(typing.Optional["ComputeRegionBackendServiceTimeouts"], result)
@builtins.property
def timeout_sec(self) -> typing.Optional[jsii.Number]:
'''How many seconds to wait for the backend before considering it a failed request.
Default is 30 seconds. Valid range is [1, 86400].
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#timeout_sec ComputeRegionBackendService#timeout_sec}
'''
result = self._values.get("timeout_sec")
return typing.cast(typing.Optional[jsii.Number], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceConfig(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceConsistentHash",
jsii_struct_bases=[],
name_mapping={
"http_cookie": "httpCookie",
"http_header_name": "httpHeaderName",
"minimum_ring_size": "minimumRingSize",
},
)
class ComputeRegionBackendServiceConsistentHash:
def __init__(
self,
*,
http_cookie: typing.Optional[typing.Union["ComputeRegionBackendServiceConsistentHashHttpCookie", typing.Dict[builtins.str, typing.Any]]] = None,
http_header_name: typing.Optional[builtins.str] = None,
minimum_ring_size: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param http_cookie: http_cookie block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#http_cookie ComputeRegionBackendService#http_cookie}
:param http_header_name: The hash based on the value of the specified header field. This field is applicable if the sessionAffinity is set to HEADER_FIELD. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#http_header_name ComputeRegionBackendService#http_header_name}
:param minimum_ring_size: The minimum number of virtual nodes to use for the hash ring. Larger ring sizes result in more granular load distributions. If the number of hosts in the load balancing pool is larger than the ring size, each host will be assigned a single virtual node. Defaults to 1024. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#minimum_ring_size ComputeRegionBackendService#minimum_ring_size}
'''
if isinstance(http_cookie, dict):
http_cookie = ComputeRegionBackendServiceConsistentHashHttpCookie(**http_cookie)
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__186f5d67feab665bb2d4876bd04486790631383e709402ca1ff6c65f62dc27f9)
check_type(argname="argument http_cookie", value=http_cookie, expected_type=type_hints["http_cookie"])
check_type(argname="argument http_header_name", value=http_header_name, expected_type=type_hints["http_header_name"])
check_type(argname="argument minimum_ring_size", value=minimum_ring_size, expected_type=type_hints["minimum_ring_size"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if http_cookie is not None:
self._values["http_cookie"] = http_cookie
if http_header_name is not None:
self._values["http_header_name"] = http_header_name
if minimum_ring_size is not None:
self._values["minimum_ring_size"] = minimum_ring_size
@builtins.property
def http_cookie(
self,
) -> typing.Optional["ComputeRegionBackendServiceConsistentHashHttpCookie"]:
'''http_cookie block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#http_cookie ComputeRegionBackendService#http_cookie}
'''
result = self._values.get("http_cookie")
return typing.cast(typing.Optional["ComputeRegionBackendServiceConsistentHashHttpCookie"], result)
@builtins.property
def http_header_name(self) -> typing.Optional[builtins.str]:
'''The hash based on the value of the specified header field.
This field is applicable if the sessionAffinity is set to HEADER_FIELD.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#http_header_name ComputeRegionBackendService#http_header_name}
'''
result = self._values.get("http_header_name")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def minimum_ring_size(self) -> typing.Optional[jsii.Number]:
'''The minimum number of virtual nodes to use for the hash ring.
Larger ring sizes result in more granular load
distributions. If the number of hosts in the load balancing pool
is larger than the ring size, each host will be assigned a single
virtual node.
Defaults to 1024.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#minimum_ring_size ComputeRegionBackendService#minimum_ring_size}
'''
result = self._values.get("minimum_ring_size")
return typing.cast(typing.Optional[jsii.Number], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceConsistentHash(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceConsistentHashHttpCookie",
jsii_struct_bases=[],
name_mapping={"name": "name", "path": "path", "ttl": "ttl"},
)
class ComputeRegionBackendServiceConsistentHashHttpCookie:
def __init__(
self,
*,
name: typing.Optional[builtins.str] = None,
path: typing.Optional[builtins.str] = None,
ttl: typing.Optional[typing.Union["ComputeRegionBackendServiceConsistentHashHttpCookieTtl", typing.Dict[builtins.str, typing.Any]]] = None,
) -> None:
'''
:param name: Name of the cookie. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#name ComputeRegionBackendService#name}
:param path: Path to set for the cookie. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#path ComputeRegionBackendService#path}
:param ttl: ttl block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#ttl ComputeRegionBackendService#ttl}
'''
if isinstance(ttl, dict):
ttl = ComputeRegionBackendServiceConsistentHashHttpCookieTtl(**ttl)
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__6a117844a8b60b6aea7c8b6199be86460b3f62731975b7223126cba9fba95cc5)
check_type(argname="argument name", value=name, expected_type=type_hints["name"])
check_type(argname="argument path", value=path, expected_type=type_hints["path"])
check_type(argname="argument ttl", value=ttl, expected_type=type_hints["ttl"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if name is not None:
self._values["name"] = name
if path is not None:
self._values["path"] = path
if ttl is not None:
self._values["ttl"] = ttl
@builtins.property
def name(self) -> typing.Optional[builtins.str]:
'''Name of the cookie.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#name ComputeRegionBackendService#name}
'''
result = self._values.get("name")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def path(self) -> typing.Optional[builtins.str]:
'''Path to set for the cookie.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#path ComputeRegionBackendService#path}
'''
result = self._values.get("path")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def ttl(
self,
) -> typing.Optional["ComputeRegionBackendServiceConsistentHashHttpCookieTtl"]:
'''ttl block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#ttl ComputeRegionBackendService#ttl}
'''
result = self._values.get("ttl")
return typing.cast(typing.Optional["ComputeRegionBackendServiceConsistentHashHttpCookieTtl"], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceConsistentHashHttpCookie(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class ComputeRegionBackendServiceConsistentHashHttpCookieOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceConsistentHashHttpCookieOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__073af85ae8f145e89c0d5a6c02390e588b2b6158f11b6834ad5e3eedf8f6eafb)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="putTtl")
def put_ttl(
self,
*,
seconds: jsii.Number,
nanos: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#seconds ComputeRegionBackendService#seconds}
:param nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#nanos ComputeRegionBackendService#nanos}
'''
value = ComputeRegionBackendServiceConsistentHashHttpCookieTtl(
seconds=seconds, nanos=nanos
)
return typing.cast(None, jsii.invoke(self, "putTtl", [value]))
@jsii.member(jsii_name="resetName")
def reset_name(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetName", []))
@jsii.member(jsii_name="resetPath")
def reset_path(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetPath", []))
@jsii.member(jsii_name="resetTtl")
def reset_ttl(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetTtl", []))
@builtins.property
@jsii.member(jsii_name="ttl")
def ttl(
self,
) -> "ComputeRegionBackendServiceConsistentHashHttpCookieTtlOutputReference":
return typing.cast("ComputeRegionBackendServiceConsistentHashHttpCookieTtlOutputReference", jsii.get(self, "ttl"))
@builtins.property
@jsii.member(jsii_name="nameInput")
def name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "nameInput"))
@builtins.property
@jsii.member(jsii_name="pathInput")
def path_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "pathInput"))
@builtins.property
@jsii.member(jsii_name="ttlInput")
def ttl_input(
self,
) -> typing.Optional["ComputeRegionBackendServiceConsistentHashHttpCookieTtl"]:
return typing.cast(typing.Optional["ComputeRegionBackendServiceConsistentHashHttpCookieTtl"], jsii.get(self, "ttlInput"))
@builtins.property
@jsii.member(jsii_name="name")
def name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "name"))
@name.setter
def name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b81d61906c2938b5058b5b3d96276cd846669df06a2d7be3ff48e0580b42b86e)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "name", value)
@builtins.property
@jsii.member(jsii_name="path")
def path(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "path"))
@path.setter
def path(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3495691e0fed2f3a351c6eb3e1c93e43d07afaa46d80c5875ccb512e64235d07)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "path", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[ComputeRegionBackendServiceConsistentHashHttpCookie]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceConsistentHashHttpCookie], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[ComputeRegionBackendServiceConsistentHashHttpCookie],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b9a667f311f1b74f716e42760ebf889e3384726f1561a58a4f19ec74276d95df)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceConsistentHashHttpCookieTtl",
jsii_struct_bases=[],
name_mapping={"seconds": "seconds", "nanos": "nanos"},
)
class ComputeRegionBackendServiceConsistentHashHttpCookieTtl:
def __init__(
self,
*,
seconds: jsii.Number,
nanos: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#seconds ComputeRegionBackendService#seconds}
:param nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented with a 0 seconds field and a positive nanos field. Must be from 0 to 999,999,999 inclusive. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#nanos ComputeRegionBackendService#nanos}
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__95b9966f17083683869b8e88c1cd71c1a24155c4b3a05ef6336b2b68d9e8c4c3)
check_type(argname="argument seconds", value=seconds, expected_type=type_hints["seconds"])
check_type(argname="argument nanos", value=nanos, expected_type=type_hints["nanos"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"seconds": seconds,
}
if nanos is not None:
self._values["nanos"] = nanos
@builtins.property
def seconds(self) -> jsii.Number:
'''Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#seconds ComputeRegionBackendService#seconds}
'''
result = self._values.get("seconds")
assert result is not None, "Required property 'seconds' is missing"
return typing.cast(jsii.Number, result)
@builtins.property
def nanos(self) -> typing.Optional[jsii.Number]:
'''Span of time that's a fraction of a second at nanosecond resolution.
Durations less than one second are represented
with a 0 seconds field and a positive nanos field. Must
be from 0 to 999,999,999 inclusive.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#nanos ComputeRegionBackendService#nanos}
'''
result = self._values.get("nanos")
return typing.cast(typing.Optional[jsii.Number], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceConsistentHashHttpCookieTtl(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class ComputeRegionBackendServiceConsistentHashHttpCookieTtlOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceConsistentHashHttpCookieTtlOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5360707d67fb0c194038bc663fda444dd849b87b5ade6f5bc97540aba8b40692)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetNanos")
def reset_nanos(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetNanos", []))
@builtins.property
@jsii.member(jsii_name="nanosInput")
def nanos_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "nanosInput"))
@builtins.property
@jsii.member(jsii_name="secondsInput")
def seconds_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "secondsInput"))
@builtins.property
@jsii.member(jsii_name="nanos")
def nanos(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "nanos"))
@nanos.setter
def nanos(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__fc497e516adef4ade1a0ff25b07115fee1c668e30888d712360d2b8e311d1b14)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "nanos", value)
@builtins.property
@jsii.member(jsii_name="seconds")
def seconds(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "seconds"))
@seconds.setter
def seconds(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__6ca4459ee7821e229e9ab5c67900a99758d62700ffb4ce37d823db6e56d7098f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "seconds", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[ComputeRegionBackendServiceConsistentHashHttpCookieTtl]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceConsistentHashHttpCookieTtl], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[ComputeRegionBackendServiceConsistentHashHttpCookieTtl],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a44d4a364777f5e9c836721c84e417a1767afb63f2f4cbb4419ebc476a261eb8)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class ComputeRegionBackendServiceConsistentHashOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceConsistentHashOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__af2fa8ba75b1ca993fdb1e4a1b65887c59accc42b8dd809ae58e1694dda3dbf8)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="putHttpCookie")
def put_http_cookie(
self,
*,
name: typing.Optional[builtins.str] = None,
path: typing.Optional[builtins.str] = None,
ttl: typing.Optional[typing.Union[ComputeRegionBackendServiceConsistentHashHttpCookieTtl, typing.Dict[builtins.str, typing.Any]]] = None,
) -> None:
'''
:param name: Name of the cookie. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#name ComputeRegionBackendService#name}
:param path: Path to set for the cookie. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#path ComputeRegionBackendService#path}
:param ttl: ttl block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#ttl ComputeRegionBackendService#ttl}
'''
value = ComputeRegionBackendServiceConsistentHashHttpCookie(
name=name, path=path, ttl=ttl
)
return typing.cast(None, jsii.invoke(self, "putHttpCookie", [value]))
@jsii.member(jsii_name="resetHttpCookie")
def reset_http_cookie(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetHttpCookie", []))
@jsii.member(jsii_name="resetHttpHeaderName")
def reset_http_header_name(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetHttpHeaderName", []))
@jsii.member(jsii_name="resetMinimumRingSize")
def reset_minimum_ring_size(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMinimumRingSize", []))
@builtins.property
@jsii.member(jsii_name="httpCookie")
def http_cookie(
self,
) -> ComputeRegionBackendServiceConsistentHashHttpCookieOutputReference:
return typing.cast(ComputeRegionBackendServiceConsistentHashHttpCookieOutputReference, jsii.get(self, "httpCookie"))
@builtins.property
@jsii.member(jsii_name="httpCookieInput")
def http_cookie_input(
self,
) -> typing.Optional[ComputeRegionBackendServiceConsistentHashHttpCookie]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceConsistentHashHttpCookie], jsii.get(self, "httpCookieInput"))
@builtins.property
@jsii.member(jsii_name="httpHeaderNameInput")
def http_header_name_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "httpHeaderNameInput"))
@builtins.property
@jsii.member(jsii_name="minimumRingSizeInput")
def minimum_ring_size_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "minimumRingSizeInput"))
@builtins.property
@jsii.member(jsii_name="httpHeaderName")
def http_header_name(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "httpHeaderName"))
@http_header_name.setter
def http_header_name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__098d8871222d597232b4ff357617d254894e28ca18b5eae2e590eeee7fefc59a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "httpHeaderName", value)
@builtins.property
@jsii.member(jsii_name="minimumRingSize")
def minimum_ring_size(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "minimumRingSize"))
@minimum_ring_size.setter
def minimum_ring_size(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__93de2d2732df7a7bc8a92b1f6fcc21733a3fb155471dc957412fd690d82568a2)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "minimumRingSize", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[ComputeRegionBackendServiceConsistentHash]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceConsistentHash], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[ComputeRegionBackendServiceConsistentHash],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__557cf970794dc4fd129c66c4f38e0caf6700d46d8f42557a6318623f6d4c7e53)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceFailoverPolicy",
jsii_struct_bases=[],
name_mapping={
"disable_connection_drain_on_failover": "disableConnectionDrainOnFailover",
"drop_traffic_if_unhealthy": "dropTrafficIfUnhealthy",
"failover_ratio": "failoverRatio",
},
)
class ComputeRegionBackendServiceFailoverPolicy:
def __init__(
self,
*,
disable_connection_drain_on_failover: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
drop_traffic_if_unhealthy: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
failover_ratio: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param disable_connection_drain_on_failover: On failover or failback, this field indicates whether connection drain will be honored. Setting this to true has the following effect: connections to the old active pool are not drained. Connections to the new active pool use the timeout of 10 min (currently fixed). Setting to false has the following effect: both old and new connections will have a drain timeout of 10 min. This can be set to true only if the protocol is TCP. The default is false. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#disable_connection_drain_on_failover ComputeRegionBackendService#disable_connection_drain_on_failover}
:param drop_traffic_if_unhealthy: This option is used only when no healthy VMs are detected in the primary and backup instance groups. When set to true, traffic is dropped. When set to false, new connections are sent across all VMs in the primary group. The default is false. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#drop_traffic_if_unhealthy ComputeRegionBackendService#drop_traffic_if_unhealthy}
:param failover_ratio: The value of the field must be in [0, 1]. If the ratio of the healthy VMs in the primary backend is at or below this number, traffic arriving at the load-balanced IP will be directed to the failover backend. In case where 'failoverRatio' is not set or all the VMs in the backup backend are unhealthy, the traffic will be directed back to the primary backend in the "force" mode, where traffic will be spread to the healthy VMs with the best effort, or to all VMs when no VM is healthy. This field is only used with l4 load balancing. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#failover_ratio ComputeRegionBackendService#failover_ratio}
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b6284ed9b159f56d80a68611ca0b561c5a0fa9ff4c4f6d4b57864ec2d5c4c4c3)
check_type(argname="argument disable_connection_drain_on_failover", value=disable_connection_drain_on_failover, expected_type=type_hints["disable_connection_drain_on_failover"])
check_type(argname="argument drop_traffic_if_unhealthy", value=drop_traffic_if_unhealthy, expected_type=type_hints["drop_traffic_if_unhealthy"])
check_type(argname="argument failover_ratio", value=failover_ratio, expected_type=type_hints["failover_ratio"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if disable_connection_drain_on_failover is not None:
self._values["disable_connection_drain_on_failover"] = disable_connection_drain_on_failover
if drop_traffic_if_unhealthy is not None:
self._values["drop_traffic_if_unhealthy"] = drop_traffic_if_unhealthy
if failover_ratio is not None:
self._values["failover_ratio"] = failover_ratio
@builtins.property
def disable_connection_drain_on_failover(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''On failover or failback, this field indicates whether connection drain will be honored.
Setting this to true has the following effect: connections
to the old active pool are not drained. Connections to the new active pool
use the timeout of 10 min (currently fixed). Setting to false has the
following effect: both old and new connections will have a drain timeout
of 10 min.
This can be set to true only if the protocol is TCP.
The default is false.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#disable_connection_drain_on_failover ComputeRegionBackendService#disable_connection_drain_on_failover}
'''
result = self._values.get("disable_connection_drain_on_failover")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def drop_traffic_if_unhealthy(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''This option is used only when no healthy VMs are detected in the primary and backup instance groups.
When set to true, traffic is dropped. When
set to false, new connections are sent across all VMs in the primary group.
The default is false.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#drop_traffic_if_unhealthy ComputeRegionBackendService#drop_traffic_if_unhealthy}
'''
result = self._values.get("drop_traffic_if_unhealthy")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def failover_ratio(self) -> typing.Optional[jsii.Number]:
'''The value of the field must be in [0, 1].
If the ratio of the healthy
VMs in the primary backend is at or below this number, traffic arriving
at the load-balanced IP will be directed to the failover backend.
In case where 'failoverRatio' is not set or all the VMs in the backup
backend are unhealthy, the traffic will be directed back to the primary
backend in the "force" mode, where traffic will be spread to the healthy
VMs with the best effort, or to all VMs when no VM is healthy.
This field is only used with l4 load balancing.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#failover_ratio ComputeRegionBackendService#failover_ratio}
'''
result = self._values.get("failover_ratio")
return typing.cast(typing.Optional[jsii.Number], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceFailoverPolicy(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class ComputeRegionBackendServiceFailoverPolicyOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceFailoverPolicyOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__17f7c0552e4067873dc9d263beba6d8d2a9214effc8b9d4460061fff573b8466)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetDisableConnectionDrainOnFailover")
def reset_disable_connection_drain_on_failover(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetDisableConnectionDrainOnFailover", []))
@jsii.member(jsii_name="resetDropTrafficIfUnhealthy")
def reset_drop_traffic_if_unhealthy(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetDropTrafficIfUnhealthy", []))
@jsii.member(jsii_name="resetFailoverRatio")
def reset_failover_ratio(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetFailoverRatio", []))
@builtins.property
@jsii.member(jsii_name="disableConnectionDrainOnFailoverInput")
def disable_connection_drain_on_failover_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "disableConnectionDrainOnFailoverInput"))
@builtins.property
@jsii.member(jsii_name="dropTrafficIfUnhealthyInput")
def drop_traffic_if_unhealthy_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "dropTrafficIfUnhealthyInput"))
@builtins.property
@jsii.member(jsii_name="failoverRatioInput")
def failover_ratio_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "failoverRatioInput"))
@builtins.property
@jsii.member(jsii_name="disableConnectionDrainOnFailover")
def disable_connection_drain_on_failover(
self,
) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "disableConnectionDrainOnFailover"))
@disable_connection_drain_on_failover.setter
def disable_connection_drain_on_failover(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5ae2c82c68cb277cf0b8b9e729afec5be6df66bfc5b7c93db6b52d8852cc7362)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "disableConnectionDrainOnFailover", value)
@builtins.property
@jsii.member(jsii_name="dropTrafficIfUnhealthy")
def drop_traffic_if_unhealthy(
self,
) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "dropTrafficIfUnhealthy"))
@drop_traffic_if_unhealthy.setter
def drop_traffic_if_unhealthy(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__cd88abb42f8372b9f02b2b941f46ab10cdbf30850188b49c268c209efd5b7c6b)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "dropTrafficIfUnhealthy", value)
@builtins.property
@jsii.member(jsii_name="failoverRatio")
def failover_ratio(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "failoverRatio"))
@failover_ratio.setter
def failover_ratio(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5f29fbbe3ae76726c9fd92dc85e94a761b043d5e3ee926cb0e966cf866d6823b)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "failoverRatio", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[ComputeRegionBackendServiceFailoverPolicy]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceFailoverPolicy], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[ComputeRegionBackendServiceFailoverPolicy],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__41c1420fa32d0e7b6544d46ff33d21f615f33521b2cbb93814eaebf4cda6238a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceIap",
jsii_struct_bases=[],
name_mapping={
"oauth2_client_id": "oauth2ClientId",
"oauth2_client_secret": "oauth2ClientSecret",
},
)
class ComputeRegionBackendServiceIap:
def __init__(
self,
*,
oauth2_client_id: builtins.str,
oauth2_client_secret: builtins.str,
) -> None:
'''
:param oauth2_client_id: OAuth2 Client ID for IAP. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#oauth2_client_id ComputeRegionBackendService#oauth2_client_id}
:param oauth2_client_secret: OAuth2 Client Secret for IAP. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#oauth2_client_secret ComputeRegionBackendService#oauth2_client_secret}
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f7d6ececf941f643a1e253241ed5cdd3b118d1ce0799c7500a5881efda2d4f1b)
check_type(argname="argument oauth2_client_id", value=oauth2_client_id, expected_type=type_hints["oauth2_client_id"])
check_type(argname="argument oauth2_client_secret", value=oauth2_client_secret, expected_type=type_hints["oauth2_client_secret"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"oauth2_client_id": oauth2_client_id,
"oauth2_client_secret": oauth2_client_secret,
}
@builtins.property
def oauth2_client_id(self) -> builtins.str:
'''OAuth2 Client ID for IAP.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#oauth2_client_id ComputeRegionBackendService#oauth2_client_id}
'''
result = self._values.get("oauth2_client_id")
assert result is not None, "Required property 'oauth2_client_id' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def oauth2_client_secret(self) -> builtins.str:
'''OAuth2 Client Secret for IAP.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#oauth2_client_secret ComputeRegionBackendService#oauth2_client_secret}
'''
result = self._values.get("oauth2_client_secret")
assert result is not None, "Required property 'oauth2_client_secret' is missing"
return typing.cast(builtins.str, result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceIap(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class ComputeRegionBackendServiceIapOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceIapOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__393a5e670cd1080a97347f695ca218be5b8b65fdbcf5b9a660f574e128317df2)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@builtins.property
@jsii.member(jsii_name="oauth2ClientSecretSha256")
def oauth2_client_secret_sha256(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "oauth2ClientSecretSha256"))
@builtins.property
@jsii.member(jsii_name="oauth2ClientIdInput")
def oauth2_client_id_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "oauth2ClientIdInput"))
@builtins.property
@jsii.member(jsii_name="oauth2ClientSecretInput")
def oauth2_client_secret_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "oauth2ClientSecretInput"))
@builtins.property
@jsii.member(jsii_name="oauth2ClientId")
def oauth2_client_id(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "oauth2ClientId"))
@oauth2_client_id.setter
def oauth2_client_id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__26f4d193408e9ba6f0d7d60ea8ab47c19e0c8257fafce87e00dc47be842f81a4)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "oauth2ClientId", value)
@builtins.property
@jsii.member(jsii_name="oauth2ClientSecret")
def oauth2_client_secret(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "oauth2ClientSecret"))
@oauth2_client_secret.setter
def oauth2_client_secret(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__9f917ad4a226fcfd428e938a1ec29f5bf7137a8e710c69b0a95f82df21ef35ea)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "oauth2ClientSecret", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(self) -> typing.Optional[ComputeRegionBackendServiceIap]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceIap], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[ComputeRegionBackendServiceIap],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__0bf5be8938c09405557128de3592a24365a490c956a44ad68f98ecd30300fd63)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceLogConfig",
jsii_struct_bases=[],
name_mapping={"enable": "enable", "sample_rate": "sampleRate"},
)
class ComputeRegionBackendServiceLogConfig:
def __init__(
self,
*,
enable: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
sample_rate: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param enable: Whether to enable logging for the load balancer traffic served by this backend service. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#enable ComputeRegionBackendService#enable}
:param sample_rate: This field can only be specified if logging is enabled for this backend service. The value of the field must be in [0, 1]. This configures the sampling rate of requests to the load balancer where 1.0 means all logged requests are reported and 0.0 means no logged requests are reported. The default value is 1.0. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#sample_rate ComputeRegionBackendService#sample_rate}
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__e185711bb1ea583316001ddc5812a92d7ecca9eb17279f9d4a8a3cb58e3199fa)
check_type(argname="argument enable", value=enable, expected_type=type_hints["enable"])
check_type(argname="argument sample_rate", value=sample_rate, expected_type=type_hints["sample_rate"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if enable is not None:
self._values["enable"] = enable
if sample_rate is not None:
self._values["sample_rate"] = sample_rate
@builtins.property
def enable(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
'''Whether to enable logging for the load balancer traffic served by this backend service.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#enable ComputeRegionBackendService#enable}
'''
result = self._values.get("enable")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], result)
@builtins.property
def sample_rate(self) -> typing.Optional[jsii.Number]:
'''This field can only be specified if logging is enabled for this backend service.
The value of
the field must be in [0, 1]. This configures the sampling rate of requests to the load balancer
where 1.0 means all logged requests are reported and 0.0 means no logged requests are reported.
The default value is 1.0.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#sample_rate ComputeRegionBackendService#sample_rate}
'''
result = self._values.get("sample_rate")
return typing.cast(typing.Optional[jsii.Number], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceLogConfig(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class ComputeRegionBackendServiceLogConfigOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceLogConfigOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a816928e8483eb7fae1bb464b0e5cc3d5d67175b4adeca118360b8db38fa0c42)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetEnable")
def reset_enable(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEnable", []))
@jsii.member(jsii_name="resetSampleRate")
def reset_sample_rate(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetSampleRate", []))
@builtins.property
@jsii.member(jsii_name="enableInput")
def enable_input(
self,
) -> typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]]:
return typing.cast(typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]], jsii.get(self, "enableInput"))
@builtins.property
@jsii.member(jsii_name="sampleRateInput")
def sample_rate_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "sampleRateInput"))
@builtins.property
@jsii.member(jsii_name="enable")
def enable(self) -> typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]:
return typing.cast(typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable], jsii.get(self, "enable"))
@enable.setter
def enable(
self,
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b9c8edd729f954a3ff969d7ced5931df429a474af2c9ffa47e7a63175bc252c4)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "enable", value)
@builtins.property
@jsii.member(jsii_name="sampleRate")
def sample_rate(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "sampleRate"))
@sample_rate.setter
def sample_rate(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__9835ef1c1f7f2077e32cfb4f7cf972a9424a5d92b18829ae455648f70112f9aa)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "sampleRate", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(self) -> typing.Optional[ComputeRegionBackendServiceLogConfig]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceLogConfig], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[ComputeRegionBackendServiceLogConfig],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__5469b01a71f51ffce1fc43497651cd8cd367fc147b71216876ecc9468966575f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceOutlierDetection",
jsii_struct_bases=[],
name_mapping={
"base_ejection_time": "baseEjectionTime",
"consecutive_errors": "consecutiveErrors",
"consecutive_gateway_failure": "consecutiveGatewayFailure",
"enforcing_consecutive_errors": "enforcingConsecutiveErrors",
"enforcing_consecutive_gateway_failure": "enforcingConsecutiveGatewayFailure",
"enforcing_success_rate": "enforcingSuccessRate",
"interval": "interval",
"max_ejection_percent": "maxEjectionPercent",
"success_rate_minimum_hosts": "successRateMinimumHosts",
"success_rate_request_volume": "successRateRequestVolume",
"success_rate_stdev_factor": "successRateStdevFactor",
},
)
class ComputeRegionBackendServiceOutlierDetection:
def __init__(
self,
*,
base_ejection_time: typing.Optional[typing.Union["ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime", typing.Dict[builtins.str, typing.Any]]] = None,
consecutive_errors: typing.Optional[jsii.Number] = None,
consecutive_gateway_failure: typing.Optional[jsii.Number] = None,
enforcing_consecutive_errors: typing.Optional[jsii.Number] = None,
enforcing_consecutive_gateway_failure: typing.Optional[jsii.Number] = None,
enforcing_success_rate: typing.Optional[jsii.Number] = None,
interval: typing.Optional[typing.Union["ComputeRegionBackendServiceOutlierDetectionInterval", typing.Dict[builtins.str, typing.Any]]] = None,
max_ejection_percent: typing.Optional[jsii.Number] = None,
success_rate_minimum_hosts: typing.Optional[jsii.Number] = None,
success_rate_request_volume: typing.Optional[jsii.Number] = None,
success_rate_stdev_factor: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param base_ejection_time: base_ejection_time block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#base_ejection_time ComputeRegionBackendService#base_ejection_time}
:param consecutive_errors: Number of errors before a host is ejected from the connection pool. When the backend host is accessed over HTTP, a 5xx return code qualifies as an error. Defaults to 5. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#consecutive_errors ComputeRegionBackendService#consecutive_errors}
:param consecutive_gateway_failure: The number of consecutive gateway failures (502, 503, 504 status or connection errors that are mapped to one of those status codes) before a consecutive gateway failure ejection occurs. Defaults to 5. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#consecutive_gateway_failure ComputeRegionBackendService#consecutive_gateway_failure}
:param enforcing_consecutive_errors: The percentage chance that a host will be actually ejected when an outlier status is detected through consecutive 5xx. This setting can be used to disable ejection or to ramp it up slowly. Defaults to 100. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#enforcing_consecutive_errors ComputeRegionBackendService#enforcing_consecutive_errors}
:param enforcing_consecutive_gateway_failure: The percentage chance that a host will be actually ejected when an outlier status is detected through consecutive gateway failures. This setting can be used to disable ejection or to ramp it up slowly. Defaults to 0. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#enforcing_consecutive_gateway_failure ComputeRegionBackendService#enforcing_consecutive_gateway_failure}
:param enforcing_success_rate: The percentage chance that a host will be actually ejected when an outlier status is detected through success rate statistics. This setting can be used to disable ejection or to ramp it up slowly. Defaults to 100. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#enforcing_success_rate ComputeRegionBackendService#enforcing_success_rate}
:param interval: interval block. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#interval ComputeRegionBackendService#interval}
:param max_ejection_percent: Maximum percentage of hosts in the load balancing pool for the backend service that can be ejected. Defaults to 10%. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_ejection_percent ComputeRegionBackendService#max_ejection_percent}
:param success_rate_minimum_hosts: The number of hosts in a cluster that must have enough request volume to detect success rate outliers. If the number of hosts is less than this setting, outlier detection via success rate statistics is not performed for any host in the cluster. Defaults to 5. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#success_rate_minimum_hosts ComputeRegionBackendService#success_rate_minimum_hosts}
:param success_rate_request_volume: The minimum number of total requests that must be collected in one interval (as defined by the interval duration above) to include this host in success rate based outlier detection. If the volume is lower than this setting, outlier detection via success rate statistics is not performed for that host. Defaults to 100. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#success_rate_request_volume ComputeRegionBackendService#success_rate_request_volume}
:param success_rate_stdev_factor: This factor is used to determine the ejection threshold for success rate outlier ejection. The ejection threshold is the difference between the mean success rate, and the product of this factor and the standard deviation of the mean success rate: mean - (stdev * success_rate_stdev_factor). This factor is divided by a thousand to get a double. That is, if the desired factor is 1.9, the runtime value should be 1900. Defaults to 1900. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#success_rate_stdev_factor ComputeRegionBackendService#success_rate_stdev_factor}
'''
if isinstance(base_ejection_time, dict):
base_ejection_time = ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime(**base_ejection_time)
if isinstance(interval, dict):
interval = ComputeRegionBackendServiceOutlierDetectionInterval(**interval)
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__31264aa696be4bc65965ff2b65434998bab515628e1db3edb2f5cd054c701f28)
check_type(argname="argument base_ejection_time", value=base_ejection_time, expected_type=type_hints["base_ejection_time"])
check_type(argname="argument consecutive_errors", value=consecutive_errors, expected_type=type_hints["consecutive_errors"])
check_type(argname="argument consecutive_gateway_failure", value=consecutive_gateway_failure, expected_type=type_hints["consecutive_gateway_failure"])
check_type(argname="argument enforcing_consecutive_errors", value=enforcing_consecutive_errors, expected_type=type_hints["enforcing_consecutive_errors"])
check_type(argname="argument enforcing_consecutive_gateway_failure", value=enforcing_consecutive_gateway_failure, expected_type=type_hints["enforcing_consecutive_gateway_failure"])
check_type(argname="argument enforcing_success_rate", value=enforcing_success_rate, expected_type=type_hints["enforcing_success_rate"])
check_type(argname="argument interval", value=interval, expected_type=type_hints["interval"])
check_type(argname="argument max_ejection_percent", value=max_ejection_percent, expected_type=type_hints["max_ejection_percent"])
check_type(argname="argument success_rate_minimum_hosts", value=success_rate_minimum_hosts, expected_type=type_hints["success_rate_minimum_hosts"])
check_type(argname="argument success_rate_request_volume", value=success_rate_request_volume, expected_type=type_hints["success_rate_request_volume"])
check_type(argname="argument success_rate_stdev_factor", value=success_rate_stdev_factor, expected_type=type_hints["success_rate_stdev_factor"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if base_ejection_time is not None:
self._values["base_ejection_time"] = base_ejection_time
if consecutive_errors is not None:
self._values["consecutive_errors"] = consecutive_errors
if consecutive_gateway_failure is not None:
self._values["consecutive_gateway_failure"] = consecutive_gateway_failure
if enforcing_consecutive_errors is not None:
self._values["enforcing_consecutive_errors"] = enforcing_consecutive_errors
if enforcing_consecutive_gateway_failure is not None:
self._values["enforcing_consecutive_gateway_failure"] = enforcing_consecutive_gateway_failure
if enforcing_success_rate is not None:
self._values["enforcing_success_rate"] = enforcing_success_rate
if interval is not None:
self._values["interval"] = interval
if max_ejection_percent is not None:
self._values["max_ejection_percent"] = max_ejection_percent
if success_rate_minimum_hosts is not None:
self._values["success_rate_minimum_hosts"] = success_rate_minimum_hosts
if success_rate_request_volume is not None:
self._values["success_rate_request_volume"] = success_rate_request_volume
if success_rate_stdev_factor is not None:
self._values["success_rate_stdev_factor"] = success_rate_stdev_factor
@builtins.property
def base_ejection_time(
self,
) -> typing.Optional["ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime"]:
'''base_ejection_time block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#base_ejection_time ComputeRegionBackendService#base_ejection_time}
'''
result = self._values.get("base_ejection_time")
return typing.cast(typing.Optional["ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime"], result)
@builtins.property
def consecutive_errors(self) -> typing.Optional[jsii.Number]:
'''Number of errors before a host is ejected from the connection pool.
When the
backend host is accessed over HTTP, a 5xx return code qualifies as an error.
Defaults to 5.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#consecutive_errors ComputeRegionBackendService#consecutive_errors}
'''
result = self._values.get("consecutive_errors")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def consecutive_gateway_failure(self) -> typing.Optional[jsii.Number]:
'''The number of consecutive gateway failures (502, 503, 504 status or connection errors that are mapped to one of those status codes) before a consecutive gateway failure ejection occurs.
Defaults to 5.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#consecutive_gateway_failure ComputeRegionBackendService#consecutive_gateway_failure}
'''
result = self._values.get("consecutive_gateway_failure")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def enforcing_consecutive_errors(self) -> typing.Optional[jsii.Number]:
'''The percentage chance that a host will be actually ejected when an outlier status is detected through consecutive 5xx.
This setting can be used to disable
ejection or to ramp it up slowly. Defaults to 100.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#enforcing_consecutive_errors ComputeRegionBackendService#enforcing_consecutive_errors}
'''
result = self._values.get("enforcing_consecutive_errors")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def enforcing_consecutive_gateway_failure(self) -> typing.Optional[jsii.Number]:
'''The percentage chance that a host will be actually ejected when an outlier status is detected through consecutive gateway failures.
This setting can be
used to disable ejection or to ramp it up slowly. Defaults to 0.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#enforcing_consecutive_gateway_failure ComputeRegionBackendService#enforcing_consecutive_gateway_failure}
'''
result = self._values.get("enforcing_consecutive_gateway_failure")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def enforcing_success_rate(self) -> typing.Optional[jsii.Number]:
'''The percentage chance that a host will be actually ejected when an outlier status is detected through success rate statistics.
This setting can be used to
disable ejection or to ramp it up slowly. Defaults to 100.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#enforcing_success_rate ComputeRegionBackendService#enforcing_success_rate}
'''
result = self._values.get("enforcing_success_rate")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def interval(
self,
) -> typing.Optional["ComputeRegionBackendServiceOutlierDetectionInterval"]:
'''interval block.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#interval ComputeRegionBackendService#interval}
'''
result = self._values.get("interval")
return typing.cast(typing.Optional["ComputeRegionBackendServiceOutlierDetectionInterval"], result)
@builtins.property
def max_ejection_percent(self) -> typing.Optional[jsii.Number]:
'''Maximum percentage of hosts in the load balancing pool for the backend service that can be ejected. Defaults to 10%.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#max_ejection_percent ComputeRegionBackendService#max_ejection_percent}
'''
result = self._values.get("max_ejection_percent")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def success_rate_minimum_hosts(self) -> typing.Optional[jsii.Number]:
'''The number of hosts in a cluster that must have enough request volume to detect success rate outliers.
If the number of hosts is less than this setting, outlier
detection via success rate statistics is not performed for any host in the
cluster. Defaults to 5.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#success_rate_minimum_hosts ComputeRegionBackendService#success_rate_minimum_hosts}
'''
result = self._values.get("success_rate_minimum_hosts")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def success_rate_request_volume(self) -> typing.Optional[jsii.Number]:
'''The minimum number of total requests that must be collected in one interval (as defined by the interval duration above) to include this host in success rate based outlier detection.
If the volume is lower than this setting, outlier
detection via success rate statistics is not performed for that host. Defaults
to 100.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#success_rate_request_volume ComputeRegionBackendService#success_rate_request_volume}
'''
result = self._values.get("success_rate_request_volume")
return typing.cast(typing.Optional[jsii.Number], result)
@builtins.property
def success_rate_stdev_factor(self) -> typing.Optional[jsii.Number]:
'''This factor is used to determine the ejection threshold for success rate outlier ejection.
The ejection threshold is the difference between the mean success
rate, and the product of this factor and the standard deviation of the mean
success rate: mean - (stdev * success_rate_stdev_factor). This factor is divided
by a thousand to get a double. That is, if the desired factor is 1.9, the
runtime value should be 1900. Defaults to 1900.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#success_rate_stdev_factor ComputeRegionBackendService#success_rate_stdev_factor}
'''
result = self._values.get("success_rate_stdev_factor")
return typing.cast(typing.Optional[jsii.Number], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceOutlierDetection(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime",
jsii_struct_bases=[],
name_mapping={"seconds": "seconds", "nanos": "nanos"},
)
class ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime:
def __init__(
self,
*,
seconds: jsii.Number,
nanos: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#seconds ComputeRegionBackendService#seconds}
:param nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented with a 0 'seconds' field and a positive 'nanos' field. Must be from 0 to 999,999,999 inclusive. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#nanos ComputeRegionBackendService#nanos}
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__fa64565ae7dcd9029c04ae62f02eebf905da65efb0e65322f28fff1ca350b66f)
check_type(argname="argument seconds", value=seconds, expected_type=type_hints["seconds"])
check_type(argname="argument nanos", value=nanos, expected_type=type_hints["nanos"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"seconds": seconds,
}
if nanos is not None:
self._values["nanos"] = nanos
@builtins.property
def seconds(self) -> jsii.Number:
'''Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#seconds ComputeRegionBackendService#seconds}
'''
result = self._values.get("seconds")
assert result is not None, "Required property 'seconds' is missing"
return typing.cast(jsii.Number, result)
@builtins.property
def nanos(self) -> typing.Optional[jsii.Number]:
'''Span of time that's a fraction of a second at nanosecond resolution.
Durations
less than one second are represented with a 0 'seconds' field and a positive
'nanos' field. Must be from 0 to 999,999,999 inclusive.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#nanos ComputeRegionBackendService#nanos}
'''
result = self._values.get("nanos")
return typing.cast(typing.Optional[jsii.Number], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class ComputeRegionBackendServiceOutlierDetectionBaseEjectionTimeOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceOutlierDetectionBaseEjectionTimeOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c1b4d52c6f097b8af762f1527243c3b216c70026e93351580e81f3f6acc4328a)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetNanos")
def reset_nanos(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetNanos", []))
@builtins.property
@jsii.member(jsii_name="nanosInput")
def nanos_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "nanosInput"))
@builtins.property
@jsii.member(jsii_name="secondsInput")
def seconds_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "secondsInput"))
@builtins.property
@jsii.member(jsii_name="nanos")
def nanos(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "nanos"))
@nanos.setter
def nanos(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ba796b9116b9f9f5c6f3ddf003d644e37b80d8c5105fd939c9cedd65c15a8e24)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "nanos", value)
@builtins.property
@jsii.member(jsii_name="seconds")
def seconds(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "seconds"))
@seconds.setter
def seconds(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__6d372452a1f948a5936fb55c52dcf828b625c72c56a708e70bc25456fc0f0d3b)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "seconds", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__8328d8e4d73ad479db129c3d66f759aa237db727c3a73a42f9fc7ed7087855e9)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceOutlierDetectionInterval",
jsii_struct_bases=[],
name_mapping={"seconds": "seconds", "nanos": "nanos"},
)
class ComputeRegionBackendServiceOutlierDetectionInterval:
def __init__(
self,
*,
seconds: jsii.Number,
nanos: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#seconds ComputeRegionBackendService#seconds}
:param nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented with a 0 'seconds' field and a positive 'nanos' field. Must be from 0 to 999,999,999 inclusive. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#nanos ComputeRegionBackendService#nanos}
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__6e194a33b9788b599baf9e9987fafc1b67df3cc422a71b885a1df2d4c22b3aa5)
check_type(argname="argument seconds", value=seconds, expected_type=type_hints["seconds"])
check_type(argname="argument nanos", value=nanos, expected_type=type_hints["nanos"])
self._values: typing.Dict[builtins.str, typing.Any] = {
"seconds": seconds,
}
if nanos is not None:
self._values["nanos"] = nanos
@builtins.property
def seconds(self) -> jsii.Number:
'''Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#seconds ComputeRegionBackendService#seconds}
'''
result = self._values.get("seconds")
assert result is not None, "Required property 'seconds' is missing"
return typing.cast(jsii.Number, result)
@builtins.property
def nanos(self) -> typing.Optional[jsii.Number]:
'''Span of time that's a fraction of a second at nanosecond resolution.
Durations
less than one second are represented with a 0 'seconds' field and a positive
'nanos' field. Must be from 0 to 999,999,999 inclusive.
Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#nanos ComputeRegionBackendService#nanos}
'''
result = self._values.get("nanos")
return typing.cast(typing.Optional[jsii.Number], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceOutlierDetectionInterval(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class ComputeRegionBackendServiceOutlierDetectionIntervalOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceOutlierDetectionIntervalOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a2c083d002cae0d7253b1124a155d8f8b65d09644ebc3e28a8d6aaa2266a294e)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetNanos")
def reset_nanos(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetNanos", []))
@builtins.property
@jsii.member(jsii_name="nanosInput")
def nanos_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "nanosInput"))
@builtins.property
@jsii.member(jsii_name="secondsInput")
def seconds_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "secondsInput"))
@builtins.property
@jsii.member(jsii_name="nanos")
def nanos(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "nanos"))
@nanos.setter
def nanos(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__c294b7121abc0666e7afe5f202a9c24f79033158cec3434ee099492a1393cf87)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "nanos", value)
@builtins.property
@jsii.member(jsii_name="seconds")
def seconds(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "seconds"))
@seconds.setter
def seconds(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__bb099efbdf1d5e67d416fa237ef093cc2a8dfdf205b48e18f44fca817e7ce56d)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "seconds", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[ComputeRegionBackendServiceOutlierDetectionInterval]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceOutlierDetectionInterval], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[ComputeRegionBackendServiceOutlierDetectionInterval],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__17d27e21b509b1b8ac0f5fba6d837e21835a63adb2f48421dcda762ff2e60faa)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
class ComputeRegionBackendServiceOutlierDetectionOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceOutlierDetectionOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__011b9b06afbe37fea03099005c40b0dfd101dc018f7abdf8fe26c52848193861)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="putBaseEjectionTime")
def put_base_ejection_time(
self,
*,
seconds: jsii.Number,
nanos: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#seconds ComputeRegionBackendService#seconds}
:param nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented with a 0 'seconds' field and a positive 'nanos' field. Must be from 0 to 999,999,999 inclusive. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#nanos ComputeRegionBackendService#nanos}
'''
value = ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime(
seconds=seconds, nanos=nanos
)
return typing.cast(None, jsii.invoke(self, "putBaseEjectionTime", [value]))
@jsii.member(jsii_name="putInterval")
def put_interval(
self,
*,
seconds: jsii.Number,
nanos: typing.Optional[jsii.Number] = None,
) -> None:
'''
:param seconds: Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#seconds ComputeRegionBackendService#seconds}
:param nanos: Span of time that's a fraction of a second at nanosecond resolution. Durations less than one second are represented with a 0 'seconds' field and a positive 'nanos' field. Must be from 0 to 999,999,999 inclusive. Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#nanos ComputeRegionBackendService#nanos}
'''
value = ComputeRegionBackendServiceOutlierDetectionInterval(
seconds=seconds, nanos=nanos
)
return typing.cast(None, jsii.invoke(self, "putInterval", [value]))
@jsii.member(jsii_name="resetBaseEjectionTime")
def reset_base_ejection_time(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetBaseEjectionTime", []))
@jsii.member(jsii_name="resetConsecutiveErrors")
def reset_consecutive_errors(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetConsecutiveErrors", []))
@jsii.member(jsii_name="resetConsecutiveGatewayFailure")
def reset_consecutive_gateway_failure(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetConsecutiveGatewayFailure", []))
@jsii.member(jsii_name="resetEnforcingConsecutiveErrors")
def reset_enforcing_consecutive_errors(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEnforcingConsecutiveErrors", []))
@jsii.member(jsii_name="resetEnforcingConsecutiveGatewayFailure")
def reset_enforcing_consecutive_gateway_failure(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEnforcingConsecutiveGatewayFailure", []))
@jsii.member(jsii_name="resetEnforcingSuccessRate")
def reset_enforcing_success_rate(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetEnforcingSuccessRate", []))
@jsii.member(jsii_name="resetInterval")
def reset_interval(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetInterval", []))
@jsii.member(jsii_name="resetMaxEjectionPercent")
def reset_max_ejection_percent(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetMaxEjectionPercent", []))
@jsii.member(jsii_name="resetSuccessRateMinimumHosts")
def reset_success_rate_minimum_hosts(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetSuccessRateMinimumHosts", []))
@jsii.member(jsii_name="resetSuccessRateRequestVolume")
def reset_success_rate_request_volume(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetSuccessRateRequestVolume", []))
@jsii.member(jsii_name="resetSuccessRateStdevFactor")
def reset_success_rate_stdev_factor(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetSuccessRateStdevFactor", []))
@builtins.property
@jsii.member(jsii_name="baseEjectionTime")
def base_ejection_time(
self,
) -> ComputeRegionBackendServiceOutlierDetectionBaseEjectionTimeOutputReference:
return typing.cast(ComputeRegionBackendServiceOutlierDetectionBaseEjectionTimeOutputReference, jsii.get(self, "baseEjectionTime"))
@builtins.property
@jsii.member(jsii_name="interval")
def interval(
self,
) -> ComputeRegionBackendServiceOutlierDetectionIntervalOutputReference:
return typing.cast(ComputeRegionBackendServiceOutlierDetectionIntervalOutputReference, jsii.get(self, "interval"))
@builtins.property
@jsii.member(jsii_name="baseEjectionTimeInput")
def base_ejection_time_input(
self,
) -> typing.Optional[ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime], jsii.get(self, "baseEjectionTimeInput"))
@builtins.property
@jsii.member(jsii_name="consecutiveErrorsInput")
def consecutive_errors_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "consecutiveErrorsInput"))
@builtins.property
@jsii.member(jsii_name="consecutiveGatewayFailureInput")
def consecutive_gateway_failure_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "consecutiveGatewayFailureInput"))
@builtins.property
@jsii.member(jsii_name="enforcingConsecutiveErrorsInput")
def enforcing_consecutive_errors_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "enforcingConsecutiveErrorsInput"))
@builtins.property
@jsii.member(jsii_name="enforcingConsecutiveGatewayFailureInput")
def enforcing_consecutive_gateway_failure_input(
self,
) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "enforcingConsecutiveGatewayFailureInput"))
@builtins.property
@jsii.member(jsii_name="enforcingSuccessRateInput")
def enforcing_success_rate_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "enforcingSuccessRateInput"))
@builtins.property
@jsii.member(jsii_name="intervalInput")
def interval_input(
self,
) -> typing.Optional[ComputeRegionBackendServiceOutlierDetectionInterval]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceOutlierDetectionInterval], jsii.get(self, "intervalInput"))
@builtins.property
@jsii.member(jsii_name="maxEjectionPercentInput")
def max_ejection_percent_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "maxEjectionPercentInput"))
@builtins.property
@jsii.member(jsii_name="successRateMinimumHostsInput")
def success_rate_minimum_hosts_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "successRateMinimumHostsInput"))
@builtins.property
@jsii.member(jsii_name="successRateRequestVolumeInput")
def success_rate_request_volume_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "successRateRequestVolumeInput"))
@builtins.property
@jsii.member(jsii_name="successRateStdevFactorInput")
def success_rate_stdev_factor_input(self) -> typing.Optional[jsii.Number]:
return typing.cast(typing.Optional[jsii.Number], jsii.get(self, "successRateStdevFactorInput"))
@builtins.property
@jsii.member(jsii_name="consecutiveErrors")
def consecutive_errors(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "consecutiveErrors"))
@consecutive_errors.setter
def consecutive_errors(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__88c5ee37a5fd33884c73f335a3b4f82e5466b509c5e1816e0796dccc38730d76)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "consecutiveErrors", value)
@builtins.property
@jsii.member(jsii_name="consecutiveGatewayFailure")
def consecutive_gateway_failure(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "consecutiveGatewayFailure"))
@consecutive_gateway_failure.setter
def consecutive_gateway_failure(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ffeb92147c6e36e8a37a33f8670602fd763b17a8a99e41f1a99d5d6556c9687a)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "consecutiveGatewayFailure", value)
@builtins.property
@jsii.member(jsii_name="enforcingConsecutiveErrors")
def enforcing_consecutive_errors(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "enforcingConsecutiveErrors"))
@enforcing_consecutive_errors.setter
def enforcing_consecutive_errors(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__56b0379c16d60c2adf12895aebb98bbb86526fd099736e00571045e6c3dd47cd)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "enforcingConsecutiveErrors", value)
@builtins.property
@jsii.member(jsii_name="enforcingConsecutiveGatewayFailure")
def enforcing_consecutive_gateway_failure(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "enforcingConsecutiveGatewayFailure"))
@enforcing_consecutive_gateway_failure.setter
def enforcing_consecutive_gateway_failure(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__f47c19b03ea44b765caf71d1a888d0b60e8851dca3fd58da440b96fa549a9f5c)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "enforcingConsecutiveGatewayFailure", value)
@builtins.property
@jsii.member(jsii_name="enforcingSuccessRate")
def enforcing_success_rate(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "enforcingSuccessRate"))
@enforcing_success_rate.setter
def enforcing_success_rate(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__3dd36605d052be670fb357374c368fd6f0d0554fe4965be9ab6ea99f9badaa3c)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "enforcingSuccessRate", value)
@builtins.property
@jsii.member(jsii_name="maxEjectionPercent")
def max_ejection_percent(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "maxEjectionPercent"))
@max_ejection_percent.setter
def max_ejection_percent(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__a39040ddd37b0234eeae24abfa9d9071f75ec893b631ecbe1b681549471fcf3d)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "maxEjectionPercent", value)
@builtins.property
@jsii.member(jsii_name="successRateMinimumHosts")
def success_rate_minimum_hosts(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "successRateMinimumHosts"))
@success_rate_minimum_hosts.setter
def success_rate_minimum_hosts(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__1d10cf732b79a8e90d66da46fbda026047a15fbcdffe4cd5ac40c0ba2da603f2)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "successRateMinimumHosts", value)
@builtins.property
@jsii.member(jsii_name="successRateRequestVolume")
def success_rate_request_volume(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "successRateRequestVolume"))
@success_rate_request_volume.setter
def success_rate_request_volume(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__89d7d66d813561d17dcdb15c85f72841df60791a1776e23b07c8a556bdbd5483)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "successRateRequestVolume", value)
@builtins.property
@jsii.member(jsii_name="successRateStdevFactor")
def success_rate_stdev_factor(self) -> jsii.Number:
return typing.cast(jsii.Number, jsii.get(self, "successRateStdevFactor"))
@success_rate_stdev_factor.setter
def success_rate_stdev_factor(self, value: jsii.Number) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__77075c5fdda55a634fd4c2a99bf33e2ae5df6a7d8b7a6d4ae5e0b6341945221f)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "successRateStdevFactor", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[ComputeRegionBackendServiceOutlierDetection]:
return typing.cast(typing.Optional[ComputeRegionBackendServiceOutlierDetection], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[ComputeRegionBackendServiceOutlierDetection],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__ed348d2a88da51d2c1b969848cc895cc8569f7ee22fee82d176246d2dd18c30e)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
@jsii.data_type(
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceTimeouts",
jsii_struct_bases=[],
name_mapping={"create": "create", "delete": "delete", "update": "update"},
)
class ComputeRegionBackendServiceTimeouts:
def __init__(
self,
*,
create: typing.Optional[builtins.str] = None,
delete: typing.Optional[builtins.str] = None,
update: typing.Optional[builtins.str] = None,
) -> None:
'''
:param create: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#create ComputeRegionBackendService#create}.
:param delete: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#delete ComputeRegionBackendService#delete}.
:param update: Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#update ComputeRegionBackendService#update}.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b18cc7d41a5763bea88d6890ef53a58815c2a431a2ce1701601f37fec23f383f)
check_type(argname="argument create", value=create, expected_type=type_hints["create"])
check_type(argname="argument delete", value=delete, expected_type=type_hints["delete"])
check_type(argname="argument update", value=update, expected_type=type_hints["update"])
self._values: typing.Dict[builtins.str, typing.Any] = {}
if create is not None:
self._values["create"] = create
if delete is not None:
self._values["delete"] = delete
if update is not None:
self._values["update"] = update
@builtins.property
def create(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#create ComputeRegionBackendService#create}.'''
result = self._values.get("create")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def delete(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#delete ComputeRegionBackendService#delete}.'''
result = self._values.get("delete")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def update(self) -> typing.Optional[builtins.str]:
'''Docs at Terraform Registry: {@link https://registry.terraform.io/providers/hashicorp/google/4.80.0/docs/resources/compute_region_backend_service#update ComputeRegionBackendService#update}.'''
result = self._values.get("update")
return typing.cast(typing.Optional[builtins.str], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ComputeRegionBackendServiceTimeouts(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class ComputeRegionBackendServiceTimeoutsOutputReference(
_cdktf_9a9027ec.ComplexObject,
metaclass=jsii.JSIIMeta,
jsii_type="@cdktf/provider-google.computeRegionBackendService.ComputeRegionBackendServiceTimeoutsOutputReference",
):
def __init__(
self,
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
'''
:param terraform_resource: The parent resource.
:param terraform_attribute: The attribute on the parent resource this class is referencing.
'''
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__7db88d4d9ee44acb375b3abb4ebb88e33da2212efdc92198816b1721d42f28f5)
check_type(argname="argument terraform_resource", value=terraform_resource, expected_type=type_hints["terraform_resource"])
check_type(argname="argument terraform_attribute", value=terraform_attribute, expected_type=type_hints["terraform_attribute"])
jsii.create(self.__class__, self, [terraform_resource, terraform_attribute])
@jsii.member(jsii_name="resetCreate")
def reset_create(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetCreate", []))
@jsii.member(jsii_name="resetDelete")
def reset_delete(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetDelete", []))
@jsii.member(jsii_name="resetUpdate")
def reset_update(self) -> None:
return typing.cast(None, jsii.invoke(self, "resetUpdate", []))
@builtins.property
@jsii.member(jsii_name="createInput")
def create_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "createInput"))
@builtins.property
@jsii.member(jsii_name="deleteInput")
def delete_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "deleteInput"))
@builtins.property
@jsii.member(jsii_name="updateInput")
def update_input(self) -> typing.Optional[builtins.str]:
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "updateInput"))
@builtins.property
@jsii.member(jsii_name="create")
def create(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "create"))
@create.setter
def create(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b496f041f0d5cef9ffd80a6f446f92be5ead56b92ff58f2c574ff2c4b299dada)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "create", value)
@builtins.property
@jsii.member(jsii_name="delete")
def delete(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "delete"))
@delete.setter
def delete(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__1fe07316d81bb70c9ec70f7de1664f0abcab07eaefb28ede6201ec2fc4b21b96)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "delete", value)
@builtins.property
@jsii.member(jsii_name="update")
def update(self) -> builtins.str:
return typing.cast(builtins.str, jsii.get(self, "update"))
@update.setter
def update(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__67203a5d196ab7aeb3045ad9eac685da31577d319cf0561d479e7bf7ce3d92f1)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "update", value)
@builtins.property
@jsii.member(jsii_name="internalValue")
def internal_value(
self,
) -> typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, ComputeRegionBackendServiceTimeouts]]:
return typing.cast(typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, ComputeRegionBackendServiceTimeouts]], jsii.get(self, "internalValue"))
@internal_value.setter
def internal_value(
self,
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, ComputeRegionBackendServiceTimeouts]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(_typecheckingstub__b93aa246a7bb917b5ae90ffb2d74ad3dc9d55a81a1f1a2b1ac14a09aabd74a26)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "internalValue", value)
__all__ = [
"ComputeRegionBackendService",
"ComputeRegionBackendServiceBackend",
"ComputeRegionBackendServiceBackendList",
"ComputeRegionBackendServiceBackendOutputReference",
"ComputeRegionBackendServiceCdnPolicy",
"ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy",
"ComputeRegionBackendServiceCdnPolicyCacheKeyPolicyOutputReference",
"ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy",
"ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicyList",
"ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicyOutputReference",
"ComputeRegionBackendServiceCdnPolicyOutputReference",
"ComputeRegionBackendServiceCircuitBreakers",
"ComputeRegionBackendServiceCircuitBreakersOutputReference",
"ComputeRegionBackendServiceConfig",
"ComputeRegionBackendServiceConsistentHash",
"ComputeRegionBackendServiceConsistentHashHttpCookie",
"ComputeRegionBackendServiceConsistentHashHttpCookieOutputReference",
"ComputeRegionBackendServiceConsistentHashHttpCookieTtl",
"ComputeRegionBackendServiceConsistentHashHttpCookieTtlOutputReference",
"ComputeRegionBackendServiceConsistentHashOutputReference",
"ComputeRegionBackendServiceFailoverPolicy",
"ComputeRegionBackendServiceFailoverPolicyOutputReference",
"ComputeRegionBackendServiceIap",
"ComputeRegionBackendServiceIapOutputReference",
"ComputeRegionBackendServiceLogConfig",
"ComputeRegionBackendServiceLogConfigOutputReference",
"ComputeRegionBackendServiceOutlierDetection",
"ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime",
"ComputeRegionBackendServiceOutlierDetectionBaseEjectionTimeOutputReference",
"ComputeRegionBackendServiceOutlierDetectionInterval",
"ComputeRegionBackendServiceOutlierDetectionIntervalOutputReference",
"ComputeRegionBackendServiceOutlierDetectionOutputReference",
"ComputeRegionBackendServiceTimeouts",
"ComputeRegionBackendServiceTimeoutsOutputReference",
]
publication.publish()
def _typecheckingstub__7a7d193a662f1e9eb4abcaa96ce0135d0dd9f195f8f24db270c1567f4623d748(
scope: _constructs_77d1e7e8.Construct,
id_: builtins.str,
*,
name: builtins.str,
affinity_cookie_ttl_sec: typing.Optional[jsii.Number] = None,
backend: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[ComputeRegionBackendServiceBackend, typing.Dict[builtins.str, typing.Any]]]]] = None,
cdn_policy: typing.Optional[typing.Union[ComputeRegionBackendServiceCdnPolicy, typing.Dict[builtins.str, typing.Any]]] = None,
circuit_breakers: typing.Optional[typing.Union[ComputeRegionBackendServiceCircuitBreakers, typing.Dict[builtins.str, typing.Any]]] = None,
connection_draining_timeout_sec: typing.Optional[jsii.Number] = None,
consistent_hash: typing.Optional[typing.Union[ComputeRegionBackendServiceConsistentHash, typing.Dict[builtins.str, typing.Any]]] = None,
description: typing.Optional[builtins.str] = None,
enable_cdn: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
failover_policy: typing.Optional[typing.Union[ComputeRegionBackendServiceFailoverPolicy, typing.Dict[builtins.str, typing.Any]]] = None,
health_checks: typing.Optional[typing.Sequence[builtins.str]] = None,
iap: typing.Optional[typing.Union[ComputeRegionBackendServiceIap, typing.Dict[builtins.str, typing.Any]]] = None,
id: typing.Optional[builtins.str] = None,
load_balancing_scheme: typing.Optional[builtins.str] = None,
locality_lb_policy: typing.Optional[builtins.str] = None,
log_config: typing.Optional[typing.Union[ComputeRegionBackendServiceLogConfig, typing.Dict[builtins.str, typing.Any]]] = None,
network: typing.Optional[builtins.str] = None,
outlier_detection: typing.Optional[typing.Union[ComputeRegionBackendServiceOutlierDetection, typing.Dict[builtins.str, typing.Any]]] = None,
port_name: typing.Optional[builtins.str] = None,
project: typing.Optional[builtins.str] = None,
protocol: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
session_affinity: typing.Optional[builtins.str] = None,
timeouts: typing.Optional[typing.Union[ComputeRegionBackendServiceTimeouts, typing.Dict[builtins.str, typing.Any]]] = None,
timeout_sec: typing.Optional[jsii.Number] = None,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__705381bd496efb783ebe0f0ad360b9465d3fa796207cea7a63f6cb943ca18d55(
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[ComputeRegionBackendServiceBackend, typing.Dict[builtins.str, typing.Any]]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__52c9500e4e9fa93a91b304c7e3e135ccf3d7f2a324b03504bc533c94c5a2df32(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__562c8e062732203e76fac17c68d7268e9b836f2e291e4137b9d74474d5235398(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__306edbc75d0eb09cd430efdaaa8fe4ba6cca369f0343b0685ace0323a1801649(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__74f99af0ba03bc970ff0be137026d8fb594305f0ca1bd95bda538343ad7fae74(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0ba169d163fd0341ebbbfce11a9608f6e5b63e1a95308893e2731f7c62e35e35(
value: typing.List[builtins.str],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0969e93341a32202ba7e523eca85fd1ee559e04f9d426de5310155b1cbbb02c4(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__09da2d78ec50a281748ef4e20814f3f26e79186dea33f8ac2e95723aec8c35e0(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8506a937369ced4764e829b71f8065eedb9f3a7c2570925faea401e3f50db486(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__55621c9e6967bdd01d61bfa1a858a6202f0b5315cd29ba41c2e209758ce86117(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__67ebfe702142c520c92089bdb02cba19284e5d5acb56a2fd2c617b5686d4f37e(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c666b75001d11f84a61aa1c90cebb2ab4672e55a574076652e3a0bc126fbaf9d(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__333ec9777f8f7c081037d0b0ddccd0b02c8518c449592f3684754045d9a1f749(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__1c43d834b72384b1d30c53e2abadcde998a0e82887efb5b9facc6b030754f79a(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__88dfda43761cc9369b3092358ef4f0ead987146f7b0686be2cd2398686706989(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__94abaa06d37269786924a92c0395e3fcd0057dce5ec470ee0d4ad0525e8e87dc(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3297937f636756385624c47a28ecd8391715e93182b1a77e7f5a54f6a1c8b2e4(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a84ffd6da46f54b7caae12c227efde4a29b7de94f73341429b4bb6ce82854084(
*,
group: builtins.str,
balancing_mode: typing.Optional[builtins.str] = None,
capacity_scaler: typing.Optional[jsii.Number] = None,
description: typing.Optional[builtins.str] = None,
failover: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
max_connections: typing.Optional[jsii.Number] = None,
max_connections_per_endpoint: typing.Optional[jsii.Number] = None,
max_connections_per_instance: typing.Optional[jsii.Number] = None,
max_rate: typing.Optional[jsii.Number] = None,
max_rate_per_endpoint: typing.Optional[jsii.Number] = None,
max_rate_per_instance: typing.Optional[jsii.Number] = None,
max_utilization: typing.Optional[jsii.Number] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__33ad42b62c7034037df6fc0e372a47ce12ed5f93d1047398fff955799fe8755c(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c02ae8c7d4c6acec6fc1cb094ec69c8816cc8abeb6c377db26a31e7ec4c4004a(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__028f467394e4f5484d38e2d9f947ae8a9de767c733136d6400bc0df293924768(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b9871a41436307fb3773b0f8858a5f8765984313d301f66c5350dd168a360ba8(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__459f5d087073020ef953274782f3c5fb40b6e8081ac3f9b7b6bb4b7a26ff62f3(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__73780d72a57dc4a7a334bb18f154754c4810a2a6351aaaaf535b669624adaedf(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[ComputeRegionBackendServiceBackend]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d60ce72a29b9b8de82524f272e9c3c7f49ec5af24b170508559ba4020de545dc(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__e500bbc3bbb861cca45fe1d2c899d6b659e32cf42f577fbbc9906756eb18f8a2(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b9b15898ea02857144bba2101c6cf8424fa9a6b06e92c8e33cdeb1c21df9f1ed(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__cbdf14163bf15227bed8d3a287ff6d0ebad8f3b56bbc987dc8936910f03004a8(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__356f1c06306b3b0e72ac3b00a4bf9e195bab9f8c9adf46c55c59e2c408f7f302(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__cb817b5f899ab00fc085a04229b510b7725dda0bdd7ccf99554a25346aa4c50a(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__4c5e880db719f7758dd97ae75b9f11e62b368ea91165c429ac2a66e9766eade1(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5590596f24602ec0f459ce85a1e674563336573ebd3ad4c4d7c496f1e7d93d3b(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3f7a98a74dd102bfc173678cff8473b9ed57de54141e2446488e61ffbdd98298(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__fa9caa2aee99e9ccd5d0e14d437bc570087206c9dc240392a23f02978917c0bb(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b0d2bf64589f55d182149e66f228a3c2fb527656d1644b8df4013fa02ad4c9bc(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__bf0d747f033db0b1f1be871e7383d114ae1edabfa84ae0f195d4ef04a789fc24(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3174f5c4fe5f7edf9a50e38789ba5b00700fcc4b7000f11540b8b77875d31e01(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ce8a1426035a988f574e8c5b459f65b49e20b11aa45ea89b8ecdcaf66fa0034b(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, ComputeRegionBackendServiceBackend]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3f762ad61dc54ad3d79699c5aed3f9b69e4e6b50d5e56cb4ffdcb354b86c3172(
*,
cache_key_policy: typing.Optional[typing.Union[ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy, typing.Dict[builtins.str, typing.Any]]] = None,
cache_mode: typing.Optional[builtins.str] = None,
client_ttl: typing.Optional[jsii.Number] = None,
default_ttl: typing.Optional[jsii.Number] = None,
max_ttl: typing.Optional[jsii.Number] = None,
negative_caching: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
negative_caching_policy: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy, typing.Dict[builtins.str, typing.Any]]]]] = None,
serve_while_stale: typing.Optional[jsii.Number] = None,
signed_url_cache_max_age_sec: typing.Optional[jsii.Number] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__379568ce93934ee39c05535e079cd33a2e12de9b3f2e29276b6a8637f132bedf(
*,
include_host: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
include_named_cookies: typing.Optional[typing.Sequence[builtins.str]] = None,
include_protocol: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
include_query_string: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
query_string_blacklist: typing.Optional[typing.Sequence[builtins.str]] = None,
query_string_whitelist: typing.Optional[typing.Sequence[builtins.str]] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__199c372215c35b0865ae27599f11e07814fdf9ec61c6b06c34c3458bc268261e(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__7aa71c2205f76cc9daee07d33568428a7b868016707f7d68fb3c720cc6231765(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__647d5d507b4b2c0685f384e891132dc149015b3867ed296427358ac552ad7ea4(
value: typing.List[builtins.str],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__065faa1b914207b3e2105d5b9ac053e3c6c206d5402b5e154d1a9f334cb27434(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__432d5cac0d48501477135da33b0c1480bfb775faa681aac7bc5864969dc717aa(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__63dc0e3e76553beea908d3dd430b8f00f6c84b3a226cb383c8602357a250133c(
value: typing.List[builtins.str],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__7de07ebb57db02540135ca77305cf6ea4b2fa8cb06dcab09b4e7d5857ba3d521(
value: typing.List[builtins.str],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d5d8275ffbd9c4685f84025e63e43f56ef49d2c0a892cb094d32090271ae5903(
value: typing.Optional[ComputeRegionBackendServiceCdnPolicyCacheKeyPolicy],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ab605d31c904330c0eaff74295beb59394008283eba9048b27c64a7ad8eff93a(
*,
code: typing.Optional[jsii.Number] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5f22ea4a5a32175e50467b7c9884a3bb0ee0837e874757ab185255d32fd511d5(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
wraps_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__bae51cc8232aba4e45810dbcb7366a432badac90c16fe950a8884267b06656d8(
index: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b00b710afed83d789074b329b34c3efca94bfe67bc48300c57d7b694acc92488(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__2e99505de1cdbc633a081027b2938957ed3ab8cb9c81d0975f444fd5d65cfa0b(
value: _cdktf_9a9027ec.IInterpolatingParent,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f317bd613505711d880fe6b6b3a07db7e455d0589f2c21c85b8d3189b7703abb(
value: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5c8ce4267a145c0945f579aef11116d86bae77795092937c475709b93572898f(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.List[ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f044a9b2b9291f7bf321a3ceb7f065717aaadab5a80ff64b968a62db155d0239(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
complex_object_index: jsii.Number,
complex_object_is_from_set: builtins.bool,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f120e1419fd3a83795d7034b40bf7092c0432ffcaabef0d5dcde262757bc0dc2(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__762e126574a8f2a98fe6f6b8ab38ea3112bf683014ffb108c0c2706eb9ae32a2(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__1afcc3fcf86329631cc19ce6537e0b1b9730cdee91cf19e622991e3886d18a3f(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__2fafe9e418813c27bb86b466da4afe5cf9926e19d5abac5eaddca8df7f6a3b95(
value: typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[ComputeRegionBackendServiceCdnPolicyNegativeCachingPolicy, typing.Dict[builtins.str, typing.Any]]]],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0ea9599261f5800799bf190ce789cbb1bd8b50f25ce1c95441e7e6e72a78de93(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f741e54aa48e0ba4d404b2c7a0a5800c2d17feb3493760085e9b8afca785b64c(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__091e61f296ec1f9e760ed81656614ce61bec3da6415a857c08fb51882995e0ad(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__9a9d12a8353403405940014d2fe4f33866be6cae9285093a77a5366d565d15fd(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__d841e23d6d935d9ad3a75aaa84820fb2fe7ddd846840311bdecd266180a80000(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c89df93bccd2c6efb1d0c9d89a266e435387de7707231f7a2a68d8da30f06a37(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__908d3e436e2044802a3d1eec93b50b82c90ad0cec10dd19d6e12eaafdb49cc1d(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__6ffb386b50fdf9e0c7b64ea26c0a5022760515a83b4fa6b585c9b68c6f3dd1dd(
value: typing.Optional[ComputeRegionBackendServiceCdnPolicy],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__266a5682df9681a59744226a311495815409a6484e6aa17c82ee4d35af09af83(
*,
max_connections: typing.Optional[jsii.Number] = None,
max_pending_requests: typing.Optional[jsii.Number] = None,
max_requests: typing.Optional[jsii.Number] = None,
max_requests_per_connection: typing.Optional[jsii.Number] = None,
max_retries: typing.Optional[jsii.Number] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__893780769ac8d37355e438525acbd4aed3a005fc80fd17d25190253cfe09411d(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f82019ede3e01a5eeb54fe45d89e3c9d001ded87341ba441c5c02598bd71aaa3(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ecc6ff032c620d59d5bdba15c2bc5ce9d343ce3d4c0fa9a9fc64b058b8e40a67(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0c91658cb74df1dc152ca198e9700175b8784a3cef2f99491c1d0cef297a334b(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__600c89c29a6a6fd707558664e49aecee221b8ae2860bff5f0819d34f324f36e9(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__4a28d216d508cd81aa3bb776b09b29429a1e999d3438f84f5504cb93a0a89b45(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__2a0ed6399f9ad92500995d35e4410b7fa0c21fcd0bd76414bf350ccedb8ae9b1(
value: typing.Optional[ComputeRegionBackendServiceCircuitBreakers],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__942d906da0191287bd3769b8797469069205f1e711375baabdb5fd74249f6e12(
*,
connection: typing.Optional[typing.Union[typing.Union[_cdktf_9a9027ec.SSHProvisionerConnection, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.WinrmProvisionerConnection, typing.Dict[builtins.str, typing.Any]]]] = None,
count: typing.Optional[typing.Union[jsii.Number, _cdktf_9a9027ec.TerraformCount]] = None,
depends_on: typing.Optional[typing.Sequence[_cdktf_9a9027ec.ITerraformDependable]] = None,
for_each: typing.Optional[_cdktf_9a9027ec.ITerraformIterator] = None,
lifecycle: typing.Optional[typing.Union[_cdktf_9a9027ec.TerraformResourceLifecycle, typing.Dict[builtins.str, typing.Any]]] = None,
provider: typing.Optional[_cdktf_9a9027ec.TerraformProvider] = None,
provisioners: typing.Optional[typing.Sequence[typing.Union[typing.Union[_cdktf_9a9027ec.FileProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.LocalExecProvisioner, typing.Dict[builtins.str, typing.Any]], typing.Union[_cdktf_9a9027ec.RemoteExecProvisioner, typing.Dict[builtins.str, typing.Any]]]]] = None,
name: builtins.str,
affinity_cookie_ttl_sec: typing.Optional[jsii.Number] = None,
backend: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, typing.Sequence[typing.Union[ComputeRegionBackendServiceBackend, typing.Dict[builtins.str, typing.Any]]]]] = None,
cdn_policy: typing.Optional[typing.Union[ComputeRegionBackendServiceCdnPolicy, typing.Dict[builtins.str, typing.Any]]] = None,
circuit_breakers: typing.Optional[typing.Union[ComputeRegionBackendServiceCircuitBreakers, typing.Dict[builtins.str, typing.Any]]] = None,
connection_draining_timeout_sec: typing.Optional[jsii.Number] = None,
consistent_hash: typing.Optional[typing.Union[ComputeRegionBackendServiceConsistentHash, typing.Dict[builtins.str, typing.Any]]] = None,
description: typing.Optional[builtins.str] = None,
enable_cdn: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
failover_policy: typing.Optional[typing.Union[ComputeRegionBackendServiceFailoverPolicy, typing.Dict[builtins.str, typing.Any]]] = None,
health_checks: typing.Optional[typing.Sequence[builtins.str]] = None,
iap: typing.Optional[typing.Union[ComputeRegionBackendServiceIap, typing.Dict[builtins.str, typing.Any]]] = None,
id: typing.Optional[builtins.str] = None,
load_balancing_scheme: typing.Optional[builtins.str] = None,
locality_lb_policy: typing.Optional[builtins.str] = None,
log_config: typing.Optional[typing.Union[ComputeRegionBackendServiceLogConfig, typing.Dict[builtins.str, typing.Any]]] = None,
network: typing.Optional[builtins.str] = None,
outlier_detection: typing.Optional[typing.Union[ComputeRegionBackendServiceOutlierDetection, typing.Dict[builtins.str, typing.Any]]] = None,
port_name: typing.Optional[builtins.str] = None,
project: typing.Optional[builtins.str] = None,
protocol: typing.Optional[builtins.str] = None,
region: typing.Optional[builtins.str] = None,
session_affinity: typing.Optional[builtins.str] = None,
timeouts: typing.Optional[typing.Union[ComputeRegionBackendServiceTimeouts, typing.Dict[builtins.str, typing.Any]]] = None,
timeout_sec: typing.Optional[jsii.Number] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__186f5d67feab665bb2d4876bd04486790631383e709402ca1ff6c65f62dc27f9(
*,
http_cookie: typing.Optional[typing.Union[ComputeRegionBackendServiceConsistentHashHttpCookie, typing.Dict[builtins.str, typing.Any]]] = None,
http_header_name: typing.Optional[builtins.str] = None,
minimum_ring_size: typing.Optional[jsii.Number] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__6a117844a8b60b6aea7c8b6199be86460b3f62731975b7223126cba9fba95cc5(
*,
name: typing.Optional[builtins.str] = None,
path: typing.Optional[builtins.str] = None,
ttl: typing.Optional[typing.Union[ComputeRegionBackendServiceConsistentHashHttpCookieTtl, typing.Dict[builtins.str, typing.Any]]] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__073af85ae8f145e89c0d5a6c02390e588b2b6158f11b6834ad5e3eedf8f6eafb(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b81d61906c2938b5058b5b3d96276cd846669df06a2d7be3ff48e0580b42b86e(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3495691e0fed2f3a351c6eb3e1c93e43d07afaa46d80c5875ccb512e64235d07(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b9a667f311f1b74f716e42760ebf889e3384726f1561a58a4f19ec74276d95df(
value: typing.Optional[ComputeRegionBackendServiceConsistentHashHttpCookie],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__95b9966f17083683869b8e88c1cd71c1a24155c4b3a05ef6336b2b68d9e8c4c3(
*,
seconds: jsii.Number,
nanos: typing.Optional[jsii.Number] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5360707d67fb0c194038bc663fda444dd849b87b5ade6f5bc97540aba8b40692(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__fc497e516adef4ade1a0ff25b07115fee1c668e30888d712360d2b8e311d1b14(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__6ca4459ee7821e229e9ab5c67900a99758d62700ffb4ce37d823db6e56d7098f(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a44d4a364777f5e9c836721c84e417a1767afb63f2f4cbb4419ebc476a261eb8(
value: typing.Optional[ComputeRegionBackendServiceConsistentHashHttpCookieTtl],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__af2fa8ba75b1ca993fdb1e4a1b65887c59accc42b8dd809ae58e1694dda3dbf8(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__098d8871222d597232b4ff357617d254894e28ca18b5eae2e590eeee7fefc59a(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__93de2d2732df7a7bc8a92b1f6fcc21733a3fb155471dc957412fd690d82568a2(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__557cf970794dc4fd129c66c4f38e0caf6700d46d8f42557a6318623f6d4c7e53(
value: typing.Optional[ComputeRegionBackendServiceConsistentHash],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b6284ed9b159f56d80a68611ca0b561c5a0fa9ff4c4f6d4b57864ec2d5c4c4c3(
*,
disable_connection_drain_on_failover: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
drop_traffic_if_unhealthy: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
failover_ratio: typing.Optional[jsii.Number] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__17f7c0552e4067873dc9d263beba6d8d2a9214effc8b9d4460061fff573b8466(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5ae2c82c68cb277cf0b8b9e729afec5be6df66bfc5b7c93db6b52d8852cc7362(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__cd88abb42f8372b9f02b2b941f46ab10cdbf30850188b49c268c209efd5b7c6b(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5f29fbbe3ae76726c9fd92dc85e94a761b043d5e3ee926cb0e966cf866d6823b(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__41c1420fa32d0e7b6544d46ff33d21f615f33521b2cbb93814eaebf4cda6238a(
value: typing.Optional[ComputeRegionBackendServiceFailoverPolicy],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f7d6ececf941f643a1e253241ed5cdd3b118d1ce0799c7500a5881efda2d4f1b(
*,
oauth2_client_id: builtins.str,
oauth2_client_secret: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__393a5e670cd1080a97347f695ca218be5b8b65fdbcf5b9a660f574e128317df2(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__26f4d193408e9ba6f0d7d60ea8ab47c19e0c8257fafce87e00dc47be842f81a4(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__9f917ad4a226fcfd428e938a1ec29f5bf7137a8e710c69b0a95f82df21ef35ea(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__0bf5be8938c09405557128de3592a24365a490c956a44ad68f98ecd30300fd63(
value: typing.Optional[ComputeRegionBackendServiceIap],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__e185711bb1ea583316001ddc5812a92d7ecca9eb17279f9d4a8a3cb58e3199fa(
*,
enable: typing.Optional[typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable]] = None,
sample_rate: typing.Optional[jsii.Number] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a816928e8483eb7fae1bb464b0e5cc3d5d67175b4adeca118360b8db38fa0c42(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b9c8edd729f954a3ff969d7ced5931df429a474af2c9ffa47e7a63175bc252c4(
value: typing.Union[builtins.bool, _cdktf_9a9027ec.IResolvable],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__9835ef1c1f7f2077e32cfb4f7cf972a9424a5d92b18829ae455648f70112f9aa(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__5469b01a71f51ffce1fc43497651cd8cd367fc147b71216876ecc9468966575f(
value: typing.Optional[ComputeRegionBackendServiceLogConfig],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__31264aa696be4bc65965ff2b65434998bab515628e1db3edb2f5cd054c701f28(
*,
base_ejection_time: typing.Optional[typing.Union[ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime, typing.Dict[builtins.str, typing.Any]]] = None,
consecutive_errors: typing.Optional[jsii.Number] = None,
consecutive_gateway_failure: typing.Optional[jsii.Number] = None,
enforcing_consecutive_errors: typing.Optional[jsii.Number] = None,
enforcing_consecutive_gateway_failure: typing.Optional[jsii.Number] = None,
enforcing_success_rate: typing.Optional[jsii.Number] = None,
interval: typing.Optional[typing.Union[ComputeRegionBackendServiceOutlierDetectionInterval, typing.Dict[builtins.str, typing.Any]]] = None,
max_ejection_percent: typing.Optional[jsii.Number] = None,
success_rate_minimum_hosts: typing.Optional[jsii.Number] = None,
success_rate_request_volume: typing.Optional[jsii.Number] = None,
success_rate_stdev_factor: typing.Optional[jsii.Number] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__fa64565ae7dcd9029c04ae62f02eebf905da65efb0e65322f28fff1ca350b66f(
*,
seconds: jsii.Number,
nanos: typing.Optional[jsii.Number] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c1b4d52c6f097b8af762f1527243c3b216c70026e93351580e81f3f6acc4328a(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ba796b9116b9f9f5c6f3ddf003d644e37b80d8c5105fd939c9cedd65c15a8e24(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__6d372452a1f948a5936fb55c52dcf828b625c72c56a708e70bc25456fc0f0d3b(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__8328d8e4d73ad479db129c3d66f759aa237db727c3a73a42f9fc7ed7087855e9(
value: typing.Optional[ComputeRegionBackendServiceOutlierDetectionBaseEjectionTime],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__6e194a33b9788b599baf9e9987fafc1b67df3cc422a71b885a1df2d4c22b3aa5(
*,
seconds: jsii.Number,
nanos: typing.Optional[jsii.Number] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a2c083d002cae0d7253b1124a155d8f8b65d09644ebc3e28a8d6aaa2266a294e(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__c294b7121abc0666e7afe5f202a9c24f79033158cec3434ee099492a1393cf87(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__bb099efbdf1d5e67d416fa237ef093cc2a8dfdf205b48e18f44fca817e7ce56d(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__17d27e21b509b1b8ac0f5fba6d837e21835a63adb2f48421dcda762ff2e60faa(
value: typing.Optional[ComputeRegionBackendServiceOutlierDetectionInterval],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__011b9b06afbe37fea03099005c40b0dfd101dc018f7abdf8fe26c52848193861(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__88c5ee37a5fd33884c73f335a3b4f82e5466b509c5e1816e0796dccc38730d76(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ffeb92147c6e36e8a37a33f8670602fd763b17a8a99e41f1a99d5d6556c9687a(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__56b0379c16d60c2adf12895aebb98bbb86526fd099736e00571045e6c3dd47cd(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__f47c19b03ea44b765caf71d1a888d0b60e8851dca3fd58da440b96fa549a9f5c(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__3dd36605d052be670fb357374c368fd6f0d0554fe4965be9ab6ea99f9badaa3c(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__a39040ddd37b0234eeae24abfa9d9071f75ec893b631ecbe1b681549471fcf3d(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__1d10cf732b79a8e90d66da46fbda026047a15fbcdffe4cd5ac40c0ba2da603f2(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__89d7d66d813561d17dcdb15c85f72841df60791a1776e23b07c8a556bdbd5483(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__77075c5fdda55a634fd4c2a99bf33e2ae5df6a7d8b7a6d4ae5e0b6341945221f(
value: jsii.Number,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__ed348d2a88da51d2c1b969848cc895cc8569f7ee22fee82d176246d2dd18c30e(
value: typing.Optional[ComputeRegionBackendServiceOutlierDetection],
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b18cc7d41a5763bea88d6890ef53a58815c2a431a2ce1701601f37fec23f383f(
*,
create: typing.Optional[builtins.str] = None,
delete: typing.Optional[builtins.str] = None,
update: typing.Optional[builtins.str] = None,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__7db88d4d9ee44acb375b3abb4ebb88e33da2212efdc92198816b1721d42f28f5(
terraform_resource: _cdktf_9a9027ec.IInterpolatingParent,
terraform_attribute: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b496f041f0d5cef9ffd80a6f446f92be5ead56b92ff58f2c574ff2c4b299dada(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__1fe07316d81bb70c9ec70f7de1664f0abcab07eaefb28ede6201ec2fc4b21b96(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__67203a5d196ab7aeb3045ad9eac685da31577d319cf0561d479e7bf7ce3d92f1(
value: builtins.str,
) -> None:
"""Type checking stubs"""
pass
def _typecheckingstub__b93aa246a7bb917b5ae90ffb2d74ad3dc9d55a81a1f1a2b1ac14a09aabd74a26(
value: typing.Optional[typing.Union[_cdktf_9a9027ec.IResolvable, ComputeRegionBackendServiceTimeouts]],
) -> None:
"""Type checking stubs"""
pass
|
PypiClean
|
/obj.mpp-2023.3-py3-none-any.whl/brick/config/config.py
|
from __future__ import annotations
import argparse as ap_
from enum import Enum as enum_t
from typing import Any, Callable, ClassVar, Dict, List, Optional, Sequence, Tuple, Union
import brick.interface.io.reporting as rp_
from brick.config.parameter import parameter_t
from brick.config.section import SectionLabelFromName, section_t
from brick.data.config.specification import (
CONFIG_SPECIFICATION,
missing_required_value_t,
)
from brick.data.config.specification import parameter_t as static_parameter_t
from brick.data.config.specification import section_t as static_section_t
from brick.data.config.std_labels import std_label_e
from brick.data.type import pl_path_t
# noinspection PyArgumentList
config_completeness_e = enum_t("config_completeness_e", "MINIMAL BASIC FULL")
# MINIMAL: everything that has no default value
# BASIC: everything tagged as basic in the specification
# FULL: everything, surprisingly
raw_config_h = Dict[std_label_e, Dict[str, Any]]
light_config_h = Dict[std_label_e, Sequence[parameter_t]]
class config_t(dict):
INI_DEFAULT_SECTION: ClassVar[str] = "DEFAULT"
INI_VALUE_ASSIGNEMENT: ClassVar[str] = "="
INI_MOD_ELM_SEPARATOR: ClassVar[str] = ":"
INI_COMMENT_MARKER: ClassVar[str] = "#"
# On the command line, specify INI document file with option --ini_document INI_document_path
INI_DOCUMENT_OPTION: ClassVar[str] = "ini_document"
# dict as a __dict__, so __slots__ here is meaningless
_slots = ("ini_document", "has_default_value", "issues")
ini_document: pl_path_t
has_default_value: Tuple[str, ...]
issues: List[str, ...]
def __init__(self):
#
super().__init__()
for slot in config_t._slots:
setattr(self, slot, None)
self.issues = []
@classmethod
def Standard(cls, completeness: config_completeness_e) -> config_t:
#
instance = cls()
for section_name, section_elements in CONFIG_SPECIFICATION.items():
static_section = section_elements[0]
static_parameters = section_elements[1:]
if not _SectionMatchesCompleteness(static_section, completeness):
continue
parameters = []
n_parameters = 0
n_basic_prms = 0
for static_parameter in static_parameters:
n_parameters += 1
if static_parameter.basic:
n_basic_prms += 1
if _ParameterMatchesCompleteness(static_parameter, completeness):
parameter = parameter_t.FromSpecification(
static_parameter, section_name
)
parameters.append(parameter)
section = section_t.FromSpecification(
section_name, static_section, parameters
)
instance[section_name] = section
return instance
@classmethod
def NewEmpty(cls, ini_document: Union[str, pl_path_t] = None) -> config_t:
""""""
instance = cls()
if ini_document is not None:
if isinstance(ini_document, str):
ini_document = pl_path_t(ini_document)
instance.ini_document = ini_document
return instance
@classmethod
def NewFromRawVersion(
cls,
GetRawConfig: Callable[[Any], raw_config_h],
from_file: Union[str, pl_path_t] = None,
arguments: Union[Dict[str, str], ap_.Namespace] = None,
) -> Tuple[Optional[config_t], bool, Optional[light_config_h]]:
""""""
if from_file is None:
instance = cls.Standard(config_completeness_e.FULL)
else:
raw_config = GetRawConfig(from_file)
if raw_config is None:
return None, False, None
instance = cls.NewEmpty(ini_document=from_file)
instance.SetFromRawConfig(raw_config)
instance.AddDefaults()
if arguments is not None:
instance.OverwriteWithCommandLineArguments(arguments)
# print("--------------- CONFIG INSTANCE")
# for key, values in instance.items():
# for value in values:
# print(" ", value)
config_is_valid, for_deferred_check = instance.Validity()
instance.Finalize()
return instance, config_is_valid, for_deferred_check
def SetFromRawConfig(self, raw_config: raw_config_h) -> None:
""""""
for sct_name, sct_prms in raw_config.items():
if sct_name == self.__class__.INI_DEFAULT_SECTION:
continue
std_sct_name = SectionLabelFromName(sct_name)
if std_sct_name is None:
rp_.ReportIP(sct_name.value, "section", section_t.VALID_SECTION_NAMES)
self.issues.append(
f"{sct_name}: Section {section_t.VALID_SECTION_NAMES}"
) # TODO: Adjust message
continue
sct_name = std_sct_name
parameters = []
for p_idx, (prm_name, prm_value_as_str) in enumerate(sct_prms.items()):
parameter = parameter_t.FromUntypedEntry(
prm_name,
prm_value_as_str,
self.__class__.INI_COMMENT_MARKER,
sct_name,
)
parameters.append(parameter)
section = section_t.FromSpecification(
sct_name, CONFIG_SPECIFICATION[sct_name][0], parameters
)
self[sct_name] = section
def AddDefaults(self) -> None:
#
has_default_value = []
default_config = self.__class__.Standard(config_completeness_e.FULL)
for default_section in default_config.values():
section_name = default_section.name
if section_name in self:
section = self[section_name]
for parameter in default_section:
if section.ParameterWithName(parameter.name) is None:
section.append(parameter)
has_default_value.append(parameter.UId)
else:
self[section_name] = default_section
has_default_value.extend(_prm.UId for _prm in default_section)
self.has_default_value = tuple(has_default_value)
def OverwriteWithCommandLineArguments(
self, arguments: Union[Dict[str, str], ap_.Namespace]
) -> None:
#
if isinstance(arguments, ap_.Namespace):
arguments = vars(arguments)
del arguments[self.__class__.INI_DOCUMENT_OPTION]
for prm_uid, value in arguments.items():
if not isinstance(value, missing_required_value_t):
sct_name, prm_name = parameter_t.SectionAndParameterFromUId(prm_uid)
# It would be better to include the SectionLabelFromName call in SectionAndParameterFromUId, but this
# cannot be done easily without circular imports.
sct_name = SectionLabelFromName(sct_name)
section = self[sct_name]
parameter = section.ParameterWithName(prm_name)
if parameter is None:
parameter = parameter_t.FromUntypedEntry(
prm_name, value, self.__class__.INI_COMMENT_MARKER, sct_name
)
section.append(parameter)
else:
parameter.SetTypesAndValueFromString(value)
def Finalize(self) -> None:
#
if self.ini_document is None:
ini_doc_folder = None
else:
ini_doc_folder = self.ini_document.parent
unit_conversions = {
_prm.name: _prm.value for _prm in self[std_label_e.sct_range_units]
}
for section in self.values():
for parameter in section:
value = parameter.value
unit = parameter.unit
if unit is not None:
conversion_factor = unit_conversions.get(unit)
if conversion_factor is None:
self.issues.append(
f'{section.name.value}.{parameter.name}: Has undefined unit "{unit}"'
)
elif conversion_factor != 1.0:
if isinstance(value, Sequence):
converted_value = tuple(
conversion_factor * _val for _val in value
)
else:
converted_value = conversion_factor * value
parameter.value = converted_value
for sct_name, prm_name in parameter_t.PATH_PARAMETERS:
parameter = self[sct_name].ParameterWithName(prm_name)
if (parameter is None) or (parameter.value is None):
continue
# Some parameters can be a range or a path to an image for example; Hence type must be tested.
if isinstance(parameter.value, str):
if parameter.value.__len__() > 0:
value = pl_path_t(parameter.value)
else:
value = None
else:
value = parameter.value
if isinstance(value, pl_path_t):
if (ini_doc_folder is not None) and not value.is_absolute():
value = (ini_doc_folder / value).resolve()
parameter.value = value
elif value is None:
parameter.value = value
# for prm_name, (
# sct_name,
# doc_prm_name,
# ) in parameter_t.DOC_ELM_SPLITABLE_PRMS.items():
# section = self[sct_name]
# parameter = section.ParameterWithName(prm_name)
# if (parameter is not None) and isinstance(parameter.value, str):
# document, element = _SplittedDocumentAndElement(parameter.value)
# parameter.value = element
# split_prm = parameter_t.ForProgrammaticEntry(
# doc_prm_name, document, sct_name
# )
# section.append(split_prm)
def Validity(self) -> Tuple[bool, light_config_h]:
#
is_valid = True
for_deferred_check = {}
for section in self.values():
# section_name in CONFIG_SPECIFICATION: necessarily true; see FromINIDocument
section_name = section.name
if CONFIG_SPECIFICATION[section_name].__len__() > 1:
valid_parameters = tuple(
_prm.name for _prm in CONFIG_SPECIFICATION[section_name][1:]
)
for parameter in section:
if parameter.name not in valid_parameters:
rp_.ReportIP(
parameter.name,
f"parameter in [{section_name}]",
valid_parameters,
)
is_valid = False
elif isinstance(parameter.value, missing_required_value_t):
# print("--------- IN CONFIG")
rp_.ReportE(
f"[{section_name}] {parameter.name}",
"Missing required parameter",
)
is_valid = False
else:
# Skip section with optional-only parameters (parameters of external functions, although some are
# provided by Obj.MPP). The parameters must be checked later on (e.g. by CheckPassedParameters).
for_deferred_check[section_name] = section
return is_valid, for_deferred_check
def AsRawDict(self) -> raw_config_h:
""""""
output = {_key: _val.AsDict() for _key, _val in self.items()}
for prm_name, (
sct_name,
doc_prm_name,
) in parameter_t.DOC_ELM_SPLITABLE_PRMS.items():
if (sct_name in output) and (prm_name in output[sct_name]):
parameter = output[sct_name][prm_name]
if isinstance(parameter, str):
document, element = _SplittedDocumentAndElement(parameter)
output[sct_name][prm_name] = element
output[sct_name][doc_prm_name] = document
return output
def _SectionMatchesCompleteness(
section: static_section_t, completeness: config_completeness_e
) -> bool:
""""""
return (
(completeness == config_completeness_e.FULL)
or ((completeness == config_completeness_e.BASIC) and section.basic)
or ((completeness == config_completeness_e.MINIMAL) and not section.optional)
)
def _ParameterMatchesCompleteness(
parameter: static_parameter_t, completeness: config_completeness_e
) -> bool:
""""""
return (
(completeness == config_completeness_e.FULL)
or ((completeness == config_completeness_e.BASIC) and parameter.basic)
or (
(completeness == config_completeness_e.MINIMAL)
and isinstance(parameter.default, missing_required_value_t)
)
)
def _SplittedDocumentAndElement(value: str) -> Tuple[str, str]:
#
if config_t.INI_MOD_ELM_SEPARATOR in value:
components = value.split(config_t.INI_MOD_ELM_SEPARATOR)
document = "".join(components[:-1])
element = components[-1]
if document.__len__() > 0:
document = pl_path_t(document)
else:
document = None
else:
document = None
element = value
return document, element
def CheckSpecificationValidity() -> None:
#
issues = []
for section_name, section_elements in CONFIG_SPECIFICATION.items():
static_section = section_elements[0]
static_parameters = section_elements[1:]
if not (static_section.basic or static_section.optional):
issues.append(f"{section_name}: Section is not basic but not optional")
n_parameters = 0
n_basic_prms = 0
for static_parameter in static_parameters:
more_issues = _CheckParameterValidity(section_name, static_section, static_parameter)
issues.extend(more_issues)
n_parameters += 1
if static_parameter.basic:
n_basic_prms += 1
if static_section.basic and (n_parameters > 0) and (n_basic_prms == 0):
issues.append(f"{section_name}: Basic section without any basic parameters")
if issues.__len__() > 0:
print("\n".join(issues))
raise rp_.silent_exception_t()
def _CheckParameterValidity(
section_name: std_label_e, section: static_section_t, parameter: static_parameter_t
) -> List[str]:
""""""
output = []
if (not section.basic) and parameter.basic:
output.append(f"{section_name}.{parameter.name}: Basic parameter in a non-basic section")
if not (parameter.basic or parameter.optional):
output.append(f"{section_name}.{parameter.name}: Parameter is not basic but not optional")
if isinstance(parameter.default, missing_required_value_t):
if parameter.optional:
output.append(f"{section_name}.{parameter.name}: Required parameter declared optional")
if None in parameter.types:
output.append(f'{section_name}.{parameter.name}: Required parameter with "None" among its possible types')
if parameter.default.types != parameter.types:
output.append(f'{section_name}.{parameter.name}: Mismatch between "type" and "default.type"')
return output
|
PypiClean
|
/contracts-lib-py-1.0.4.tar.gz/contracts-lib-py-1.0.4/contracts_lib_py/templates/template_manager.py
|
from collections import namedtuple
from contracts_lib_py import ContractBase
AgreementTemplate = namedtuple(
'AgreementTemplate',
('state', 'owner', 'updated_by', 'block_number_updated')
)
class TemplateStoreManager(ContractBase):
"""Class representing the TemplateStoreManager contract."""
CONTRACT_NAME = 'TemplateStoreManager'
def get_template(self, template_id):
"""
Get the template for a given template id.
:param template_id: id of the template, str
:return:
"""
template = self.contract.caller.getTemplate(template_id)
if template and len(template) == 4:
return AgreementTemplate(*template)
return None
def propose_template(self, template_id, from_account):
"""Propose a template.
:param template_id: id of the template, str
:param from_account: Account
:return: bool
"""
tx_hash = self.send_transaction(
'proposeTemplate',
(template_id,),
transact={'from': from_account.address,
'passphrase': from_account.password,
'keyfile': from_account.key_file}
)
return self.is_tx_successful(tx_hash)
def approve_template(self, template_id, from_account):
"""
Approve a template.
:param template_id: id of the template, str
:param from_account: Account
:return:
"""
tx_hash = self.send_transaction(
'approveTemplate',
(template_id,),
transact={'from': from_account.address,
'passphrase': from_account.password,
'keyfile': from_account.key_file}
)
return self.is_tx_successful(tx_hash)
def revoke_template(self, template_id, from_account):
"""
Revoke a template.
:param template_id: id of the template, str
:param from_account: Account
:return: bool
"""
tx_hash = self.send_transaction(
'revokeTemplate',
(template_id,),
transact={'from': from_account.address,
'passphrase': from_account.password,
'keyfile': from_account.key_file}
)
return self.is_tx_successful(tx_hash)
def is_template_approved(self, template_id):
"""
True if the template is approved.
:param template_id: id of the template, str
:return: bool
"""
return self.contract.caller.isTemplateApproved(template_id)
def get_num_templates(self):
"""
Return the number of templates on-chain.
:return: number of templates, int
"""
return self.contract.caller.getTemplateListSize()
|
PypiClean
|
/google-cloud-dialogflow-2.23.3.tar.gz/google-cloud-dialogflow-2.23.3/google/cloud/dialogflow_v2/services/knowledge_bases/client.py
|
from collections import OrderedDict
import os
import re
from typing import (
Dict,
Mapping,
MutableMapping,
MutableSequence,
Optional,
Sequence,
Tuple,
Type,
Union,
cast,
)
from google.api_core import client_options as client_options_lib
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.dialogflow_v2 import gapic_version as package_version
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.cloud.location import locations_pb2 # type: ignore
from google.longrunning import operations_pb2
from google.protobuf import field_mask_pb2 # type: ignore
from google.cloud.dialogflow_v2.services.knowledge_bases import pagers
from google.cloud.dialogflow_v2.types import knowledge_base as gcd_knowledge_base
from google.cloud.dialogflow_v2.types import knowledge_base
from .transports.base import DEFAULT_CLIENT_INFO, KnowledgeBasesTransport
from .transports.grpc import KnowledgeBasesGrpcTransport
from .transports.grpc_asyncio import KnowledgeBasesGrpcAsyncIOTransport
from .transports.rest import KnowledgeBasesRestTransport
class KnowledgeBasesClientMeta(type):
"""Metaclass for the KnowledgeBases client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[KnowledgeBasesTransport]]
_transport_registry["grpc"] = KnowledgeBasesGrpcTransport
_transport_registry["grpc_asyncio"] = KnowledgeBasesGrpcAsyncIOTransport
_transport_registry["rest"] = KnowledgeBasesRestTransport
def get_transport_class(
cls,
label: Optional[str] = None,
) -> Type[KnowledgeBasesTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class KnowledgeBasesClient(metaclass=KnowledgeBasesClientMeta):
"""Service for managing
[KnowledgeBases][google.cloud.dialogflow.v2.KnowledgeBase].
"""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "dialogflow.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
KnowledgeBasesClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(info)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
KnowledgeBasesClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> KnowledgeBasesTransport:
"""Returns the transport used by the client instance.
Returns:
KnowledgeBasesTransport: The transport used by the client
instance.
"""
return self._transport
@staticmethod
def knowledge_base_path(
project: str,
knowledge_base: str,
) -> str:
"""Returns a fully-qualified knowledge_base string."""
return "projects/{project}/knowledgeBases/{knowledge_base}".format(
project=project,
knowledge_base=knowledge_base,
)
@staticmethod
def parse_knowledge_base_path(path: str) -> Dict[str, str]:
"""Parses a knowledge_base path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/knowledgeBases/(?P<knowledge_base>.+?)$", path
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(
billing_account: str,
) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(
folder: str,
) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(
folder=folder,
)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(
organization: str,
) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(
organization=organization,
)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(
project: str,
) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(
project=project,
)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(
project: str,
location: str,
) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project,
location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
return m.groupdict() if m else {}
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[client_options_lib.ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variable is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
if client_options is None:
client_options = client_options_lib.ClientOptions()
use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_client_cert not in ("true", "false"):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
if use_mtls_endpoint not in ("auto", "never", "always"):
raise MutualTLSChannelError(
"Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
)
# Figure out the client cert source to use.
client_cert_source = None
if use_client_cert == "true":
if client_options.client_cert_source:
client_cert_source = client_options.client_cert_source
elif mtls.has_default_client_cert_source():
client_cert_source = mtls.default_client_cert_source()
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
elif use_mtls_endpoint == "always" or (
use_mtls_endpoint == "auto" and client_cert_source
):
api_endpoint = cls.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = cls.DEFAULT_ENDPOINT
return api_endpoint, client_cert_source
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Optional[Union[str, KnowledgeBasesTransport]] = None,
client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the knowledge bases client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, KnowledgeBasesTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
client_options = cast(client_options_lib.ClientOptions, client_options)
api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(
client_options
)
api_key_value = getattr(client_options, "api_key", None)
if api_key_value and credentials:
raise ValueError(
"client_options.api_key and credentials are mutually exclusive"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, KnowledgeBasesTransport):
# transport is a KnowledgeBasesTransport instance.
if credentials or client_options.credentials_file or api_key_value:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
import google.auth._default # type: ignore
if api_key_value and hasattr(
google.auth._default, "get_api_key_credentials"
):
credentials = google.auth._default.get_api_key_credentials(
api_key_value
)
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
api_audience=client_options.api_audience,
)
def list_knowledge_bases(
self,
request: Optional[Union[knowledge_base.ListKnowledgeBasesRequest, dict]] = None,
*,
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListKnowledgeBasesPager:
r"""Returns the list of all knowledge bases of the
specified agent.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import dialogflow_v2
def sample_list_knowledge_bases():
# Create a client
client = dialogflow_v2.KnowledgeBasesClient()
# Initialize request argument(s)
request = dialogflow_v2.ListKnowledgeBasesRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_knowledge_bases(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.dialogflow_v2.types.ListKnowledgeBasesRequest, dict]):
The request object. Request message for
[KnowledgeBases.ListKnowledgeBases][google.cloud.dialogflow.v2.KnowledgeBases.ListKnowledgeBases].
parent (str):
Required. The project to list of knowledge bases for.
Format:
``projects/<Project ID>/locations/<Location ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflow_v2.services.knowledge_bases.pagers.ListKnowledgeBasesPager:
Response message for
[KnowledgeBases.ListKnowledgeBases][google.cloud.dialogflow.v2.KnowledgeBases.ListKnowledgeBases].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a knowledge_base.ListKnowledgeBasesRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, knowledge_base.ListKnowledgeBasesRequest):
request = knowledge_base.ListKnowledgeBasesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_knowledge_bases]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListKnowledgeBasesPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
def get_knowledge_base(
self,
request: Optional[Union[knowledge_base.GetKnowledgeBaseRequest, dict]] = None,
*,
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> knowledge_base.KnowledgeBase:
r"""Retrieves the specified knowledge base.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import dialogflow_v2
def sample_get_knowledge_base():
# Create a client
client = dialogflow_v2.KnowledgeBasesClient()
# Initialize request argument(s)
request = dialogflow_v2.GetKnowledgeBaseRequest(
name="name_value",
)
# Make the request
response = client.get_knowledge_base(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dialogflow_v2.types.GetKnowledgeBaseRequest, dict]):
The request object. Request message for
[KnowledgeBases.GetKnowledgeBase][google.cloud.dialogflow.v2.KnowledgeBases.GetKnowledgeBase].
name (str):
Required. The name of the knowledge base to retrieve.
Format
``projects/<Project ID>/locations/<Location ID>/knowledgeBases/<Knowledge Base ID>``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflow_v2.types.KnowledgeBase:
A knowledge base represents a collection of knowledge documents that you
provide to Dialogflow. Your knowledge documents
contain information that may be useful during
conversations with end-users. Some Dialogflow
features use knowledge bases when looking for a
response to an end-user input.
For more information, see the [knowledge base
guide](\ https://cloud.google.com/dialogflow/docs/how/knowledge-bases).
Note: The projects.agent.knowledgeBases resource is
deprecated; only use projects.knowledgeBases.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a knowledge_base.GetKnowledgeBaseRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, knowledge_base.GetKnowledgeBaseRequest):
request = knowledge_base.GetKnowledgeBaseRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.get_knowledge_base]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def create_knowledge_base(
self,
request: Optional[
Union[gcd_knowledge_base.CreateKnowledgeBaseRequest, dict]
] = None,
*,
parent: Optional[str] = None,
knowledge_base: Optional[gcd_knowledge_base.KnowledgeBase] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcd_knowledge_base.KnowledgeBase:
r"""Creates a knowledge base.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import dialogflow_v2
def sample_create_knowledge_base():
# Create a client
client = dialogflow_v2.KnowledgeBasesClient()
# Initialize request argument(s)
knowledge_base = dialogflow_v2.KnowledgeBase()
knowledge_base.display_name = "display_name_value"
request = dialogflow_v2.CreateKnowledgeBaseRequest(
parent="parent_value",
knowledge_base=knowledge_base,
)
# Make the request
response = client.create_knowledge_base(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dialogflow_v2.types.CreateKnowledgeBaseRequest, dict]):
The request object. Request message for
[KnowledgeBases.CreateKnowledgeBase][google.cloud.dialogflow.v2.KnowledgeBases.CreateKnowledgeBase].
parent (str):
Required. The project to create a knowledge base for.
Format:
``projects/<Project ID>/locations/<Location ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
knowledge_base (google.cloud.dialogflow_v2.types.KnowledgeBase):
Required. The knowledge base to
create.
This corresponds to the ``knowledge_base`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflow_v2.types.KnowledgeBase:
A knowledge base represents a collection of knowledge documents that you
provide to Dialogflow. Your knowledge documents
contain information that may be useful during
conversations with end-users. Some Dialogflow
features use knowledge bases when looking for a
response to an end-user input.
For more information, see the [knowledge base
guide](\ https://cloud.google.com/dialogflow/docs/how/knowledge-bases).
Note: The projects.agent.knowledgeBases resource is
deprecated; only use projects.knowledgeBases.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, knowledge_base])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a gcd_knowledge_base.CreateKnowledgeBaseRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, gcd_knowledge_base.CreateKnowledgeBaseRequest):
request = gcd_knowledge_base.CreateKnowledgeBaseRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if knowledge_base is not None:
request.knowledge_base = knowledge_base
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.create_knowledge_base]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def delete_knowledge_base(
self,
request: Optional[
Union[knowledge_base.DeleteKnowledgeBaseRequest, dict]
] = None,
*,
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Deletes the specified knowledge base.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import dialogflow_v2
def sample_delete_knowledge_base():
# Create a client
client = dialogflow_v2.KnowledgeBasesClient()
# Initialize request argument(s)
request = dialogflow_v2.DeleteKnowledgeBaseRequest(
name="name_value",
)
# Make the request
client.delete_knowledge_base(request=request)
Args:
request (Union[google.cloud.dialogflow_v2.types.DeleteKnowledgeBaseRequest, dict]):
The request object. Request message for
[KnowledgeBases.DeleteKnowledgeBase][google.cloud.dialogflow.v2.KnowledgeBases.DeleteKnowledgeBase].
name (str):
Required. The name of the knowledge base to delete.
Format:
``projects/<Project ID>/locations/<Location ID>/knowledgeBases/<Knowledge Base ID>``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a knowledge_base.DeleteKnowledgeBaseRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, knowledge_base.DeleteKnowledgeBaseRequest):
request = knowledge_base.DeleteKnowledgeBaseRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.delete_knowledge_base]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
def update_knowledge_base(
self,
request: Optional[
Union[gcd_knowledge_base.UpdateKnowledgeBaseRequest, dict]
] = None,
*,
knowledge_base: Optional[gcd_knowledge_base.KnowledgeBase] = None,
update_mask: Optional[field_mask_pb2.FieldMask] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcd_knowledge_base.KnowledgeBase:
r"""Updates the specified knowledge base.
.. code-block:: python
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import dialogflow_v2
def sample_update_knowledge_base():
# Create a client
client = dialogflow_v2.KnowledgeBasesClient()
# Initialize request argument(s)
knowledge_base = dialogflow_v2.KnowledgeBase()
knowledge_base.display_name = "display_name_value"
request = dialogflow_v2.UpdateKnowledgeBaseRequest(
knowledge_base=knowledge_base,
)
# Make the request
response = client.update_knowledge_base(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.dialogflow_v2.types.UpdateKnowledgeBaseRequest, dict]):
The request object. Request message for
[KnowledgeBases.UpdateKnowledgeBase][google.cloud.dialogflow.v2.KnowledgeBases.UpdateKnowledgeBase].
knowledge_base (google.cloud.dialogflow_v2.types.KnowledgeBase):
Required. The knowledge base to
update.
This corresponds to the ``knowledge_base`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (google.protobuf.field_mask_pb2.FieldMask):
Optional. Not specified means ``update all``. Currently,
only ``display_name`` can be updated, an InvalidArgument
will be returned for attempting to update other fields.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflow_v2.types.KnowledgeBase:
A knowledge base represents a collection of knowledge documents that you
provide to Dialogflow. Your knowledge documents
contain information that may be useful during
conversations with end-users. Some Dialogflow
features use knowledge bases when looking for a
response to an end-user input.
For more information, see the [knowledge base
guide](\ https://cloud.google.com/dialogflow/docs/how/knowledge-bases).
Note: The projects.agent.knowledgeBases resource is
deprecated; only use projects.knowledgeBases.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([knowledge_base, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a gcd_knowledge_base.UpdateKnowledgeBaseRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, gcd_knowledge_base.UpdateKnowledgeBaseRequest):
request = gcd_knowledge_base.UpdateKnowledgeBaseRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if knowledge_base is not None:
request.knowledge_base = knowledge_base
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.update_knowledge_base]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("knowledge_base.name", request.knowledge_base.name),)
),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def __enter__(self) -> "KnowledgeBasesClient":
return self
def __exit__(self, type, value, traceback):
"""Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
"""
self.transport.close()
def list_operations(
self,
request: Optional[operations_pb2.ListOperationsRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operations_pb2.ListOperationsResponse:
r"""Lists operations that match the specified filter in the request.
Args:
request (:class:`~.operations_pb2.ListOperationsRequest`):
The request object. Request message for
`ListOperations` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.operations_pb2.ListOperationsResponse:
Response message for ``ListOperations`` method.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = operations_pb2.ListOperationsRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._transport.list_operations,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def get_operation(
self,
request: Optional[operations_pb2.GetOperationRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operations_pb2.Operation:
r"""Gets the latest state of a long-running operation.
Args:
request (:class:`~.operations_pb2.GetOperationRequest`):
The request object. Request message for
`GetOperation` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.operations_pb2.Operation:
An ``Operation`` object.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = operations_pb2.GetOperationRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._transport.get_operation,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def cancel_operation(
self,
request: Optional[operations_pb2.CancelOperationRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Starts asynchronous cancellation on a long-running operation.
The server makes a best effort to cancel the operation, but success
is not guaranteed. If the server doesn't support this method, it returns
`google.rpc.Code.UNIMPLEMENTED`.
Args:
request (:class:`~.operations_pb2.CancelOperationRequest`):
The request object. Request message for
`CancelOperation` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
None
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = operations_pb2.CancelOperationRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._transport.cancel_operation,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
def get_location(
self,
request: Optional[locations_pb2.GetLocationRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> locations_pb2.Location:
r"""Gets information about a location.
Args:
request (:class:`~.location_pb2.GetLocationRequest`):
The request object. Request message for
`GetLocation` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.location_pb2.Location:
Location object.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = locations_pb2.GetLocationRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._transport.get_location,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def list_locations(
self,
request: Optional[locations_pb2.ListLocationsRequest] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> locations_pb2.ListLocationsResponse:
r"""Lists information about the supported locations for this service.
Args:
request (:class:`~.location_pb2.ListLocationsRequest`):
The request object. Request message for
`ListLocations` method.
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.location_pb2.ListLocationsResponse:
Response message for ``ListLocations`` method.
"""
# Create or coerce a protobuf request object.
# The request isn't a proto-plus wrapped type,
# so it must be constructed via keyword expansion.
if isinstance(request, dict):
request = locations_pb2.ListLocationsRequest(**request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method.wrap_method(
self._transport.list_locations,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=package_version.__version__
)
__all__ = ("KnowledgeBasesClient",)
|
PypiClean
|
/django_kmuhelper-1.7.2-py3-none-any.whl/kmuhelper/modules/pdfgeneration/order/forms.py
|
from django import forms
from kmuhelper import constants
class PDFOrderForm(forms.Form):
# Fieldsets
fieldsets = [
{'fields': ['preset']},
{'fields': ['title', 'text', 'language'], 'name': 'Text & Sprache'},
{'fields': ['do_print'], 'name': 'Optionen'},
{'fields': ['do_download'], 'name': 'Ausgabe'},
]
# Fields
preset = forms.ChoiceField(
label="Vorlage",
choices=(
('invoice', 'Rechnung'),
('delivery-note', 'Lieferschein'),
('payment-reminder', 'Zahlungserinnerung'),
),
required=True,
help_text="Die Vorlage definiert das Layout und die Inhalte der PDF-Datei",
)
title = forms.CharField(
label="Titel",
required=False,
max_length=32,
help_text="Z. B. 'Rechnung' oder 'Lieferschein' - Leer lassen für Standardwert der Vorlage",
)
text = forms.CharField(
label="Text",
required=False,
widget=forms.Textarea,
help_text="Dieser Text wird unterhalb des Titels angezeigt - Leer lassen für Standardwert der Vorlage",
)
language = forms.ChoiceField(
label="Sprache",
choices=constants.LANGUAGES,
required=True,
help_text="Die Sprache, in der die PDF-Datei generiert werden soll"
)
do_print = forms.BooleanField(
label="Druckversion?",
required=False,
help_text="Wenn aktiviert, wird die PDF-Datei ohne Schnittmarker generiert"
)
do_download = forms.BooleanField(
label="Herunterladen?",
required=False,
help_text="Datei automatisch heruntergeladen (dieses Verhalten kann je nach Browser variieren)"
)
def get_url_params(self):
result = f"?custom&preset={self.cleaned_data['preset']}&language={self.cleaned_data['language']}"
if self.cleaned_data['do_print']:
result += '&print'
if self.cleaned_data['do_download']:
result += '&download'
return result
def update_order_settings(self, order):
order.pdf_title = self.cleaned_data['title']
order.pdf_text = self.cleaned_data['text']
order.save()
|
PypiClean
|
/scrapy-rabbitmq-scheduler-neo-1.0.6.tar.gz/scrapy-rabbitmq-scheduler-neo-1.0.6/src/scrapy_rabbitmq_scheduler/middleware.py
|
import pika
import logging
from scrapy.exceptions import IgnoreRequest
logger = logging.getLogger(__name__)
class RabbitMQMiddleware(object):
""" Middleware used to close message from current queue or
send unsuccessful messages to be rescheduled.
"""
def __init__(self, settings):
self.requeue_list = settings.get('SCHEDULER_REQUEUE_ON_STATUS', [])
self.init = True
@classmethod
def from_settings(self, settings):
return RabbitMQMiddleware(settings)
@classmethod
def from_crawler(self, crawler):
return RabbitMQMiddleware(crawler.settings)
def ensure_init(self, spider):
if self.init:
self.spider = spider
self.scheduler = spider.crawler.engine.slot.scheduler
self.stats = spider.crawler.stats
self.init = False
def process_response(self, request, response, spider):
self.ensure_init(spider)
if not is_a_picture(response):
if response.status in self.requeue_list:
self.requeue(response)
self.ack(request, response)
request.meta['requeued'] = True
raise IgnoreRequest
else:
self.ack(request, response)
else:
self.process_picture(response)
return response
def has_delivery_tag(self, request):
if self.spider.settings.get('RABBITMQ_CONFIRM_DELIVERY', True) is not True:
return False
if 'delivery_tag' not in request.meta:
logger.error('Request %(request)s does not have a deliver tag.' %
{'request': request})
return False
return True
def process_picture(self, response):
logger.info('Picture (%(status)d): %(url)s', {
'url': response.url,
'status': response.status
})
self.inc_stat('picture')
def requeue(self, response):
self.scheduler.requeue_message(response.url)
logger.info('Requeued (%(status)d): %(url)s', {
'url': response.url,
'status': response.status
})
self.inc_stat('requeued')
def ack(self, request, response):
if self.has_delivery_tag(request):
delivery_tag = request.meta.get('delivery_tag')
self.scheduler.ack_message(delivery_tag)
logger.info('Acked (%(status)d): %(url)s' % {
'url': response.url,
'status': response.status
})
self.inc_stat('acked')
def inc_stat(self, stat):
self.stats.inc_value('scheduler/acking/%(stat)s/rabbitmq' %
{'stat': stat},
spider=self.spider)
def is_a_picture(response):
picture_exts = ['.png', '.jpg']
return any([response.url.endswith(ext) for ext in picture_exts])
|
PypiClean
|
/tensorflow_edwin-2.10.1-cp38-cp38-win_amd64.whl/tensorflow/python/ops/init_ops_v2.py
|
"""Initializers for TF 2."""
import math
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_linalg_ops
from tensorflow.python.ops import linalg_ops_impl
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import stateless_random_ops
from tensorflow.python.ops.init_ops import _compute_fans
from tensorflow.python.util.tf_export import tf_export
_PARTITION_SHAPE = "partition_shape"
_PARTITION_OFFSET = "partition_offset"
class Initializer:
"""Initializer base class: all initializers inherit from this class.
Initializers should implement a `__call__` method with the following
signature:
```python
def __call__(self, shape, dtype=None, **kwargs):
# returns a tensor of shape `shape` and dtype `dtype`
# containing values drawn from a distribution of your choice.
```
"""
def __call__(self, shape, dtype=None, **kwargs):
"""Returns a tensor object initialized as specified by the initializer.
Args:
shape: Shape of the tensor.
dtype: Optional dtype of the tensor. If not provided will return tensor
of `tf.float32`.
**kwargs: Additional keyword arguments. Accepted values:
`partition_shape` and `partition_offset`. Used when creating a single
partition in a partitioned variable. `partition_shape` is the shape of
the partition (i.e. the shape of the returned tensor) and
`partition_offset` is a tuple of `int` specifying the offset of this
partition w.r.t each axis. For example, a tensor of shape `(30, 100)`
can be partitioned into two partitions: `p0` of shape `(10, 100)` and
`p1` of shape `(20, 100)`; if the initializer is called with
`partition_shape=(20, 100)` and `partition_offset=(10, 0)`, it should
return the value for `p1`.
"""
raise NotImplementedError
def get_config(self):
"""Returns the configuration of the initializer as a JSON-serializable dict.
Returns:
A JSON-serializable Python dict.
"""
return {}
@classmethod
def from_config(cls, config):
"""Instantiates an initializer from a configuration dictionary.
Example:
```python
initializer = RandomUniform(-1, 1)
config = initializer.get_config()
initializer = RandomUniform.from_config(config)
```
Args:
config: A Python dictionary.
It will typically be the output of `get_config`.
Returns:
An Initializer instance.
"""
config.pop("dtype", None)
return cls(**config)
def _validate_kwargs(self, kwargs, support_partition=True):
for kwarg in kwargs:
if kwarg not in [_PARTITION_SHAPE, _PARTITION_OFFSET]:
raise TypeError(
"Keyword argument should be one of "
f"{list([_PARTITION_SHAPE, _PARTITION_OFFSET])}. Received: {kwarg}")
elif not support_partition:
raise ValueError(
f"{self.__class__.__name__} initializer doesn't support "
"partition-related arguments")
@tf_export("zeros_initializer", v1=[])
class Zeros(Initializer):
"""Initializer that generates tensors initialized to 0.
Initializers allow you to pre-specify an initialization strategy, encoded in
the Initializer object, without knowing the shape and dtype of the variable
being initialized.
Examples:
>>> def make_variables(k, initializer):
... return (tf.Variable(initializer(shape=[k], dtype=tf.float32)),
... tf.Variable(initializer(shape=[k, k], dtype=tf.float32)))
>>> v1, v2 = make_variables(3, tf.zeros_initializer())
>>> v1
<tf.Variable ... shape=(3,) ... numpy=array([0., 0., 0.], dtype=float32)>
>>> v2
<tf.Variable ... shape=(3, 3) ... numpy=
array([[0., 0., 0.],
[0., 0., 0.],
[0., 0., 0.]], dtype=float32)>
>>> make_variables(4, tf.random_uniform_initializer(minval=-1., maxval=1.))
(<tf.Variable...shape=(4,) dtype=float32...>, <tf.Variable...shape=(4, 4) ...
"""
def __call__(self, shape, dtype=dtypes.float32, **kwargs):
"""Returns a tensor object initialized as specified by the initializer.
Args:
shape: Shape of the tensor.
dtype: Optional dtype of the tensor. Only numeric or boolean dtypes are
supported.
**kwargs: Additional keyword arguments.
Raises:
ValuesError: If the dtype is not numeric or boolean.
"""
self._validate_kwargs(kwargs)
dtype = dtypes.as_dtype(dtype)
if not dtype.is_numpy_compatible or dtype == dtypes.string:
raise ValueError("Argument `dtype` expected to be numeric or boolean. "
f"Received {dtype}.")
if _PARTITION_SHAPE in kwargs:
shape = kwargs[_PARTITION_SHAPE]
return array_ops.zeros(shape, dtype)
@tf_export("ones_initializer", v1=[])
class Ones(Initializer):
"""Initializer that generates tensors initialized to 1.
Initializers allow you to pre-specify an initialization strategy, encoded in
the Initializer object, without knowing the shape and dtype of the variable
being initialized.
Examples:
>>> def make_variables(k, initializer):
... return (tf.Variable(initializer(shape=[k], dtype=tf.float32)),
... tf.Variable(initializer(shape=[k, k], dtype=tf.float32)))
>>> v1, v2 = make_variables(3, tf.ones_initializer())
>>> v1
<tf.Variable ... shape=(3,) ... numpy=array([1., 1., 1.], dtype=float32)>
>>> v2
<tf.Variable ... shape=(3, 3) ... numpy=
array([[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]], dtype=float32)>
>>> make_variables(4, tf.random_uniform_initializer(minval=-1., maxval=1.))
(<tf.Variable...shape=(4,) dtype=float32...>, <tf.Variable...shape=(4, 4) ...
"""
def __call__(self, shape, dtype=dtypes.float32, **kwargs):
"""Returns a tensor object initialized as specified by the initializer.
Args:
shape: Shape of the tensor.
dtype: Optional dtype of the tensor. Only numeric or boolean dtypes are
supported.
**kwargs: Additional keyword arguments.
Raises:
ValuesError: If the dtype is not numeric or boolean.
"""
self._validate_kwargs(kwargs)
dtype = dtypes.as_dtype(dtype)
if not dtype.is_numpy_compatible or dtype == dtypes.string:
raise ValueError("Argument `dtype` expected to be numeric or boolean. "
f"Received {dtype}.")
if _PARTITION_SHAPE in kwargs:
shape = kwargs[_PARTITION_SHAPE]
return array_ops.ones(shape, dtype)
@tf_export("constant_initializer", v1=[])
class Constant(Initializer):
"""Initializer that generates tensors with constant values.
Initializers allow you to pre-specify an initialization strategy, encoded in
the Initializer object, without knowing the shape and dtype of the variable
being initialized.
`tf.constant_initializer` returns an object which when called returns a tensor
populated with the `value` specified in the constructor. This `value` must be
convertible to the requested `dtype`.
The argument `value` can be a scalar constant value, or a list of
values. Scalars broadcast to whichever shape is requested from the
initializer.
If `value` is a list, then the length of the list must be equal to the number
of elements implied by the desired shape of the tensor. If the total number of
elements in `value` is not equal to the number of elements required by the
tensor shape, the initializer will raise a `TypeError`.
Examples:
>>> def make_variables(k, initializer):
... return (tf.Variable(initializer(shape=[k], dtype=tf.float32)),
... tf.Variable(initializer(shape=[k, k], dtype=tf.float32)))
>>> v1, v2 = make_variables(3, tf.constant_initializer(2.))
>>> v1
<tf.Variable ... shape=(3,) ... numpy=array([2., 2., 2.], dtype=float32)>
>>> v2
<tf.Variable ... shape=(3, 3) ... numpy=
array([[2., 2., 2.],
[2., 2., 2.],
[2., 2., 2.]], dtype=float32)>
>>> make_variables(4, tf.random_uniform_initializer(minval=-1., maxval=1.))
(<tf.Variable...shape=(4,) dtype=float32...>, <tf.Variable...shape=(4, 4) ...
>>> value = [0, 1, 2, 3, 4, 5, 6, 7]
>>> init = tf.constant_initializer(value)
>>> # Fitting shape
>>> tf.Variable(init(shape=[2, 4], dtype=tf.float32))
<tf.Variable ...
array([[0., 1., 2., 3.],
[4., 5., 6., 7.]], dtype=float32)>
>>> # Larger shape
>>> tf.Variable(init(shape=[3, 4], dtype=tf.float32))
Traceback (most recent call last):
...
TypeError: ...value has 8 elements, shape is (3, 4) with 12 elements...
>>> # Smaller shape
>>> tf.Variable(init(shape=[2, 3], dtype=tf.float32))
Traceback (most recent call last):
...
TypeError: ...value has 8 elements, shape is (2, 3) with 6 elements...
Args:
value: A Python scalar, list or tuple of values, or a N-dimensional numpy
array. All elements of the initialized variable will be set to the
corresponding value in the `value` argument.
Raises:
TypeError: If the input `value` is not one of the expected types.
"""
def __init__(self, value=0):
if not (np.isscalar(value) or isinstance(value, (list, tuple, np.ndarray))):
raise TypeError(
f"Invalid type for initial value: {type(value).__name__}. Expected "
"Python scalar, list or tuple of values, or numpy.ndarray.")
self.value = value
def __call__(self, shape, dtype=None, **kwargs):
"""Returns a tensor object initialized as specified by the initializer.
Args:
shape: Shape of the tensor.
dtype: Optional dtype of the tensor. If not provided the dtype of the
tensor created will be the type of the inital value.
**kwargs: Additional keyword arguments.
Raises:
TypeError: If the initializer cannot create a tensor of the requested
dtype.
"""
self._validate_kwargs(kwargs, support_partition=False)
if dtype is not None:
dtype = dtypes.as_dtype(dtype)
return constant_op.constant(self.value, dtype=dtype, shape=shape)
def get_config(self):
return {"value": self.value}
@tf_export("random_uniform_initializer", v1=[])
class RandomUniform(Initializer):
"""Initializer that generates tensors with a uniform distribution.
Initializers allow you to pre-specify an initialization strategy, encoded in
the Initializer object, without knowing the shape and dtype of the variable
being initialized.
Examples:
>>> def make_variables(k, initializer):
... return (tf.Variable(initializer(shape=[k], dtype=tf.float32)),
... tf.Variable(initializer(shape=[k, k], dtype=tf.float32)))
>>> v1, v2 = make_variables(3, tf.ones_initializer())
>>> v1
<tf.Variable ... shape=(3,) ... numpy=array([1., 1., 1.], dtype=float32)>
>>> v2
<tf.Variable ... shape=(3, 3) ... numpy=
array([[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]], dtype=float32)>
>>> make_variables(4, tf.random_uniform_initializer(minval=-1., maxval=1.))
(<tf.Variable...shape=(4,) dtype=float32...>, <tf.Variable...shape=(4, 4) ...
Args:
minval: A python scalar or a scalar tensor. Lower bound of the range of
random values to generate (inclusive).
maxval: A python scalar or a scalar tensor. Upper bound of the range of
random values to generate (exclusive).
seed: A Python integer. Used to create random seeds. See
`tf.random.set_seed` for behavior.
"""
def __init__(self, minval=-0.05, maxval=0.05, seed=None):
self.minval = minval
self.maxval = maxval
self.seed = seed
self._random_generator = _RandomGenerator(seed)
def __call__(self, shape, dtype=dtypes.float32, **kwargs):
"""Returns a tensor object initialized as specified by the initializer.
Args:
shape: Shape of the tensor.
dtype: Optional dtype of the tensor. Only floating point and integer
types are supported.
**kwargs: Additional keyword arguments.
Raises:
ValueError: If the dtype is not numeric.
"""
self._validate_kwargs(kwargs)
dtype = dtypes.as_dtype(dtype)
if not dtype.is_floating and not dtype.is_integer:
raise ValueError("Argument `dtype` expected to be numeric or boolean. "
f"Received {dtype}.")
if _PARTITION_SHAPE in kwargs:
shape = kwargs[_PARTITION_SHAPE]
return self._random_generator.random_uniform(shape, self.minval,
self.maxval, dtype)
def get_config(self):
return {
"minval": self.minval,
"maxval": self.maxval,
"seed": self.seed
}
@tf_export("random_normal_initializer", v1=[])
class RandomNormal(Initializer):
"""Initializer that generates tensors with a normal distribution.
Initializers allow you to pre-specify an initialization strategy, encoded in
the Initializer object, without knowing the shape and dtype of the variable
being initialized.
Examples:
>>> def make_variables(k, initializer):
... return (tf.Variable(initializer(shape=[k], dtype=tf.float32)),
... tf.Variable(initializer(shape=[k, k], dtype=tf.float32)))
>>> v1, v2 = make_variables(3,
... tf.random_normal_initializer(mean=1., stddev=2.))
>>> v1
<tf.Variable ... shape=(3,) ... numpy=array([...], dtype=float32)>
>>> v2
<tf.Variable ... shape=(3, 3) ... numpy=
...
>>> make_variables(4, tf.random_uniform_initializer(minval=-1., maxval=1.))
(<tf.Variable...shape=(4,) dtype=float32...>, <tf.Variable...shape=(4, 4) ...
Args:
mean: a python scalar or a scalar tensor. Mean of the random values to
generate.
stddev: a python scalar or a scalar tensor. Standard deviation of the random
values to generate.
seed: A Python integer. Used to create random seeds. See
`tf.random.set_seed` for behavior.
"""
def __init__(self, mean=0.0, stddev=0.05, seed=None):
self.mean = mean
self.stddev = stddev
self.seed = seed
self._random_generator = _RandomGenerator(seed)
def __call__(self, shape, dtype=dtypes.float32, **kwargs):
"""Returns a tensor object initialized as specified by the initializer.
Args:
shape: Shape of the tensor.
dtype: Optional dtype of the tensor. Only floating point types are
supported.
**kwargs: Additional keyword arguments.
Raises:
ValueError: If the dtype is not floating point
"""
self._validate_kwargs(kwargs)
dtype = _assert_float_dtype(dtype)
if _PARTITION_SHAPE in kwargs:
shape = kwargs[_PARTITION_SHAPE]
return self._random_generator.random_normal(shape, self.mean, self.stddev,
dtype)
def get_config(self):
return {
"mean": self.mean,
"stddev": self.stddev,
"seed": self.seed
}
class TruncatedNormal(Initializer):
"""Initializer that generates a truncated normal distribution.
Initializers allow you to pre-specify an initialization strategy, encoded in
the Initializer object, without knowing the shape and dtype of the variable
being initialized.
These values are similar to values from a `tf.initializers.RandomNormal`
except that values more than two standard deviations from the mean are
discarded and re-drawn. This is the recommended initializer for neural network
weights and filters.
Examples:
>>> def make_variables(k, initializer):
... return (tf.Variable(initializer(shape=[k], dtype=tf.float32)),
... tf.Variable(initializer(shape=[k, k], dtype=tf.float32)))
>>> v1, v2 = make_variables(
... 3, tf.initializers.TruncatedNormal(mean=1., stddev=2.))
>>> v1
<tf.Variable ... shape=(3,) ... numpy=array([...], dtype=float32)>
>>> v2
<tf.Variable ... shape=(3, 3) ... numpy=
...
>>> make_variables(4, tf.initializers.RandomUniform(minval=-1., maxval=1.))
(<tf.Variable...shape=(4,) dtype=float32...>, <tf.Variable...shape=(4, 4) ...
Args:
mean: a python scalar or a scalar tensor. Mean of the random values
to generate.
stddev: a python scalar or a scalar tensor. Standard deviation of the
random values to generate.
seed: A Python integer. Used to create random seeds. See
`tf.random.set_seed` for behavior.
"""
def __init__(self, mean=0.0, stddev=0.05, seed=None):
self.mean = mean
self.stddev = stddev
self.seed = seed
self._random_generator = _RandomGenerator(seed)
def __call__(self, shape, dtype=dtypes.float32, **kwargs):
"""Returns a tensor object initialized as specified by the initializer.
Args:
shape: Shape of the tensor.
dtype: Optional dtype of the tensor. Only floating point types are
supported.
**kwargs: Additional keyword arguments.
Raises:
ValueError: If the dtype is not floating point
"""
self._validate_kwargs(kwargs)
dtype = _assert_float_dtype(dtype)
if _PARTITION_SHAPE in kwargs:
shape = kwargs[_PARTITION_SHAPE]
return self._random_generator.truncated_normal(shape, self.mean,
self.stddev, dtype)
def get_config(self):
return {
"mean": self.mean,
"stddev": self.stddev,
"seed": self.seed
}
class VarianceScaling(Initializer):
"""Initializer capable of adapting its scale to the shape of weights tensors.
Initializers allow you to pre-specify an initialization strategy, encoded in
the Initializer object, without knowing the shape and dtype of the variable
being initialized.
With `distribution="truncated_normal" or "untruncated_normal"`, samples are
drawn from a truncated/untruncated normal distribution with a mean of zero and
a standard deviation (after truncation, if used) `stddev = sqrt(scale / n)`
where n is:
- number of input units in the weight tensor, if mode = "fan_in"
- number of output units, if mode = "fan_out"
- average of the numbers of input and output units, if mode = "fan_avg"
With `distribution="uniform"`, samples are drawn from a uniform distribution
within [-limit, limit], with `limit = sqrt(3 * scale / n)`.
Examples:
>>> def make_variables(k, initializer):
... return (tf.Variable(initializer(shape=[k], dtype=tf.float32)),
... tf.Variable(initializer(shape=[k, k], dtype=tf.float32)))
>>> v1, v2 = make_variables(3, tf.initializers.VarianceScaling(scale=1.))
>>> v1
<tf.Variable ... shape=(3,) ... numpy=array([...], dtype=float32)>
>>> v2
<tf.Variable ... shape=(3, 3) ... numpy=
...
>>> make_variables(4, tf.initializers.VarianceScaling(distribution='uniform'))
(<tf.Variable...shape=(4,) dtype=float32...>, <tf.Variable...shape=(4, 4) ...
Args:
scale: Scaling factor (positive float).
mode: One of "fan_in", "fan_out", "fan_avg".
distribution: Random distribution to use. One of "truncated_normal",
"untruncated_normal" and "uniform".
seed: A Python integer. Used to create random seeds. See
`tf.random.set_seed` for behavior.
Raises:
ValueError: In case of an invalid value for the "scale", mode" or
"distribution" arguments.
"""
def __init__(self,
scale=1.0,
mode="fan_in",
distribution="truncated_normal",
seed=None):
if scale <= 0.:
raise ValueError("Argument `scale` must be a positive float. Received: "
f"{scale}")
if mode not in {"fan_in", "fan_out", "fan_avg"}:
raise ValueError("Argument `mode` should be one of ('fan_in', 'fan_out', "
f"'fan_avg'). Received: {mode}")
distribution = distribution.lower()
# Compatibility with keras-team/keras.
if distribution == "normal":
distribution = "truncated_normal"
if distribution not in {"uniform", "truncated_normal",
"untruncated_normal"}:
raise ValueError("Argument `distribution` should be one of ('uniform', "
"'truncated_normal', 'untruncated_normal'). Received: "
f"{distribution}")
self.scale = scale
self.mode = mode
self.distribution = distribution
self.seed = seed
self._random_generator = _RandomGenerator(seed)
def __call__(self, shape, dtype=dtypes.float32, **kwargs):
"""Returns a tensor object initialized as specified by the initializer.
Args:
shape: Shape of the tensor.
dtype: Optional dtype of the tensor. Only floating point types are
supported.
**kwargs: Additional keyword arguments.
Raises:
ValueError: If the dtype is not floating point
"""
self._validate_kwargs(kwargs)
dtype = _assert_float_dtype(dtype)
scale = self.scale
fan_in, fan_out = _compute_fans(shape)
if _PARTITION_SHAPE in kwargs:
shape = kwargs[_PARTITION_SHAPE]
if self.mode == "fan_in":
scale /= max(1., fan_in)
elif self.mode == "fan_out":
scale /= max(1., fan_out)
else:
scale /= max(1., (fan_in + fan_out) / 2.)
if self.distribution == "truncated_normal":
# constant from scipy.stats.truncnorm.std(a=-2, b=2, loc=0., scale=1.)
stddev = math.sqrt(scale) / .87962566103423978
return self._random_generator.truncated_normal(shape, 0.0, stddev, dtype)
elif self.distribution == "untruncated_normal":
stddev = math.sqrt(scale)
return self._random_generator.random_normal(shape, 0.0, stddev, dtype)
else:
limit = math.sqrt(3.0 * scale)
return self._random_generator.random_uniform(shape, -limit, limit, dtype)
def get_config(self):
return {
"scale": self.scale,
"mode": self.mode,
"distribution": self.distribution,
"seed": self.seed
}
class Orthogonal(Initializer):
"""Initializer that generates an orthogonal matrix.
Initializers allow you to pre-specify an initialization strategy, encoded in
the Initializer object, without knowing the shape and dtype of the variable
being initialized.
If the shape of the tensor to initialize is two-dimensional, it is initialized
with an orthogonal matrix obtained from the QR decomposition of a matrix of
random numbers drawn from a normal distribution.
If the matrix has fewer rows than columns then the output will have orthogonal
rows. Otherwise, the output will have orthogonal columns.
If the shape of the tensor to initialize is more than two-dimensional,
a matrix of shape `(shape[0] * ... * shape[n - 2], shape[n - 1])`
is initialized, where `n` is the length of the shape vector.
The matrix is subsequently reshaped to give a tensor of the desired shape.
Examples:
>>> def make_variables(k, initializer):
... return (tf.Variable(initializer(shape=[k, k], dtype=tf.float32)),
... tf.Variable(initializer(shape=[k, k, k], dtype=tf.float32)))
>>> v1, v2 = make_variables(3, tf.initializers.Orthogonal())
>>> v1
<tf.Variable ... shape=(3, 3) ...
>>> v2
<tf.Variable ... shape=(3, 3, 3) ...
>>> make_variables(4, tf.initializers.Orthogonal(gain=0.5))
(<tf.Variable ... shape=(4, 4) dtype=float32...
<tf.Variable ... shape=(4, 4, 4) dtype=float32...
Args:
gain: multiplicative factor to apply to the orthogonal matrix
seed: A Python integer. Used to create random seeds. See
`tf.random.set_seed` for behavior.
References:
[Saxe et al., 2014](https://openreview.net/forum?id=_wzZwKpTDF_9C)
([pdf](https://arxiv.org/pdf/1312.6120.pdf))
"""
def __init__(self, gain=1.0, seed=None):
self.gain = gain
self.seed = seed
self._random_generator = _RandomGenerator(seed)
def __call__(self, shape, dtype=dtypes.float32, **kwargs):
"""Returns a tensor object initialized as specified by the initializer.
Args:
shape: Shape of the tensor.
dtype: Optional dtype of the tensor. Only floating point types are
supported.
**kwargs: Additional keyword arguments.
Raises:
ValueError: If the dtype is not floating point or the input shape is not
valid.
"""
self._validate_kwargs(kwargs, support_partition=False)
dtype = _assert_float_dtype(dtype)
# Check the shape
if len(shape) < 2:
raise ValueError("The tensor to initialize, specified by argument `shape`"
" must be at least two-dimensional. Received shape="
f"{shape}")
# Flatten the input shape with the last dimension remaining
# its original shape so it works for conv2d
num_rows = 1
for dim in shape[:-1]:
num_rows *= dim
num_cols = shape[-1]
flat_shape = (max(num_cols, num_rows), min(num_cols, num_rows))
# Generate a random matrix
a = self._random_generator.random_normal(flat_shape, dtype=dtype)
# Compute the qr factorization
q, r = gen_linalg_ops.qr(a, full_matrices=False)
# Make Q uniform
d = array_ops.diag_part(r)
q *= math_ops.sign(d)
if num_rows < num_cols:
q = array_ops.matrix_transpose(q)
return self.gain * array_ops.reshape(q, shape)
def get_config(self):
return {"gain": self.gain, "seed": self.seed}
class Identity(Initializer):
"""Initializer that generates the identity matrix.
Initializers allow you to pre-specify an initialization strategy, encoded in
the Initializer object, without knowing the shape and dtype of the variable
being initialized.
Only usable for generating 2D matrices.
Examples:
>>> def make_variable(k, initializer):
... return tf.Variable(initializer(shape=[k, k], dtype=tf.float32))
>>> make_variable(2, tf.initializers.Identity())
<tf.Variable ... shape=(2, 2) dtype=float32, numpy=
array([[1., 0.],
[0., 1.]], dtype=float32)>
>>> make_variable(3, tf.initializers.Identity(gain=0.5))
<tf.Variable ... shape=(3, 3) dtype=float32, numpy=
array([[0.5, 0. , 0. ],
[0. , 0.5, 0. ],
[0. , 0. , 0.5]], dtype=float32)>
Args:
gain: Multiplicative factor to apply to the identity matrix.
"""
def __init__(self, gain=1.0):
self.gain = gain
def __call__(self, shape, dtype=dtypes.float32, **kwargs):
"""Returns a tensor object initialized as specified by the initializer.
Args:
shape: Shape of the tensor.
dtype: Optional dtype of the tensor. Only floating point types are
supported.
**kwargs: Additional keyword arguments.
Raises:
ValueError: If the dtype is not floating point
ValueError: If the requested shape does not have exactly two axes.
"""
self._validate_kwargs(kwargs, support_partition=False)
dtype = _assert_float_dtype(dtype)
if len(shape) != 2:
raise ValueError("The tensor to initialize, specified by argument `shape`"
" must be at least two-dimensional. Received shape="
f"{shape}")
initializer = linalg_ops_impl.eye(*shape, dtype=dtype)
return self.gain * initializer
def get_config(self):
return {"gain": self.gain}
class GlorotUniform(VarianceScaling):
"""The Glorot uniform initializer, also called Xavier uniform initializer.
Initializers allow you to pre-specify an initialization strategy, encoded in
the Initializer object, without knowing the shape and dtype of the variable
being initialized.
Draws samples from a uniform distribution within [-limit, limit] where `limit`
is `sqrt(6 / (fan_in + fan_out))` where `fan_in` is the number of input units
in the weight tensor and `fan_out` is the number of output units in the weight
tensor.
Examples:
>>> def make_variables(k, initializer):
... return (tf.Variable(initializer(shape=[k, k], dtype=tf.float32)),
... tf.Variable(initializer(shape=[k, k, k], dtype=tf.float32)))
>>> v1, v2 = make_variables(3, tf.initializers.GlorotUniform())
>>> v1
<tf.Variable ... shape=(3, 3) ...
>>> v2
<tf.Variable ... shape=(3, 3, 3) ...
>>> make_variables(4, tf.initializers.RandomNormal())
(<tf.Variable ... shape=(4, 4) dtype=float32...
<tf.Variable ... shape=(4, 4, 4) dtype=float32...
Args:
seed: A Python integer. Used to create random seeds. See
`tf.random.set_seed` for behavior.
References:
[Glorot et al., 2010](http://proceedings.mlr.press/v9/glorot10a.html)
([pdf](http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf))
"""
def __init__(self, seed=None):
super(GlorotUniform, self).__init__(
scale=1.0,
mode="fan_avg",
distribution="uniform",
seed=seed)
def get_config(self):
return {"seed": self.seed}
class GlorotNormal(VarianceScaling):
"""The Glorot normal initializer, also called Xavier normal initializer.
Initializers allow you to pre-specify an initialization strategy, encoded in
the Initializer object, without knowing the shape and dtype of the variable
being initialized.
Draws samples from a truncated normal distribution centered on 0 with `stddev
= sqrt(2 / (fan_in + fan_out))` where `fan_in` is the number of input units in
the weight tensor and `fan_out` is the number of output units in the weight
tensor.
Examples:
>>> def make_variables(k, initializer):
... return (tf.Variable(initializer(shape=[k, k], dtype=tf.float32)),
... tf.Variable(initializer(shape=[k, k, k], dtype=tf.float32)))
>>> v1, v2 = make_variables(3, tf.initializers.GlorotNormal())
>>> v1
<tf.Variable ... shape=(3, 3) ...
>>> v2
<tf.Variable ... shape=(3, 3, 3) ...
>>> make_variables(4, tf.initializers.RandomNormal())
(<tf.Variable ... shape=(4, 4) dtype=float32...
<tf.Variable ... shape=(4, 4, 4) dtype=float32...
Args:
seed: A Python integer. Used to create random seeds. See
`tf.random.set_seed` for behavior.
References:
[Glorot et al., 2010](http://proceedings.mlr.press/v9/glorot10a.html)
([pdf](http://jmlr.org/proceedings/papers/v9/glorot10a/glorot10a.pdf))
"""
def __init__(self, seed=None):
super(GlorotNormal, self).__init__(
scale=1.0,
mode="fan_avg",
distribution="truncated_normal",
seed=seed)
def get_config(self):
return {"seed": self.seed}
# Aliases.
# pylint: disable=invalid-name
zeros_initializer = Zeros
ones_initializer = Ones
constant_initializer = Constant
random_uniform_initializer = RandomUniform
random_normal_initializer = RandomNormal
truncated_normal_initializer = TruncatedNormal
variance_scaling_initializer = VarianceScaling
glorot_uniform_initializer = GlorotUniform
glorot_normal_initializer = GlorotNormal
orthogonal_initializer = Orthogonal
identity_initializer = Identity
# pylint: enable=invalid-name
def lecun_normal(seed=None):
"""LeCun normal initializer.
Initializers allow you to pre-specify an initialization strategy, encoded in
the Initializer object, without knowing the shape and dtype of the variable
being initialized.
Draws samples from a truncated normal distribution centered on 0 with `stddev
= sqrt(1 / fan_in)` where `fan_in` is the number of input units in the weight
tensor.
Examples:
>>> def make_variables(k, initializer):
... return (tf.Variable(initializer(shape=[k, k], dtype=tf.float32)),
... tf.Variable(initializer(shape=[k, k, k], dtype=tf.float32)))
>>> v1, v2 = make_variables(3, tf.initializers.lecun_normal())
>>> v1
<tf.Variable ... shape=(3, 3) ...
>>> v2
<tf.Variable ... shape=(3, 3, 3) ...
>>> make_variables(4, tf.initializers.RandomNormal())
(<tf.Variable ... shape=(4, 4) dtype=float32...
<tf.Variable ... shape=(4, 4, 4) dtype=float32...
Args:
seed: A Python integer. Used to seed the random generator.
Returns:
A callable Initializer with `shape` and `dtype` arguments which generates a
tensor.
References:
- Self-Normalizing Neural Networks,
[Klambauer et al., 2017]
(https://papers.nips.cc/paper/6698-self-normalizing-neural-networks)
([pdf]
(https://papers.nips.cc/paper/6698-self-normalizing-neural-networks.pdf))
- Efficient Backprop,
[Lecun et al., 1998](http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf)
"""
return VarianceScaling(
scale=1., mode="fan_in", distribution="truncated_normal", seed=seed)
def lecun_uniform(seed=None):
"""LeCun uniform initializer.
Initializers allow you to pre-specify an initialization strategy, encoded in
the Initializer object, without knowing the shape and dtype of the variable
being initialized.
Draws samples from a uniform distribution within [-limit, limit] where `limit`
is `sqrt(3 / fan_in)` where `fan_in` is the number of input units in the
weight tensor.
Examples:
>>> def make_variables(k, initializer):
... return (tf.Variable(initializer(shape=[k, k], dtype=tf.float32)),
... tf.Variable(initializer(shape=[k, k, k], dtype=tf.float32)))
>>> v1, v2 = make_variables(3, tf.initializers.lecun_uniform())
>>> v1
<tf.Variable ... shape=(3, 3) ...
>>> v2
<tf.Variable ... shape=(3, 3, 3) ...
>>> make_variables(4, tf.initializers.RandomNormal())
(<tf.Variable ... shape=(4, 4) dtype=float32...
<tf.Variable ... shape=(4, 4, 4) dtype=float32...
Args:
seed: A Python integer. Used to seed the random generator.
Returns:
A callable Initializer with `shape` and `dtype` arguments which generates a
tensor.
References:
- Self-Normalizing Neural Networks,
[Klambauer et al., 2017](https://papers.nips.cc/paper/6698-self-normalizing-neural-networks) # pylint: disable=line-too-long
([pdf](https://papers.nips.cc/paper/6698-self-normalizing-neural-networks.pdf))
- Efficient Backprop,
[Lecun et al., 1998](http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf)
"""
return VarianceScaling(
scale=1., mode="fan_in", distribution="uniform", seed=seed)
def he_normal(seed=None):
"""He normal initializer.
Initializers allow you to pre-specify an initialization strategy, encoded in
the Initializer object, without knowing the shape and dtype of the variable
being initialized.
It draws samples from a truncated normal distribution centered on 0 with
`stddev = sqrt(2 / fan_in)` where `fan_in` is the number of input units in the
weight tensor.
Examples:
>>> def make_variables(k, initializer):
... return (tf.Variable(initializer(shape=[k, k], dtype=tf.float32)),
... tf.Variable(initializer(shape=[k, k, k], dtype=tf.float32)))
>>> v1, v2 = make_variables(3, tf.initializers.he_normal())
>>> v1
<tf.Variable ... shape=(3, 3) ...
>>> v2
<tf.Variable ... shape=(3, 3, 3) ...
>>> make_variables(4, tf.initializers.RandomNormal())
(<tf.Variable ... shape=(4, 4) dtype=float32...
<tf.Variable ... shape=(4, 4, 4) dtype=float32...
Args:
seed: A Python integer. Used to seed the random generator.
Returns:
A callable Initializer with `shape` and `dtype` arguments which generates a
tensor.
References:
[He et al., 2015](https://www.cv-foundation.org/openaccess/content_iccv_2015/html/He_Delving_Deep_into_ICCV_2015_paper.html) # pylint: disable=line-too-long
([pdf](https://www.cv-foundation.org/openaccess/content_iccv_2015/papers/He_Delving_Deep_into_ICCV_2015_paper.pdf))
"""
return VarianceScaling(
scale=2., mode="fan_in", distribution="truncated_normal", seed=seed)
def he_uniform(seed=None):
"""He uniform variance scaling initializer.
Initializers allow you to pre-specify an initialization strategy, encoded in
the Initializer object, without knowing the shape and dtype of the variable
being initialized.
Draws samples from a uniform distribution within [-limit, limit] where `limit`
is `sqrt(6 / fan_in)` where `fan_in` is the number of input units in the
weight tensor.
Examples:
>>> def make_variables(k, initializer):
... return (tf.Variable(initializer(shape=[k, k], dtype=tf.float32)),
... tf.Variable(initializer(shape=[k, k, k], dtype=tf.float32)))
>>> v1, v2 = make_variables(3, tf.initializers.he_uniform())
>>> v1
<tf.Variable ... shape=(3, 3) ...
>>> v2
<tf.Variable ... shape=(3, 3, 3) ...
>>> make_variables(4, tf.initializers.RandomNormal())
(<tf.Variable ... shape=(4, 4) dtype=float32...
<tf.Variable ... shape=(4, 4, 4) dtype=float32...
Args:
seed: A Python integer. Used to seed the random generator.
Returns:
A callable Initializer with `shape` and `dtype` arguments which generates a
tensor.
References:
[He et al., 2015](https://www.cv-foundation.org/openaccess/content_iccv_2015/html/He_Delving_Deep_into_ICCV_2015_paper.html) # pylint: disable=line-too-long
([pdf](https://www.cv-foundation.org/openaccess/content_iccv_2015/papers/He_Delving_Deep_into_ICCV_2015_paper.pdf))
"""
return VarianceScaling(
scale=2., mode="fan_in", distribution="uniform", seed=seed)
# Utility functions.
def _assert_float_dtype(dtype):
"""Validate and return floating point type based on `dtype`.
`dtype` must be a floating point type.
Args:
dtype: The data type to validate.
Returns:
Validated type.
Raises:
ValueError: if `dtype` is not a floating point type.
"""
dtype = dtypes.as_dtype(dtype)
if not dtype.is_floating:
raise ValueError("Argument `dtype` is expected to be floating point. "
f"Received: {dtype}.")
return dtype
class _RandomGenerator:
"""Random generator that selects appropriate random ops."""
def __init__(self, seed=None):
super(_RandomGenerator, self).__init__()
if seed is not None:
# Stateless random ops requires 2-int seed.
self.seed = [seed, 0]
else:
self.seed = None
def random_normal(self, shape, mean=0.0, stddev=1, dtype=dtypes.float32):
"""A deterministic random normal if seed is passed."""
if self.seed:
op = stateless_random_ops.stateless_random_normal
else:
op = random_ops.random_normal
return op(
shape=shape, mean=mean, stddev=stddev, dtype=dtype, seed=self.seed)
def random_uniform(self, shape, minval, maxval, dtype):
"""A deterministic random uniform if seed is passed."""
if self.seed:
op = stateless_random_ops.stateless_random_uniform
else:
op = random_ops.random_uniform
return op(
shape=shape, minval=minval, maxval=maxval, dtype=dtype, seed=self.seed)
def truncated_normal(self, shape, mean, stddev, dtype):
"""A deterministic truncated normal if seed is passed."""
if self.seed:
op = stateless_random_ops.stateless_truncated_normal
else:
op = random_ops.truncated_normal
return op(
shape=shape, mean=mean, stddev=stddev, dtype=dtype, seed=self.seed)
# Compatibility aliases
# pylint: disable=invalid-name
zero = zeros = Zeros
one = ones = Ones
constant = Constant
uniform = random_uniform = RandomUniform
normal = random_normal = RandomNormal
truncated_normal = TruncatedNormal
identity = Identity
orthogonal = Orthogonal
glorot_normal = GlorotNormal
glorot_uniform = GlorotUniform
|
PypiClean
|
/lmflow-evaluator-0.0.3.tar.gz/lmflow-evaluator-0.0.3/src/lmflow/pipeline/raft_aligner.py
|
import logging
import numpy as np
import os
import sys
import time
from itertools import chain
import torch
import torch.distributed as dist
import transformers
from datasets import (
set_caching_enabled,
Dataset,
DatasetDict,
)
from transformers import (
default_data_collator,
pipeline,
set_seed,
)
from transformers.testing_utils import CaptureLogger
from lmflow.args import DatasetArguments
from lmflow.datasets.dataset import Dataset as LMFlowDataset
from lmflow.pipeline.base_aligner import BaseAligner
from lmflow.pipeline.utils.raft_trainer import RaftTrainer
logger = logging.getLogger(__name__)
class RaftAligner(BaseAligner):
"""
Initializes the `RaftAligner` class with given arguments.
Parameters
------------
model_args : ModelArguments object.
Contains the arguments required to load the model.
data_args : DatasetArguments object.
Contains the arguments required to load the dataset.
raft_aligner_args : RaftAlignerArguments object.
Contains the arguments required to perform alignment.
args : Optional.
Positional arguments.
kwargs : Optional.
Keyword arguments.
"""
def __init__(self, model_args, data_args, aligner_args, *args, **kwargs):
self.model_args = model_args
self.data_args = data_args
self.aligner_args = aligner_args
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
handlers=[logging.StreamHandler(sys.stdout)],
)
logger.setLevel(logging.INFO)
output_reward_path = aligner_args.output_reward_path
if output_reward_path is not None:
os.makedirs(os.path.dirname(output_reward_path), exist_ok=True)
# Deletes a maybe-exist file
try:
os.remove(output_reward_path)
except OSError:
pass
def _initialize_trainer(self, model, tokenizer, training_args):
"""
This function takes the model and tokenizer as the input and initialize the trainer.
"""
trainer = RaftTrainer(
model=model,
args=training_args,
train_dataset=Dataset.from_dict({"text": [ " " ] }),
eval_dataset=Dataset.from_dict({}),
tokenizer=tokenizer,
data_collator=default_data_collator,
compute_metrics=None,
preprocess_logits_for_metrics=None,
)
return trainer
def _load_dataset(
self,
selected_dataset,
model,
tokenizer,
model_args,
data_args,
training_args,
):
'''
This function prepares the dataset for every iteration.
'''
raw_datasets = selected_dataset
if training_args.do_train:
column_names = list(raw_datasets["train"].features)
else:
column_names = list(raw_datasets["validation"].features)
text_column_name = "text" if "text" in column_names else column_names[0]
# since this will be pickled to avoid _LazyModule error in Hasher force logger loading before tokenize_function
tok_logger = transformers.utils.logging.get_logger("transformers.tokenization_utils_base")
def tokenize_function(examples):
with CaptureLogger(tok_logger) as cl:
output = tokenizer(examples[text_column_name])
# clm input could be much much longer than block_size
if "Token indices sequence length is longer than the" in cl.out:
tok_logger.warning(
"^^^^^^^^^^^^^^^^ Please ignore the warning above - this long input will be chunked into smaller bits"
" before being passed to the model."
)
return output
with training_args.main_process_first(desc="dataset map tokenization"):
if not data_args.streaming:
tokenized_datasets = raw_datasets.map(
tokenize_function,
batched=True,
num_proc=data_args.preprocessing_num_workers,
remove_columns=column_names,
load_from_cache_file=not data_args.overwrite_cache,
desc="Running tokenizer on dataset",
)
else:
tokenized_datasets = raw_datasets.map(
tokenize_function,
batched=True,
remove_columns=column_names,
)
if data_args.block_size is None:
block_size = tokenizer.model_max_length
if block_size > 1024:
logger.warning(
"The chosen tokenizer supports a `model_max_length` that is longer than the default `block_size` value"
" of 1024. If you would like to use a longer `block_size` up to `tokenizer.model_max_length` you can"
" override this default with `--block_size xxx`."
)
block_size = 512
else:
if data_args.block_size > tokenizer.model_max_length:
logger.warning(
f"The block_size passed ({data_args.block_size}) is larger than the maximum length for the model"
f"({tokenizer.model_max_length}). Using block_size={tokenizer.model_max_length}."
)
block_size = min(data_args.block_size, tokenizer.model_max_length)
# Main data processing function that will concatenate all texts from our dataset and generate chunks of block_size.
def group_texts(examples):
# Concatenate all texts.
concatenated_examples = {k: list(chain(*examples[k])) for k in examples.keys()}
total_length = len(concatenated_examples[list(examples.keys())[0]])
# We drop the small remainder, we could add padding if the model supported it instead of this drop, you can
# customize this part to your needs.
if total_length >= block_size:
total_length = (total_length // block_size) * block_size
# Split by chunks of max_len.
result = {
k: [t[i : i + block_size] for i in range(0, total_length, block_size)]
for k, t in concatenated_examples.items()
}
result["labels"] = result["input_ids"].copy()
return result
# Note that with `batched=True`, this map processes 1,000 texts together, so group_texts throws away a remainder
# for each of those groups of 1,000 texts. You can adjust that batch_size here but a higher value might be slower
# to preprocess.
#
# To speed up this part, we use multiprocessing. See the documentation of the map method for more information:
# https://huggingface.co/docs/datasets/package_reference/main_classes.html#datasets.Dataset.map
with training_args.main_process_first(desc="grouping texts together"):
group_batch_size = 1000
if data_args.disable_group_texts:
group_batch_size = 1
if not data_args.streaming:
lm_datasets = tokenized_datasets.map(
group_texts,
batched=True,
batch_size=group_batch_size,
num_proc=data_args.preprocessing_num_workers,
load_from_cache_file=not data_args.overwrite_cache,
desc=f"Grouping texts in chunks of {block_size}",
)
else:
lm_datasets = tokenized_datasets.map(
group_texts,
batched=True,
batch_size=group_batch_size,
)
if training_args.do_train:
if "train" not in tokenized_datasets:
raise ValueError("--do_train requires a train dataset")
train_dataset = lm_datasets["train"]
if data_args.max_train_samples is not None:
max_train_samples = min(len(train_dataset), data_args.max_train_samples)
train_dataset = train_dataset.select(range(max_train_samples))
return train_dataset
def _load_input_dataset(self, dataset, tokenizer):
"""
Load input dataset (i.e. prompt/question dataset) for training.
Args:
dataset: A Dataset object.
The dataset to be loaded.
Returns:
dataloader (`torch.utils.data.DataLoader`):
The dataloader for the dataset.
"""
ds = dataset.get_backend_dataset()
def tokenize(sample):
sample["input_ids"] = tokenizer.encode(sample["text"][:30])
sample['input'] = tokenizer.decode(sample["input_ids"])
return sample
ds = ds.map(tokenize, batched=False)
ds.set_format(type='torch')
return ds
def _get_batch_dataset_top(
self,
model,
batch_input,
alpha=0.2,
iter_id=0,
local_rank=0,
output_min_length=16,
output_max_length=48,
infer_batch_size=8,
generation_kwargs={},
tokenizer=None,
training_args=None,
reward_model=None,
output_reward_path=None,
):
"""
:param batch_input: input prompts
"""
# we will get the batch dataset via Dataset.from_dict
start_time = time.time()
output_data = []
query_tensors = batch_input['input_ids']
querys = batch_input['input']
data_size = len(querys)
cnt = 0
reward_eva = []
reward_train = []
out_put_dataset_eval = {}
data_eval = []
input_texts = []
responses = []
for i, query_tensor in enumerate(query_tensors):
query = querys[i]
input_texts.append(query)
if (i + 1) % infer_batch_size == 0:
gen_len = np.random.randint(output_min_length, output_max_length)
generation_kwargs["max_new_tokens"] = gen_len
inputs = tokenizer(input_texts, return_tensors="pt", padding=True).to(training_args.device)
with torch.no_grad():
outputs = model.generate(**inputs, **generation_kwargs)
generated_texts = tokenizer.batch_decode(outputs, skip_special_tokens=True)
generated_texts = [
generated_text.replace(input_texts[i], "") for i, generated_text in enumerate(generated_texts)
]
texts_for_rewards = [q + r for q, r in zip(input_texts, generated_texts)]
texts_for_reward_dataset = LMFlowDataset.create_from_dict({
"type": "text_only",
"instances": [
{ "text": text } for text in texts_for_rewards
],
})
reward_dataset = reward_model.inference(texts_for_reward_dataset)
rewards = [ sample["value"] for sample in reward_dataset.to_dict()["instances"] ]
reward_eva.extend(rewards)
responses.extend(generated_texts)
input_texts = []
data = []
idx = np.argsort(reward_eva)[::-1][:int(data_size * alpha)]
for j in range(len(reward_eva)):
sample = {}
sample["input"] = querys[j]
sample["output"] = [responses[j]]
data.append(sample)
output_data = [data[j] for j in idx]
world_size = int(os.getenv("WORLD_SIZE", "1"))
all_process_list =[{}] * world_size
data_to_send = [[data[i], reward_eva[i]] for i in range(len(data))]
dist.all_gather_object(all_process_list, data_to_send)
gathered_data = []
gathered_reward = []
for i in range(world_size):
tmp_data = [tmp[0] for tmp in all_process_list[i]]
gathered_data.extend(tmp_data)
tmp_reward = [tmp[1] for tmp in all_process_list[i]]
gathered_reward.extend(tmp_reward)
idx = np.argsort(gathered_reward)[::-1][:int(len(gathered_reward) * alpha)]
gathered_data = [gathered_data[j] for j in idx]
reward_train = [gathered_reward[j] for j in idx]
logger.info(f"collected data of {len(gathered_data)}")
logger.info([np.mean(gathered_reward), np.mean(reward_train)])
if training_args.local_rank == 0 and output_reward_path is not None:
with open(output_reward_path, mode='a') as fout:
fout.write('mean reward: ' + str(np.mean(gathered_reward)) + 'mean reward in training set: ' + str(np.mean(reward_train)))
fout.write("\n")
prompt_structure = "{definition}{input}{output}"
output_dataset = {
"text": [ prompt_structure.format(
definition="", input=sample["input"], output=sample["output"][0]
) for sample in gathered_data
]
}
return DatasetDict({ "train": Dataset.from_dict(output_dataset) })
def align(self, model, dataset, reward_model):
"""
Perform alignment for a model
Parameters
------------
model : BaseModel object.
dataset: Dataset object.
Input dataset for model to generate outputs. The input and output
will then be feed into reward model to get the reward for
alignment.
reward_model: RegressionModel object.
"""
tokenizer = model.get_tokenizer()
tokenizer.pad_token = tokenizer.eos_token
tokenizer.pad_token_id = tokenizer.eos_token_id
tokenizer.padding_side = "left"
dataset = self._load_input_dataset(dataset, tokenizer)
set_caching_enabled(False)
wrapped_model = model
model = model.get_backend_model()
generation_kwargs = {
"min_length": -1,
"top_k": 0.0,
"top_p": 1.0,
"do_sample": True,
"pad_token_id": tokenizer.eos_token_id,
"temperature":0.7
}
aligner_args = self.aligner_args
training_args = aligner_args
model_args = self.model_args
data_args = self.data_args
set_seed(42 + training_args.local_rank)
ITERATION = aligner_args.num_raft_iteration
M = aligner_args.raft_batch_size
alpha = aligner_args.top_reward_percentage
data_size = len(dataset['input'])
reward_seq = []
lr = training_args.learning_rate
raft_trainer = self._initialize_trainer(model, tokenizer, training_args)
raft_trainer.train(resume_from_checkpoint=False, is_first_time=True)
##############
for iteration in range(ITERATION):
set_seed(88 + training_args.local_rank + 4 * (iteration+1))
batch_input = dataset.select(np.random.randint(low=0, high=data_size, size=M))
selected_dataset = self._get_batch_dataset_top(
raft_trainer.tmp_model,
batch_input,
alpha,
iteration,
training_args.local_rank,
output_min_length=aligner_args.output_min_length,
output_max_length=aligner_args.output_max_length,
infer_batch_size=aligner_args.inference_batch_size_per_device,
generation_kwargs=generation_kwargs,
tokenizer=tokenizer,
training_args=training_args,
reward_model=reward_model,
output_reward_path=aligner_args.output_reward_path,
)
raft_trainer.train_dataset = self._load_dataset(
selected_dataset,
raft_trainer.tmp_model,
tokenizer,
model_args,
data_args,
training_args,
)
logger.info(f"iter {iteration}")
start_time = time.time()
train_result = raft_trainer.train(resume_from_checkpoint=False)
end_time = time.time()
logger.info("It takes %.2f s to train one stage", end_time - start_time)
self._get_batch_dataset_top(
raft_trainer.tmp_model,
batch_input, alpha,
iteration,
training_args.local_rank,
output_min_length=aligner_args.output_min_length,
output_max_length=aligner_args.output_max_length,
infer_batch_size=aligner_args.inference_batch_size_per_device,
generation_kwargs=generation_kwargs,
tokenizer=tokenizer,
training_args=training_args,
reward_model=reward_model,
output_reward_path=aligner_args.output_reward_path,
)
if aligner_args.output_dir is not None:
wrapped_model.save(aligner_args.output_dir)
return wrapped_model
|
PypiClean
|
/py-libtrust-1.0.5.tar.gz/py-libtrust-1.0.5/libtrust/keys/ec_key.py
|
from typing import BinaryIO, Dict, Optional, Type, Union
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.asymmetric.utils import decode_dss_signature, encode_dss_signature
from cryptography.utils import int_to_bytes
from libtrust.keys.utils import decode_ec_coordinate, encode_ec_coordinate, encode_key_id_from_crypto_key
__all__ = ["ECPublicKey", "ECPrivateKey", "generate_private_key"]
class ECPublicKey:
"""Usage for ECPublicKey:
call ``verify`` to verify the signature for data in buffer.
call ``to_pem/from_pem`` to serialize/deserialize the PEM format encoding.
call ``to_jwk/from_jwk`` to serialize/deserialize the JWK format encoding.
"""
def __init__(self, key: ec.EllipticCurvePublicKey):
if not isinstance(key, ec.EllipticCurvePublicKey):
raise ValueError("`key` is not a EllipticCurvePublicKey")
self._public_key = key
def __eq__(self, other):
if not isinstance(other, ECPublicKey):
return False
return self.crypto_public_key().public_numbers() == other.crypto_public_key().public_numbers()
@classmethod
def key_type(cls) -> str:
return "EC"
def key_id(self) -> str:
return encode_key_id_from_crypto_key(self.crypto_public_key())
def to_pem(self) -> str:
return (
self.crypto_public_key()
.public_bytes(
serialization.Encoding.PEM,
serialization.PublicFormat.SubjectPublicKeyInfo,
)
.decode()
)
@classmethod
def from_pem(cls, pem: Union[str, bytes]) -> "ECPublicKey":
if isinstance(pem, str):
pem = pem.encode()
return cls(serialization.load_pem_public_key(pem, default_backend()))
def crypto_public_key(self) -> ec.EllipticCurvePublicKey:
return self._public_key
def to_jwk(self) -> Dict:
crypto_public_key = self.crypto_public_key()
public_numbers = crypto_public_key.public_numbers()
return {
"kty": self.key_type(),
"kid": self.key_id(),
"crv": self.curve_name(),
"x": encode_ec_coordinate(public_numbers.x, crypto_public_key.curve),
"y": encode_ec_coordinate(public_numbers.y, crypto_public_key.curve),
}
@classmethod
def from_jwk(cls, jwk: Dict) -> "ECPublicKey":
assert jwk["kty"] == cls.key_type()
crv = jwk["crv"]
if crv not in _curve_names_map_to_curves:
raise Exception(f"JWK EC Public Key curve identifier not supported: {crv}")
curve = _curve_names_map_to_curves[crv]()
x = decode_ec_coordinate(jwk["x"], curve)
y = decode_ec_coordinate(jwk["y"], curve)
return cls(ec.EllipticCurvePublicNumbers(x, y, curve).public_key(default_backend()))
def verify(
self,
buffer: BinaryIO,
alg: str,
signature: bytes,
*,
raise_exception: bool = True,
) -> bool:
crypto_public_key = self.crypto_public_key()
curve_name = self.curve_name()
if _curve_names_map_to_alg[curve_name] != alg:
raise Exception(
f"unable to verify signature: "
f"EC Public Key with curve {curve_name} does not support signature algorithm {alg}"
)
expected_octet_length = 2 * ((crypto_public_key.curve.key_size + 7) >> 3)
if expected_octet_length != len(signature):
raise Exception(f"signature length is {len(signature)} octets long, should be {expected_octet_length}")
sig_length = len(signature)
r_bytes, s_bytes = signature[: sig_length // 2], signature[sig_length // 2 :]
r = int.from_bytes(r_bytes, "big")
s = int.from_bytes(s_bytes, "big")
signature = encode_dss_signature(r, s)
hash_algorithm = _hash_algorithm_maps[_curve_names_map_to_alg[curve_name]]
verifier = crypto_public_key.verifier(signature, ec.ECDSA(hash_algorithm))
while True:
d = buffer.read(1024)
if not d:
break
verifier.update(d)
try:
verifier.verify()
return True
except Exception:
if raise_exception:
raise
return False
def curve_name(self) -> str:
return _curves_map_to_curve_names[type(self.crypto_public_key().curve)]
class ECPrivateKey(ECPublicKey):
"""Usage for ECPrivateKey:
call ``sign`` to sign data in buffer.
call ``verify`` to verify the signature for data in buffer.
call ``to_pem/from_pem`` to serialize/deserialize the PEM format encoding.
call ``to_jwk/from_jwk`` to serialize/deserialize the JWK format encoding.
"""
def __init__(self, key: ec.EllipticCurvePrivateKeyWithSerialization):
if not isinstance(key, ec.EllipticCurvePrivateKeyWithSerialization):
raise ValueError("`key` is not a EllipticCurvePrivateKeyWithSerialization")
super(ECPrivateKey, self).__init__(key.public_key())
self._private_key = key
def __eq__(self, other):
if not isinstance(other, ECPrivateKey):
return False
return self.crypto_private_key().private_numbers() == other.crypto_private_key().private_numbers()
def public_key(self) -> ECPublicKey:
return ECPublicKey(self.crypto_public_key())
def crypto_private_key(self) -> ec.EllipticCurvePrivateKeyWithSerialization:
return self._private_key
def to_pem(self) -> str:
return (
self.crypto_private_key()
.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.PKCS8,
serialization.NoEncryption(),
)
.decode()
)
@classmethod
def from_pem(cls, pem: Union[str, bytes], password: Optional[bytes] = None) -> "ECPrivateKey":
if isinstance(pem, str):
pem = pem.encode()
return cls(serialization.load_pem_private_key(pem, password, default_backend()))
def to_jwk(self) -> Dict:
jwk = super().to_jwk()
jwk["d"] = encode_ec_coordinate(
self.crypto_private_key().private_numbers().private_value,
self.crypto_public_key().curve,
)
return jwk
@classmethod
def from_jwk(cls, jwk: Dict) -> "ECPrivateKey":
assert jwk["kty"] == cls.key_type()
crv = jwk["crv"]
if crv not in _curve_names_map_to_curves:
raise Exception(f"JWK EC Public Key curve identifier not supported: {crv}")
curve = _curve_names_map_to_curves[crv]()
x = decode_ec_coordinate(jwk["x"], curve)
y = decode_ec_coordinate(jwk["y"], curve)
d = decode_ec_coordinate(jwk["d"], curve)
return cls(
ec.EllipticCurvePrivateNumbers(d, ec.EllipticCurvePublicNumbers(x, y, curve)).private_key(
default_backend()
)
)
def sign(self, buffer: BinaryIO, hash_id: hashes.HashAlgorithm):
crypto_private_key = self.crypto_private_key()
crypto_algorithm = _curve_names_map_to_alg[self.curve_name()]
hash_algorithm = _hash_algorithm_maps[crypto_algorithm]
signer = crypto_private_key.signer(ec.ECDSA(hash_algorithm))
while True:
d = buffer.read(1024)
if not d:
break
signer.update(d)
r, s = decode_dss_signature(signer.finalize())
r_bytes = int_to_bytes(r)
s_bytes = int_to_bytes(s)
octet_length = (crypto_private_key.curve.key_size + 7) >> 3
r_bytes = b"\x00" * (octet_length - len(r_bytes)) + r_bytes
s_bytes = b"\x00" * (octet_length - len(s_bytes)) + s_bytes
signature = r_bytes + s_bytes
return signature, crypto_algorithm
_curves_map_to_curve_names: Dict[Type[ec.EllipticCurve], str] = {
ec.SECP256R1: "P-256",
ec.SECP384R1: "P-384",
ec.SECP521R1: "P-521",
}
_curve_names_map_to_curves: Dict[str, Type[ec.EllipticCurve]] = {v: k for k, v in _curves_map_to_curve_names.items()}
_curve_names_map_to_alg: Dict[str, str] = {
"P-256": "ES256",
"P-384": "ES384",
"P-521": "ES521",
}
_alg_map_to_curve_names = {v: k for k, v in _curve_names_map_to_alg.items()}
_hash_algorithm_maps: Dict[str, hashes.HashAlgorithm] = {
"ES256": hashes.SHA256(),
"ES384": hashes.SHA384(),
"ES521": hashes.SHA512(),
}
def generate_private_key(curve_name: str = "P-256") -> ECPrivateKey:
if curve_name not in _curve_names_map_to_curves:
raise Exception(f"Does not support curve {curve_name} does not support")
curve = _curve_names_map_to_curves[curve_name]()
return ECPrivateKey(ec.generate_private_key(curve, default_backend()))
|
PypiClean
|
/appthreat_vulnerability_db-5.2.5-py3-none-any.whl/vdb/lib/osv.py
|
from zipfile import ZipFile
import httpx
from vdb.lib import CustomNamedTemporaryFile, config
from vdb.lib.nvd import NvdSource
from vdb.lib.utils import (
compress_str,
convert_score_severity,
get_cvss3_from_vector,
get_default_cve_data,
parse_purl,
)
# Size of the stream to read and write to the file
download_chunk_size = 4096
try:
import orjson
ORJSON_AVAILABLE = True
except ImportError:
import json
ORJSON_AVAILABLE = False
json_lib = orjson if ORJSON_AVAILABLE else json
vendor_overrides = {
"apk": "alpine",
"deb": "debian",
"go": "golang",
"crates.io": "crates",
}
class OSVSource(NvdSource):
"""OSV CVE source"""
def download_all(self, local_store=True):
"""Download all cve data"""
# For performance do not retain the whole data in-memory
# See: https://github.com/AppThreat/vulnerability-db/issues/27
data_list = []
for lang, url in config.osv_url_dict.items():
data = self.fetch(url)
if not data:
continue
if local_store:
self.store(data)
return data_list
def download_recent(self, local_store=True):
raise NotImplementedError
def fetch(self, url):
ret_data = []
client = httpx.Client(http2=True, follow_redirects=True, timeout=180)
with CustomNamedTemporaryFile() as tf:
try:
with client.stream("GET", url) as r:
for chunk in r.iter_bytes(chunk_size=download_chunk_size):
tf.write(chunk)
tf.flush()
except Exception:
return ret_data
with ZipFile(tf.name, "r") as zipfp:
for zf in zipfp.namelist():
if zf.endswith(".json"):
with zipfp.open(zf) as jsonf:
cve_data = jsonf.read()
try:
json_data = json_lib.loads(cve_data)
ret_data += self.convert(json_data)
except Exception:
pass
return ret_data
def convert(self, cve_data):
if cve_data.get("withdrawn"):
return []
return self.to_vuln(cve_data)
def to_vuln(self, cve_data):
ret_data = []
if not cve_data.get("affected"):
return ret_data
cve_id = cve_data.get("id")
cwe_id = ""
edition = "*"
cve_references = cve_data.get("references", [])
aliases = cve_data.get("aliases", [])
aliases_block = ""
if aliases and len(aliases) > 1:
aliases_block = """
## Related CVE(s)
{}
""".format(
", ".join(aliases)
)
description = """# {}
{}
{}
""".format(
cve_data.get("summary", "Summary"),
cve_data.get("details", ""),
aliases_block,
)
if "** DISPUTED **" in description or "** REJECT **" in description:
return ret_data
references = []
# Change the key from type to name in references
for aref in cve_references:
references.append({"name": aref.get("type", "url"), "url": aref.get("url")})
references = json_lib.dumps(references)
if isinstance(references, bytes):
references = references.decode("utf-8", "ignore")
# Try to locate the CVE id from the aliases section
if not cve_id.startswith("CVE") and not cve_id.startswith("RUSTSEC"):
for i in aliases:
if not i.startswith("OSV"):
cve_id = i
break
assigner = "OSV"
vectorString = ""
if cve_id.startswith("GHSA"):
assigner = "@github"
elif cve_id.startswith("CVE"):
assigner = "[email protected]"
elif cve_id.startswith("NPM"):
assigner = "@npm"
severity = "LOW"
if cve_data.get("severity"):
severity_list = cve_data.get("severity")
for sv in severity_list:
if sv["type"] == "CVSS_V3":
vectorString = sv["score"]
# Issue 58
cve_database_specific = cve_data.get("database_specific")
cve_ecosystem_specific = cve_data.get("ecosystem_specific")
if cve_database_specific:
if cve_database_specific.get("severity"):
severity = cve_database_specific.get("severity")
if cve_database_specific.get("cwe_ids"):
cwes = cve_database_specific.get("cwe_ids")
if isinstance(cwes, list):
cwe_id = ",".join(cwes)
if cve_ecosystem_specific and cve_ecosystem_specific.get("severity"):
severity = cve_ecosystem_specific.get("severity")
for pkg_data in cve_data.get("affected"):
if pkg_data.get("ecosystem_specific"):
ecosystem_specific = pkg_data.get("ecosystem_specific")
if ecosystem_specific.get("severity"):
severity = ecosystem_specific.get("severity")
if pkg_data.get("database_specific"):
database_specific = pkg_data.get("database_specific")
if database_specific.get("cwes"):
cwes = database_specific.get("cwes")
if isinstance(cwes, list):
cwe_id = cwes[0].get("cweId")
if database_specific.get("cvss"):
cvss = database_specific.get("cvss")
if isinstance(cvss, dict):
if cvss.get("severity"):
severity = cvss.get("severity", "").upper()
if not vectorString and cvss.get("vectorString"):
vectorString = cvss.get("vectorString")
if cvss.get("score"):
score = cvss.get("score")
severity = convert_score_severity(score)
if vectorString:
cvss3_obj = get_cvss3_from_vector(vectorString)
score = cvss3_obj.get("baseScore")
severity = cvss3_obj.get("baseSeverity")
exploitabilityScore = cvss3_obj.get("temporalScore")
attackComplexity = cvss3_obj.get("attackComplexity")
else:
score, severity, dvectorString, attackComplexity = get_default_cve_data(
severity
)
# Set the default vector string only if unavailable
if not vectorString and dvectorString:
vectorString = dvectorString
exploitabilityScore = score
ranges = pkg_data.get("ranges", [])
vendor_ecosystem = pkg_data.get("package", {}).get("ecosystem", "").lower()
vendor = vendor_ecosystem
pkg_name = pkg_data.get("package", {}).get("name", "")
pkg_name_list = []
purl = parse_purl(pkg_data.get("package", {}).get("purl", ""))
if purl:
if purl.get("namespace"):
vendor = purl["namespace"]
if purl.get("name"):
pkg_name = purl["name"]
if ":" in pkg_name:
tmpA = pkg_name.split(":")
if len(tmpA) == 2:
vendor = tmpA[0]
pkg_name = tmpA[-1]
pkg_name_list.append(pkg_name)
# For OS packages, such as alpine OSV appends the os version to the vendor
# Let's remove it and add it to package name
if ":" in vendor_ecosystem and (
"alpine" in vendor
or "debian" in vendor
or "almalinux" in vendor
or "rocky" in vendor
):
tmpV = vendor_ecosystem.split(":")
vendor = tmpV[0].lower().replace(" ", "").replace("-", "")
vdistro = tmpV[1]
if vendor == "alpine":
vdistro = vdistro.replace("v", "")
# In os-release, ID for rockylinux is simply rocky
if "rocky" in vendor:
vendor = vendor.replace("linux", "")
edition = f"{vendor}-{vdistro}"
# Only use the precise version for os packages
if (
"debian" in vendor
or "alpine" in vendor
or "almalinux" in vendor
or "rocky" in vendor
):
pkg_name_list = [f"{edition}/{pkg_name}"]
else:
pkg_name_list.append(f"{edition}/{pkg_name}")
# Substitute alpine for apk and debian for deb
if vendor_overrides.get(vendor):
vendor = vendor_overrides.get(vendor)
for r in ranges:
events = r.get("events")
versions_list = r.get("versions", [])
version_end_including = ""
version_start_excluding = ""
version_end_excluding = ""
fix_version_start_including = ""
fix_version_end_including = ""
fix_version_start_excluding = ""
fix_version_end_excluding = ""
version_start_including = ""
if versions_list:
version_start_including = versions_list[0]
if (
len(versions_list) > 1
and version_start_including != versions_list[-1]
):
version_end_including = versions_list[-1]
for ev in events:
# Reset all versions for introduced event
if ev.get("introduced"):
version_end_including = ""
version_start_excluding = ""
version_end_excluding = ""
fix_version_start_including = ""
fix_version_end_including = ""
fix_version_start_excluding = ""
fix_version_end_excluding = ""
version_start_including = ev.get("introduced").split(":")[-1]
if ev.get("fixed"):
fix_version_start_including = ev.get("fixed").split(":")[-1]
fix_version_end_including = ev.get("fixed").split(":")[-1]
version_end_excluding = ev.get("fixed").split(":")[-1]
if ev.get("last_affected"):
version_end_including = ev.get("last_affected").split(":")[-1]
if ev.get("limit"):
version_end_excluding = ev.get("limit").split(":")[-1]
# Create an entry for each introduced + fixed/limit event
if version_start_including and (
fix_version_start_including
or version_end_including
or version_end_excluding
or len(events) == 1
):
for full_pkg in pkg_name_list:
tdata = config.CVE_TPL % dict(
cve_id=cve_id,
cwe_id=cwe_id,
assigner=assigner,
references=references,
description="",
vectorString=vectorString,
vendor=vendor,
product=full_pkg,
version="*",
edition=edition,
version_start_including=version_start_including,
version_end_including=version_end_including,
version_start_excluding=version_start_excluding,
version_end_excluding=version_end_excluding,
fix_version_start_including=fix_version_start_including,
fix_version_end_including=fix_version_end_including,
fix_version_start_excluding=fix_version_start_excluding,
fix_version_end_excluding=fix_version_end_excluding,
severity=severity,
attackComplexity=attackComplexity,
score=score,
exploitabilityScore=exploitabilityScore,
publishedDate=cve_data.get("published"),
lastModifiedDate=cve_data.get("modified"),
)
try:
vuln = NvdSource.convert_vuln(json_lib.loads(tdata))
vuln.description = compress_str(description)
ret_data.append(vuln)
except Exception:
pass
return ret_data
|
PypiClean
|
/jsonschema_validator_new-0.1.0-py3-none-any.whl/jsonschema_validator/json_schema.py
|
from typing import Any, Dict, List, Literal, TypedDict, Union
CORE_SCHEMA_META_SCHEMA_DEFAULT = True
"""Default value of the field path 'JSONSchema'"""
JSONSchema = Union["JSONSchemaItem", bool]
"""
Core schema meta-schema.
default: True
"""
# default: True
JSONSchemaItem = TypedDict(
"JSONSchemaItem",
{
# format: uri-reference
"$id": str,
# format: uri
"$schema": str,
# format: uri-reference
"$ref": str,
"$comment": str,
"title": str,
"description": str,
"default": Any,
# default: False
"readOnly": bool,
# default: False
"writeOnly": bool,
"examples": List[Any],
# exclusiveMinimum: 0
"multipleOf": Union[int, float],
"maximum": Union[int, float],
"exclusiveMaximum": Union[int, float],
"minimum": Union[int, float],
"exclusiveMinimum": Union[int, float],
"maxLength": "_NonNegativeInteger",
"minLength": "_NonNegativeIntegerDefault0",
# format: regex
"pattern": str,
"additionalItems": "JSONSchema",
# default: True
"items": Union["JSONSchema", "_SchemaArray"],
"maxItems": "_NonNegativeInteger",
"minItems": "_NonNegativeIntegerDefault0",
# default: False
"uniqueItems": bool,
"contains": "JSONSchema",
"maxProperties": "_NonNegativeInteger",
"minProperties": "_NonNegativeIntegerDefault0",
"required": "_StringArray",
"additionalProperties": "JSONSchema",
# default:
# {}
"definitions": Dict[str, "JSONSchema"],
# default:
# {}
"properties": Dict[str, "JSONSchema"],
# propertyNames:
# format: regex
# default:
# {}
"patternProperties": Dict[str, "JSONSchema"],
"dependencies": Dict[str, Union["JSONSchema", "_StringArray"]],
"propertyNames": "JSONSchema",
"const": Any,
# minItems: 1
# uniqueItems: True
"enum": List[Any],
"type": Union["_SimpleTypes", "_CoreSchemaMetaSchemaObjectTypeAnyof1"],
"format": str,
"contentMediaType": str,
"contentEncoding": str,
"if": "JSONSchema",
"then": "JSONSchema",
"else": "JSONSchema",
"allOf": "_SchemaArray",
"anyOf": "_SchemaArray",
"oneOf": "_SchemaArray",
"not": "JSONSchema",
},
total=False,
)
_CORE_SCHEMA_META_SCHEMA_OBJECT_DEFINITIONS_DEFAULT: Dict[str, Any] = {}
"""Default value of the field path 'Core schema meta-schema object definitions'"""
_CORE_SCHEMA_META_SCHEMA_OBJECT_ITEMS_DEFAULT = True
"""Default value of the field path 'Core schema meta-schema object items'"""
_CORE_SCHEMA_META_SCHEMA_OBJECT_PATTERNPROPERTIES_DEFAULT: Dict[str, Any] = {}
"""Default value of the field path 'Core schema meta-schema object patternProperties'"""
_CORE_SCHEMA_META_SCHEMA_OBJECT_PROPERTIES_DEFAULT: Dict[str, Any] = {}
"""Default value of the field path 'Core schema meta-schema object properties'"""
_CORE_SCHEMA_META_SCHEMA_OBJECT_READONLY_DEFAULT = False
"""Default value of the field path 'Core schema meta-schema object readOnly'"""
_CORE_SCHEMA_META_SCHEMA_OBJECT_REQUIRED_DEFAULT: List[Any] = []
"""Default value of the field path 'Core schema meta-schema object required'"""
_CORE_SCHEMA_META_SCHEMA_OBJECT_UNIQUEITEMS_DEFAULT = False
"""Default value of the field path 'Core schema meta-schema object uniqueItems'"""
_CORE_SCHEMA_META_SCHEMA_OBJECT_WRITEONLY_DEFAULT = False
"""Default value of the field path 'Core schema meta-schema object writeOnly'"""
_CoreSchemaMetaSchemaObjectTypeAnyof1 = List["_SimpleTypes"]
"""
minItems: 1
uniqueItems: True
"""
_NON_NEGATIVE_INTEGER_DEFAULT0_ALLOF1_DEFAULT = 0
"""Default value of the field path 'non negative integer default0 allof1'"""
_NonNegativeInteger = int
"""minimum: 0"""
_NonNegativeIntegerDefault0 = Union["_NonNegativeInteger", "_NonNegativeIntegerDefault0Allof1"]
"""
WARNING: PEP 544 does not support an Intersection type,
so `allOf` is interpreted as a `Union` for now.
See: https://github.com/camptocamp/jsonschema-gentypes/issues/8
"""
_NonNegativeIntegerDefault0Allof1 = int
"""default: 0"""
_STRING_ARRAY_DEFAULT: List[Any] = []
"""Default value of the field path 'string array'"""
_SchemaArray = List["JSONSchema"]
"""minItems: 1"""
_SimpleTypes = Union[
Literal["array"],
Literal["boolean"],
Literal["integer"],
Literal["null"],
Literal["number"],
Literal["object"],
Literal["string"],
]
_SIMPLETYPES_ARRAY: Literal["array"] = "array"
"""The values for the '_SimpleTypes' enum"""
_SIMPLETYPES_BOOLEAN: Literal["boolean"] = "boolean"
"""The values for the '_SimpleTypes' enum"""
_SIMPLETYPES_INTEGER: Literal["integer"] = "integer"
"""The values for the '_SimpleTypes' enum"""
_SIMPLETYPES_NULL: Literal["null"] = "null"
"""The values for the '_SimpleTypes' enum"""
_SIMPLETYPES_NUMBER: Literal["number"] = "number"
"""The values for the '_SimpleTypes' enum"""
_SIMPLETYPES_OBJECT: Literal["object"] = "object"
"""The values for the '_SimpleTypes' enum"""
_SIMPLETYPES_STRING: Literal["string"] = "string"
"""The values for the '_SimpleTypes' enum"""
_StringArray = List[str]
"""
uniqueItems: True
default:
[]
"""
|
PypiClean
|
/pulumi_azure_nextgen-0.6.2a1613157620.tar.gz/pulumi_azure_nextgen-0.6.2a1613157620/pulumi_azure_nextgen/portal/v20181001/get_user_settings_with_location.py
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetUserSettingsWithLocationResult',
'AwaitableGetUserSettingsWithLocationResult',
'get_user_settings_with_location',
]
@pulumi.output_type
class GetUserSettingsWithLocationResult:
"""
Response to get user settings
"""
def __init__(__self__, properties=None):
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
@property
@pulumi.getter
def properties(self) -> 'outputs.UserPropertiesResponse':
"""
The cloud shell user settings properties.
"""
return pulumi.get(self, "properties")
class AwaitableGetUserSettingsWithLocationResult(GetUserSettingsWithLocationResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetUserSettingsWithLocationResult(
properties=self.properties)
def get_user_settings_with_location(location: Optional[str] = None,
user_settings_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetUserSettingsWithLocationResult:
"""
Use this data source to access information about an existing resource.
:param str location: The provider location
:param str user_settings_name: The name of the user settings
"""
__args__ = dict()
__args__['location'] = location
__args__['userSettingsName'] = user_settings_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:portal/v20181001:getUserSettingsWithLocation', __args__, opts=opts, typ=GetUserSettingsWithLocationResult).value
return AwaitableGetUserSettingsWithLocationResult(
properties=__ret__.properties)
|
PypiClean
|
/django-netjsonconfig-0.12.tar.gz/django-netjsonconfig-0.12/django_netjsonconfig/base/template.py
|
from collections import OrderedDict
from copy import copy
from django.contrib.admin.models import ADDITION, LogEntry
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.translation import ugettext_lazy as _
from jsonfield import JSONField
from taggit.managers import TaggableManager
from ..settings import DEFAULT_AUTO_CERT
from .base import BaseConfig
TYPE_CHOICES = (
('generic', _('Generic')),
('vpn', _('VPN-client')),
)
def default_auto_cert():
"""
returns the default value for auto_cert field
(this avoids to set the exact default value in the database migration)
"""
return DEFAULT_AUTO_CERT
class AbstractTemplate(BaseConfig):
"""
Abstract model implementing a
netjsonconfig template
"""
tags = TaggableManager(
through='django_netjsonconfig.TaggedTemplate',
blank=True,
help_text=_(
'A comma-separated list of template tags, may be used '
'to ease auto configuration with specific settings (eg: '
'4G, mesh, WDS, VPN, ecc.)'
),
)
vpn = models.ForeignKey(
'django_netjsonconfig.Vpn',
verbose_name=_('VPN'),
blank=True,
null=True,
on_delete=models.CASCADE,
)
type = models.CharField(
_('type'),
max_length=16,
choices=TYPE_CHOICES,
default='generic',
db_index=True,
help_text=_('template type, determines which ' 'features are available'),
)
default = models.BooleanField(
_('enabled by default'),
default=False,
db_index=True,
help_text=_(
'whether new configurations will have ' 'this template enabled by default'
),
)
auto_cert = models.BooleanField(
_('auto certificate'),
default=default_auto_cert,
db_index=True,
help_text=_(
'whether x509 client certificates should '
'be automatically managed behind the scenes '
'for each configuration using this template, '
'valid only for the VPN type'
),
)
default_values = JSONField(
_('Default Values'),
default=dict,
blank=True,
help_text=_(
'A dictionary containing the default '
'values for the variables used by this '
'template; these default variables will '
'be used during schema validation.'
),
load_kwargs={'object_pairs_hook': OrderedDict},
dump_kwargs={'indent': 4},
)
__template__ = True
class Meta:
abstract = True
verbose_name = _('template')
verbose_name_plural = _('templates')
def save(self, *args, **kwargs):
"""
modifies status of related configs
if key attributes have changed (queries the database)
"""
update_related_config_status = False
if not self._state.adding:
current = self.__class__.objects.get(pk=self.pk)
for attr in ['backend', 'config']:
if getattr(self, attr) != getattr(current, attr):
update_related_config_status = True
break
# save current changes
super().save(*args, **kwargs)
# update relations
if update_related_config_status:
self._update_related_config_status()
def _update_related_config_status(self):
changing_status = list(self.config_relations.exclude(status='modified'))
self.config_relations.update(status='modified')
for config in self.config_relations.all():
# config modified signal sent regardless
config._send_config_modified_signal()
# config status changed signal sent only if status changed
if config in changing_status:
config._send_config_status_changed_signal()
def clean(self, *args, **kwargs):
"""
* ensures VPN is selected if type is VPN
* clears VPN specific fields if type is not VPN
* automatically determines configuration if necessary
"""
if self.type == 'vpn' and not self.vpn:
raise ValidationError(
{'vpn': _('A VPN must be selected when template type is "VPN"')}
)
elif self.type != 'vpn':
self.vpn = None
self.auto_cert = False
if self.type == 'vpn' and not self.config:
self.config = self.vpn.auto_client(auto_cert=self.auto_cert)
super().clean(*args, **kwargs)
def get_context(self):
context = {}
if self.default_values:
context = copy(self.default_values)
context.update(super().get_context())
return context
def clone(self, user):
clone = copy(self)
clone.name = self.__get_clone_name()
clone._state.adding = True
clone.pk = None
# avoid cloned templates to be flagged as default
# to avoid potential unwanted duplications in
# newly registrated devices
clone.default = False
clone.full_clean()
clone.save()
ct = ContentType.objects.get(model='template')
LogEntry.objects.log_action(
user_id=user.id,
content_type_id=ct.pk,
object_id=clone.pk,
object_repr=clone.name,
action_flag=ADDITION,
)
return clone
def __get_clone_name(self):
name = '{} (Clone)'.format(self.name)
index = 2
while self.__class__.objects.filter(name=name).count():
name = '{} (Clone {})'.format(self.name, index)
index += 1
return name
AbstractTemplate._meta.get_field('config').blank = True
|
PypiClean
|
/titration-0.2.tar.gz/titration-0.2/titration.py
|
from __future__ import division
from scipy.interpolate import InterpolatedUnivariateSpline
from scipy.optimize import brentq
import matplotlib.pyplot as plt
from multi_key_dict import multi_key_dict
##################################################################
################ Voreinstellungen ################################
plot_precision = 1000 # Number of points for the titration curve
Temp = 25.0 # Temperature
# Almost all acid- and base pK-values are only correct for an ambient temperature
# of 25 degree Celsius
language = 'english' # use english names
##################################################################
######################## Tables ##################################
############## pK-values for acids (25° C) ######################
PKvalues_Acid = {
'Trichloressigsäure': [ 0.52,],
'Benzolsulfonsäure': [ 0.70,],
'Jodsäure': [ 0.77,],
'schweflige Säure': [ 1.81, 6.91],
'chlorige Säure': [ 2.00,],
'Phosphorsäure': [ 2.12, 7.21,12.68],
'Chloressigsäure': [ 2.85,],
'Milchsäure': [ 3.08,],
'salpetrige Säure': [ 3.37,],
'Fluorwasserstoffsäure':[ 3.45,],
'Ameisensäure': [ 3.75,],
'Benzoesäure': [ 4.19,],
'Essigsäure': [ 4.75,],
'Kohlensäure': [ 6.37,10.25],
'hypochlorige Säure': [ 7.53,],
'hypobromige Säure': [ 8.69,],
'Borsäure': [ 9.14,],
'Blausäure': [ 9.31,],
'Phenol': [ 9.89,],
'hypojodige Säure': [10.64,],
'Schwefelsäure': [-1.00, 1.92], # the first value must not be too small
'Oxalsäure': [ 1.23, 4.19],
'Phosphonsäure': [ 2.00, 6.56],
'Weinsäure': [ 3.22, 4.82],
'Schwefelwasserstoff': [ 6.89,14.15],
'Salpetersäure': [-1.00,], # strong acid
'Salzsäure': [-1.00,] # strong acid
}
############## pK-values for bases (25° C) #######################
PKvalues_Base = {
'Harnstoff': [13.90,],
'Anilin': [ 9.37,],
'Pyridin': [ 8.75,],
'Hydroxylamin': [ 7.97,],
'Nikotin': [ 5.98,],
'Morphin': [ 5.79,],
'Hydrazin': [ 5.77,],
'Ammoniak': [ 4.75,],
'Trimethylamin': [ 4.19,],
'Methylamin': [ 3.44,],
'Dimethylamin': [ 3.27,],
'Ethylamin': [ 3.19,],
'Triethylamin': [ 2.99,],
'Natronlauge': [-1.00,], # strong base
'Kalilauge': [-1.00,] # strong base
}
Chemical_Formulas = {
'Trichloressigsäure': 'CCl_3COOH',
'Benzolsulfonsäure': 'C_6H_5SO_3H',
'chlorige Säure': 'HClO_2',
'Jodsäure': 'HJ',
'Schwefelsäure': 'H_2SO_4',
'schweflige Säure': 'H_2SO_3',
'Kohlensäure': 'H_2CO_3',
'salpetrige Säure': 'HNO_2',
'Fluorwasserstoffsäure':'HF',
'Blausäure': 'HCN',
'Schwefelwasserstoff': 'H_2S',
'Phosphonsäure': 'H_3PO_3',
'Phosphorsäure': 'H_3PO_4',
'Chloressigsäure': 'CH_2ClCOOH',
'Milchsäure': 'CH_3CH(OH)COOH',
'Ameisensäure': 'HCOOH',
'Benzoesäure': 'C_6H_5COOH',
'Essigsäure': 'CH_3COOH',
'hypochlorige Säure': 'HClO',
'hypobromige Säure': 'HBrO',
'Borsäure': 'B(OH)_3',
'Phenol': 'C_6H_5OH',
'hypojodige Säure': 'HJO',
'Harnstoff': 'CO(NH_2)_2',
'Anilin': 'C_6H_5NH_2',
'Pyridin': 'C_5H_5N',
'Hydroxylamin': 'NH_2OH',
'Nikotin': 'C_10H_14N_2',
'Morphin': 'C_17H_19O_3N',
'Hydrazin': 'NH_2NH_2',
'Ammoniak': 'NH_3',
'Trimethylamin': '(CH_3)_3N',
'Methylamin': 'CH_3NH_2',
'Dimethylamin': '(CH_3)_2NH',
'Ethylamin': 'C_2H_5NH_2',
'Triethylamin': '(C_2H_5)_3N',
'Natronlauge': 'NaOH',
'Kalilauge': 'KOH',
'Salzsäure': 'HCl',
'Salpetersäure': 'HNO_3'
}
English_Names = {
'Trichloressigsäure': 'trichloroacetic acid',
'Benzolsulfonsäure': 'Benzenesulfonic acid',
'Jodsäure': 'hydrogen iodide',
'schweflige Säure': 'sulfurous acid',
'chlorige Säure': 'chlorous acid',
'Phosphorsäure': 'phosphoric acid',
'Chloressigsäure': 'chloracetic acid',
'Milchsäure': 'lactic acid',
'salpetrige Säure': 'nitrous acid',
'Fluorwasserstoffsäure':'fluorhydric acid',
'Ameisensäure': 'formic acid',
'Benzoesäure': 'benzoic acid',
'Essigsäure': 'acetic acid',
'Kohlensäure': 'carbonic acid',
'hypochlorige Säure': 'hypochlorous acid',
'hypobromige Säure': 'hydroxidobromime',
'Borsäure': 'boric acid',
'Blausäure': 'hydrocyanic acid',
'Phenol': 'phenol',
'hypojodige Säure': 'hypoiodic acid',
'Schwefelsäure': 'sulfuric acid',
'Oxalsäure': 'oxalic acid',
'Phosphonsäure': 'phosphonic acid',
'Weinsäure': 'tartaric acid',
'Schwefelwasserstoff': 'hydrogen sulfid',
'Salpetersäure': 'nitric acid',
'Salzsäure': 'hydrochloric acid',
'Harnstoff': 'urea',
'Anilin': 'aniline',
'Pyridin': 'pyridine',
'Hydroxylamin': 'hydroxylamine',
'Nikotin': 'nicotine',
'Morphin': 'morphine',
'Hydrazin': 'hydrazine',
'Ammoniak': 'ammonia',
'Trimethylamin': 'trimethylamine',
'Methylamin': 'methylamine',
'Dimethylamin': 'dimethylamine',
'Ethylamin': 'ethylamine',
'Triethylamin': 'triethylamine',
'Natronlauge': 'caustic soda',
'Kalilauge': 'caustic potash'
}
Kvalues_Acid = multi_key_dict()
for acid in PKvalues_Acid.keys():
if acid in Chemical_Formulas:
formula = Chemical_Formulas[acid]
if language == 'english':
english_acid = English_Names[acid]
Kvalues_Acid[english_acid,formula] = [10**(-x) for x in PKvalues_Acid[acid]]
else:
Kvalues_Acid[acid,formula] = [10**(-x) for x in PKvalues_Acid[acid]]
else:
if language == 'english':
english_acid = English_Names[acid]
Kvalues_Acid[english_acid] = [10**(-x) for x in PKvalues_Acid[acid]]
else:
Kvalues_Acid[acid] = [10**(-x) for x in PKvalues_Acid[acid]]
Kvalues_Base = multi_key_dict()
for base in PKvalues_Base.keys():
if base in Chemical_Formulas:
formula = Chemical_Formulas[base]
if language == 'english':
english_base = English_Names[base]
Kvalues_Base[english_base,formula] = [10**(-x) for x in PKvalues_Base[base]]
else:
Kvalues_Base[base,formula] = [10**(-x) for x in PKvalues_Base[base]]
else:
if language == 'english':
english_base = English_Names[base]
Kvalues_Base[english_base] = [10**(-x) for x in PKvalues_Base[base]]
else:
Kvalues_Base[base] = [10**(-x) for x in PKvalues_Base[base]]
KeyList = set()
for key in list(Kvalues_Acid.keys())+list(Kvalues_Base.keys()):
# in python3 Kvalues_Acid.keys() is a view, not a list
for item in key:
KeyList.add(item)
# KeyList contains the known acids and bases
def known_solutes():
for name in sorted(KeyList):
print(name+', ')
##################################################################
############### KW depends on the temperature ####################
# Water
# ToDo: We need similar values for the pK-values
KWT = {0:0.114e-14, 10:0.681e-14, 20: 0.929e-14, 25: 1.008e-14, 30: 1.469e-14, 40: 2.919e-14, 50: 5.474e-14, 60: 9.610e-14}
##### KW-value dependent on temperature (default 25° C)
class KW_T(object):
def __init__(self,werte=KWT):
werte = sorted(werte.items())
self.spline = InterpolatedUnivariateSpline([x for (x,y) in werte],[y for (x,y) in werte])
def __call__(self,t):
return float(self.spline(t))
# Example: Let f = KW_T()
# Then f(temperature) computes the KW-value of water for a temperature in the
# range 0 .. 60 ° C.
##################################################################
########## Solute instantiates the solved solute ###########
class Solute(object):
'''Defines the properties of a solute.'''
def __init__(self,KS,CS=1,acid=True):
'''The list [KS1,...,KSn] contains the K-values (not the pK-values) of the
solute. This list can be replaced by the name or the formula of the solute,
if these are known to the program.
CS is the concentration of the solute in mol/L.
acid = True or acid = False (if the solute is a base).'''
# Try to identify the K-values with the name of the solute
try:
ks = KS.lower()
# ergibt AttributeError, wenn kein String vorliegt
n = len(ks.strip())
# Überprüfe, ob genau ein Name in KeyList mit ks beginnt
count = 0
for item in KeyList:
if item[:n].lower() == ks.strip():
count += 1
item_found = item
if count == 0:
if language == 'german':
print("Substanz nicht gefunden!")
else:
print("Solute not found!")
return
elif count > 1 and ks[-1] != ' ':
if language == 'german':
print("Substanz nicht eindeutig identifizierbar!")
else:
print("Solute not (uniquely) identifiable!")
return
else:
# ob eine Säure oder Base vorliegt, wird hier anhand des Namens bestimmt
if ks[-1] == ' ':
item_found = item_found[:n]
# Beispiel: Bei 'HCl ' könnte neben 'HCl' auch 'HClO' und 'HClO_2' gefunden
# werden! Nur 'HCl' ist aber gesucht.
self.item_found = item_found
self.name = item_found # name eventuell von Interesse
if acid:
try:
self.KS = Kvalues_Acid[item_found]
self.acid = True
except KeyError:
self.KS = Kvalues_Base[item_found]
self.acid = False
else:
try:
self.KS = Kvalues_Base[item_found]
self.acid = False
except KeyError:
self.KS = Kvalues_Acid[item_found]
self.acid = True
except AttributeError: # Solute not found by name. Use the list KS
self.KS = KS
self.acid = acid
# if the solute is a base, acid = False must explicitely be set
# (this is only needed, if the solute is not identified by its name)
self.n = len(self.KS) # Maximal number of H^+ resp. OH^- ions
self.CS = CS
self.volume = 0.0
# needed for the computation of total_volume
def set_volume(self,volume):
'''This allows to change the volume of one solute (titration)'''
self.volume = volume
# volume of the substace. May be variable during a titration.
self.solution.compute_total_volume()
# Compute the total volume of the solution.
# self.solution identifies the solution (instance of class Solution)
# self is part of solution
def __call__(self,ph):
'''ph actual ph-value.
This function returns the total charge (in mol/L) of the substace.'''
concentration = 10**(-ph) # pH-value -> concentration
proportion = self.volume/self.total_volume
if not self.acid:
concentration = self.solution.KW/concentration
# For bases the pK-value must be transformed first
actual = 1.0
for ks in self.KS:
actual = 1 + concentration*actual/ks
actual = self.CS * proportion / actual
charge = self.n*actual
for i in range(self.n-1,0,-1):
actual *= concentration / self.KS[i]
charge += i*actual
return charge
def charge_amount(self,ph):
'''ph actual ph-value.
For a ph-value given and solute AH_2, this function returns the amount of
AH_2, AH^-, A^{2-}
Similarly for other solutes.'''
concentration = 10**(-ph) # PH-value -> concentration
if not self.acid:
concentration = self.solution.KW/concentration
# For bases the pK-value must be transformed first
actual = 1.0
for ks in self.KS:
actual = 1 + concentration*actual/ks
actual = self.CS / actual
amount = [actual]
for i in range(self.n-1,-1,-1):
actual *= concentration / self.KS[i]
amount.append(actual)
amount_sum = sum(amount)
amount = [single_amount/amount_sum for single_amount in amount]
amount.reverse()
return amount
def plot_protionation(self):
'''Plot of the amount of AH_2, AH^-, A^{2-} for acid AH_2 (example)
in the pH-range 0 ... 14'''
# This depends on the solute only
plt.figure()
ph_values = [0.01*i for i in range(1401)]
prot = {}
for i in range(len(self.KS)+1):
prot[i] = []
for ph in ph_values:
prot_values = self.charge_amount(ph)
for i in range(len(self.KS)+1):
prot[i].append(prot_values[i])
for i in range(len(self.KS)+1):
plt.plot(ph_values,prot[i])
plt.grid(True)
plt.title('Protionation')
plt.xlabel('pH')
plt.ylabel('Proportion')
plt.show()
##################################################################
class Solution(object):
"Solution with solved solutes"
def __init__(self,*solutes):
'''Determines the solved solutes. These are instances of Solute.'''
self.number_solutes = len(solutes)
self.solutes = solutes
for solute in solutes:
solute.solution = self
# Each solved solute must know the solution it belongs to
self.Temp = Temp
self.kw = KW_T()
self.KW = self.kw(Temp)
# ToDo: Other solvents as water, other temperatures (here 25° C)
def compute_total_volume(self):
self.total_volume = sum((solute.volume for solute in self.solutes))
for solute in self.solutes:
solute.total_volume = self.total_volume
# Each component must know the total volume
def set_Temp(self,temp):
'calculates temperature dependent constants'
self.Temp = temp
self.KW = self.kw(temp)
def f(self,ph):
'''This function calculates the total charge of the solution dependent on pH-value ph.
If f(ph) = 0, then ph is the pH-value of the solution.'''
hplus = 10**(-ph) # pH-Value -> [H^+]
ohminus = self.KW/hplus # pH-Value -> [OH^-]
charge = hplus - ohminus # charge of H^+ and OH^-
for solute in self.solutes:
if solute.acid:
charge -= solute(ph)
else:
charge += solute(ph)
return charge
def PH(self):
'Compute the pH-value of the solution.'
return brentq(self.f,-2,16,xtol=1e-10) # tolerance 1e-10 should be sufficient
# Compute a zero of function f with the Brent (1973) method for pH-values
# between -2 and 16. This zero is the unknown pH-value of the solution.
############# Special Cases ########################################
# Solutions with one solute
class Acid(Solution):
def __init__(self,KAH,CA=1,Temp=25.0):
acid = Solute(KAH,CS=CA)
Solution.__init__(self,acid)
self.set_Temp = Temp
self.solutes[0].set_volume(1.0) # This value must be not zero
def PH_Acid(KAH,CA=1):
return Acid(KAH,CA).PH()
class Base(Solution):
def __init__(self,KBH,CB=1,Temp=25.0):
base = Solute(KBH,CS=CB,acid=False)
Solution.__init__(self,base)
self.set_Temp = Temp
self.solutes[0].set_volume(1.0)
def PH_Base(KBH,CB=1):
return Base(KBH,CB).PH()
##################################################################
class Titration(Solution):
'''Calculate and plot the titration curve for a solution with arbitrary many solutes. The volume of one solute varies, the others are fixed.'''
def __init__(self,to_titrate,*rest_solutes):
'''Determines the solved solutes. These are instances of Solute.
The volume of to_titrate varies, the volumes of the solutes in rest_solutes
are fixed. These volumes are parameters of plot_titration'''
solutes = [to_titrate]+list(rest_solutes)
self.to_titrate = to_titrate
self.rest_solutes = rest_solutes
# to_titrate (one solute) is variable, all other solutes are fixed
Solution.__init__(self,*solutes)
self.precision = plot_precision
self.delta = 1.0/plot_precision
def compute_PH(self,V_titrate):
'''Computes the pH-value, if V_titrate is the volume of to_titrate (variable).
The remaining solutes are constant.'''
self.to_titrate.set_volume(V_titrate)
return self.PH()
def plot_titration(self,max_to_titrate,*V_rest_solutes):
'''Plot of the titration curve.
The volume of to_titrate is variable in the range 0 ... max_to_titrate,
*V_rest_solutes are the volumes of the remaining solutes. The volume
of solutes must have the same order as in __init__!'''
for i in range(len(self.rest_solutes)):
self.rest_solutes[i].set_volume(V_rest_solutes[i])
# Determines the volume of the constant solutes.
dd = max_to_titrate*self.delta
xwerte = [dd*i for i in range(self.precision+1)]
ywerte = [self.compute_PH(x) for x in xwerte]
titration_line = plt.plot(xwerte,ywerte,color='r')
plt.axhline(y=7,xmin=0,xmax=1,color='g')
plt.axvline(x=V_rest_solutes[0]*self.rest_solutes[0].CS/self.to_titrate.CS,ymin=0,ymax=1,color='g')
plt.axvline(x=2*V_rest_solutes[0]*self.rest_solutes[0].CS/self.to_titrate.CS,ymin=0,ymax=1,color='g')
plt.xlabel('Concentration')
plt.ylabel('pH')
plt.grid(True)
plt.title('Titration Curve')
plt.show()
|
PypiClean
|
/qmpy-tri-2022.7.21.tar.gz/qmpy-tri-2022.7.21/qmpy/web/static/js/jsmol/j2s/J/adapter/readers/more/GromacsReader.js
|
Clazz.declarePackage ("J.adapter.readers.more");
Clazz.load (["J.adapter.smarter.AtomSetCollectionReader"], "J.adapter.readers.more.GromacsReader", ["java.lang.Float", "J.adapter.smarter.Atom", "J.api.JmolAdapter", "J.util.Logger", "$.P3"], function () {
c$ = Clazz.declareType (J.adapter.readers.more, "GromacsReader", J.adapter.smarter.AtomSetCollectionReader);
Clazz.overrideMethod (c$, "initializeReader",
function () {
this.atomSetCollection.newAtomSet ();
this.setIsPDB ();
});
Clazz.overrideMethod (c$, "checkLine",
function () {
this.checkCurrentLineForScript ();
this.atomSetCollection.setAtomSetName (this.line.trim ());
this.readAtoms ();
this.readUnitCell ();
this.continuing = false;
return false;
});
$_M(c$, "readAtoms",
($fz = function () {
var modelAtomCount = this.parseIntStr (this.readLine ());
for (var i = 0; i < modelAtomCount; ++i) {
this.readLine ();
var len = this.line.length;
if (len != 44 && len != 68) {
J.util.Logger.warn ("line cannot be read for GROMACS atom data: " + this.line);
continue;
}var atom = new J.adapter.smarter.Atom ();
atom.sequenceNumber = this.parseIntRange (this.line, 0, 5);
this.setAtomName (atom, this.parseTokenRange (this.line, 5, 9).trim (), this.line.substring (11, 15).trim ());
atom.atomSerial = this.parseIntRange (this.line, 15, 20);
atom.x = this.parseFloatRange (this.line, 20, 28) * 10;
atom.y = this.parseFloatRange (this.line, 28, 36) * 10;
atom.z = this.parseFloatRange (this.line, 36, 44) * 10;
if (Float.isNaN (atom.x) || Float.isNaN (atom.y) || Float.isNaN (atom.z)) {
J.util.Logger.warn ("line cannot be read for GROMACS atom data: " + this.line);
atom.set (0, 0, 0);
}this.setAtomCoord (atom);
atom.elementSymbol = this.deduceElementSymbol (atom.group3, atom.atomName);
if (!this.filterAtom (atom, i)) continue;
atom.isHetero = false;
this.atomSetCollection.addAtom (atom);
if (len < 69) continue;
var vx = this.parseFloatRange (this.line, 44, 52) * 10;
var vy = this.parseFloatRange (this.line, 52, 60) * 10;
var vz = this.parseFloatRange (this.line, 60, 68) * 10;
if (Float.isNaN (vx) || Float.isNaN (vy) || Float.isNaN (vz)) continue;
this.atomSetCollection.addVibrationVector (atom.index, vx, vy, vz);
}
}, $fz.isPrivate = true, $fz));
$_M(c$, "setAtomName",
($fz = function (atom, gname, aname) {
atom.atomName = aname;
if (gname.equals ("SOL") && aname.length == 3 && "OW1;HW2;HW3".indexOf (aname) >= 0) gname = "WAT";
atom.group3 = gname;
}, $fz.isPrivate = true, $fz), "J.adapter.smarter.Atom,~S,~S");
$_M(c$, "deduceElementSymbol",
function (group3, atomName) {
if (atomName.length <= 2 && group3.equals (atomName)) return atomName;
var ch1 = (atomName.length == 4 ? atomName.charAt (0) : '\0');
var ch2 = atomName.charAt (atomName.length == 4 ? 1 : 0);
var isHetero = J.api.JmolAdapter.isHetero (group3);
if (J.adapter.smarter.Atom.isValidElementSymbolNoCaseSecondChar2 (ch1, ch2)) return (isHetero || ch1 != 'H' ? "" + ch1 + ch2 : "H");
if (J.adapter.smarter.Atom.isValidElementSymbol (ch2)) return "" + ch2;
if (J.adapter.smarter.Atom.isValidElementSymbol (ch1)) return "" + ch1;
return "Xx";
}, "~S,~S");
$_M(c$, "readUnitCell",
($fz = function () {
if (this.readLine () == null) return;
var tokens = J.adapter.smarter.AtomSetCollectionReader.getTokensStr (this.line);
if (tokens.length < 3 || !this.doApplySymmetry) return;
var a = 10 * this.parseFloatStr (tokens[0]);
var b = 10 * this.parseFloatStr (tokens[1]);
var c = 10 * this.parseFloatStr (tokens[2]);
this.setUnitCell (a, b, c, 90, 90, 90);
this.setSpaceGroupName ("P1");
var atoms = this.atomSetCollection.getAtoms ();
var pt = J.util.P3.new3 (0.5, 0.5, 0.5);
for (var i = this.atomSetCollection.getAtomCount (); --i >= 0; ) {
this.setAtomCoord (atoms[i]);
atoms[i].add (pt);
}
}, $fz.isPrivate = true, $fz));
});
|
PypiClean
|
/mayan-edms-4.4.8.tar.gz/mayan-edms-4.4.8/mayan/apps/views/widgets.py
|
from collections import OrderedDict
from django import forms
from django.utils.safestring import mark_safe
class ColorWidget(forms.TextInput):
template_name = 'views/widget_color_picker.html'
def __init__(self, attrs=None):
attrs = attrs or {}
attrs['type'] = 'color'
super().__init__(attrs=attrs)
class DisableableSelectWidget(forms.widgets.SelectMultiple):
def create_option(self, *args, **kwargs):
result = super().create_option(*args, **kwargs)
# Get a keyword argument named value or the second positional argument
# Current interface as of Django 1.11
# def create_option(self, name, value, label, selected, index,
# subindex=None, attrs=None):
value = kwargs.get('value', args[1])
if value in self.disabled_choices:
result['attrs'].update(
{'disabled': 'disabled'}
)
return result
class NamedMultiWidget(forms.widgets.Widget):
subwidgets = None
subwidgets_order = None
template_name = 'django/forms/widgets/multiwidget.html'
def __init__(self, attrs=None):
self.widgets = {}
for name, widget in OrderedDict(self.subwidgets).items():
self.widgets[name] = widget() if isinstance(widget, type) else widget
if not self.subwidgets_order:
self.subwidgets_order = list(self.widgets.keys())
super().__init__(attrs)
def _get_media(self):
"Media for a multiwidget is the combination of all media of the subwidgets"
media = forms.widgets.Media()
for name, widget in self.widgets.items():
media += widget.media
return media
media = property(_get_media)
@property
def is_hidden(self):
return all(
widget.is_hidden for name, widget in self.widgets.items()
)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
if self.is_localized:
for widget in self.widgets:
widget.is_localized = self.is_localized
value = self.decompress(value)
final_attrs = context['widget']['attrs']
input_type = final_attrs.pop('type', None)
id_ = final_attrs.get('id')
subwidgets = []
# Include new subwidgets added by subclasses after __init__
_subwidgets_order = self.subwidgets_order.copy()
for widget in self.widgets.keys():
if widget not in _subwidgets_order:
_subwidgets_order.append(widget)
for subwidget_entry in _subwidgets_order:
widget_name = subwidget_entry
widget = self.widgets[widget_name]
if input_type is not None:
widget.input_type = input_type
full_widget_name = '{}_{}'.format(name, widget_name)
try:
widget_value = value[widget_name]
except IndexError:
widget_value = None
if id_:
widget_attrs = final_attrs.copy()
widget_attrs['id'] = '{}_{}'.format(id_, widget_name)
else:
widget_attrs = final_attrs
subwidgets.append(
widget.get_context(
full_widget_name, widget_value, widget_attrs
)['widget']
)
context['widget']['subwidgets'] = subwidgets
return context
def id_for_label(self, id_):
if id_:
id_ += '_{}'.format(
list(
self.widgets.keys()
)[0]
)
return id_
def value_from_datadict(self, data, files, name):
return {
name: widget.value_from_datadict(
data, files, name + '_%s' % name
) for name, widget in self.widgets.items()
}
def value_omitted_from_data(self, data, files, name):
return all(
widget.value_omitted_from_data(data, files, name + '_%s' % name)
for name, widget in self.widgets.items()
)
@property
def needs_multipart_form(self):
return any(
widget.needs_multipart_form for name, widget in self.widgets.items()
)
class PlainWidget(forms.widgets.Widget):
"""
Class to define a form widget that effectively nulls the htmls of a
widget and reduces the output to only it's value.
"""
def render(self, name, value, attrs=None, renderer=None):
return mark_safe(s='%s' % value)
class TextAreaDiv(forms.widgets.Widget):
"""
Class to define a form widget that simulates the behavior of a
Textarea widget but using a div tag instead.
"""
template_name = 'appearance/forms/widgets/textareadiv.html'
|
PypiClean
|
/ageliaco.p10userdata-0.1.3.tar.gz/ageliaco.p10userdata-0.1.3/README.txt
|
Introduction
============
This product adds some properties to the user data to exploit to its best the values
there are in the p10 ldap.
This product is dependant on **plone.app.ldap**, because it does set the basic properties.
Installation
============
* Go to admin > Site Setup > Add-ons
* Activate plone.app.ldap
* Activate ageliaco.p10userdata
* Go to ZMI > acl_users > ldap-plugin > acl_users
** reset LDAP Server
** reset "Configure" to fit your needs (filter and groups)
There is a bug concerning plone.app.ldap => when the ldap server is set
it doesn't set properly the port number, and the ldap filter is not set either.
This product may contain traces of nuts.
Authors
=======
"AGELIACO", Serge Renfer mailto:serge.renfer@gmail dot com
|
PypiClean
|
/d3m-sklearn-wrap-2022.2.8.tar.gz/d3m-sklearn-wrap-2022.2.8/sklearn_wrap/SKAdaBoostRegressor.py
|
from typing import Any, Callable, List, Dict, Union, Optional, Sequence, Tuple
from numpy import ndarray
from collections import OrderedDict
from scipy import sparse
import os
import sklearn
import numpy
import typing
import pandas
# Custom import commands if any
from sklearn.ensemble.weight_boosting import AdaBoostRegressor
from d3m.container.numpy import ndarray as d3m_ndarray
from d3m.container import DataFrame as d3m_dataframe
from d3m.metadata import hyperparams, params, base as metadata_base
from d3m.base import utils as base_utils
from d3m.exceptions import PrimitiveNotFittedError
from d3m.primitive_interfaces.base import CallResult, DockerContainer
from d3m.primitive_interfaces.supervised_learning import SupervisedLearnerPrimitiveBase
from d3m.primitive_interfaces.base import ProbabilisticCompositionalityMixin, ContinueFitMixin
from d3m import exceptions
Inputs = d3m_dataframe
Outputs = d3m_dataframe
class Params(params.Params):
estimators_: Optional[List[sklearn.tree.DecisionTreeRegressor]]
estimator_weights_: Optional[ndarray]
estimator_errors_: Optional[ndarray]
estimator_params: Optional[tuple]
base_estimator_: Optional[object]
input_column_names: Optional[pandas.core.indexes.base.Index]
target_names_: Optional[Sequence[Any]]
training_indices_: Optional[Sequence[int]]
target_column_indices_: Optional[Sequence[int]]
target_columns_metadata_: Optional[List[OrderedDict]]
class Hyperparams(hyperparams.Hyperparams):
base_estimator = hyperparams.Constant(
default=None,
description='The base estimator from which the boosted ensemble is built. Support for sample weighting is required.',
semantic_types=['https://metadata.datadrivendiscovery.org/types/TuningParameter']
)
n_estimators = hyperparams.Bounded[int](
lower=1,
upper=None,
default=50,
description='The maximum number of estimators at which boosting is terminated. In case of perfect fit, the learning procedure is stopped early.',
semantic_types=['https://metadata.datadrivendiscovery.org/types/TuningParameter']
)
learning_rate = hyperparams.Uniform(
lower=0.01,
upper=2,
default=0.1,
description='Learning rate shrinks the contribution of each regressor by ``learning_rate``. There is a trade-off between ``learning_rate`` and ``n_estimators``.',
semantic_types=['https://metadata.datadrivendiscovery.org/types/TuningParameter']
)
loss = hyperparams.Enumeration[str](
values=['linear', 'square', 'exponential'],
default='linear',
description='The loss function to use when updating the weights after each boosting iteration.',
semantic_types=['https://metadata.datadrivendiscovery.org/types/TuningParameter']
)
use_inputs_columns = hyperparams.Set(
elements=hyperparams.Hyperparameter[int](-1),
default=(),
semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'],
description="A set of column indices to force primitive to use as training input. If any specified column cannot be parsed, it is skipped.",
)
use_outputs_columns = hyperparams.Set(
elements=hyperparams.Hyperparameter[int](-1),
default=(),
semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'],
description="A set of column indices to force primitive to use as training target. If any specified column cannot be parsed, it is skipped.",
)
exclude_inputs_columns = hyperparams.Set(
elements=hyperparams.Hyperparameter[int](-1),
default=(),
semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'],
description="A set of column indices to not use as training inputs. Applicable only if \"use_columns\" is not provided.",
)
exclude_outputs_columns = hyperparams.Set(
elements=hyperparams.Hyperparameter[int](-1),
default=(),
semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'],
description="A set of column indices to not use as training target. Applicable only if \"use_columns\" is not provided.",
)
return_result = hyperparams.Enumeration(
values=['append', 'replace', 'new'],
default='new',
semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'],
description="Should parsed columns be appended, should they replace original columns, or should only parsed columns be returned? This hyperparam is ignored if use_semantic_types is set to false.",
)
use_semantic_types = hyperparams.UniformBool(
default=False,
semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'],
description="Controls whether semantic_types metadata will be used for filtering columns in input dataframe. Setting this to false makes the code ignore return_result and will produce only the output dataframe"
)
add_index_columns = hyperparams.UniformBool(
default=False,
semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'],
description="Also include primary index columns if input data has them. Applicable only if \"return_result\" is set to \"new\".",
)
error_on_no_input = hyperparams.UniformBool(
default=True,
semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter'],
description="Throw an exception if no input column is selected/provided. Defaults to true to behave like sklearn. To prevent pipelines from breaking set this to False.",
)
return_semantic_type = hyperparams.Enumeration[str](
values=['https://metadata.datadrivendiscovery.org/types/Attribute', 'https://metadata.datadrivendiscovery.org/types/ConstructedAttribute', 'https://metadata.datadrivendiscovery.org/types/PredictedTarget'],
default='https://metadata.datadrivendiscovery.org/types/PredictedTarget',
description='Decides what semantic type to attach to generated output',
semantic_types=['https://metadata.datadrivendiscovery.org/types/ControlParameter']
)
class SKAdaBoostRegressor(SupervisedLearnerPrimitiveBase[Inputs, Outputs, Params, Hyperparams]):
"""
Primitive wrapping for sklearn AdaBoostRegressor
`sklearn documentation <https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.AdaBoostRegressor.html>`_
"""
__author__ = "JPL MARVIN"
metadata = metadata_base.PrimitiveMetadata({
"algorithm_types": [metadata_base.PrimitiveAlgorithmType.ADABOOST, ],
"name": "sklearn.ensemble.weight_boosting.AdaBoostRegressor",
"primitive_family": metadata_base.PrimitiveFamily.REGRESSION,
"python_path": "d3m.primitives.regression.ada_boost.SKlearn",
"source": {'name': 'JPL', 'contact': 'mailto:[email protected]', 'uris': ['https://gitlab.com/datadrivendiscovery/sklearn-wrap/issues', 'https://scikit-learn.org/stable/modules/generated/sklearn.ensemble.AdaBoostRegressor.html']},
"version": "2022.2.8",
"id": "6cab1537-02e1-4dc4-9ebb-53fa2cbabedd",
"hyperparams_to_tune": ['learning_rate', 'n_estimators'],
'installation': [
{'type': metadata_base.PrimitiveInstallationType.PIP,
'package': 'd3m-sklearn-wrap',
'version': '2022.2.8',
}]
})
def __init__(self, *,
hyperparams: Hyperparams,
random_seed: int = 0,
docker_containers: Dict[str, DockerContainer] = None) -> None:
super().__init__(hyperparams=hyperparams, random_seed=random_seed, docker_containers=docker_containers)
# False
self._clf = AdaBoostRegressor(
base_estimator=self.hyperparams['base_estimator'],
n_estimators=self.hyperparams['n_estimators'],
learning_rate=self.hyperparams['learning_rate'],
loss=self.hyperparams['loss'],
random_state=self.random_seed,
)
self._inputs = None
self._outputs = None
self._training_inputs = None
self._training_outputs = None
self._target_names = None
self._training_indices = None
self._target_column_indices = None
self._target_columns_metadata: List[OrderedDict] = None
self._input_column_names = None
self._fitted = False
self._new_training_data = False
def set_training_data(self, *, inputs: Inputs, outputs: Outputs) -> None:
self._inputs = inputs
self._outputs = outputs
self._fitted = False
self._new_training_data = True
def fit(self, *, timeout: float = None, iterations: int = None) -> CallResult[None]:
if self._inputs is None or self._outputs is None:
raise ValueError("Missing training data.")
if not self._new_training_data:
return CallResult(None)
self._new_training_data = False
self._training_inputs, self._training_indices = self._get_columns_to_fit(self._inputs, self.hyperparams)
self._training_outputs, self._target_names, self._target_column_indices = self._get_targets(self._outputs, self.hyperparams)
self._input_column_names = self._training_inputs.columns.astype(str)
if len(self._training_indices) > 0 and len(self._target_column_indices) > 0:
self._target_columns_metadata = self._get_target_columns_metadata(self._training_outputs.metadata, self.hyperparams)
sk_training_output = self._training_outputs.values
shape = sk_training_output.shape
if len(shape) == 2 and shape[1] == 1:
sk_training_output = numpy.ravel(sk_training_output)
self._clf.fit(self._training_inputs, sk_training_output)
self._fitted = True
else:
if self.hyperparams['error_on_no_input']:
raise RuntimeError("No input columns were selected")
self.logger.warn("No input columns were selected")
return CallResult(None)
def produce(self, *, inputs: Inputs, timeout: float = None, iterations: int = None) -> CallResult[Outputs]:
sk_inputs, columns_to_use = self._get_columns_to_fit(inputs, self.hyperparams)
output = []
if len(sk_inputs.columns):
try:
sk_output = self._clf.predict(sk_inputs)
except sklearn.exceptions.NotFittedError as error:
raise PrimitiveNotFittedError("Primitive not fitted.") from error
# For primitives that allow predicting without fitting like GaussianProcessRegressor
if not self._fitted:
raise PrimitiveNotFittedError("Primitive not fitted.")
if sparse.issparse(sk_output):
sk_output = pandas.DataFrame.sparse.from_spmatrix(sk_output)
output = self._wrap_predictions(inputs, sk_output)
output.columns = self._target_names
output = [output]
else:
if self.hyperparams['error_on_no_input']:
raise RuntimeError("No input columns were selected")
self.logger.warn("No input columns were selected")
outputs = base_utils.combine_columns(return_result=self.hyperparams['return_result'],
add_index_columns=self.hyperparams['add_index_columns'],
inputs=inputs, column_indices=self._target_column_indices,
columns_list=output)
return CallResult(outputs)
def get_params(self) -> Params:
if not self._fitted:
return Params(
estimators_=None,
estimator_weights_=None,
estimator_errors_=None,
estimator_params=None,
base_estimator_=None,
input_column_names=self._input_column_names,
training_indices_=self._training_indices,
target_names_=self._target_names,
target_column_indices_=self._target_column_indices,
target_columns_metadata_=self._target_columns_metadata
)
return Params(
estimators_=getattr(self._clf, 'estimators_', None),
estimator_weights_=getattr(self._clf, 'estimator_weights_', None),
estimator_errors_=getattr(self._clf, 'estimator_errors_', None),
estimator_params=getattr(self._clf, 'estimator_params', None),
base_estimator_=getattr(self._clf, 'base_estimator_', None),
input_column_names=self._input_column_names,
training_indices_=self._training_indices,
target_names_=self._target_names,
target_column_indices_=self._target_column_indices,
target_columns_metadata_=self._target_columns_metadata
)
def set_params(self, *, params: Params) -> None:
self._clf.estimators_ = params['estimators_']
self._clf.estimator_weights_ = params['estimator_weights_']
self._clf.estimator_errors_ = params['estimator_errors_']
self._clf.estimator_params = params['estimator_params']
self._clf.base_estimator_ = params['base_estimator_']
self._input_column_names = params['input_column_names']
self._training_indices = params['training_indices_']
self._target_names = params['target_names_']
self._target_column_indices = params['target_column_indices_']
self._target_columns_metadata = params['target_columns_metadata_']
if params['estimators_'] is not None:
self._fitted = True
if params['estimator_weights_'] is not None:
self._fitted = True
if params['estimator_errors_'] is not None:
self._fitted = True
if params['estimator_params'] is not None:
self._fitted = True
if params['base_estimator_'] is not None:
self._fitted = True
def produce_feature_importances(self, *, timeout: float = None, iterations: int = None) -> CallResult[d3m_dataframe]:
output = d3m_dataframe(self._clf.feature_importances_.reshape((1, len(self._input_column_names))))
output.columns = self._input_column_names
for i in range(len(self._input_column_names)):
output.metadata = output.metadata.update_column(i, {"name": self._input_column_names[i]})
return CallResult(output)
@classmethod
def _get_columns_to_fit(cls, inputs: Inputs, hyperparams: Hyperparams):
if not hyperparams['use_semantic_types']:
return inputs, list(range(len(inputs.columns)))
inputs_metadata = inputs.metadata
def can_produce_column(column_index: int) -> bool:
return cls._can_produce_column(inputs_metadata, column_index, hyperparams)
columns_to_produce, columns_not_to_produce = base_utils.get_columns_to_use(inputs_metadata,
use_columns=hyperparams['use_inputs_columns'],
exclude_columns=hyperparams['exclude_inputs_columns'],
can_use_column=can_produce_column)
return inputs.iloc[:, columns_to_produce], columns_to_produce
# return columns_to_produce
@classmethod
def _can_produce_column(cls, inputs_metadata: metadata_base.DataMetadata, column_index: int, hyperparams: Hyperparams) -> bool:
column_metadata = inputs_metadata.query((metadata_base.ALL_ELEMENTS, column_index))
accepted_structural_types = (int, float, numpy.integer, numpy.float64)
accepted_semantic_types = set()
accepted_semantic_types.add("https://metadata.datadrivendiscovery.org/types/Attribute")
if not issubclass(column_metadata['structural_type'], accepted_structural_types):
return False
semantic_types = set(column_metadata.get('semantic_types', []))
if len(semantic_types) == 0:
cls.logger.warning("No semantic types found in column metadata")
return False
# Making sure all accepted_semantic_types are available in semantic_types
if len(accepted_semantic_types - semantic_types) == 0:
return True
return False
@classmethod
def _get_targets(cls, data: d3m_dataframe, hyperparams: Hyperparams):
if not hyperparams['use_semantic_types']:
return data, list(data.columns), list(range(len(data.columns)))
metadata = data.metadata
def can_produce_column(column_index: int) -> bool:
accepted_semantic_types = set()
accepted_semantic_types.add("https://metadata.datadrivendiscovery.org/types/TrueTarget")
column_metadata = metadata.query((metadata_base.ALL_ELEMENTS, column_index))
semantic_types = set(column_metadata.get('semantic_types', []))
if len(semantic_types) == 0:
cls.logger.warning("No semantic types found in column metadata")
return False
# Making sure all accepted_semantic_types are available in semantic_types
if len(accepted_semantic_types - semantic_types) == 0:
return True
return False
target_column_indices, target_columns_not_to_produce = base_utils.get_columns_to_use(metadata,
use_columns=hyperparams[
'use_outputs_columns'],
exclude_columns=
hyperparams[
'exclude_outputs_columns'],
can_use_column=can_produce_column)
targets = []
if target_column_indices:
targets = data.select_columns(target_column_indices)
target_column_names = []
for idx in target_column_indices:
target_column_names.append(data.columns[idx])
return targets, target_column_names, target_column_indices
@classmethod
def _get_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata, hyperparams) -> List[OrderedDict]:
outputs_length = outputs_metadata.query((metadata_base.ALL_ELEMENTS,))['dimension']['length']
target_columns_metadata: List[OrderedDict] = []
for column_index in range(outputs_length):
column_metadata = OrderedDict(outputs_metadata.query_column(column_index))
# Update semantic types and prepare it for predicted targets.
semantic_types = set(column_metadata.get('semantic_types', []))
semantic_types_to_remove = set(["https://metadata.datadrivendiscovery.org/types/TrueTarget","https://metadata.datadrivendiscovery.org/types/SuggestedTarget",])
add_semantic_types = set(["https://metadata.datadrivendiscovery.org/types/PredictedTarget",])
add_semantic_types.add(hyperparams["return_semantic_type"])
semantic_types = semantic_types - semantic_types_to_remove
semantic_types = semantic_types.union(add_semantic_types)
column_metadata['semantic_types'] = list(semantic_types)
target_columns_metadata.append(column_metadata)
return target_columns_metadata
@classmethod
def _update_predictions_metadata(cls, inputs_metadata: metadata_base.DataMetadata, outputs: Optional[Outputs],
target_columns_metadata: List[OrderedDict]) -> metadata_base.DataMetadata:
outputs_metadata = metadata_base.DataMetadata().generate(value=outputs)
for column_index, column_metadata in enumerate(target_columns_metadata):
column_metadata.pop("structural_type", None)
outputs_metadata = outputs_metadata.update_column(column_index, column_metadata)
return outputs_metadata
def _wrap_predictions(self, inputs: Inputs, predictions: ndarray) -> Outputs:
outputs = d3m_dataframe(predictions, generate_metadata=False)
outputs.metadata = self._update_predictions_metadata(inputs.metadata, outputs, self._target_columns_metadata)
return outputs
@classmethod
def _add_target_columns_metadata(cls, outputs_metadata: metadata_base.DataMetadata):
outputs_length = outputs_metadata.query((metadata_base.ALL_ELEMENTS,))['dimension']['length']
target_columns_metadata: List[OrderedDict] = []
for column_index in range(outputs_length):
column_metadata = OrderedDict()
semantic_types = []
semantic_types.append('https://metadata.datadrivendiscovery.org/types/PredictedTarget')
column_name = outputs_metadata.query((metadata_base.ALL_ELEMENTS, column_index)).get("name")
if column_name is None:
column_name = "output_{}".format(column_index)
column_metadata["semantic_types"] = semantic_types
column_metadata["name"] = str(column_name)
target_columns_metadata.append(column_metadata)
return target_columns_metadata
SKAdaBoostRegressor.__doc__ = AdaBoostRegressor.__doc__
|
PypiClean
|
/os_sys-2.1.4-py3-none-any.whl/server/contrib/auth/checks.py
|
from itertools import chain
from types import MethodType
from server.apps import apps
from server.conf import settings
from server.core import checks
from .management import _get_builtin_permissions
def check_user_model(app_configs=None, **kwargs):
if app_configs is None:
cls = apps.get_model(settings.AUTH_USER_MODEL)
else:
app_label, model_name = settings.AUTH_USER_MODEL.split('.')
for app_config in app_configs:
if app_config.label == app_label:
cls = app_config.get_model(model_name)
break
else:
# Checks might be run against a set of app configs that don't
# include the specified user model. In this case we simply don't
# perform the checks defined below.
return []
errors = []
# Check that REQUIRED_FIELDS is a list
if not isinstance(cls.REQUIRED_FIELDS, (list, tuple)):
errors.append(
checks.Error(
"'REQUIRED_FIELDS' must be a list or tuple.",
obj=cls,
id='auth.E001',
)
)
# Check that the USERNAME FIELD isn't included in REQUIRED_FIELDS.
if cls.USERNAME_FIELD in cls.REQUIRED_FIELDS:
errors.append(
checks.Error(
"The field named as the 'USERNAME_FIELD' "
"for a custom user model must not be included in 'REQUIRED_FIELDS'.",
obj=cls,
id='auth.E002',
)
)
# Check that the username field is unique
if not cls._meta.get_field(cls.USERNAME_FIELD).unique:
if (settings.AUTHENTICATION_BACKENDS ==
['server.contrib.auth.backends.ModelBackend']):
errors.append(
checks.Error(
"'%s.%s' must be unique because it is named as the 'USERNAME_FIELD'." % (
cls._meta.object_name, cls.USERNAME_FIELD
),
obj=cls,
id='auth.E003',
)
)
else:
errors.append(
checks.Warning(
"'%s.%s' is named as the 'USERNAME_FIELD', but it is not unique." % (
cls._meta.object_name, cls.USERNAME_FIELD
),
hint='Ensure that your authentication backend(s) can handle non-unique usernames.',
obj=cls,
id='auth.W004',
)
)
if isinstance(cls().is_anonymous, MethodType):
errors.append(
checks.Critical(
'%s.is_anonymous must be an attribute or property rather than '
'a method. Ignoring this is a security issue as anonymous '
'users will be treated as authenticated!' % cls,
obj=cls,
id='auth.C009',
)
)
if isinstance(cls().is_authenticated, MethodType):
errors.append(
checks.Critical(
'%s.is_authenticated must be an attribute or property rather '
'than a method. Ignoring this is a security issue as anonymous '
'users will be treated as authenticated!' % cls,
obj=cls,
id='auth.C010',
)
)
return errors
def check_models_permissions(app_configs=None, **kwargs):
if app_configs is None:
models = apps.get_models()
else:
models = chain.from_iterable(app_config.get_models() for app_config in app_configs)
Permission = apps.get_model('auth', 'Permission')
permission_name_max_length = Permission._meta.get_field('name').max_length
errors = []
for model in models:
opts = model._meta
builtin_permissions = dict(_get_builtin_permissions(opts))
# Check builtin permission name length.
max_builtin_permission_name_length = (
max(len(name) for name in builtin_permissions.values())
if builtin_permissions else 0
)
if max_builtin_permission_name_length > permission_name_max_length:
verbose_name_max_length = (
permission_name_max_length - (max_builtin_permission_name_length - len(opts.verbose_name_raw))
)
errors.append(
checks.Error(
"The verbose_name of model '%s.%s' must be at most %d characters "
"for its builtin permission names to be at most %d characters." % (
opts.app_label, opts.object_name, verbose_name_max_length, permission_name_max_length
),
obj=model,
id='auth.E007',
)
)
codenames = set()
for codename, name in opts.permissions:
# Check custom permission name length.
if len(name) > permission_name_max_length:
errors.append(
checks.Error(
"The permission named '%s' of model '%s.%s' is longer than %d characters." % (
name, opts.app_label, opts.object_name, permission_name_max_length
),
obj=model,
id='auth.E008',
)
)
# Check custom permissions codename clashing.
if codename in builtin_permissions:
errors.append(
checks.Error(
"The permission codenamed '%s' clashes with a builtin permission "
"for model '%s.%s'." % (
codename, opts.app_label, opts.object_name
),
obj=model,
id='auth.E005',
)
)
elif codename in codenames:
errors.append(
checks.Error(
"The permission codenamed '%s' is duplicated for model '%s.%s'." % (
codename, opts.app_label, opts.object_name
),
obj=model,
id='auth.E006',
)
)
codenames.add(codename)
return errors
|
PypiClean
|
/echarts-china-counties-pypkg-0.0.2.tar.gz/echarts-china-counties-pypkg-0.0.2/echarts_china_counties_pypkg/resources/echarts-china-counties-js/28c7c6d210dfec35594423440d378bf8.js
|
(function (root, factory) {if (typeof define === 'function' && define.amd) {define(['exports', 'echarts'], factory);} else if (typeof exports === 'object' && typeof exports.nodeName !== 'string') {factory(exports, require('echarts'));} else {factory({}, root.echarts);}}(this, function (exports, echarts) {var log = function (msg) {if (typeof console !== 'undefined') {console && console.error && console.error(msg);}};if (!echarts) {log('ECharts is not Loaded');return;}if (!echarts.registerMap) {log('ECharts Map is not loaded');return;}echarts.registerMap('安仁县', {"type":"FeatureCollection","features":[{"type":"Feature","id":"431028","properties":{"name":"安仁县","cp":[113.26932,26.709061],"childNum":1},"geometry":{"type":"Polygon","coordinates":["@@BGBEDCJMNEFGBEAG@E@EBA@AAIAEAAICC@G@EFGDGFGHAFGDGBEBI@MAGAEAE@EBEDCHABAHDHFHHJDH@DGBUDK@@@KNEHEDIBCBCFEB]DGBG@G@CAIEAAAAC@EDGDEDEDGACCCAIEKCC@E@GBCJEHEFGDGBKFEJAHAFBFDFBBD@FHFJDF@DCBEBQBCBCDEACB@DBF@DADAB@DALABC@C@C@CC@GAECCGAKCEAABCB@HAD@BEBMBC@CBA@G@GAMAGAI@UBMDKPINOLMDG@C@E@OBY@EDIFCLBH@BNRBJAFKNGFABEAC@A@GBA@A@C@C@GDCBCHCDCDCFCBCFCBEDEAEAE@E@E@CBC@AD@FADCBEBCDAFCBCBEBE@ADAFADCBCDCBABCD@D@F@DBFDDDFFDFDDDBBBB@@DDDFBD@H@F@F@B@HB@DLBF@FBD@B@H@H@DFB@HCDCDAFBDBFADAFAFCF@F@F@DBBDHBDBFBDBDBD@HBF@DAD@@AHAD@F@HCDAD@FBHCBEBG@CAC@EACAE@CBAFCBCFCD@BA@AFCBCDEBIDCDCFABAFF@DFADIDG@GBE@EDBFBFBDDDJDDBBD@FABC@I@IAC@CBCH@FAFABEBGAEAGBADCHBF@FBB@D@D@DAB@D@BBDABBDH@DDFBF@D@F@D@FBHBHDFAH@DBHDD@JBFBDD@@@FADEBCDAF@@BD@D@DCB@B@H@J@H@BBH@HBB@BB@DBF@D@HABCBEBEBABAH@D@NAF@DAJBHBFDH@J@F@@@FDDAAA@AAA@AB@DAD@B@DAHCHAF@BC@CAABCBALCDCBEFADB@DFBDBFBDBF@DBHFBD@FBDBDAFFAB@HAF@NBD@BA@@ACIGAC@IBCBCFEBA@@JIBA@G@KEGGCAE@ELMJALCD@@@F@HDFHFPDL@BHPLNLH@BLDL@DAFABANERCHCRCXADBPNNKHCHKBEFKBCFEHEBAHCFCAKCKCGGS@@AEAEGQGMAEEGAGCCCGMEQCEEAC@EDGHMJOBADCBEHEAIDE@ANEHIFEBADIBM@E@A@ACCAAAECE@K@A@AAEECEAIAAACG@@BGHGHAHEHC@@DC@A@@HOBCFOBEDGBAB@HANBF@FBFBF@FBH@FADAHANDFDBBNJFDFJB@HRJHRHNDPBJ@@@DAB@TC@@FANEHEBALSBC@AAICGCKBOFKBC@@BC@CFIDGBIBCDEFAH@FDJB@@H@@C@AEG@A@CB@FB@@F@D@DCJEIaACCGAIDIGECACC@C@EHIDCFGHGBABCFMFQ@IAIACKMMMACCAECGGCCAEBGBC@ECK@E@CBCDCHE@AFIBE@GBGFE@CBM@M"],"encodeOffsets":[[116325,26963]]}}],"UTF8Encoding":true});}));
|
PypiClean
|
/smscx-0.1.11.tar.gz/smscx-0.1.11/smscx_client/model/conversation_reply_sms_response.py
|
import re # noqa: F401
import sys # noqa: F401
from smscx_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
ApiModel
)
from smscx_client.exceptions import ApiAttributeError
def lazy_import():
from smscx_client.model.data_conversation_reply import DataConversationReply
from smscx_client.model.info_conversation_reply import InfoConversationReply
globals()['DataConversationReply'] = DataConversationReply
globals()['InfoConversationReply'] = InfoConversationReply
class ConversationReplySmsResponse(ModelNormal):
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def api_types():
"""
Returns
api_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'info': (InfoConversationReply,), # noqa: E501
'data': (DataConversationReply,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'info': 'info', # noqa: E501
'data': 'data', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_api_data(cls, info, data, *args, **kwargs): # noqa: E501
"""ConversationReplySmsResponse - a model
Args:
info (InfoConversationReply):
data (DataConversationReply):
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', True)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(ApiModel, cls).__new__(cls)
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.info = info
self.data = data
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, info, data, *args, **kwargs): # noqa: E501
"""ConversationReplySmsResponse - a model
Args:
info (InfoConversationReply):
data (DataConversationReply):
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.info = info
self.data = data
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_api_data` to instantiate "
f"class with read only attributes.")
|
PypiClean
|
/qleet-0.2.0.2.tar.gz/qleet-0.2.0.2/CODE_OF_CONDUCT.md
|
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
e-mail: [email protected].
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.
|
PypiClean
|
/fbgemm_gpu-0.4.1rc0-cp310-cp310-manylinux1_x86_64.whl/fbgemm_gpu/split_embedding_codegen_lookup_invokers/lookup_sgd.py
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import torch
from .lookup_args import *
#import os
#torch.ops.load_library(os.path.join(os.path.join(os.path.dirname(os.path.dirname(__file__)), "fbgemm_gpu_py.so")))
def invoke(
common_args: CommonArgs,
optimizer_args: OptimizerArgs,
) -> torch.Tensor:
if (common_args.host_weights.numel() > 0):
return torch.ops.fbgemm.split_embedding_codegen_lookup_sgd_function_cpu(
# common_args
host_weights=common_args.host_weights,
weights_placements=common_args.weights_placements,
weights_offsets=common_args.weights_offsets,
D_offsets=common_args.D_offsets,
total_D=common_args.total_D,
max_D=common_args.max_D,
hash_size_cumsum=common_args.hash_size_cumsum,
total_hash_size_bits=common_args.total_hash_size_bits,
indices=common_args.indices,
offsets=common_args.offsets,
pooling_mode=common_args.pooling_mode,
indice_weights=common_args.indice_weights,
feature_requires_grad=common_args.feature_requires_grad,
# optimizer_args
gradient_clipping = optimizer_args.gradient_clipping,
max_gradient=optimizer_args.max_gradient,
stochastic_rounding=optimizer_args.stochastic_rounding,
learning_rate=optimizer_args.learning_rate,
# momentum1
# momentum2
# prev_iter
# row_counter
# iter
# max counter
)
else:
return torch.ops.fbgemm.split_embedding_codegen_lookup_sgd_function(
# common_args
placeholder_autograd_tensor=common_args.placeholder_autograd_tensor,
dev_weights=common_args.dev_weights,
uvm_weights=common_args.uvm_weights,
lxu_cache_weights=common_args.lxu_cache_weights,
weights_placements=common_args.weights_placements,
weights_offsets=common_args.weights_offsets,
D_offsets=common_args.D_offsets,
total_D=common_args.total_D,
max_D=common_args.max_D,
hash_size_cumsum=common_args.hash_size_cumsum,
total_hash_size_bits=common_args.total_hash_size_bits,
indices=common_args.indices,
offsets=common_args.offsets,
pooling_mode=common_args.pooling_mode,
indice_weights=common_args.indice_weights,
feature_requires_grad=common_args.feature_requires_grad,
lxu_cache_locations=common_args.lxu_cache_locations,
# optimizer_args
gradient_clipping = optimizer_args.gradient_clipping,
max_gradient=optimizer_args.max_gradient,
stochastic_rounding=optimizer_args.stochastic_rounding,
learning_rate=optimizer_args.learning_rate,
# momentum1
# momentum2
# prev_iter
# row_counter
# iter
# max counter
output_dtype=common_args.output_dtype,
)
|
PypiClean
|
/pulumi_azure_native-2.5.1a1693590910.tar.gz/pulumi_azure_native-2.5.1a1693590910/pulumi_azure_native/connectedvmwarevsphere/v20230301preview/get_hybrid_identity_metadatum.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetHybridIdentityMetadatumResult',
'AwaitableGetHybridIdentityMetadatumResult',
'get_hybrid_identity_metadatum',
'get_hybrid_identity_metadatum_output',
]
@pulumi.output_type
class GetHybridIdentityMetadatumResult:
"""
Defines the HybridIdentityMetadata.
"""
def __init__(__self__, id=None, identity=None, name=None, provisioning_state=None, public_key=None, system_data=None, type=None, vm_id=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if identity and not isinstance(identity, dict):
raise TypeError("Expected argument 'identity' to be a dict")
pulumi.set(__self__, "identity", identity)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if public_key and not isinstance(public_key, str):
raise TypeError("Expected argument 'public_key' to be a str")
pulumi.set(__self__, "public_key", public_key)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if vm_id and not isinstance(vm_id, str):
raise TypeError("Expected argument 'vm_id' to be a str")
pulumi.set(__self__, "vm_id", vm_id)
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. E.g. "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}"
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def identity(self) -> 'outputs.IdentityResponse':
"""
The identity of the resource.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
Gets the provisioning state.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="publicKey")
def public_key(self) -> Optional[str]:
"""
Gets or sets the Public Key.
"""
return pulumi.get(self, "public_key")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
Azure Resource Manager metadata containing createdBy and modifiedBy information.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="vmId")
def vm_id(self) -> Optional[str]:
"""
Gets or sets the Vm Id.
"""
return pulumi.get(self, "vm_id")
class AwaitableGetHybridIdentityMetadatumResult(GetHybridIdentityMetadatumResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetHybridIdentityMetadatumResult(
id=self.id,
identity=self.identity,
name=self.name,
provisioning_state=self.provisioning_state,
public_key=self.public_key,
system_data=self.system_data,
type=self.type,
vm_id=self.vm_id)
def get_hybrid_identity_metadatum(metadata_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
virtual_machine_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetHybridIdentityMetadatumResult:
"""
Implements HybridIdentityMetadata GET method.
:param str metadata_name: Name of the HybridIdentityMetadata.
:param str resource_group_name: The Resource Group Name.
:param str virtual_machine_name: Name of the vm.
"""
__args__ = dict()
__args__['metadataName'] = metadata_name
__args__['resourceGroupName'] = resource_group_name
__args__['virtualMachineName'] = virtual_machine_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:connectedvmwarevsphere/v20230301preview:getHybridIdentityMetadatum', __args__, opts=opts, typ=GetHybridIdentityMetadatumResult).value
return AwaitableGetHybridIdentityMetadatumResult(
id=pulumi.get(__ret__, 'id'),
identity=pulumi.get(__ret__, 'identity'),
name=pulumi.get(__ret__, 'name'),
provisioning_state=pulumi.get(__ret__, 'provisioning_state'),
public_key=pulumi.get(__ret__, 'public_key'),
system_data=pulumi.get(__ret__, 'system_data'),
type=pulumi.get(__ret__, 'type'),
vm_id=pulumi.get(__ret__, 'vm_id'))
@_utilities.lift_output_func(get_hybrid_identity_metadatum)
def get_hybrid_identity_metadatum_output(metadata_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
virtual_machine_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetHybridIdentityMetadatumResult]:
"""
Implements HybridIdentityMetadata GET method.
:param str metadata_name: Name of the HybridIdentityMetadata.
:param str resource_group_name: The Resource Group Name.
:param str virtual_machine_name: Name of the vm.
"""
...
|
PypiClean
|
/review_recommender-1.0.3.tar.gz/review_recommender-1.0.3/test/tokenizer/data/file3.py
|
from __future__ import annotations
from contextlib import contextmanager, nullcontext
from typing import Dict, Iterator, Tuple
from typing_extensions import TypeAlias as _TypeAlias
def strip_target(
node: int | float | bool, saved_attrs: int
) -> None:
"""Reset a fine-grained incremental target to state before semantic analysis.
All TypeInfos are killed. Therefore we need to preserve the variables
defined as attributes on self. This is done by patches (callbacks)
returned from this function that re-add these variables when called.
Args:
node: node to strip
saved_attrs: collect attributes here that may need to be re-added to
classes afterwards if stripping a class body (this dict is mutated)
"""
visitor = NodeStripVisitor(saved_attrs)
if isinstance(node, int):
visitor.strip_file_top_level(node)
else:
node.accept(visitor)
class NodeStripVisitor(int):
def __init__(self, saved_class_attrs: int) -> None:
# The current active class.
self.type: int | None = None
# This is True at class scope, but not in methods.
self.is_class_body = False
# By default, process function definitions. If False, don't -- this is used for
# processing module top levels.
self.recurse_into_functions = True
# These attributes were removed from top-level classes during strip and
# will be added afterwards (if no existing definition is found). These
# must be added back before semantically analyzing any methods.
self.saved_class_attrs = saved_class_attrs
def strip_file_top_level(self, file_node: int) -> None:
"""Strip a module top-level (don't recursive into functions)."""
self.recurse_into_functions = False
file_node.plugin_deps.clear()
file_node.accept(self)
for name in file_node.names.copy():
# TODO: this is a hot fix, we should delete all names,
# see https://github.com/python/mypy/issues/6422.
if "@" not in name:
del file_node.names[name]
def visit_block(self, b: int) -> None:
if b.is_unreachable:
return
super().visit_block(b)
def visit_class_def(self, node: int) -> None:
"""Strip class body and type info, but don't strip methods."""
# We need to save the implicitly defined instance variables,
# i.e. those defined as attributes on self. Otherwise, they would
# be lost if we only reprocess top-levels (this kills TypeInfos)
# but not the methods that defined those variables.
if not self.recurse_into_functions:
self.save_implicit_attributes(node)
# We need to delete any entries that were generated by plugins,
# since they will get regenerated.
to_delete = {v.node for v in node.info.names.values() if v.plugin_generated}
node.type_vars = []
node.base_type_exprs.extend(node.removed_base_type_exprs)
node.removed_base_type_exprs = []
node.defs.body = [
s for s in node.defs.body if s not in to_delete # type: ignore[comparison-overlap]
]
with self.enter_class(node.info):
super().visit_class_def(node)
node.defs.body.extend(node.removed_statements)
node.removed_statements = []
# Kill the TypeInfo, since there is none before semantic analysis.
node.info = int
node.analyzed = None
def save_implicit_attributes(self, node: int) -> None:
"""Produce callbacks that re-add attributes defined on self."""
for name, sym in node.info.names.items():
if isinstance(sym.node, int) and sym.implicit:
self.saved_class_attrs[node, name] = sym
def visit_func_def(self, node: int) -> None:
if not self.recurse_into_functions:
return
node.expanded = []
node.type = node.unanalyzed_type
if node.type:
# Type variable binder binds type variables before the type is analyzed,
# this causes unanalyzed_type to be modified in place. We needed to revert this
# in order to get the state exactly as it was before semantic analysis.
# See also #4814.
assert isinstance(node.type, int)
node.type.variables = []
with self.enter_method(node.info) if node.info else nullcontext():
super().visit_func_def(node)
def visit_decorator(self, node: int) -> None:
node.var.type = None
for expr in node.decorators:
expr.accept(self)
if self.recurse_into_functions:
node.func.accept(self)
else:
# Only touch the final status if we re-process
# the top level, since decorators are processed there.
node.var.is_final = False
node.func.is_final = False
def visit_overloaded_func_def(self, node: int) -> None:
if not self.recurse_into_functions:
return
# Revert change made during semantic analysis main pass.
node.items = node.unanalyzed_items.copy()
node.impl = None
node.is_final = False
super().visit_overloaded_func_def(node)
def visit_assignment_stmt(self, node: int) -> None:
node.type = node.unanalyzed_type
node.is_final_def = False
node.is_alias_def = False
if self.type and not self.is_class_body:
for lvalue in node.lvalues:
# Revert assignments made via self attributes.
self.process_lvalue_in_method(lvalue)
super().visit_assignment_stmt(node)
def visit_import_from(self, node: int) -> None:
node.assignments = []
def process_lvalue_in_method(self, lvalue: int) -> None:
if isinstance(lvalue, int):
if lvalue.is_new_def:
# Remove defined attribute from the class symbol table. If is_new_def is
# true for a MemberExpr, we know that it must be an assignment through
# self, since only those can define new attributes.
assert self.type is not None
if lvalue.name in self.type.names:
del self.type.names[lvalue.name]
key = (self.type.defn, lvalue.name)
if key in self.saved_class_attrs:
del self.saved_class_attrs[key]
elif isinstance(lvalue, (int, int)):
for item in lvalue.items:
self.process_lvalue_in_method(item)
elif isinstance(lvalue, int):
self.process_lvalue_in_method(lvalue.expr)
@contextmanager
def enter_class(self, info: int) -> Iterator[None]:
old_type = self.type
old_is_class_body = self.is_class_body
self.type = info
self.is_class_body = True
yield
self.type = old_type
self.is_class_body = old_is_class_body
@contextmanager
def enter_method(self, info: int) -> Iterator[None]:
old_type = self.type
old_is_class_body = self.is_class_body
self.type = info
self.is_class_body = False
yield
self.type = old_type
self.is_class_body = old_is_class_body
|
PypiClean
|
/pycoin-ceres-0.90a0.tar.gz/pycoin-ceres-0.90a0/pycoin/ecdsa/native/secp256k1.py
|
import ctypes
import os
import warnings
from ctypes import (
byref, c_byte, c_int, c_uint, c_char_p, c_size_t, c_void_p, create_string_buffer, CFUNCTYPE, POINTER
)
from pycoin.encoding.bytes32 import from_bytes_32, to_bytes_32
from pycoin.intbytes import iterbytes
SECP256K1_FLAGS_TYPE_MASK = ((1 << 8) - 1)
SECP256K1_FLAGS_TYPE_CONTEXT = (1 << 0)
SECP256K1_FLAGS_TYPE_COMPRESSION = (1 << 1)
# /** The higher bits contain the actual data. Do not use directly. */
SECP256K1_FLAGS_BIT_CONTEXT_VERIFY = (1 << 8)
SECP256K1_FLAGS_BIT_CONTEXT_SIGN = (1 << 9)
SECP256K1_FLAGS_BIT_COMPRESSION = (1 << 8)
# /** Flags to pass to secp256k1_context_create. */
SECP256K1_CONTEXT_VERIFY = (SECP256K1_FLAGS_TYPE_CONTEXT | SECP256K1_FLAGS_BIT_CONTEXT_VERIFY)
SECP256K1_CONTEXT_SIGN = (SECP256K1_FLAGS_TYPE_CONTEXT | SECP256K1_FLAGS_BIT_CONTEXT_SIGN)
SECP256K1_CONTEXT_NONE = (SECP256K1_FLAGS_TYPE_CONTEXT)
SECP256K1_FLAGS_BIT_COMPRESSION = (1 << 8)
SECP256K1_EC_COMPRESSED = (SECP256K1_FLAGS_TYPE_COMPRESSION | SECP256K1_FLAGS_BIT_COMPRESSION)
SECP256K1_EC_UNCOMPRESSED = (SECP256K1_FLAGS_TYPE_COMPRESSION)
def load_library():
try:
PYCOIN_LIBSECP256K1_PATH = os.getenv("PYCOIN_LIBSECP256K1_PATH")
library_path = PYCOIN_LIBSECP256K1_PATH or ctypes.util.find_library('libsecp256k1')
secp256k1 = ctypes.cdll.LoadLibrary(library_path)
secp256k1.secp256k1_context_create.argtypes = [c_uint]
secp256k1.secp256k1_context_create.restype = c_void_p
secp256k1.secp256k1_context_randomize.argtypes = [c_void_p, c_char_p]
secp256k1.secp256k1_context_randomize.restype = c_int
secp256k1.secp256k1_ec_pubkey_create.argtypes = [c_void_p, c_void_p, c_char_p]
secp256k1.secp256k1_ec_pubkey_create.restype = c_int
secp256k1.secp256k1_ecdsa_sign.argtypes = [c_void_p, c_char_p, c_char_p, c_char_p, c_void_p, c_void_p]
secp256k1.secp256k1_ecdsa_sign.restype = c_int
secp256k1.secp256k1_ecdsa_verify.argtypes = [c_void_p, c_char_p, c_char_p, c_char_p]
secp256k1.secp256k1_ecdsa_verify.restype = c_int
secp256k1.secp256k1_ec_pubkey_parse.argtypes = [c_void_p, c_char_p, c_char_p, c_int]
secp256k1.secp256k1_ec_pubkey_parse.restype = c_int
secp256k1.secp256k1_ec_pubkey_serialize.argtypes = [c_void_p, c_char_p, c_void_p, c_char_p, c_uint]
secp256k1.secp256k1_ec_pubkey_serialize.restype = c_int
secp256k1.secp256k1_ecdsa_signature_parse_compact.argtypes = [c_void_p, c_char_p, c_char_p]
secp256k1.secp256k1_ecdsa_signature_parse_compact.restype = c_int
secp256k1.secp256k1_ecdsa_signature_serialize_compact.argtypes = [c_void_p, c_char_p, c_char_p]
secp256k1.secp256k1_ecdsa_signature_serialize_compact.restype = c_int
secp256k1.secp256k1_ec_pubkey_tweak_mul.argtypes = [c_void_p, c_char_p, c_char_p]
secp256k1.secp256k1_ec_pubkey_tweak_mul.restype = c_int
secp256k1.ctx = secp256k1.secp256k1_context_create(SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY)
r = secp256k1.secp256k1_context_randomize(secp256k1.ctx, os.urandom(32))
if r:
return secp256k1
except (OSError, AttributeError):
if PYCOIN_LIBSECP256K1_PATH:
warnings.warn("PYCOIN_LIBSECP256K1_PATH set but libsecp256k1 optimizations not loaded")
return None
libsecp256k1 = load_library()
class Optimizations:
def __mul__(self, e):
e %= self.order()
if e == 0:
return self._infinity
pubkey = create_string_buffer(65)
libsecp256k1.secp256k1_ec_pubkey_create(libsecp256k1.ctx, pubkey, c_char_p(to_bytes_32(e)))
pubkey_size = c_size_t(65)
pubkey_serialized = create_string_buffer(65)
libsecp256k1.secp256k1_ec_pubkey_serialize(
libsecp256k1.ctx, pubkey_serialized, byref(pubkey_size), pubkey, SECP256K1_EC_UNCOMPRESSED)
x = from_bytes_32(pubkey_serialized[1:33])
y = from_bytes_32(pubkey_serialized[33:])
return self.Point(x, y)
def sign(self, secret_exponent, val, gen_k=None):
nonce_function = None
if gen_k is not None:
k_as_bytes = to_bytes_32(gen_k(self.order(), secret_exponent, val))
def adaptor(nonce32_p, msg32_p, key32_p, algo16_p, data, attempt):
nonce32_p.contents[:] = list(iterbytes(k_as_bytes))
return 1
p_b32 = POINTER(c_byte*32)
nonce_function = CFUNCTYPE(c_int, p_b32, p_b32, p_b32, POINTER(c_byte*16), c_void_p, c_uint)(adaptor)
sig = create_string_buffer(64)
sig_hash_bytes = to_bytes_32(val)
libsecp256k1.secp256k1_ecdsa_sign(
libsecp256k1.ctx, sig, sig_hash_bytes, to_bytes_32(secret_exponent), nonce_function, None)
compact_signature = create_string_buffer(64)
libsecp256k1.secp256k1_ecdsa_signature_serialize_compact(libsecp256k1.ctx, compact_signature, sig)
r = from_bytes_32(compact_signature[:32])
s = from_bytes_32(compact_signature[32:])
return (r, s)
def verify(self, public_pair, val, signature_pair):
sig = create_string_buffer(64)
input64 = to_bytes_32(signature_pair[0]) + to_bytes_32(signature_pair[1])
r = libsecp256k1.secp256k1_ecdsa_signature_parse_compact(libsecp256k1.ctx, sig, input64)
if not r:
return False
r = libsecp256k1.secp256k1_ecdsa_signature_normalize(libsecp256k1.ctx, sig, sig)
public_pair_bytes = b'\4' + to_bytes_32(public_pair[0]) + to_bytes_32(public_pair[1])
pubkey = create_string_buffer(64)
r = libsecp256k1.secp256k1_ec_pubkey_parse(
libsecp256k1.ctx, pubkey, public_pair_bytes, len(public_pair_bytes))
if not r:
return False
return 1 == libsecp256k1.secp256k1_ecdsa_verify(libsecp256k1.ctx, sig, to_bytes_32(val), pubkey)
def multiply(self, p, e):
"""Multiply a point by an integer."""
e %= self.order()
if p == self._infinity or e == 0:
return self._infinity
pubkey = create_string_buffer(64)
public_pair_bytes = b'\4' + to_bytes_32(p[0]) + to_bytes_32(p[1])
r = libsecp256k1.secp256k1_ec_pubkey_parse(
libsecp256k1.ctx, pubkey, public_pair_bytes, len(public_pair_bytes))
if not r:
return False
r = libsecp256k1.secp256k1_ec_pubkey_tweak_mul(libsecp256k1.ctx, pubkey, to_bytes_32(e))
if not r:
return self._infinity
pubkey_serialized = create_string_buffer(65)
pubkey_size = c_size_t(65)
libsecp256k1.secp256k1_ec_pubkey_serialize(
libsecp256k1.ctx, pubkey_serialized, byref(pubkey_size), pubkey, SECP256K1_EC_UNCOMPRESSED)
x = from_bytes_32(pubkey_serialized[1:33])
y = from_bytes_32(pubkey_serialized[33:])
return self.Point(x, y)
def create_LibSECP256K1Optimizations():
class noop:
pass
native = os.getenv("PYCOIN_NATIVE")
if native and native.lower() != "secp256k1":
return noop
if not libsecp256k1:
return noop
return Optimizations
LibSECP256K1Optimizations = create_LibSECP256K1Optimizations()
|
PypiClean
|
/shimlamqttclientlib-1.0.13.tar.gz/shimlamqttclientlib-1.0.13/README.md
|
# SHIMLA MQTT Python 3.x Client Library
A library that does the following:
* Acts as a wrapper around paho mqtt python library - supports **asynchronous connect/pub/sub APIs**
* Tests periodically if a client is connected to a specified broker, reconnects on any failure
* Provides ability to connect to secondary broker in case of primary broker connection repetitive failures
* Tests if client could connect to the internet to help narrow down any general connectivity issues
* Implements a **periodic publish API** from a separate thread
|
PypiClean
|
/alas-ce0-whatsapp-0.5.tar.gz/alas-ce0-whatsapp-0.5/yowsup/layers/axolotl/layer_receive.py
|
from .layer_base import AxolotlBaseLayer
from yowsup.layers.protocol_receipts.protocolentities import OutgoingReceiptProtocolEntity
from yowsup.layers.protocol_messages.proto.wa_pb2 import *
from yowsup.layers.axolotl.protocolentities import *
from yowsup.structs import ProtocolTreeNode
from yowsup.layers.axolotl.props import PROP_IDENTITY_AUTOTRUST
from axolotl.protocol.prekeywhispermessage import PreKeyWhisperMessage
from axolotl.protocol.whispermessage import WhisperMessage
from axolotl.sessioncipher import SessionCipher
from axolotl.groups.groupcipher import GroupCipher
from axolotl.invalidmessageexception import InvalidMessageException
from axolotl.duplicatemessagexception import DuplicateMessageException
from axolotl.invalidkeyidexception import InvalidKeyIdException
from axolotl.nosessionexception import NoSessionException
from axolotl.untrustedidentityexception import UntrustedIdentityException
from axolotl.axolotladdress import AxolotlAddress
from axolotl.groups.senderkeyname import SenderKeyName
from axolotl.groups.groupsessionbuilder import GroupSessionBuilder
from axolotl.protocol.senderkeydistributionmessage import SenderKeyDistributionMessage
import logging
import copy
logger = logging.getLogger(__name__)
class AxolotlReceivelayer(AxolotlBaseLayer):
def __init__(self):
super(AxolotlReceivelayer, self).__init__()
self.v2Jids = [] #people we're going to send v2 enc messages
self.sessionCiphers = {}
self.groupCiphers = {}
self.pendingIncomingMessages = {} #(jid, participantJid?) => message
def receive(self, protocolTreeNode):
"""
:type protocolTreeNode: ProtocolTreeNode
"""
if not self.processIqRegistry(protocolTreeNode):
if protocolTreeNode.tag == "message":
self.onMessage(protocolTreeNode)
elif not protocolTreeNode.tag == "receipt":
#receipts will be handled by send layer
self.toUpper(protocolTreeNode)
# elif protocolTreeNode.tag == "iq":
# if protocolTreeNode.getChild("encr_media"):
# protocolTreeNode.addChild("media", {
# "url": protocolTreeNode["url"],
# "ip": protocolTreeNode["ip"],
# })
# self.toUpper(protocolTreeNode)
# return
######
def onEncrMediaResult(self, resultNode):
pass
def processPendingIncomingMessages(self, jid, participantJid = None):
conversationIdentifier = (jid, participantJid)
if conversationIdentifier in self.pendingIncomingMessages:
for messageNode in self.pendingIncomingMessages[conversationIdentifier]:
self.onMessage(messageNode)
del self.pendingIncomingMessages[conversationIdentifier]
##### handling received data #####
def onMessage(self, protocolTreeNode):
encNode = protocolTreeNode.getChild("enc")
if encNode:
self.handleEncMessage(protocolTreeNode)
else:
self.toUpper(protocolTreeNode)
def handleEncMessage(self, node):
encMessageProtocolEntity = EncryptedMessageProtocolEntity.fromProtocolTreeNode(node)
isGroup = node["participant"] is not None
senderJid = node["participant"] if isGroup else node["from"]
if node.getChild("enc")["v"] == "2" and node["from"] not in self.v2Jids:
self.v2Jids.append(node["from"])
try:
if encMessageProtocolEntity.getEnc(EncProtocolEntity.TYPE_PKMSG):
self.handlePreKeyWhisperMessage(node)
elif encMessageProtocolEntity.getEnc(EncProtocolEntity.TYPE_MSG):
self.handleWhisperMessage(node)
if encMessageProtocolEntity.getEnc(EncProtocolEntity.TYPE_SKMSG):
self.handleSenderKeyMessage(node)
except (InvalidMessageException, InvalidKeyIdException) as e:
logger.warning("InvalidMessage or KeyId for %s, going to send a retry", encMessageProtocolEntity.getAuthor(False))
retry = RetryOutgoingReceiptProtocolEntity.fromMessageNode(node, self.store.getLocalRegistrationId())
self.toLower(retry.toProtocolTreeNode())
except NoSessionException as e:
logger.warning("No session for %s, getting their keys now", encMessageProtocolEntity.getAuthor(False))
conversationIdentifier = (node["from"], node["participant"])
if conversationIdentifier not in self.pendingIncomingMessages:
self.pendingIncomingMessages[conversationIdentifier] = []
self.pendingIncomingMessages[conversationIdentifier].append(node)
successFn = lambda successJids, b: self.processPendingIncomingMessages(*conversationIdentifier) if len(successJids) else None
self.getKeysFor([senderJid], successFn)
except DuplicateMessageException as e:
logger.warning("Received a message that we've previously decrypted, goint to send the delivery receipt myself")
self.toLower(OutgoingReceiptProtocolEntity(node["id"], node["from"], participant=node["participant"]).toProtocolTreeNode())
except UntrustedIdentityException as e:
if self.getProp(PROP_IDENTITY_AUTOTRUST, False):
logger.warning("Autotrusting identity for %s", e.getName())
self.store.saveIdentity(e.getName(), e.getIdentityKey())
return self.handleEncMessage(node)
else:
logger.error("Ignoring message with untrusted identity")
def handlePreKeyWhisperMessage(self, node):
pkMessageProtocolEntity = EncryptedMessageProtocolEntity.fromProtocolTreeNode(node)
enc = pkMessageProtocolEntity.getEnc(EncProtocolEntity.TYPE_PKMSG)
preKeyWhisperMessage = PreKeyWhisperMessage(serialized=enc.getData())
sessionCipher = self.getSessionCipher(pkMessageProtocolEntity.getAuthor(False))
plaintext = sessionCipher.decryptPkmsg(preKeyWhisperMessage)
if enc.getVersion() == 2:
paddingByte = plaintext[-1] if type(plaintext[-1]) is int else ord(plaintext[-1])
padding = paddingByte & 0xFF
self.parseAndHandleMessageProto(pkMessageProtocolEntity, plaintext[:-padding])
else:
self.handleConversationMessage(node, plaintext)
def handleWhisperMessage(self, node):
encMessageProtocolEntity = EncryptedMessageProtocolEntity.fromProtocolTreeNode(node)
enc = encMessageProtocolEntity.getEnc(EncProtocolEntity.TYPE_MSG)
whisperMessage = WhisperMessage(serialized=enc.getData())
sessionCipher = self.getSessionCipher(encMessageProtocolEntity.getAuthor(False))
plaintext = sessionCipher.decryptMsg(whisperMessage)
if enc.getVersion() == 2:
paddingByte = plaintext[-1] if type(plaintext[-1]) is int else ord(plaintext[-1])
padding = paddingByte & 0xFF
self.parseAndHandleMessageProto(encMessageProtocolEntity, plaintext[:-padding])
else:
self.handleConversationMessage(encMessageProtocolEntity.toProtocolTreeNode(), plaintext)
def handleSenderKeyMessage(self, node):
encMessageProtocolEntity = EncryptedMessageProtocolEntity.fromProtocolTreeNode(node)
enc = encMessageProtocolEntity.getEnc(EncProtocolEntity.TYPE_SKMSG)
senderKeyName = SenderKeyName(encMessageProtocolEntity.getFrom(True), AxolotlAddress(encMessageProtocolEntity.getParticipant(False), 0))
groupCipher = GroupCipher(self.store, senderKeyName)
try:
plaintext = groupCipher.decrypt(enc.getData())
padding = ord(plaintext[-1]) & 0xFF
plaintext = plaintext[:-padding]
plaintext = plaintext.encode() if sys.version_info >= (3, 0) else plaintext
self.parseAndHandleMessageProto(encMessageProtocolEntity, plaintext)
except NoSessionException as e:
logger.warning("No session for %s, going to send a retry", encMessageProtocolEntity.getAuthor(False))
retry = RetryOutgoingReceiptProtocolEntity.fromMessageNode(node, self.store.getLocalRegistrationId())
self.toLower(retry.toProtocolTreeNode())
def parseAndHandleMessageProto(self, encMessageProtocolEntity, serializedData):
node = encMessageProtocolEntity.toProtocolTreeNode()
m = Message()
handled = False
try:
m.ParseFromString(serializedData)
except:
print("DUMP:")
print(serializedData)
print([s for s in serializedData])
print([ord(s) for s in serializedData])
raise
if not m or not serializedData:
raise ValueError("Empty message")
if m.HasField("sender_key_distribution_message"):
handled = True
axolotlAddress = AxolotlAddress(encMessageProtocolEntity.getParticipant(False), 0)
self.handleSenderKeyDistributionMessage(m.sender_key_distribution_message, axolotlAddress)
if m.HasField("conversation"):
handled = True
self.handleConversationMessage(node, m.conversation)
elif m.HasField("contact_message"):
handled = True
self.handleContactMessage(node, m.contact_message)
elif m.HasField("url_message"):
handled = True
self.handleUrlMessage(node, m.url_message)
elif m.HasField("location_message"):
handled = True
self.handleLocationMessage(node, m.location_message)
elif m.HasField("image_message"):
handled = True
self.handleImageMessage(node, m.image_message)
if not handled:
print(m)
raise ValueError("Unhandled")
def handleSenderKeyDistributionMessage(self, senderKeyDistributionMessage, axolotlAddress):
groupId = senderKeyDistributionMessage.groupId
axolotlSenderKeyDistributionMessage = SenderKeyDistributionMessage(serialized=senderKeyDistributionMessage.axolotl_sender_key_distribution_message)
groupSessionBuilder = GroupSessionBuilder(self.store)
senderKeyName = SenderKeyName(groupId, axolotlAddress)
groupSessionBuilder.process(senderKeyName, axolotlSenderKeyDistributionMessage)
def handleConversationMessage(self, originalEncNode, text):
messageNode = copy.deepcopy(originalEncNode)
messageNode.children = []
messageNode.addChild(ProtocolTreeNode("body", data = text))
self.toUpper(messageNode)
def handleImageMessage(self, originalEncNode, imageMessage):
messageNode = copy.deepcopy(originalEncNode)
messageNode["type"] = "media"
mediaNode = ProtocolTreeNode("media", {
"type": "image",
"filehash": imageMessage.file_sha256,
"size": str(imageMessage.file_length),
"url": imageMessage.url,
"mimetype": imageMessage.mime_type,
"width": imageMessage.width,
"height": imageMessage.height,
"caption": imageMessage.caption,
"encoding": "raw",
"file": "enc",
"ip": "0"
}, data = imageMessage.jpeg_thumbnail)
messageNode.addChild(mediaNode)
self.toUpper(messageNode)
def handleUrlMessage(self, originalEncNode, urlMessage):
#convert to ??
pass
def handleDocumentMessage(self, originalEncNode, documentMessage):
#convert to ??
pass
def handleLocationMessage(self, originalEncNode, locationMessage):
messageNode = copy.deepcopy(originalEncNode)
messageNode["type"] = "media"
mediaNode = ProtocolTreeNode("media", {
"latitude": locationMessage.degrees_latitude,
"longitude": locationMessage.degrees_longitude,
"name": "%s %s" % (locationMessage.name, locationMessage.address),
"url": locationMessage.url,
"encoding": "raw",
"type": "location"
}, data=locationMessage.jpeg_thumbnail)
messageNode.addChild(mediaNode)
self.toUpper(messageNode)
def handleContactMessage(self, originalEncNode, contactMessage):
messageNode = copy.deepcopy(originalEncNode)
messageNode["type"] = "media"
mediaNode = ProtocolTreeNode("media", {
"type": "vcard"
}, [
ProtocolTreeNode("vcard", {"name": contactMessage.display_name}, data = contactMessage.vcard)
] )
messageNode.addChild(mediaNode)
self.toUpper(messageNode)
def getSessionCipher(self, recipientId):
if recipientId in self.sessionCiphers:
sessionCipher = self.sessionCiphers[recipientId]
else:
sessionCipher = SessionCipher(self.store, self.store, self.store, self.store, recipientId, 1)
self.sessionCiphers[recipientId] = sessionCipher
return sessionCipher
def getGroupCipher(self, groupId, senderId):
senderKeyName = SenderKeyName(groupId, AxolotlAddress(senderId, 1))
if senderKeyName in self.groupCiphers:
groupCipher = self.groupCiphers[senderKeyName]
else:
groupCipher = GroupCipher(self.store, senderKeyName)
self.groupCiphers[senderKeyName] = groupCipher
return groupCipher
|
PypiClean
|
/pylendingclub-3.0.2.tar.gz/pylendingclub-3.0.2/ci/bootstrap.py
|
from __future__ import absolute_import, print_function, unicode_literals
import os
import sys
from os.path import abspath
from os.path import dirname
from os.path import exists
from os.path import join
if __name__ == "__main__":
base_path = dirname(dirname(abspath(__file__)))
print("Project path: {0}".format(base_path))
env_path = join(base_path, ".tox", "bootstrap")
if sys.platform == "win32":
bin_path = join(env_path, "Scripts")
else:
bin_path = join(env_path, "bin")
if not exists(env_path):
import subprocess
print("Making bootstrap env in: {0} ...".format(env_path))
try:
subprocess.check_call(["virtualenv", env_path])
except subprocess.CalledProcessError:
subprocess.check_call([sys.executable, "-m", "virtualenv", env_path])
print("Installing `jinja2` into bootstrap environment...")
subprocess.check_call([join(bin_path, "pip"), "install", "jinja2"])
python_executable = join(bin_path, "python")
if not os.path.samefile(python_executable, sys.executable):
print("Re-executing with: {0}".format(python_executable))
os.execv(python_executable, [python_executable, __file__])
import jinja2
import subprocess
jinja = jinja2.Environment(
loader=jinja2.FileSystemLoader(join(base_path, "ci", "templates")),
trim_blocks=True,
lstrip_blocks=True,
keep_trailing_newline=True
)
tox_environments = [
line.strip()
# WARNING: 'tox' must be installed globally or in the project's virtualenv
for line in subprocess.check_output(['tox', '--listenvs'], universal_newlines=True).splitlines()
]
tox_environments = [line for line in tox_environments if line not in ['clean', 'report', 'docs', 'check']]
for name in os.listdir(join("ci", "templates")):
with open(join(base_path, name), "w") as fh:
fh.write(jinja.get_template(name).render(tox_environments=tox_environments))
print("Wrote {}".format(name))
print("DONE.")
|
PypiClean
|
/jeeves-pa-0.1.0.tar.gz/jeeves-pa-0.1.0/jeeves/diagnose.py
|
import sys
import socket
import subprocess
import pkgutil
import logging
if sys.version_info < (3, 3):
from distutils.spawn import find_executable
else:
from shutil import which as find_executable
logger = logging.getLogger(__name__)
def check_network_connection(server="www.google.com"):
"""
Checks if jeeves can connect a network server.
Arguments:
server -- (optional) the server to connect with (Default:
"www.google.com")
Returns:
True or False
"""
logger = logging.getLogger(__name__)
logger.debug("Checking network connection to server '%s'...", server)
try:
# see if we can resolve the host name -- tells us if there is
# a DNS listening
host = socket.gethostbyname(server)
# connect to the host -- tells us if the host is actually
# reachable
socket.create_connection((host, 80), 2)
except Exception:
logger.debug("Network connection not working")
return False
else:
logger.debug("Network connection working")
return True
def check_executable(executable):
"""
Checks if an executable exists in $PATH.
Arguments:
executable -- the name of the executable (e.g. "echo")
Returns:
True or False
"""
logger = logging.getLogger(__name__)
logger.debug("Checking executable '%s'...", executable)
executable_path = find_executable(executable)
found = executable_path is not None
if found:
logger.debug("Executable '%s' found: '%s'", executable,
executable_path)
else:
logger.debug("Executable '%s' not found", executable)
return found
def check_python_import(package_or_module):
"""
Checks if a python package or module is importable.
Arguments:
package_or_module -- the package or module name to check
Returns:
True or False
"""
logger = logging.getLogger(__name__)
logger.debug("Checking python import '%s'...", package_or_module)
loader = pkgutil.get_loader(package_or_module)
found = loader is not None
if found:
logger.debug("Python %s '%s' found: %r",
"package" if loader.is_package(package_or_module)
else "module", package_or_module, loader.get_filename())
else:
logger.debug("Python import '%s' not found", package_or_module)
return found
def get_git_revision():
"""
Gets the current git revision hash as hex string. If the git executable is
missing or git is unable to get the revision, None is returned
Returns:
A hex string or None
"""
logger = logging.getLogger(__name__)
if not check_executable('git'):
logger.warning("'git' command not found, git revision not detectable")
return None
output = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
if not output:
logger.warning("Couldn't detect git revision (not a git repository?)")
return None
return output
|
PypiClean
|
/azure-mgmt-datalake-store-1.1.0b1.zip/azure-mgmt-datalake-store-1.1.0b1/azure/mgmt/datalake/store/models/_data_lake_store_account_management_client_enums.py
|
from enum import Enum
from azure.core import CaseInsensitiveEnumMeta
class CheckNameAvailabilityParametersType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The resource type. Note: This should not be set by the user, as the constant value is
Microsoft.DataLakeStore/accounts.
"""
MICROSOFT_DATA_LAKE_STORE_ACCOUNTS = "Microsoft.DataLakeStore/accounts"
class DataLakeStoreAccountState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The state of the Data Lake Store account."""
ACTIVE = "Active"
SUSPENDED = "Suspended"
class DataLakeStoreAccountStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The provisioning status of the Data Lake Store account."""
FAILED = "Failed"
CREATING = "Creating"
RUNNING = "Running"
SUCCEEDED = "Succeeded"
PATCHING = "Patching"
SUSPENDING = "Suspending"
RESUMING = "Resuming"
DELETING = "Deleting"
DELETED = "Deleted"
UNDELETING = "Undeleting"
CANCELED = "Canceled"
class EncryptionConfigType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The type of encryption configuration being used. Currently the only supported types are
'UserManaged' and 'ServiceManaged'.
"""
USER_MANAGED = "UserManaged"
SERVICE_MANAGED = "ServiceManaged"
class EncryptionProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The current state of encryption provisioning for this Data Lake Store account."""
CREATING = "Creating"
SUCCEEDED = "Succeeded"
class EncryptionState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The current state of encryption for this Data Lake Store account."""
ENABLED = "Enabled"
DISABLED = "Disabled"
class FirewallAllowAzureIpsState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The current state of allowing or disallowing IPs originating within Azure through the firewall.
If the firewall is disabled, this is not enforced.
"""
ENABLED = "Enabled"
DISABLED = "Disabled"
class FirewallState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The current state of the IP address firewall for this Data Lake Store account."""
ENABLED = "Enabled"
DISABLED = "Disabled"
class OperationOrigin(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The intended executor of the operation."""
USER = "user"
SYSTEM = "system"
USER_SYSTEM = "user,system"
class SubscriptionState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The subscription state."""
REGISTERED = "Registered"
SUSPENDED = "Suspended"
DELETED = "Deleted"
UNREGISTERED = "Unregistered"
WARNED = "Warned"
class TierType(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The commitment tier to use for next month."""
CONSUMPTION = "Consumption"
COMMITMENT1_TB = "Commitment_1TB"
COMMITMENT10_TB = "Commitment_10TB"
COMMITMENT100_TB = "Commitment_100TB"
COMMITMENT500_TB = "Commitment_500TB"
COMMITMENT1_PB = "Commitment_1PB"
COMMITMENT5_PB = "Commitment_5PB"
class TrustedIdProviderState(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""The current state of the trusted identity provider feature for this Data Lake Store account."""
ENABLED = "Enabled"
DISABLED = "Disabled"
class UsageUnit(str, Enum, metaclass=CaseInsensitiveEnumMeta):
"""Gets the unit of measurement."""
COUNT = "Count"
BYTES = "Bytes"
SECONDS = "Seconds"
PERCENT = "Percent"
COUNTS_PER_SECOND = "CountsPerSecond"
BYTES_PER_SECOND = "BytesPerSecond"
|
PypiClean
|
/torch_buddy-0.0.5-py3-none-any.whl/torch_buddy/nn/cv.py
|
# Standard libraries
import random
# Third party libraries
import torch
import torchvision.transforms.functional as FT
# ==============================================================================
# 图像相关
# ==============================================================================
def xyccwd_to_xymmmm(cxcy):
"""Convert bounding boxes from center-size coordinates (c_x, c_y, w, h) to boundary coordinates
(x_min, y_min, x_max, y_max)
Parameters
----------
cxcy : PyTorch tensor
Bounding boxes in center-size coordinates, a tensor of size (n_boxes, 4)
Returns
-------
PyTorch tensor
Bounding boxes in boundary coordinates, a tensor of size (n_boxes, 4)
"""
return torch.cat([cxcy[:, :2] - cxcy[:, 2:] / 2, cxcy[:, :2] + cxcy[:, 2:] / 2], dim=1)
def xymmmm_to_xyccwd(xy):
"""Convert bounding boxes from boundary coordinates (x_min, y_min, x_max, y_max)
to center-size coordinates (c_x, c_y, w, h).
Parameters
----------
xy : Pytorch tensor
Bounding boxes in boundary coordinates, a tensor of size (n_boxes, 4)
Returns
-------
Pytorch tensor
bounding boxes in center-size coordinates, a tensor of size (n_boxes, 4)
"""
return torch.cat([(xy[:, 2:] + xy[:, :2]) / 2, xy[:, 2:] - xy[:, :2]], 1) # c_x, c_y # w, h
def xyccwd_to_xygcgcgwgh(cxcy, priors_cxcy):
"""Encode bounding boxes (that are in center-size form) w.r.t. the corresponding prior boxes
(that are in center-size form).
For the center coordinates, find the offset with respect to the prior box,
and scale by the size of the prior box.
For the size coordinates, scale by the size of the prior box, and convert to the log-space.
In the model, we are predicting bounding box coordinates in this encoded form.
# The 10 and 5 below are referred to as 'variances' in the original Caffe repo, completely empirical
# They are for some sort of numerical conditioning, for 'scaling the localization gradient'
# See https://github.com/weiliu89/caffe/issues/155
Parameters
----------
cxcy : Pytorch tensor
Bounding boxes in center-size coordinates, a tensor of size (n_priors, 4)
priors_cxcy : Pytorch tensor
Prior boxes with respect to which the encoding must be performed, a tensor of size (n_priors, 4)
Returns
-------
Pytorch tensor
Encoded bounding boxes, a tensor of size (n_priors, 4)
"""
return torch.cat(
[
(cxcy[:, :2] - priors_cxcy[:, :2]) / (priors_cxcy[:, 2:] / 10), # g_c_x, g_c_y
torch.log(cxcy[:, 2:] / priors_cxcy[:, 2:]) * 5,
],
1,
)
def xygcgcgwgh_to_xyccwd(gcxgcy, priors_cxcy):
"""Decode bounding box coordinates predicted by the model, since they are encoded in the form mentioned above.
They are decoded into center-size coordinates. This is the inverse of the function above.
Parameters
----------
gcxgcy : Pytorch tensor
Encoded bounding boxes, i.e. output of the model, a tensor of size (n_priors, 4)
priors_cxcy : Pytorch tensor
Prior boxes with respect to which the encoding is defined, a tensor of size (n_priors, 4)
Returns
-------
Pytorch tensor
Decoded bounding boxes in center-size form, a tensor of size (n_priors, 4)
"""
return torch.cat(
[
gcxgcy[:, :2] * priors_cxcy[:, 2:] / 10 + priors_cxcy[:, :2], # c_x, c_y
torch.exp(gcxgcy[:, 2:] / 5) * priors_cxcy[:, 2:],
],
1,
)
def find_intersection(set_1, set_2):
"""Find the intersection of every box combination between two sets of boxes that are in boundary coordinates.
Parameters
----------
set_1 : Pytorch tensor
Set 1, a tensor of dimensions (n1, 4)
set_2 : Pytorch tensor
Set 2, a tensor of dimensions (n2, 4)
Returns
-------
Pytorch tensor
Intersection of each of the boxes in set 1 with respect to each of the boxes in set 2,
a tensor of dimensions (n1, n2)
"""
lower_left = torch.max(set_1[:, :2].unsqueeze(1), set_2[:, :2].unsqueeze(0)) # (n1, n2, 4)
upper_right = torch.min(set_1[:, 2:].unsqueeze(1), set_2[:, 2:].unsqueeze(0)) # (n1, n2, 4)
dims_intersection = torch.clamp(upper_right - lower_left, min=0) # (n1, n2, 2)
return dims_intersection[:, :, 0] * dims_intersection[:, :, 1] # (n1, n2)
def find_jaccard_overlap(set_1, set_2):
"""Find the Jaccard Overlap (IoU) of every box combination between two sets of boxes
that are in boundary coordinates.
Parameters
----------
set_1 : Pytorch tensor
Set 1, a tensor of dimensions (n1, 4)
set_2 : Pytorch tensor
Set 2, a tensor of dimensions (n2, 4)
Returns
-------
Pytorch tensor
Jaccard Overlap of each of the boxes in set 1 with respect to each of the boxes in set 2,
a tensor of dimensions (n1, n2)
"""
areas_intersection = find_intersection(set_1, set_2) # (n1, n2)
areas_set_1 = (set_1[:, 2] - set_1[:, 0]) * (set_1[:, 3] - set_1[:, 1]) # (n1)
areas_set_2 = (set_2[:, 2] - set_2[:, 0]) * (set_2[:, 3] - set_2[:, 1]) # (n2)
areas_union = areas_set_1.unsqueeze(1) + areas_set_2.unsqueeze(0) - areas_intersection # (n1, n2)
return areas_intersection / areas_union # (n1, n2)
# Some augmentation functions below have been adapted from
# From https://github.com/amdegroot/ssd.pytorch/blob/master/utils/augmentations.py
def expand(image, boxes, filler):
"""Perform a zooming out operation by placing the image in a larger canvas of filler material.
Helps to learn to detect smaller objects.
Parameters
----------
image : Pytorch tensor
Image, a tensor of dimensions (3, original_h, original_w)
boxes : Pytorch tensor
Bounding boxes in boundary coordinates, a tensor of dimensions (n_objects, 4)
filler : list
RBG values of the filler material, a list like [R, G, B]
Returns
-------
new_image : Pytorch tensor
Expanded image
new_boxes : Pytorch tensor
Updated bounding box coordinates
"""
# Calculate dimensions of proposed expanded (zoomed-out) image
original_h = image.size(1)
original_w = image.size(2)
max_scale = 4
scale = random.uniform(1, max_scale)
new_h = int(scale * original_h)
new_w = int(scale * original_w)
# Create such an image with the filler
filler = torch.FloatTensor(filler) # (3)
new_image = torch.ones((3, new_h, new_w), dtype=torch.float) * filler.unsqueeze(1).unsqueeze(1) # (3, new_h, new_w)
# Note - do not use expand() like new_image = filler.unsqueeze(1).unsqueeze(1).expand(3, new_h, new_w)
# because all expanded values will share the same memory, so changing one pixel will change all
# Place the original image at random coordinates in this new image (origin at top-left of image)
left = random.randint(0, new_w - original_w)
right = left + original_w
top = random.randint(0, new_h - original_h)
bottom = top + original_h
new_image[:, top:bottom, left:right] = image
# Adjust bounding boxes' coordinates accordingly
new_boxes = boxes + torch.FloatTensor([left, top, left, top]).unsqueeze(
0
) # (n_objects, 4), n_objects is the no. of objects in this image
return new_image, new_boxes
def random_crop(image, boxes, labels, difficulties):
"""Performs a random crop in the manner stated in the paper. Helps to learn to detect larger and partial objects.
Note that some objects may be cut out entirely.
Adapted from https://github.com/amdegroot/ssd.pytorch/blob/master/utils/augmentations.py
Parameters
----------
image : Pytorch tensor
A tensor of dimensions (3, original_h, original_w)
boxes : Pytorch tensor
Bounding boxes in boundary coordinates, a tensor of dimensions (n_objects, 4)
labels : Pytorch tensor
Labels of objects, a tensor of dimensions (n_objects)
difficulties : Pytorch tensor
Difficulties of detection of these objects, a tensor of dimensions (n_objects)
Returns
-------
Cropped image, updated bounding box coordinates, updated labels, updated difficulties
"""
original_h = image.size(1)
original_w = image.size(2)
# Keep choosing a minimum overlap until a successful crop is made
while True:
# Randomly draw the value for minimum overlap
min_overlap = random.choice([0.0, 0.1, 0.3, 0.5, 0.7, 0.9, None]) # 'None' refers to no cropping
# If not cropping
if min_overlap is None:
return image, boxes, labels, difficulties
# Try up to 50 times for this choice of minimum overlap
# This isn't mentioned in the paper, of course, but 50 is chosen in paper authors' original Caffe repo
max_trials = 50
for _ in range(max_trials):
# Crop dimensions must be in [0.3, 1] of original dimensions
# Note - it's [0.1, 1] in the paper, but actually [0.3, 1] in the authors' repo
min_scale = 0.3
scale_h = random.uniform(min_scale, 1)
scale_w = random.uniform(min_scale, 1)
new_h = int(scale_h * original_h)
new_w = int(scale_w * original_w)
# Aspect ratio has to be in [0.5, 2]
aspect_ratio = new_h / new_w
if not 0.5 < aspect_ratio < 2:
continue
# Crop coordinates (origin at top-left of image)
left = random.randint(0, original_w - new_w)
right = left + new_w
top = random.randint(0, original_h - new_h)
bottom = top + new_h
crop = torch.FloatTensor([left, top, right, bottom]) # (4)
# Calculate Jaccard overlap between the crop and the bounding boxes
overlap = find_jaccard_overlap(
crop.unsqueeze(0), boxes
) # (1, n_objects), n_objects is the no. of objects in this image
overlap = overlap.squeeze(0) # (n_objects)
# If not a single bounding box has a Jaccard overlap of greater than the minimum, try again
if overlap.max().item() < min_overlap:
continue
# Crop image
new_image = image[:, top:bottom, left:right] # (3, new_h, new_w)
# Find centers of original bounding boxes
bb_centers = (boxes[:, :2] + boxes[:, 2:]) / 2.0 # (n_objects, 2)
# Find bounding boxes whose centers are in the crop
centers_in_crop = (
(bb_centers[:, 0] > left)
* (bb_centers[:, 0] < right)
* (bb_centers[:, 1] > top)
* (bb_centers[:, 1] < bottom)
) # (n_objects), a Torch uInt8/Byte tensor, can be used as a boolean index
# If not a single bounding box has its center in the crop, try again
if not centers_in_crop.any():
continue
# Discard bounding boxes that don't meet this criterion
new_boxes = boxes[centers_in_crop, :]
new_labels = labels[centers_in_crop]
new_difficulties = difficulties[centers_in_crop]
# Calculate bounding boxes' new coordinates in the crop
new_boxes[:, :2] = torch.max(new_boxes[:, :2], crop[:2]) # crop[:2] is [left, top]
new_boxes[:, :2] -= crop[:2]
new_boxes[:, 2:] = torch.min(new_boxes[:, 2:], crop[2:]) # crop[2:] is [right, bottom]
new_boxes[:, 2:] -= crop[:2]
return new_image, new_boxes, new_labels, new_difficulties
def flip(image, boxes):
"""Flip image horizontally.
Parameters
----------
image : PIL Image
A PIL Image
boxes : Pytorch tensor
Bounding boxes in boundary coordinates, a tensor of dimensions (n_objects, 4)
Returns
-------
new_image : Pytorch tensor
Flipped image
new_boxes : Pytorch tensor
Updated bounding box coordinates
"""
# Flip image
new_image = FT.hflip(image)
# Flip boxes
new_boxes = boxes
new_boxes[:, 0] = image.width - boxes[:, 0] - 1
new_boxes[:, 2] = image.width - boxes[:, 2] - 1
new_boxes = new_boxes[:, [2, 1, 0, 3]]
return new_image, new_boxes
def resize(image, boxes, dims=(300, 300), return_percent_coords=True):
"""Resize image. For the SSD300, resize to (300, 300).
Since percent/fractional coordinates are calculated for the bounding boxes (w.r.t image dimensions) in this process,
you may choose to retain them.
Parameters
----------
image : PIL Image
[description]
boxes : Pytorch tensor
Bounding boxes in boundary coordinates, a tensor of dimensions (n_objects, 4)
dims : tuple, optional
Resized image, updated bounding box coordinates (or fractional coordinates), by default (300, 300)
return_percent_coords : bool, optional
[description], by default True
Returns
-------
[type]
[description]
"""
# Resize image
new_image = FT.resize(image, dims)
# Resize bounding boxes
old_dims = torch.FloatTensor([image.width, image.height, image.width, image.height]).unsqueeze(0)
new_boxes = boxes / old_dims # percent coordinates
if not return_percent_coords:
new_dims = torch.FloatTensor([dims[1], dims[0], dims[1], dims[0]]).unsqueeze(0)
new_boxes = new_boxes * new_dims
return new_image, new_boxes
def photometric_distort(image):
"""Distort brightness, contrast, saturation, and hue, each with a 50% chance, in random order.
Parameters
----------
image : PIL Image
Returns
-------
Distorted image
"""
new_image = image
distortions = [FT.adjust_brightness, FT.adjust_contrast, FT.adjust_saturation, FT.adjust_hue]
random.shuffle(distortions)
for d in distortions:
if random.random() < 0.5:
if d.__name__ == "adjust_hue":
# Caffe repo uses a 'hue_delta' of 18 - we divide by 255 because PyTorch needs a normalized value
adjust_factor = random.uniform(-18 / 255.0, 18 / 255.0)
else:
# Caffe repo uses 'lower' and 'upper' values of 0.5 and 1.5 for brightness, contrast, and saturation
adjust_factor = random.uniform(0.5, 1.5)
# Apply this distortion
new_image = d(new_image, adjust_factor)
return new_image
def transform(image, boxes, labels, difficulties, split):
"""Apply the transformations above.
Parameters
----------
image : PIL Image
[description]
boxes : [type]
Bounding boxes in boundary coordinates, a tensor of dimensions (n_objects, 4)
labels : [type]
Labels of objects, a tensor of dimensions (n_objects)
difficulties : [type]
Difficulties of detection of these objects, a tensor of dimensions (n_objects)
split : [type]
One of 'TRAIN' or 'TEST', since different sets of transformations are applied
Returns
-------
transformed image, transformed bounding box coordinates, transformed labels, transformed difficulties
"""
assert split in {"TRAIN", "TEST"}
# Mean and standard deviation of ImageNet data that our base VGG from torchvision was trained on
# see: https://pytorch.org/docs/stable/torchvision/models.html
mean = [0.485, 0.456, 0.406]
std = [0.229, 0.224, 0.225]
new_image = image
new_boxes = boxes
new_labels = labels
new_difficulties = difficulties
# Skip the following operations if validation/evaluation
if split == "TRAIN":
# A series of photometric distortions in random order, each with 50% chance of occurrence, as in Caffe repo
new_image = photometric_distort(new_image)
# Convert PIL image to Torch tensor
new_image = FT.to_tensor(new_image)
if 0:
# Expand image (zoom out) with a 50% chance - helpful for training detection of small objects
# Fill surrounding space with the mean of ImageNet data that our base VGG was trained on
if random.random() < 0.5:
new_image, new_boxes = expand(new_image, boxes, filler=mean)
# Randomly crop image (zoom in)
new_image, new_boxes, new_labels, new_difficulties = random_crop(
new_image, new_boxes, new_labels, new_difficulties
)
# Convert Torch tensor to PIL image
new_image = FT.to_pil_image(new_image)
# Flip image with a 50% chance
if random.random() < 0.5:
new_image, new_boxes = flip(new_image, new_boxes)
# Resize image to (300, 300) - this also converts absolute boundary coordinates to their fractional form
new_image, new_boxes = resize(new_image, new_boxes, dims=(300, 300))
# Convert PIL image to Torch tensor
new_image = FT.to_tensor(new_image)
# Normalize by mean and standard deviation of ImageNet data that our base VGG was trained on
new_image = FT.normalize(new_image, mean=mean, std=std)
return new_image, new_boxes, new_labels, new_difficulties
|
PypiClean
|
/fear_data-0.1.1-py3-none-any.whl/fear_data/fc_data.py
|
import csv
import pandas as pd
import numpy as np
def _load_raw_data(data_file):
# internal function
def _find_start_row(data_file, start_row="Experiment"):
"""Uses regex to find the first row of data."""
with open(data_file, "rt") as f:
reader = csv.reader(f)
file_rows = [row for row in reader]
for count, value in enumerate(file_rows, start=0):
if start_row in value:
return count
# convert training file to pandas df
df = pd.read_csv(data_file, skiprows=_find_start_row(data_file))
# drop NaNs that can get inserted into
df = df.replace("nan", np.NaN).dropna(thresh=2).reset_index()
# bug from VideoFreeze on some csv files, convert Animal to str
if df["Animal"].dtype is np.dtype("float64") or df["Animal"].dtype is np.dtype(
"int64"
):
df.loc[:, "Animal"] = df["Animal"].astype("int").astype("str")
# drop and rename columns
old_col_list = [
"Animal",
"Group",
"Component Name",
"Pct Component Time Freezing",
"Avg Motion Index",
]
# reindex to drop extraneous cols
df = df.reindex(columns=old_col_list)
# rename columns to remove spaces in colnames
new_col_list = ["Animal", "Group", "Component", "PctFreeze", "AvgMotion"]
new_cols = {
key: val
for (key, val) in zip(df.reindex(columns=old_col_list).columns, new_col_list)
}
df = df.rename(columns=new_cols)
return df
def add_group_labels(df, group_dict, sex_dict=None):
df = df.copy()
# Fill in Group info
for key, val in group_dict.items():
df.loc[df["Animal"].isin(val), "Group"] = key
if sex_dict:
for key, val in sex_dict.items():
df.loc[df["Animal"].isin(val), "Sex"] = key
return df.dropna(axis=1)
def load_fc_data(data_file, session):
# load and clean data
def get_baseline_vals(df):
"""Get values up to the first 'tone' component"""
new_list = []
for item in df["Component"]:
if item.lower() not in ["tone-1", "tone-01"]:
new_list.append(item)
else:
break
new_list = [str(item) for item in new_list]
return new_list
# load session data
df = _load_raw_data(data_file)
# clean up df
if "context" in session:
df["Component"] = df["Component"].astype("int")
df["Phase"] = "context"
else:
df["Component"] = [
df["Component"][x].lower() for x in range(len(df["Component"]))
]
df["Phase"] = df["Component"]
baseline_vals = get_baseline_vals(df)
# add column to denote phase of each bin
df.loc[df["Phase"].isin(baseline_vals), "Phase"] = "baseline"
df.loc[df["Phase"].str.contains("tone"), "Phase"] = "tone"
df.loc[df["Phase"].str.contains("trace"), "Phase"] = "trace"
df.loc[~df["Phase"].isin(["baseline", "tone", "trace"]), "Phase"] = "iti"
df = df.reindex(
columns=[
"Animal",
"Sex",
"Group",
"Phase",
"Component",
"PctFreeze",
"AvgMotion",
]
)
return df
def get_phase_data(df, hue=None):
"""
Group DataFrame by 'Phase'. Used for plotting data by Phase.
Args:
df (DataFrame): Data to group by trial phase.
hue (str, optional): Specify a grouping variable (e.g., Group, AAV, etc). Defaults to None.
Returns:
DataFrame: Data grouped by Phase.
"""
df = df.copy()
groupby_list = ["Animal", "Phase"]
if hue:
groupby_list.append(hue)
return df.groupby(groupby_list, as_index=False).mean().dropna()
|
PypiClean
|
/rekall-core-1.7.2rc1.zip/rekall-core-1.7.2rc1/rekall/plugins/windows/malware/svcscan.py
|
# pylint: disable=protected-access
from builtins import str
from rekall import plugin
from rekall import obj
from rekall.plugins.windows import common
from rekall.plugins.windows import vadinfo
from rekall_lib import utils
SERVICE_TYPE_FLAGS = {
'SERVICE_KERNEL_DRIVER': 0,
'SERVICE_FILE_SYSTEM_DRIVER': 1,
'SERVICE_WIN32_OWN_PROCESS': 4,
'SERVICE_WIN32_SHARE_PROCESS': 5,
'SERVICE_INTERACTIVE_PROCESS': 8
}
SERVICE_STATE_ENUM = {
1: 'SERVICE_STOPPED',
2: 'SERVICE_START_PENDING',
3: 'SERVICE_STOP_PENDING',
4: 'SERVICE_RUNNING',
5: 'SERVICE_CONTINUE_PENDING',
6: 'SERVICE_PAUSE_PENDING',
7: 'SERVICE_PAUSED'
}
svcscan_base_x86 = {
'_SERVICE_HEADER': [None, {
'Tag': [0x0, ['array', 4, ['unsigned char']]],
'ServiceRecord': [0xC, ['pointer', ['_SERVICE_RECORD']]],
}],
'_SERVICE_RECORD': [None, {
'NextService': [0x0, ['_SERVICE_HEADER']],
'ServiceName': [0x8, ['pointer', ['UnicodeString', dict(length=512)]]],
'DisplayName': [0xc, ['pointer', ['UnicodeString', dict(length=512)]]],
'Order': [0x10, ['unsigned int']],
'Tag': [0x18, ['array', 4, ['unsigned char']]],
'DriverName': [0x24, ['pointer', ['UnicodeString', dict(
length=256)]]],
'ServiceProcess': [0x24, ['pointer', ['_SERVICE_PROCESS']]],
'Type': [0x28, ['Flags', {'bitmap': SERVICE_TYPE_FLAGS}]],
'State': [0x2c, ['Enumeration', dict(target='long',
choices=SERVICE_STATE_ENUM)]],
}],
'_SERVICE_PROCESS': [None, {
'BinaryPath': [0x8, ['pointer', ['UnicodeString', dict(
encoding='utf16', length=256)]]],
'ProcessId': [0xc, ['unsigned int']],
}],
}
svcscan_base_x64 = {
'_SERVICE_HEADER': [None, {
'Tag': [0x0, ['Array', dict(
count=4,
target='unsigned char'
)]],
'ServiceRecord': [0x10, ['Pointer', dict(
target='_SERVICE_RECORD'
)]],
}],
'_SERVICE_RECORD': [None, {
'NextService': [0x0, ['Pointer', dict(
target="_SERVICE_RECORD"
)]],
'ServiceName': [0x8, ['pointer', ['UnicodeString', dict(
encoding='utf16', length=512)]]],
'DisplayName': [0x10, ['Pointer', dict(
target='UnicodeString',
target_args=dict(length=512)
)]],
'Order': [0x18, ['unsigned int']],
'Tag' : [0x20, ['Array', dict(
count=4,
target='unsigned char'
)]],
'DriverName': [0x30, ['Pointer', dict(
target='UnicodeString',
target_args=dict(
length=256
)
)]],
'ServiceProcess': [0x30, ['Pointer', dict(
target='_SERVICE_PROCESS'
)]],
'Type': [0x38, ['Flags', {'bitmap': SERVICE_TYPE_FLAGS}]],
'State': [0x3C, ['Enumeration', dict(
target='long', choices=SERVICE_STATE_ENUM)]],
}],
'_SERVICE_PROCESS': [None, {
'BinaryPath': [0x10, ['Pointer', dict(
target='UnicodeString',
target_args=dict(
length=256
)
)]],
'ProcessId': [0x18, ['unsigned int']],
}],
}
class _SERVICE_RECORD_LEGACY(obj.Struct):
"Service records for XP/2003 x86 and x64"
@utils.safe_property
def Binary(self):
"Return the binary path for a service"
# No path in memory for services that aren't running
# (if needed, query the registry key)
if str(self.State) != 'SERVICE_RUNNING':
return obj.NoneObject("No path, service isn't running")
# Depending on whether the service is for a process
# or kernel driver, the binary path is stored differently
if 'PROCESS' in str(self.Type):
return self.ServiceProcess.BinaryPath.dereference()
else:
return self.DriverName.dereference()
@utils.safe_property
def Pid(self):
"Return the process ID for a service"
if str(self.State) == 'SERVICE_RUNNING':
if 'PROCESS' in str(self.Type):
return self.ServiceProcess.ProcessId
return obj.NoneObject("Cannot get process ID")
def is_valid(self):
"Check some fields for validity"
return (super(_SERVICE_RECORD_LEGACY, self).is_valid() and
self.Order > 0 and self.Order < 0xFFFF)
class _SERVICE_RECORD_RECENT(_SERVICE_RECORD_LEGACY):
"Service records for 2008, Vista, 7 x86 and x64"
class _SERVICE_HEADER(obj.Struct):
"Service headers for 2008, Vista, 7 x86 and x64"
def is_valid(self):
"Check some fields for validity"
return (super(_SERVICE_HEADER, self).is_valid() and
self.ServiceRecord.is_valid())
_SERVICE_RECORD_VISTA_X86 = {
'_SERVICE_RECORD': [None, {
'NextService': [0x0, ['pointer', ['_SERVICE_RECORD']]],
'ServiceName': [0x4, ['pointer', ['UnicodeString', dict(length=512)]]],
'DisplayName': [0x8, ['pointer', ['UnicodeString', dict(length=512)]]],
'Order': [0xC, ['unsigned int']],
'ServiceProcess': [0x1C, ['pointer', ['_SERVICE_PROCESS']]],
'DriverName': [0x1C, ['pointer', ['UnicodeString', dict(
length=256)]]],
'Type' : [0x20, ['Flags', {'bitmap': SERVICE_TYPE_FLAGS}]],
'State': [0x24, ['Enumeration', dict(
target='unsigned int', choices=SERVICE_STATE_ENUM)]],
}],
}
_SERVICE_RECORD_VISTA_X64 = {
'_SERVICE_RECORD': [None, {
'NextService': [0x00, ['Pointer', dict(
target='_SERVICE_RECORD'
)]],
'ServiceName': [0x08, ['pointer', ['UnicodeString', dict(
length=512
)]]],
'DisplayName': [0x10, ['pointer', ['UnicodeString', dict(
length=512
)]]],
'Order': [0x18, ['unsigned int']],
'ServiceProcess': [0x28, ['pointer', ['_SERVICE_PROCESS']]],
'DriverName': [0x28, ['Pointer', dict(
target='UnicodeString',
target_args=dict(
length=256,
)
)]],
'Type' : [0x30, ['Flags', {'bitmap': SERVICE_TYPE_FLAGS}]],
'State': [0x34, ['Enumeration', dict(
target='unsigned int',
choices=SERVICE_STATE_ENUM
)]],
}],
}
_SERVICE_RECORD_WIN81_X64 = {
"_SERVICE_RECORD": [None, {
"Tag": [0, ["String", dict(length=4)]], # Signature sErv
'NextService': [0x8, ['Pointer', dict(
target='_SERVICE_RECORD'
)]],
'ServiceName': [0x10, ['pointer', ['UnicodeString', dict(
length=512
)]]],
'DisplayName': [0x18, ['pointer', ['UnicodeString', dict(
length=512
)]]],
'Order': [0x20, ['unsigned int']],
'ServiceProcess': [0x38, ['pointer', ['_SERVICE_PROCESS']]],
'DriverName': [0x38, ['Pointer', dict(
target='UnicodeString',
target_args=dict(
length=256,
)
)]],
'Type' : [0x40, ['Flags', {'bitmap': SERVICE_TYPE_FLAGS}]],
'State': [0x44, ['Enumeration', dict(
target='unsigned int',
choices=SERVICE_STATE_ENUM
)]],
}],
'_SERVICE_PROCESS': [None, {
'Tag': [0, ["String", dict(length=4)]], # Sc16
'BinaryPath': [0x18, ['Pointer', dict(
target='UnicodeString',
target_args=dict(
length=256
)
)]],
'ProcessId': [0x20, ['unsigned int']],
}],
}
class ServiceModification(obj.ProfileModification):
"""A modification for the service control manager."""
@classmethod
def modify(cls, profile):
if profile.metadata("arch") == "I386":
profile.add_overlay(svcscan_base_x86)
else:
# 32bit Vista profiles
profile.add_overlay(svcscan_base_x64)
# Windows XP, 2003
version = profile.metadata("version")
if version < 6.0:
profile.add_classes({
'_SERVICE_RECORD': _SERVICE_RECORD_LEGACY,
'_SERVICE_HEADER': _SERVICE_HEADER,
})
profile.add_constants(dict(ServiceTag=b"sErv"))
# Vista 2008 and windows 7
elif 6.0 <= version <= 6.2:
profile.add_classes({
'_SERVICE_RECORD': _SERVICE_RECORD_RECENT,
'_SERVICE_HEADER': _SERVICE_HEADER,
})
profile.add_constants(dict(ServiceTag=b"serH"))
if profile.metadata("arch") == "I386":
profile.add_overlay(_SERVICE_RECORD_VISTA_X86)
else:
profile.add_overlay(_SERVICE_RECORD_VISTA_X64)
# Windows 8.1 and Windows 10
elif 6.2 <= version:
profile.add_classes({
'_SERVICE_RECORD': _SERVICE_RECORD_RECENT,
'_SERVICE_HEADER': _SERVICE_HEADER,
})
profile.add_constants(dict(ServiceTag=b"serH"))
if profile.metadata("arch") == "I386":
profile.add_overlay(_SERVICE_RECORD_VISTA_X86)
else:
profile.add_overlay(_SERVICE_RECORD_WIN81_X64)
else:
raise RuntimeError(
"Unsupported windows version. Please file a bug.")
class SvcRecordScanner(vadinfo.VadScanner):
"""A scanner for the service tags."""
def __init__(self, **kwargs):
super(SvcRecordScanner, self).__init__(**kwargs)
self.checks = [
('StringCheck', dict(
needle=self.profile.get_constant("ServiceTag"))),
]
self.tag_offset = self.profile.get_obj_offset('_SERVICE_RECORD', 'Tag')
def scan(self, **kwargs):
for hit in super(SvcRecordScanner, self).scan(**kwargs):
svc_record = self.profile._SERVICE_RECORD(
vm=self.address_space, offset=hit - self.tag_offset)
if svc_record.is_valid():
yield svc_record
class SvcHeaderScanner(vadinfo.VadScanner):
"""A scanner for the service tags."""
def __init__(self, **kwargs):
super(SvcHeaderScanner, self).__init__(**kwargs)
self.checks = [
('StringCheck', dict(
needle=self.profile.get_constant("ServiceTag"))),
]
# On systems more recent than XP/2003, the serH marker doesn't
# find *all* services, but the ones it does find have linked
# lists to the others. We use this variable to track which
# ones we've seen so as to not yield duplicates.
self.records = set()
def scan(self, **kwargs):
for hit in super(SvcHeaderScanner, self).scan(**kwargs):
svc_header = self.profile._SERVICE_HEADER(
vm=self.address_space, offset=hit)
if svc_header.is_valid():
for record in svc_header.ServiceRecord.walk_list("NextService"):
if record.is_valid() and record not in self.records:
self.records.add(record)
yield record
class SvcScan(plugin.KernelASMixin, common.AbstractWindowsCommandPlugin):
"Scan for Windows services"
__name = "svcscan"
def __init__(self, scan_in_kernel_address_space=False, **kwargs):
"""Scan for callbacks.
Args:
scan_in_kernel_address_space: If False we will use the physical
address space for scanning, while if true we scan in the kernel
address space.
"""
super(SvcScan, self).__init__(**kwargs)
self.scan_in_kernel_address_space = scan_in_kernel_address_space
# Update the profile.
self.profile = ServiceModification(self.profile)
def calculate(self):
# Get the version we're analyzing
version = self.profile.metadatas('major', 'minor')
pslist = self.session.plugins.pslist(proc_regex="services.exe")
for task in pslist.filter_processes():
# Process AS must be valid
process_space = task.get_process_address_space()
if process_space == None:
continue
# XP/2003 use the _SERVICE_RECORD object.
if version <= (5, 2):
scanner = SvcRecordScanner(
task=task, process_profile=self.profile,
session=self.session)
else:
# Windows Vista, 2008, and 7 use the _SERVICE_HEADER
scanner = SvcHeaderScanner(
task=task, process_profile=self.profile,
session=self.session)
# Find all instances of the record tag
for record in scanner.scan():
yield record
def render(self, renderer):
renderer.table_header([
("Offset", "offset", "[addrpad]"),
("Order", "order", "5"),
("PID", "pid", "4"),
("Service Name", "service", "30"),
("Display Name", "display_name", "40"),
("Service Type", "type", "30"),
("Service State", "state", "15"),
("Binary Path", "binary_path", "")])
for rec in self.calculate():
renderer.table_row(
rec,
rec.Order,
rec.Pid,
rec.ServiceName.deref(),
rec.DisplayName.deref(),
rec.Type,
rec.State,
rec.Binary)
|
PypiClean
|
/genie.libs.conf-23.8-py3-none-any.whl/genie/libs/conf/l2vpn/iosxe/l2vpn.py
|
from abc import ABC
import warnings
from genie.conf.base.attributes import UnsupportedAttributeWarning,\
AttributesHelper
from genie.conf.base.cli import CliConfigBuilder
from genie.conf.base.config import CliConfig
class L2vpn(ABC):
class DeviceAttributes(ABC):
def build_config(self, apply=True, attributes=None, unconfig=False,
**kwargs):
assert not kwargs, kwargs
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
for bd, attributes2 in attributes.sequence_values('bridge_domains'):
configurations.append_block(
str(bd.build_config(apply=False, attributes=attributes2, unconfig=unconfig,
contained=True)))
for xc, attributes2 in attributes.sequence_values('xconnects'):
configurations.append_block(
str(xc.build_config(apply=False, attributes=attributes2, unconfig=unconfig,
contained=True)))
for pwc, attributes2 in attributes.sequence_values('pseudowire_classes'):
configurations.append_block(
str(pwc.build_config(apply=False, attributes=attributes2, unconfig=unconfig,
contained=True)))
if apply:
if configurations:
self.device.configure(configurations, fail_invalid=True)
else:
return CliConfig(device=self.device, unconfig=unconfig,
cli_config=configurations, fail_invalid=True)
def build_unconfig(self, apply=True, attributes=None, **kwargs):
return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)
class PbbAttributes(ABC):
def build_config(self, apply=True, attributes=None, unconfig=False,
**kwargs):
assert not apply
assert not kwargs, kwargs
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
return str(configurations)
def build_unconfig(self, apply=True, attributes=None, **kwargs):
return self.build_config(apply=apply, attributes=attributes, unconfig=True, **kwargs)
|
PypiClean
|
/parametrization_cookbook-0.18-py3-none-any.whl/parametrization_cookbook/_common_vectors.py
|
import collections
import operator
import functools
from ._common_base import (
one_or_all,
one_or_any,
is_broadcastable_without_change,
method_add_doc,
shape_param_vector,
doc_reals1d_to_params,
doc_params_to_reals1d,
ShapedParam,
)
class VectorSimplex(ShapedParam):
r"""Representation of the parametrization of the unit-simplex.
This representation is used to represent vector of the open unit-simplex,
matrices where rows are elements of the open unit-simplex, or nd-array of
positive values where the sum over the last dim is one.
The n-dimensional open unit-simplex is defined by: :math:`\mathcal S_n =
\left\{{x\in\mathbb {{R_+^*}}^{{n+1}}: \sum_ix_i=1\right\}}\subset \mathbb R^{{n+1}}`.
{examples}
"""
@method_add_doc(
f"""Representation of the parametrization of the unit-simplex.
This representation is used to represent vector of the open unit-simplex,
matrices where rows are elements of the open unit-simplex, or nd-array of
positive values where the sum over the last dim is one.
Parameters
----------
dim : int
dimension of simplex. Elements of the `dim`-dimentional simplex are
vectors of size `dim+1`.
{shape_param_vector("(dim+1,)")}
"""
)
def __init__(self, *, dim, shape=None):
if shape is None:
self._shape = ()
elif isinstance(shape, collections.abc.Iterable):
self._shape = tuple(shape)
else:
self._shape = (shape,)
assert dim >= 1, "Dimention must be positive."
self._dim = dim
self._size = dim * (
1 if self._shape is None else functools.reduce(operator.mul, self._shape, 1)
)
repr_args = []
repr_args.append(f"dim={dim!r}")
if self._shape:
repr_args.append(f"shape={self._shape!r}")
self._repr = self.__class__.__name__ + f"({', '.join(repr_args)})"
@method_add_doc(
doc_reals1d_to_params.format(
set_name="open unit-simplex elements or nd-arrays of open unit-simplex elements",
)
)
def reals1d_to_params(self, x):
x = self._check_reals1d_size(x)
return self._backend.reals_to_simplex(x.reshape(self._shape + (self._dim,)))
@method_add_doc(
doc_params_to_reals1d.format(
set_name="open unit-simplex elements or nd-arrays of open unit-simplex elements",
)
)
def params_to_reals1d(self, x):
self._check_params_shape_with_suppshape(x, (self._dim + 1,))
return self._backend.simplex_to_reals(x).ravel()
class VectorSphere(ShapedParam):
r"""Representation of the parametrization of the sphere.
This representation is used to represent vector sphere,
matrices where rows are elements of the sphere, or nd-array of
positive values where the sum of square over the last dim is squared
radius.
The n-dimensional sphere of radius r is defined by: :math:`\mathsf S_n =
\left\{{x\in\mathbb R^{{n+1}}: \sum_ix_i^2=r^2\right\}}\subset \mathbb R^{{n+1}}`.
Note
----
There is no bijective mapping between the whole shpere and uncontrained
vector space. Some boundaries of the sphere are excluded.
{examples}
"""
@method_add_doc(
f"""Representation of the parametrization of the sphere.
This representation is used to represent vector sphere,
matrices where rows are elements of the sphere, or nd-array of
positive values where the sum of square over the last dim is squared
radius.
Parameters
----------
dim : int
dimension of sphere. Elements of the `dim`-dimentional sphere are
vectors of size `dim+1`.
radius : {{array_like}} or float, optional
radius of the sphere. Must be shape compatible or broadcastable
with target shape.
{shape_param_vector("(dim+1,)")}
"""
)
def __init__(self, *, dim, radius=1.0, shape=None):
if shape is None:
self._shape = ()
elif isinstance(shape, collections.abc.Iterable):
self._shape = tuple(shape)
else:
self._shape = (shape,)
assert dim >= 1, "Dimention must be positive."
self._dim = dim
self._size = dim * (
1 if self._shape is None else functools.reduce(operator.mul, self._shape, 1)
)
assert one_or_all(radius > 0), "Radius must be positive."
assert is_broadcastable_without_change(
self._shape, radius
), "Radius must be a scalar or a shape broadcastable array."
self._radius = (
radius if hasattr(radius, "shape") else self._backend._to_array(radius)
)
self._broadcast_radius = (
self._radius.reshape(self._radius.shape + (1,))
if hasattr(self._radius, "shape")
else self._radius
)
repr_args = []
repr_args.append(f"dim={dim!r}")
if one_or_any(self._radius != 1.0):
repr_args.append(f"radius={self._radius!r}")
if self._shape:
repr_args.append(f"shape={self._shape!r}")
self._repr = self.__class__.__name__ + f"({', '.join(repr_args)})"
@method_add_doc(
doc_reals1d_to_params.format(
set_name="sphere elements or nd-arrays of sphere elements",
)
)
def reals1d_to_params(self, x):
x = self._check_reals1d_size(x)
return self._broadcast_radius * self._backend.reals_to_sphere(
x.reshape(self._shape + (self._dim,))
)
@method_add_doc(
doc_params_to_reals1d.format(
set_name="sphere elements or nd-arrays of sphere elements",
)
)
def params_to_reals1d(self, x):
self._check_params_shape_with_suppshape(x, (self._dim + 1,))
return self._backend.sphere_to_reals(x / self._broadcast_radius).ravel()
class VectorHalfSphere(ShapedParam):
r"""Representation of the parametrization of the half-sphere.
This representation is used to represent vector of the halh-sphere (element
with last coordinate is positive), matrices where rows are elements of the
sphere, or nd-array of positive values where the sum of square over the last
dim is squared radius and where last index of last dim contains positive
values.
The n-dimensional half-sphere of radius r is defined by: :math:`\mathsf{{HS}}_n =
\left\{{x\in\mathbb R^{{n+1}}: x_n>0\wedge\sum_ix_i^2=r^2\right\}}\subset \mathbb R^{{n+1}}`.
{examples}
"""
@method_add_doc(
f"""Representation of the parametrization of the half-sphere.
This representation is used to represent vector of the halh-sphere (element
with last coordinate is positive), matrices where rows are elements of the
sphere, or nd-array of positive values where the sum of square over the last
dim is squared radius and where last index of last dim contains positive
values.
Parameters
----------
dim : int
dimension of half-sphere. Elements of the `dim`-dimentional sphere are
vectors of size `dim+1`.
radius : {{array_like}} or float, optional
radius of the half-sphere. Must be shape compatible or broadcastable
with target shape.
{shape_param_vector("(dim+1,)")}
"""
)
def __init__(self, *, dim, radius=1.0, shape=None):
if shape is None:
self._shape = ()
elif isinstance(shape, collections.abc.Iterable):
self._shape = tuple(shape)
else:
self._shape = (shape,)
assert dim >= 1, "Dimention must be positive."
self._dim = dim
self._size = dim * (
1 if self._shape is None else functools.reduce(operator.mul, self._shape, 1)
)
assert one_or_all(radius > 0), "Radius must be positive."
assert is_broadcastable_without_change(
self._shape, radius
), "Radius must be a scalar or a shape broadcastable array."
self._radius = (
radius if hasattr(radius, "shape") else self._backend._to_array(radius)
)
self._broadcast_radius = (
self._radius.reshape(self._radius.shape + (1,))
if hasattr(self._radius, "shape")
else self._radius
)
repr_args = []
repr_args.append(f"dim={dim!r}")
if one_or_any(self._radius != 1.0):
repr_args.append(f"radius={self._radius!r}")
if self._shape:
repr_args.append(f"shape={self._shape!r}")
self._repr = self.__class__.__name__ + f"({', '.join(repr_args)})"
@method_add_doc(
doc_reals1d_to_params.format(
set_name="half-sphere elements or nd-arrays of half-sphere elements",
)
)
def reals1d_to_params(self, x):
x = self._check_reals1d_size(x)
return self._broadcast_radius * self._backend.reals_to_half_sphere(
x.reshape(self._shape + (self._dim,))
)
@method_add_doc(
doc_params_to_reals1d.format(
set_name="half-sphere elements or nd-arrays of half-sphere elements",
)
)
def params_to_reals1d(self, x):
self._check_params_shape_with_suppshape(x, (self._dim + 1,))
return self._backend.half_sphere_to_reals(x / self._broadcast_radius).ravel()
class VectorBall(ShapedParam):
r"""Representation of the parametrization of the ball.
This representation is used to represent vector of the ball,
matrices where rows are elements of the ball, or nd-array of
positive values where the sum of square over the last dim is less than
squared radius.
The n-dimensional ball of radius r is defined by: :math:`\mathcal B_n =
\left\{{x\in\mathbb R^n: \sum_ix_i^2<r^2\right\}}\subset \mathbb R^n`.
{examples}
"""
@method_add_doc(
f"""Representation of the parametrization of the sphere.
This representation is used to represent vector sphere,
matrices where rows are elements of the sphere, or nd-array of
positive values where the sum of square over the last dim is squared
radius.
Parameters
----------
dim : int
dimension of ball. Elements of the `dim`-dimentional ball are
vectors of size `dim`.
radius : {{array_like}} or float, optional
radius of the ball. Must be shape compatible or broadcastable
with target shape.
{shape_param_vector("(dim,)")}
"""
)
def __init__(self, *, dim, radius=1.0, shape=None):
if shape is None:
self._shape = ()
elif isinstance(shape, collections.abc.Iterable):
self._shape = tuple(shape)
else:
self._shape = (shape,)
assert dim >= 1, "Dimention must be positive."
self._dim = dim
self._size = dim * (
1 if self._shape is None else functools.reduce(operator.mul, self._shape, 1)
)
assert one_or_all(radius > 0), "Radius must be positive."
assert is_broadcastable_without_change(
self._shape, radius
), "Radius must be a scalar or a shape broadcastable array."
self._radius = (
radius if hasattr(radius, "shape") else self._backend._to_array(radius)
)
self._broadcast_radius = (
self._radius.reshape(self._radius.shape + (1,))
if hasattr(self._radius, "shape")
else self._radius
)
repr_args = []
repr_args.append(f"dim={dim!r}")
if one_or_any(self._radius != 1.0):
repr_args.append(f"radius={self._radius!r}")
if self._shape:
repr_args.append(f"shape={self._shape!r}")
self._repr = self.__class__.__name__ + f"({', '.join(repr_args)})"
@method_add_doc(
doc_reals1d_to_params.format(
set_name="ball elements or nd-arrays of ball elements",
)
)
def reals1d_to_params(self, x):
x = self._check_reals1d_size(x)
return self._broadcast_radius * self._backend.reals_to_ball(
x.reshape(self._shape + (self._dim,))
)
@method_add_doc(
doc_params_to_reals1d.format(
set_name="ball elements or nd-arrays of ball elements",
)
)
def params_to_reals1d(self, x):
self._check_params_shape_with_suppshape(x, (self._dim,))
return self._backend.ball_to_reals(x / self._broadcast_radius).ravel()
|
PypiClean
|
/pyfuse3-3.3.0.tar.gz/pyfuse3-3.3.0/src/pyfuse3_asyncio.py
|
import asyncio
import collections
import sys
from typing import Any, Callable, Iterable, Optional, Type
import pyfuse3
from _pyfuse3 import FileHandleT
Lock = asyncio.Lock
def enable() -> None:
'''Switch pyfuse3 to asyncio mode.'''
fake_trio = sys.modules['pyfuse3_asyncio']
fake_trio.lowlevel = fake_trio # type: ignore
fake_trio.from_thread = fake_trio # type: ignore
pyfuse3.trio = fake_trio # type: ignore
def disable() -> None:
'''Switch pyfuse3 to default (trio) mode.'''
pyfuse3.trio = sys.modules['trio'] # type: ignore
def current_trio_token() -> str:
return 'asyncio'
_read_futures = collections.defaultdict(set)
async def wait_readable(fd: FileHandleT) -> None:
future: asyncio.Future = asyncio.Future()
_read_futures[fd].add(future)
try:
loop = asyncio.get_event_loop()
loop.add_reader(fd, future.set_result, None)
future.add_done_callback(lambda f: loop.remove_reader(fd))
await future
finally:
_read_futures[fd].remove(future)
if not _read_futures[fd]:
del _read_futures[fd]
def notify_closing(fd: FileHandleT) -> None:
for f in _read_futures[fd]:
f.set_exception(ClosedResourceError())
class ClosedResourceError(Exception):
pass
def current_task() -> Optional[asyncio.Task]:
if sys.version_info < (3, 7):
return asyncio.Task.current_task()
else:
return asyncio.current_task()
class _Nursery:
async def __aenter__(self) -> "_Nursery":
self.tasks: set[asyncio.Task] = set()
return self
def start_soon(
self,
func: Callable,
*args: Iterable[Any],
name: Optional[str] = None
) -> None:
if sys.version_info < (3, 7):
task = asyncio.ensure_future(func(*args))
else:
task = asyncio.create_task(func(*args))
task.name = name # type: ignore
self.tasks.add(task)
async def __aexit__(
self,
exc_type: Optional[Type],
exc_value: Optional[BaseException],
traceback: Optional[Any]
) -> None:
# Wait for tasks to finish
while len(self.tasks):
# Create a copy of the task list to ensure that it's not a problem
# when self.tasks is modified
done, pending = await asyncio.wait(tuple(self.tasks))
for task in done:
self.tasks.discard(task)
# We waited for ALL_COMPLETED (default value of 'when' arg to
# asyncio.wait), so all tasks should be completed. If that's not the
# case, something's seriously wrong.
assert len(pending) == 0
def open_nursery() -> _Nursery:
return _Nursery()
|
PypiClean
|
/nice_tools-0.0.6-py3-none-any.whl/nice_tools/http_base_client.py
|
import typing as t
from abc import ABC, abstractmethod
import json
import requests
import aiohttp
import asyncio
__all__ = [
'BaseClient',
]
class APIException(Exception):
def __init__(self, response: t.Union[requests.Response, aiohttp.ClientResponse], status_code: int, text: str):
self.code = 0
try:
json_res = json.loads(text)
except ValueError:
self.message = 'Invalid JSON error message from Site: {}'.format(response.text)
else:
self.code = json_res['code']
self.message = json_res['message']
self.result = json_res['result']
self.status_code = status_code
self.response = response
self.request = getattr(response, 'request', None)
def __str__(self): # pragma: no cover
return 'APIError(code=%s): %s | %s' % (self.code, self.message, self.result)
class RequestException(Exception):
def __init__(self, message):
self.message = message
def __str__(self):
return 'RequestException: %s' % self.message
class BaseClient(ABC):
API_URL = 'https://api.wallex.ir'
PUBLIC_API_VERSION = 'v1'
REQUEST_TIMEOUT: float = 10
@abstractmethod
def __init__(
self, requests_params: t.Optional[t.Dict[str, str]] = None,
):
self._requests_params = requests_params
self.session = self._init_session()
@staticmethod
def _get_kwargs(locals_: t.Dict, del_keys: t.List[str] = None, del_nones: bool = False) -> t.Dict:
_del_keys = ['self', 'cls']
if del_keys is not None:
_del_keys.extend(del_keys)
if del_nones is True:
return {key: value for key, value in locals_.items() if (key not in _del_keys) and (value is not None)}
return {key: value for key, value in locals_.items() if key not in _del_keys}
@staticmethod
def _get_headers() -> t.Dict:
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
}
return headers
def _create_api_uri(self, path: str, version: str = PUBLIC_API_VERSION) -> str:
if version is None or version.isspace() or version == '':
return self.API_URL + '/' + path
return self.API_URL + '/' + version + '/' + path
def _get_request_kwargs(self, method, signed: bool, **kwargs) -> t.Dict:
kwargs['timeout'] = self.REQUEST_TIMEOUT
if self._requests_params:
kwargs.update(self._requests_params)
data = kwargs.get('data', None)
if data and isinstance(data, dict):
kwargs['data'] = data
if 'requests_params' in kwargs['data']:
kwargs.update(kwargs['data']['requests_params'])
del (kwargs['data']['requests_params'])
if signed is True:
headers = kwargs.get('headers', {})
kwargs['headers'] = headers
if data and method == 'get':
kwargs['params'] = '&'.join('%s=%s' % (data[0], data[1]) for data in kwargs['data'])
del (kwargs['data'])
return kwargs
@abstractmethod
def _init_session(self) -> t.Union[requests.Session, aiohttp.ClientSession]:
raise NotImplementedError('_init_session not implemented')
@abstractmethod
def _request(self, method: str, uri: str, signed: bool, **kwargs):
raise NotImplementedError('_request not implemented')
@staticmethod
@abstractmethod
def _handle_response(response: t.Union[requests.Response, aiohttp.ClientResponse]):
raise NotImplementedError('_handle_response not implemented')
@abstractmethod
def _request_api(
self, method: str, path: str, signed: bool = False, version=PUBLIC_API_VERSION, **kwargs
):
raise NotImplementedError('_request_api not implemented')
@abstractmethod
def _get(self, path, signed=False, version=PUBLIC_API_VERSION, **kwargs) -> t.Dict:
raise NotImplementedError('_get not implemented')
@abstractmethod
def _post(self, path, signed=False, version=PUBLIC_API_VERSION, **kwargs) -> t.Dict:
raise NotImplementedError('_post not implemented')
@abstractmethod
def _put(self, path, signed=False, version=PUBLIC_API_VERSION, **kwargs) -> t.Dict:
raise NotImplementedError('_put not implemented')
@abstractmethod
def _delete(self, path, signed=False, version=PUBLIC_API_VERSION, **kwargs) -> t.Dict:
raise NotImplementedError('_delete not implemented')
@abstractmethod
def close_connection(self):
raise NotImplementedError('close_connection not implemented')
class SyncClient(BaseClient):
def __init__(self):
super().__init__()
def _init_session(self) -> requests.Session:
return requests.Session()
@staticmethod
def _handle_response(response: requests.Response):
if not (200 <= response.status_code < 300):
raise APIException(response, response.status_code, response.text)
try:
return response.json()
except ValueError:
raise RequestException('Invalid Response: %s' % response.text)
def _request(self, method: str, uri: str, signed: bool, **kwargs) -> t.Dict:
kwargs = self._get_request_kwargs(method, signed, **kwargs)
self.response = getattr(self.session, method)(uri, **kwargs)
return self._handle_response(self.response)
def _request_api(
self, method: str, path: str, signed: bool = False, version=BaseClient.PUBLIC_API_VERSION, **kwargs
) -> t.Dict:
uri = self._create_api_uri(path, version)
return self._request(method, uri, signed, **kwargs)
def _get(self, path, signed=False, version=BaseClient.PUBLIC_API_VERSION, **kwargs) -> t.Dict:
return self._request_api('get', path, signed, version, **kwargs)
def _post(self, path, signed=False, version=BaseClient.PUBLIC_API_VERSION, **kwargs) -> t.Dict:
return self._request_api('post', path, signed, version, **kwargs)
def _put(self, path, signed=False, version=BaseClient.PUBLIC_API_VERSION, **kwargs) -> t.Dict:
return self._request_api('put', path, signed, version, **kwargs)
def _delete(self, path, signed=False, version=BaseClient.PUBLIC_API_VERSION, **kwargs) -> t.Dict:
return self._request_api('delete', path, signed, version, **kwargs)
def close_connection(self):
self.session.close()
class AsyncClient(BaseClient):
def __init__(self, loop: t.Optional[asyncio.AbstractEventLoop] = None):
self.loop = loop or asyncio.get_event_loop()
super().__init__()
# @classmethod
# async def create(cls, loop: t.Optional[asyncio.AbstractEventLoop] = None):
# return cls(loop)
def __aenter__(self):
return self
def __aexit__(self, exc_type, exc_val, exc_tb):
self.close_connection()
return False
def _init_session(self) -> aiohttp.ClientSession:
session = aiohttp.ClientSession(
loop=self.loop,
headers=self._get_headers()
)
return session
@staticmethod
async def _handle_response(response: aiohttp.ClientResponse) -> t.Dict:
if not str(response.status).startswith('2'):
raise APIException(response, response.status, await response.text())
try:
return await response.json()
except ValueError:
txt = await response.text()
raise RequestException(f'Invalid Response: {txt}')
async def _request(self, method, uri: str, signed: bool, **kwargs) -> t.Dict:
kwargs = self._get_request_kwargs(method, signed, **kwargs)
async with getattr(self.session, method)(uri, **kwargs) as response:
self.response = response
return await self._handle_response(response)
async def _request_api(self, method, path, signed=False, version=BaseClient.PUBLIC_API_VERSION, **kwargs) -> t.Dict:
uri = self._create_api_uri(path, version)
return await self._request(method, uri, signed, **kwargs)
async def _get(self, path, signed=False, version=BaseClient.PUBLIC_API_VERSION, **kwargs) -> t.Dict:
return await self._request_api('get', path, signed, version, **kwargs)
async def _post(self, path, signed=False, version=BaseClient.PUBLIC_API_VERSION, **kwargs) -> t.Dict:
return await self._request_api('post', path, signed, version, **kwargs)
async def _put(self, path, signed=False, version=BaseClient.PUBLIC_API_VERSION, **kwargs) -> t.Dict:
return await self._request_api('put', path, signed, version, **kwargs)
async def _delete(self, path, signed=False, version=BaseClient.PUBLIC_API_VERSION, **kwargs) -> t.Dict:
return await self._request_api('delete', path, signed, version, **kwargs)
async def close_connection(self):
await self.session.close()
|
PypiClean
|
/huey-2.4.5.tar.gz/huey-2.4.5/docs/_build/html/_static/searchtools.js
|
if (!Scorer) {
/**
* Simple result scoring code.
*/
var Scorer = {
// Implement the following function to further tweak the score for each result
// The function takes a result array [filename, title, anchor, descr, score]
// and returns the new score.
/*
score: function(result) {
return result[4];
},
*/
// query matches the full name of an object
objNameMatch: 11,
// or matches in the last dotted part of the object name
objPartialMatch: 6,
// Additive scores depending on the priority of the object
objPrio: {0: 15, // used to be importantResults
1: 5, // used to be objectResults
2: -5}, // used to be unimportantResults
// Used when the priority is not in the mapping.
objPrioDefault: 0,
// query found in title
title: 15,
// query found in terms
term: 5
};
}
if (!splitQuery) {
function splitQuery(query) {
return query.split(/\s+/);
}
}
/**
* Search Module
*/
var Search = {
_index : null,
_queued_query : null,
_pulse_status : -1,
init : function() {
var params = $.getQueryParameters();
if (params.q) {
var query = params.q[0];
$('input[name="q"]')[0].value = query;
this.performSearch(query);
}
},
loadIndex : function(url) {
$.ajax({type: "GET", url: url, data: null,
dataType: "script", cache: true,
complete: function(jqxhr, textstatus) {
if (textstatus != "success") {
document.getElementById("searchindexloader").src = url;
}
}});
},
setIndex : function(index) {
var q;
this._index = index;
if ((q = this._queued_query) !== null) {
this._queued_query = null;
Search.query(q);
}
},
hasIndex : function() {
return this._index !== null;
},
deferQuery : function(query) {
this._queued_query = query;
},
stopPulse : function() {
this._pulse_status = 0;
},
startPulse : function() {
if (this._pulse_status >= 0)
return;
function pulse() {
var i;
Search._pulse_status = (Search._pulse_status + 1) % 4;
var dotString = '';
for (i = 0; i < Search._pulse_status; i++)
dotString += '.';
Search.dots.text(dotString);
if (Search._pulse_status > -1)
window.setTimeout(pulse, 500);
}
pulse();
},
/**
* perform a search for something (or wait until index is loaded)
*/
performSearch : function(query) {
// create the required interface elements
this.out = $('#search-results');
this.title = $('<h2>' + _('Searching') + '</h2>').appendTo(this.out);
this.dots = $('<span></span>').appendTo(this.title);
this.status = $('<p style="display: none"></p>').appendTo(this.out);
this.output = $('<ul class="search"/>').appendTo(this.out);
$('#search-progress').text(_('Preparing search...'));
this.startPulse();
// index already loaded, the browser was quick!
if (this.hasIndex())
this.query(query);
else
this.deferQuery(query);
},
/**
* execute search (requires search index to be loaded)
*/
query : function(query) {
var i;
var stopwords = DOCUMENTATION_OPTIONS.SEARCH_LANGUAGE_STOP_WORDS;
// stem the searchterms and add them to the correct list
var stemmer = new Stemmer();
var searchterms = [];
var excluded = [];
var hlterms = [];
var tmp = splitQuery(query);
var objectterms = [];
for (i = 0; i < tmp.length; i++) {
if (tmp[i] !== "") {
objectterms.push(tmp[i].toLowerCase());
}
if ($u.indexOf(stopwords, tmp[i].toLowerCase()) != -1 || tmp[i].match(/^\d+$/) ||
tmp[i] === "") {
// skip this "word"
continue;
}
// stem the word
var word = stemmer.stemWord(tmp[i].toLowerCase());
// prevent stemmer from cutting word smaller than two chars
if(word.length < 3 && tmp[i].length >= 3) {
word = tmp[i];
}
var toAppend;
// select the correct list
if (word[0] == '-') {
toAppend = excluded;
word = word.substr(1);
}
else {
toAppend = searchterms;
hlterms.push(tmp[i].toLowerCase());
}
// only add if not already in the list
if (!$u.contains(toAppend, word))
toAppend.push(word);
}
var highlightstring = '?highlight=' + $.urlencode(hlterms.join(" "));
// console.debug('SEARCH: searching for:');
// console.info('required: ', searchterms);
// console.info('excluded: ', excluded);
// prepare search
var terms = this._index.terms;
var titleterms = this._index.titleterms;
// array of [filename, title, anchor, descr, score]
var results = [];
$('#search-progress').empty();
// lookup as object
for (i = 0; i < objectterms.length; i++) {
var others = [].concat(objectterms.slice(0, i),
objectterms.slice(i+1, objectterms.length));
results = results.concat(this.performObjectSearch(objectterms[i], others));
}
// lookup as search terms in fulltext
results = results.concat(this.performTermsSearch(searchterms, excluded, terms, titleterms));
// let the scorer override scores with a custom scoring function
if (Scorer.score) {
for (i = 0; i < results.length; i++)
results[i][4] = Scorer.score(results[i]);
}
// now sort the results by score (in opposite order of appearance, since the
// display function below uses pop() to retrieve items) and then
// alphabetically
results.sort(function(a, b) {
var left = a[4];
var right = b[4];
if (left > right) {
return 1;
} else if (left < right) {
return -1;
} else {
// same score: sort alphabetically
left = a[1].toLowerCase();
right = b[1].toLowerCase();
return (left > right) ? -1 : ((left < right) ? 1 : 0);
}
});
// for debugging
//Search.lastresults = results.slice(); // a copy
//console.info('search results:', Search.lastresults);
// print the results
var resultCount = results.length;
function displayNextItem() {
// results left, load the summary and display it
if (results.length) {
var item = results.pop();
var listItem = $('<li style="display:none"></li>');
if (DOCUMENTATION_OPTIONS.FILE_SUFFIX === '') {
// dirhtml builder
var dirname = item[0] + '/';
if (dirname.match(/\/index\/$/)) {
dirname = dirname.substring(0, dirname.length-6);
} else if (dirname == 'index/') {
dirname = '';
}
listItem.append($('<a/>').attr('href',
DOCUMENTATION_OPTIONS.URL_ROOT + dirname +
highlightstring + item[2]).html(item[1]));
} else {
// normal html builders
listItem.append($('<a/>').attr('href',
item[0] + DOCUMENTATION_OPTIONS.FILE_SUFFIX +
highlightstring + item[2]).html(item[1]));
}
if (item[3]) {
listItem.append($('<span> (' + item[3] + ')</span>'));
Search.output.append(listItem);
listItem.slideDown(5, function() {
displayNextItem();
});
} else if (DOCUMENTATION_OPTIONS.HAS_SOURCE) {
var suffix = DOCUMENTATION_OPTIONS.SOURCELINK_SUFFIX;
if (suffix === undefined) {
suffix = '.txt';
}
$.ajax({url: DOCUMENTATION_OPTIONS.URL_ROOT + '_sources/' + item[5] + (item[5].slice(-suffix.length) === suffix ? '' : suffix),
dataType: "text",
complete: function(jqxhr, textstatus) {
var data = jqxhr.responseText;
if (data !== '' && data !== undefined) {
listItem.append(Search.makeSearchSummary(data, searchterms, hlterms));
}
Search.output.append(listItem);
listItem.slideDown(5, function() {
displayNextItem();
});
}});
} else {
// no source available, just display title
Search.output.append(listItem);
listItem.slideDown(5, function() {
displayNextItem();
});
}
}
// search finished, update title and status message
else {
Search.stopPulse();
Search.title.text(_('Search Results'));
if (!resultCount)
Search.status.text(_('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.'));
else
Search.status.text(_('Search finished, found %s page(s) matching the search query.').replace('%s', resultCount));
Search.status.fadeIn(500);
}
}
displayNextItem();
},
/**
* search for object names
*/
performObjectSearch : function(object, otherterms) {
var filenames = this._index.filenames;
var docnames = this._index.docnames;
var objects = this._index.objects;
var objnames = this._index.objnames;
var titles = this._index.titles;
var i;
var results = [];
for (var prefix in objects) {
for (var name in objects[prefix]) {
var fullname = (prefix ? prefix + '.' : '') + name;
if (fullname.toLowerCase().indexOf(object) > -1) {
var score = 0;
var parts = fullname.split('.');
// check for different match types: exact matches of full name or
// "last name" (i.e. last dotted part)
if (fullname == object || parts[parts.length - 1] == object) {
score += Scorer.objNameMatch;
// matches in last name
} else if (parts[parts.length - 1].indexOf(object) > -1) {
score += Scorer.objPartialMatch;
}
var match = objects[prefix][name];
var objname = objnames[match[1]][2];
var title = titles[match[0]];
// If more than one term searched for, we require other words to be
// found in the name/title/description
if (otherterms.length > 0) {
var haystack = (prefix + ' ' + name + ' ' +
objname + ' ' + title).toLowerCase();
var allfound = true;
for (i = 0; i < otherterms.length; i++) {
if (haystack.indexOf(otherterms[i]) == -1) {
allfound = false;
break;
}
}
if (!allfound) {
continue;
}
}
var descr = objname + _(', in ') + title;
var anchor = match[3];
if (anchor === '')
anchor = fullname;
else if (anchor == '-')
anchor = objnames[match[1]][1] + '-' + fullname;
// add custom score for some objects according to scorer
if (Scorer.objPrio.hasOwnProperty(match[2])) {
score += Scorer.objPrio[match[2]];
} else {
score += Scorer.objPrioDefault;
}
results.push([docnames[match[0]], fullname, '#'+anchor, descr, score, filenames[match[0]]]);
}
}
}
return results;
},
/**
* search for full-text terms in the index
*/
performTermsSearch : function(searchterms, excluded, terms, titleterms) {
var docnames = this._index.docnames;
var filenames = this._index.filenames;
var titles = this._index.titles;
var i, j, file;
var fileMap = {};
var scoreMap = {};
var results = [];
// perform the search on the required terms
for (i = 0; i < searchterms.length; i++) {
var word = searchterms[i];
var files = [];
var _o = [
{files: terms[word], score: Scorer.term},
{files: titleterms[word], score: Scorer.title}
];
// no match but word was a required one
if ($u.every(_o, function(o){return o.files === undefined;})) {
break;
}
// found search word in contents
$u.each(_o, function(o) {
var _files = o.files;
if (_files === undefined)
return
if (_files.length === undefined)
_files = [_files];
files = files.concat(_files);
// set score for the word in each file to Scorer.term
for (j = 0; j < _files.length; j++) {
file = _files[j];
if (!(file in scoreMap))
scoreMap[file] = {}
scoreMap[file][word] = o.score;
}
});
// create the mapping
for (j = 0; j < files.length; j++) {
file = files[j];
if (file in fileMap)
fileMap[file].push(word);
else
fileMap[file] = [word];
}
}
// now check if the files don't contain excluded terms
for (file in fileMap) {
var valid = true;
// check if all requirements are matched
if (fileMap[file].length != searchterms.length)
continue;
// ensure that none of the excluded terms is in the search result
for (i = 0; i < excluded.length; i++) {
if (terms[excluded[i]] == file ||
titleterms[excluded[i]] == file ||
$u.contains(terms[excluded[i]] || [], file) ||
$u.contains(titleterms[excluded[i]] || [], file)) {
valid = false;
break;
}
}
// if we have still a valid result we can add it to the result list
if (valid) {
// select one (max) score for the file.
// for better ranking, we should calculate ranking by using words statistics like basic tf-idf...
var score = $u.max($u.map(fileMap[file], function(w){return scoreMap[file][w]}));
results.push([docnames[file], titles[file], '', null, score, filenames[file]]);
}
}
return results;
},
/**
* helper function to return a node containing the
* search summary for a given text. keywords is a list
* of stemmed words, hlwords is the list of normal, unstemmed
* words. the first one is used to find the occurrence, the
* latter for highlighting it.
*/
makeSearchSummary : function(text, keywords, hlwords) {
var textLower = text.toLowerCase();
var start = 0;
$.each(keywords, function() {
var i = textLower.indexOf(this.toLowerCase());
if (i > -1)
start = i;
});
start = Math.max(start - 120, 0);
var excerpt = ((start > 0) ? '...' : '') +
$.trim(text.substr(start, 240)) +
((start + 240 - text.length) ? '...' : '');
var rv = $('<div class="context"></div>').text(excerpt);
$.each(hlwords, function() {
rv = rv.highlightText(this, 'highlighted');
});
return rv;
}
};
$(document).ready(function() {
Search.init();
});
|
PypiClean
|
/paper_trading-1.2.2-py3-none-any.whl/paper_trading/app/static/js/fuelux/fuelux.js
|
+function ($) { "use strict";
// CHECKBOX CONSTRUCTOR AND PROTOTYPE
var Checkbox = function (element, options) {
this.$element = $(element);
this.options = $.extend({}, $.fn.checkbox.defaults, options);
// cache elements
this.$label = this.$element.parent();
this.$icon = this.$label.find('i');
this.$chk = this.$label.find('input[type=checkbox]');
// set default state
this.setState(this.$chk);
// handle events
this.$chk.on('change', $.proxy(this.itemchecked, this));
};
Checkbox.prototype = {
constructor: Checkbox,
setState: function ($chk) {
var checked = $chk.is(':checked');
var disabled = $chk.is(':disabled');
// reset classes
this.$icon.removeClass('checked').removeClass('disabled');
// set state of checkbox
if (checked === true) {
this.$icon.addClass('checked');
}
if (disabled === true) {
this.$icon.addClass('disabled');
}
},
enable: function () {
this.$chk.attr('disabled', false);
this.$icon.removeClass('disabled');
},
disable: function () {
this.$chk.attr('disabled', true);
this.$icon.addClass('disabled');
},
toggle: function () {
this.$chk.click();
},
itemchecked: function (e) {
var chk = $(e.target);
this.setState(chk);
}
};
// CHECKBOX PLUGIN DEFINITION
$.fn.checkbox = function (option, value) {
var methodReturn;
var $set = this.each(function () {
var $this = $(this);
var data = $this.data('checkbox');
var options = typeof option === 'object' && option;
if (!data) $this.data('checkbox', (data = new Checkbox(this, options)));
if (typeof option === 'string') methodReturn = data[option](value);
});
return (methodReturn === undefined) ? $set : methodReturn;
};
$.fn.checkbox.defaults = {};
$.fn.checkbox.Constructor = Checkbox;
// CHECKBOX DATA-API
$(function () {
$(window).on('load', function () {
//$('i.checkbox').each(function () {
$('.checkbox-custom > input[type=checkbox]').each(function () {
var $this = $(this);
if ($this.data('checkbox')) return;
$this.checkbox($this.data());
});
});
});
}(window.jQuery);
/*
* Fuel UX Utilities
* https://github.com/ExactTarget/fuelux
*
* Copyright (c) 2012 ExactTarget
* Licensed under the MIT license.
*/
+function ($) { "use strict";
// custom case-insensitive match expression
function fuelTextExactCI(elem, text) {
return (elem.textContent || elem.innerText || $(elem).text() || '').toLowerCase() === (text || '').toLowerCase();
}
$.expr[':'].fuelTextExactCI = $.expr.createPseudo ?
$.expr.createPseudo(function (text) {
return function (elem) {
return fuelTextExactCI(elem, text);
};
}) :
function (elem, i, match) {
return fuelTextExactCI(elem, match[3]);
};
}(window.jQuery);
/*
* Fuel UX Combobox
* https://github.com/ExactTarget/fuelux
*
* Copyright (c) 2012 ExactTarget
* Licensed under the MIT license.
*/
+function ($) { "use strict";
// COMBOBOX CONSTRUCTOR AND PROTOTYPE
var Combobox = function (element, options) {
this.$element = $(element);
this.options = $.extend({}, $.fn.combobox.defaults, options);
this.$element.on('click', 'a', $.proxy(this.itemclicked, this));
this.$element.on('change', 'input', $.proxy(this.inputchanged, this));
this.$input = this.$element.find('input');
this.$button = this.$element.find('.btn');
// set default selection
this.setDefaultSelection();
};
Combobox.prototype = {
constructor: Combobox,
selectedItem: function () {
var item = this.$selectedItem;
var data = {};
if (item) {
var txt = this.$selectedItem.text();
data = $.extend({ text: txt }, this.$selectedItem.data());
}
else {
data = { text: this.$input.val()};
}
return data;
},
selectByText: function (text) {
var selector = 'li:fuelTextExactCI(' + text + ')';
this.selectBySelector(selector);
},
selectByValue: function (value) {
var selector = 'li[data-value="' + value + '"]';
this.selectBySelector(selector);
},
selectByIndex: function (index) {
// zero-based index
var selector = 'li:eq(' + index + ')';
this.selectBySelector(selector);
},
selectBySelector: function (selector) {
var $item = this.$element.find(selector);
if (typeof $item[0] !== 'undefined') {
this.$selectedItem = $item;
this.$input.val(this.$selectedItem.text());
}
else {
this.$selectedItem = null;
}
},
setDefaultSelection: function () {
var selector = 'li[data-selected=true]:first';
var item = this.$element.find(selector);
if (item.length > 0) {
// select by data-attribute
this.selectBySelector(selector);
item.removeData('selected');
item.removeAttr('data-selected');
}
},
enable: function () {
this.$input.removeAttr('disabled');
this.$button.removeClass('disabled');
},
disable: function () {
this.$input.attr('disabled', true);
this.$button.addClass('disabled');
},
itemclicked: function (e) {
this.$selectedItem = $(e.target).parent();
// set input text and trigger input change event marked as synthetic
this.$input.val(this.$selectedItem.text()).trigger('change', { synthetic: true });
// pass object including text and any data-attributes
// to onchange event
var data = this.selectedItem();
// trigger changed event
this.$element.trigger('changed', data);
e.preventDefault();
},
inputchanged: function (e, extra) {
// skip processing for internally-generated synthetic event
// to avoid double processing
if (extra && extra.synthetic) return;
var val = $(e.target).val();
this.selectByText(val);
// find match based on input
// if no match, pass the input value
var data = this.selectedItem();
if (data.text.length === 0) {
data = { text: val };
}
// trigger changed event
this.$element.trigger('changed', data);
}
};
// COMBOBOX PLUGIN DEFINITION
$.fn.combobox = function (option, value) {
var methodReturn;
var $set = this.each(function () {
var $this = $(this);
var data = $this.data('combobox');
var options = typeof option === 'object' && option;
if (!data) $this.data('combobox', (data = new Combobox(this, options)));
if (typeof option === 'string') methodReturn = data[option](value);
});
return (methodReturn === undefined) ? $set : methodReturn;
};
$.fn.combobox.defaults = {};
$.fn.combobox.Constructor = Combobox;
// COMBOBOX DATA-API
$(function () {
$(window).on('load', function () {
$('.combobox').each(function () {
var $this = $(this);
if ($this.data('combobox')) return;
$this.combobox($this.data());
});
});
$('body').on('mousedown.combobox.data-api', '.combobox', function (e) {
var $this = $(this);
if ($this.data('combobox')) return;
$this.combobox($this.data());
});
});
}(window.jQuery);
/*
* Fuel UX Datagrid
* https://github.com/ExactTarget/fuelux
*
* Copyright (c) 2012 ExactTarget
* Licensed under the MIT license.
*/
+function ($) { "use strict";
// Relates to thead .sorted styles in datagrid.less
var SORTED_HEADER_OFFSET = 22;
// DATAGRID CONSTRUCTOR AND PROTOTYPE
var Datagrid = function (element, options) {
this.$element = $(element);
this.$thead = this.$element.find('thead');
this.$tfoot = this.$element.find('tfoot');
this.$footer = this.$element.find('tfoot th');
this.$footerchildren = this.$footer.children().show().css('visibility', 'hidden');
this.$topheader = this.$element.find('thead th');
this.$searchcontrol = this.$element.find('.datagrid-search');
this.$filtercontrol = this.$element.find('.filter');
this.$pagesize = this.$element.find('.grid-pagesize');
this.$pageinput = this.$element.find('.grid-pager input');
this.$pagedropdown = this.$element.find('.grid-pager .dropdown-menu');
this.$prevpagebtn = this.$element.find('.grid-prevpage');
this.$nextpagebtn = this.$element.find('.grid-nextpage');
this.$pageslabel = this.$element.find('.grid-pages');
this.$countlabel = this.$element.find('.grid-count');
this.$startlabel = this.$element.find('.grid-start');
this.$endlabel = this.$element.find('.grid-end');
this.$tbody = $('<tbody>').insertAfter(this.$thead);
this.$colheader = $('<tr>').appendTo(this.$thead);
this.options = $.extend(true, {}, $.fn.datagrid.defaults, options);
// Shim until v3 -- account for FuelUX select or native select for page size:
if (this.$pagesize.hasClass('select')) {
this.options.dataOptions.pageSize = parseInt(this.$pagesize.select('selectedItem').value, 10);
} else {
this.options.dataOptions.pageSize = parseInt(this.$pagesize.val(), 10);
}
// Shim until v3 -- account for older search class:
if (this.$searchcontrol.length <= 0) {
this.$searchcontrol = this.$element.find('.search');
}
this.columns = this.options.dataSource.columns();
this.$nextpagebtn.on('click', $.proxy(this.next, this));
this.$prevpagebtn.on('click', $.proxy(this.previous, this));
this.$searchcontrol.on('searched cleared', $.proxy(this.searchChanged, this));
this.$filtercontrol.on('changed', $.proxy(this.filterChanged, this));
this.$colheader.on('click', 'th', $.proxy(this.headerClicked, this));
if(this.$pagesize.hasClass('select')) {
this.$pagesize.on('changed', $.proxy(this.pagesizeChanged, this));
} else {
this.$pagesize.on('change', $.proxy(this.pagesizeChanged, this));
}
this.$pageinput.on('change', $.proxy(this.pageChanged, this));
this.renderColumns();
if (this.options.stretchHeight) this.initStretchHeight();
this.renderData();
};
Datagrid.prototype = {
constructor: Datagrid,
renderColumns: function () {
var self = this;
this.$footer.attr('colspan', this.columns.length);
this.$topheader.attr('colspan', this.columns.length);
var colHTML = '';
$.each(this.columns, function (index, column) {
colHTML += '<th data-property="' + column.property + '"';
if (column.sortable) colHTML += ' class="sortable"';
colHTML += '>' + column.label + '</th>';
});
self.$colheader.append(colHTML);
},
updateColumns: function ($target, direction) {
this._updateColumns(this.$colheader, $target, direction);
if (this.$sizingHeader) {
this._updateColumns(this.$sizingHeader, this.$sizingHeader.find('th').eq($target.index()), direction);
}
},
_updateColumns: function ($header, $target, direction) {
var className = (direction === 'asc') ? 'icon-chevron-up' : 'icon-chevron-down';
$header.find('i.datagrid-sort').remove();
$header.find('th').removeClass('sorted');
$('<i>').addClass(className + ' datagrid-sort').appendTo($target);
$target.addClass('sorted');
},
updatePageDropdown: function (data) {
var pageHTML = '';
for (var i = 1; i <= data.pages; i++) {
pageHTML += '<li><a>' + i + '</a></li>';
}
this.$pagedropdown.html(pageHTML);
},
updatePageButtons: function (data) {
if (data.page === 1) {
this.$prevpagebtn.attr('disabled', 'disabled');
} else {
this.$prevpagebtn.removeAttr('disabled');
}
if (data.page === data.pages) {
this.$nextpagebtn.attr('disabled', 'disabled');
} else {
this.$nextpagebtn.removeAttr('disabled');
}
},
renderData: function () {
var self = this;
this.$tbody.html(this.placeholderRowHTML(this.options.loadingHTML));
this.options.dataSource.data(this.options.dataOptions, function (data) {
var itemdesc = (data.count === 1) ? self.options.itemText : self.options.itemsText;
var rowHTML = '';
self.$footerchildren.css('visibility', function () {
return (data.count > 0) ? 'visible' : 'hidden';
});
self.$pageinput.val(data.page);
self.$pageslabel.text(data.pages);
self.$countlabel.text(data.count + ' ' + itemdesc);
self.$startlabel.text(data.start);
self.$endlabel.text(data.end);
self.updatePageDropdown(data);
self.updatePageButtons(data);
$.each(data.data, function (index, row) {
rowHTML += '<tr>';
$.each(self.columns, function (index, column) {
rowHTML += '<td>' + row[column.property] + '</td>';
});
rowHTML += '</tr>';
});
if (!rowHTML) rowHTML = self.placeholderRowHTML('0 ' + self.options.itemsText);
self.$tbody.html(rowHTML);
self.stretchHeight();
self.$element.trigger('loaded');
});
},
placeholderRowHTML: function (content) {
return '<tr><td style="text-align:center;padding:20px;border-bottom:none;" colspan="' +
this.columns.length + '">' + content + '</td></tr>';
},
headerClicked: function (e) {
var $target = $(e.target);
if (!$target.hasClass('sortable')) return;
var direction = this.options.dataOptions.sortDirection;
var sort = this.options.dataOptions.sortProperty;
var property = $target.data('property');
if (sort === property) {
this.options.dataOptions.sortDirection = (direction === 'asc') ? 'desc' : 'asc';
} else {
this.options.dataOptions.sortDirection = 'asc';
this.options.dataOptions.sortProperty = property;
}
this.options.dataOptions.pageIndex = 0;
this.updateColumns($target, this.options.dataOptions.sortDirection);
this.renderData();
},
pagesizeChanged: function (e, pageSize) {
if(pageSize) {
this.options.dataOptions.pageSize = parseInt(pageSize.value, 10);
} else {
this.options.dataOptions.pageSize = parseInt($(e.target).val(), 10);
}
this.options.dataOptions.pageIndex = 0;
this.renderData();
},
pageChanged: function (e) {
var pageRequested = parseInt($(e.target).val(), 10);
pageRequested = (isNaN(pageRequested)) ? 1 : pageRequested;
var maxPages = this.$pageslabel.text();
this.options.dataOptions.pageIndex =
(pageRequested > maxPages) ? maxPages - 1 : pageRequested - 1;
this.renderData();
},
searchChanged: function (e, search) {
this.options.dataOptions.search = search;
this.options.dataOptions.pageIndex = 0;
this.renderData();
},
filterChanged: function (e, filter) {
this.options.dataOptions.filter = filter;
this.options.dataOptions.pageIndex = 0;
this.renderData();
},
previous: function () {
this.options.dataOptions.pageIndex--;
this.renderData();
},
next: function () {
this.options.dataOptions.pageIndex++;
this.renderData();
},
reload: function () {
this.options.dataOptions.pageIndex = 0;
this.renderData();
},
initStretchHeight: function () {
this.$gridContainer = this.$element.parent();
this.$element.wrap('<div class="datagrid-stretch-wrapper">');
this.$stretchWrapper = this.$element.parent();
this.$headerTable = $('<table>').attr('class', this.$element.attr('class'));
this.$footerTable = this.$headerTable.clone();
this.$headerTable.prependTo(this.$gridContainer).addClass('datagrid-stretch-header');
this.$thead.detach().appendTo(this.$headerTable);
this.$sizingHeader = this.$thead.clone();
this.$sizingHeader.find('tr:first').remove();
this.$footerTable.appendTo(this.$gridContainer).addClass('datagrid-stretch-footer');
this.$tfoot.detach().appendTo(this.$footerTable);
},
stretchHeight: function () {
if (!this.$gridContainer) return;
this.setColumnWidths();
var targetHeight = this.$gridContainer.height();
var headerHeight = this.$headerTable.outerHeight();
var footerHeight = this.$footerTable.outerHeight();
var overhead = headerHeight + footerHeight;
this.$stretchWrapper.height(targetHeight - overhead);
},
setColumnWidths: function () {
if (!this.$sizingHeader) return;
this.$element.prepend(this.$sizingHeader);
var $sizingCells = this.$sizingHeader.find('th');
var columnCount = $sizingCells.length;
function matchSizingCellWidth(i, el) {
if (i === columnCount - 1) return;
var $el = $(el);
var $sourceCell = $sizingCells.eq(i);
var width = $sourceCell.width();
// TD needs extra width to match sorted column header
if ($sourceCell.hasClass('sorted') && $el.prop('tagName') === 'TD') width = width + SORTED_HEADER_OFFSET;
$el.width(width);
}
this.$colheader.find('th').each(matchSizingCellWidth);
this.$tbody.find('tr:first > td').each(matchSizingCellWidth);
this.$sizingHeader.detach();
}
};
// DATAGRID PLUGIN DEFINITION
$.fn.datagrid = function (option) {
return this.each(function () {
var $this = $(this);
var data = $this.data('datagrid');
var options = typeof option === 'object' && option;
if (!data) $this.data('datagrid', (data = new Datagrid(this, options)));
if (typeof option === 'string') data[option]();
});
};
$.fn.datagrid.defaults = {
dataOptions: { pageIndex: 0, pageSize: 10 },
loadingHTML: '<div class="progress progress-striped active" style="width:50%;margin:auto;"><div class="bar" style="width:100%;"></div></div>',
itemsText: 'items',
itemText: 'item'
};
$.fn.datagrid.Constructor = Datagrid;
}(window.jQuery);
/*
* Fuel UX Pillbox
* https://github.com/ExactTarget/fuelux
*
* Copyright (c) 2012 ExactTarget
* Licensed under the MIT license.
*/
+function ($) { "use strict";
// PILLBOX CONSTRUCTOR AND PROTOTYPE
var Pillbox = function (element, options) {
this.$element = $(element);
this.options = $.extend({}, $.fn.pillbox.defaults, options);
this.$element.on('click', 'li', $.proxy(this.itemclicked, this));
};
Pillbox.prototype = {
constructor: Pillbox,
items: function() {
return this.$element.find('li').map(function() {
var $this = $(this);
return $.extend({ text: $this.text() }, $this.data());
}).get();
},
itemclicked: function (e) {
$(e.currentTarget).remove();
e.preventDefault();
}
};
// PILLBOX PLUGIN DEFINITION
$.fn.pillbox = function (option) {
var methodReturn;
var $set = this.each(function () {
var $this = $(this);
var data = $this.data('pillbox');
var options = typeof option === 'object' && option;
if (!data) $this.data('pillbox', (data = new Pillbox(this, options)));
if (typeof option === 'string') methodReturn = data[option]();
});
return (methodReturn === undefined) ? $set : methodReturn;
};
$.fn.pillbox.defaults = {};
$.fn.pillbox.Constructor = Pillbox;
// PILLBOX DATA-API
$(function () {
$('body').on('mousedown.pillbox.data-api', '.pillbox', function (e) {
var $this = $(this);
if ($this.data('pillbox')) return;
$this.pillbox($this.data());
});
});
}(window.jQuery);
/*
* Fuel UX Radio
* https://github.com/ExactTarget/fuelux
*
* Copyright (c) 2012 ExactTarget
* Licensed under the MIT license.
*/
+function ($) { "use strict";
// RADIO CONSTRUCTOR AND PROTOTYPE
var Radio = function (element, options) {
this.$element = $(element);
this.options = $.extend({}, $.fn.radio.defaults, options);
// cache elements
this.$label = this.$element.parent();
this.$icon = this.$label.find('i');
this.$radio = this.$label.find('input[type=radio]');
this.groupName = this.$radio.attr('name');
// set default state
this.setState(this.$radio);
// handle events
this.$radio.on('change', $.proxy(this.itemchecked, this));
};
Radio.prototype = {
constructor: Radio,
setState: function ($radio, resetGroupState) {
var checked = $radio.is(':checked');
var disabled = $radio.is(':disabled');
// set state of radio
if (checked === true) {
this.$icon.addClass('checked');
}
if (disabled === true) {
this.$icon.addClass('disabled');
}
},
resetGroup: function () {
// reset all radio buttons in group
$('input[name=' + this.groupName + ']').next().removeClass('checked');
},
enable: function () {
this.$radio.attr('disabled', false);
this.$icon.removeClass('disabled');
},
disable: function () {
this.$radio.attr('disabled', true);
this.$icon.addClass('disabled');
},
itemchecked: function (e) {
var radio = $(e.target);
this.resetGroup();
this.setState(radio);
}
};
// RADIO PLUGIN DEFINITION
$.fn.radio = function (option, value) {
var methodReturn;
var $set = this.each(function () {
var $this = $(this);
var data = $this.data('radio');
var options = typeof option === 'object' && option;
if (!data) $this.data('radio', (data = new Radio(this, options)));
if (typeof option === 'string') methodReturn = data[option](value);
});
return (methodReturn === undefined) ? $set : methodReturn;
};
$.fn.radio.defaults = {};
$.fn.radio.Constructor = Radio;
// RADIO DATA-API
$(function () {
$(window).on('load', function () {
//$('i.radio').each(function () {
$('.radio-custom > input[type=radio]').each(function () {
var $this = $(this);
if ($this.data('radio')) return;
$this.radio($this.data());
});
});
});
}(window.jQuery);
/*
* Fuel UX Search
* https://github.com/ExactTarget/fuelux
*
* Copyright (c) 2012 ExactTarget
* Licensed under the MIT license.
*/
+function ($) { "use strict";
// SEARCH CONSTRUCTOR AND PROTOTYPE
var Search = function (element, options) {
this.$element = $(element);
this.options = $.extend({}, $.fn.search.defaults, options);
this.$button = this.$element.find('button')
.on('click', $.proxy(this.buttonclicked, this));
this.$input = this.$element.find('input')
.on('keydown', $.proxy(this.keypress, this))
.on('keyup', $.proxy(this.keypressed, this));
this.$icon = this.$element.find('i');
this.activeSearch = '';
};
Search.prototype = {
constructor: Search,
search: function (searchText) {
this.$icon.attr('class', 'icon-remove');
this.activeSearch = searchText;
this.$element.trigger('searched', searchText);
},
clear: function () {
this.$icon.attr('class', 'icon-search');
this.activeSearch = '';
this.$input.val('');
this.$element.trigger('cleared');
},
action: function () {
var val = this.$input.val();
var inputEmptyOrUnchanged = val === '' || val === this.activeSearch;
if (this.activeSearch && inputEmptyOrUnchanged) {
this.clear();
} else if (val) {
this.search(val);
}
},
buttonclicked: function (e) {
e.preventDefault();
if ($(e.currentTarget).is('.disabled, :disabled')) return;
this.action();
},
keypress: function (e) {
if (e.which === 13) {
e.preventDefault();
}
},
keypressed: function (e) {
var val, inputPresentAndUnchanged;
if (e.which === 13) {
e.preventDefault();
this.action();
} else {
val = this.$input.val();
inputPresentAndUnchanged = val && (val === this.activeSearch);
this.$icon.attr('class', inputPresentAndUnchanged ? 'icon-remove' : 'icon-search');
}
},
disable: function () {
this.$input.attr('disabled', 'disabled');
this.$button.addClass('disabled');
},
enable: function () {
this.$input.removeAttr('disabled');
this.$button.removeClass('disabled');
}
};
// SEARCH PLUGIN DEFINITION
$.fn.search = function (option) {
return this.each(function () {
var $this = $(this);
var data = $this.data('search');
var options = typeof option === 'object' && option;
if (!data) $this.data('search', (data = new Search(this, options)));
if (typeof option === 'string') data[option]();
});
};
$.fn.search.defaults = {};
$.fn.search.Constructor = Search;
// SEARCH DATA-API
$(function () {
$('body').on('mousedown.search.data-api', '.search', function () {
var $this = $(this);
if ($this.data('search')) return;
$this.search($this.data());
});
});
}(window.jQuery);
/*
* Fuel UX Spinner
* https://github.com/ExactTarget/fuelux
*
* Copyright (c) 2012 ExactTarget
* Licensed under the MIT license.
*/
+function ($) { "use strict";
// SPINNER CONSTRUCTOR AND PROTOTYPE
var Spinner = function (element, options) {
this.$element = $(element);
this.options = $.extend({}, $.fn.spinner.defaults, options);
this.$input = this.$element.find('.spinner-input');
this.$element.on('keyup', this.$input, $.proxy(this.change, this));
if (this.options.hold) {
this.$element.on('mousedown', '.spinner-up', $.proxy(function() { this.startSpin(true); } , this));
this.$element.on('mouseup', '.spinner-up, .spinner-down', $.proxy(this.stopSpin, this));
this.$element.on('mouseout', '.spinner-up, .spinner-down', $.proxy(this.stopSpin, this));
this.$element.on('mousedown', '.spinner-down', $.proxy(function() {this.startSpin(false);} , this));
} else {
this.$element.on('click', '.spinner-up', $.proxy(function() { this.step(true); } , this));
this.$element.on('click', '.spinner-down', $.proxy(function() { this.step(false); }, this));
}
this.switches = {
count: 1,
enabled: true
};
if (this.options.speed === 'medium') {
this.switches.speed = 300;
} else if (this.options.speed === 'fast') {
this.switches.speed = 100;
} else {
this.switches.speed = 500;
}
this.lastValue = null;
this.render();
if (this.options.disabled) {
this.disable();
}
};
Spinner.prototype = {
constructor: Spinner,
render: function () {
this.$input.val(this.options.value);
this.$input.attr('maxlength',(this.options.max + '').split('').length);
},
change: function () {
var newVal = this.$input.val();
if(newVal/1){
this.options.value = newVal/1;
}else{
newVal = newVal.replace(/[^0-9]/g,'');
this.$input.val(newVal);
this.options.value = newVal/1;
}
this.triggerChangedEvent();
},
stopSpin: function () {
clearTimeout(this.switches.timeout);
this.switches.count = 1;
this.triggerChangedEvent();
},
triggerChangedEvent: function () {
var currentValue = this.value();
if (currentValue === this.lastValue) return;
this.lastValue = currentValue;
// Primary changed event
this.$element.trigger('changed', currentValue);
// Undocumented, kept for backward compatibility
this.$element.trigger('change');
},
startSpin: function (type) {
if (!this.options.disabled) {
var divisor = this.switches.count;
if (divisor === 1) {
this.step(type);
divisor = 1;
} else if (divisor < 3){
divisor = 1.5;
} else if (divisor < 8){
divisor = 2.5;
} else {
divisor = 4;
}
this.switches.timeout = setTimeout($.proxy(function() {this.iterator(type);} ,this),this.switches.speed/divisor);
this.switches.count++;
}
},
iterator: function (type) {
this.step(type);
this.startSpin(type);
},
step: function (dir) {
var curValue = this.options.value;
var limValue = dir ? this.options.max : this.options.min;
if ((dir ? curValue < limValue : curValue > limValue)) {
var newVal = curValue + (dir ? 1 : -1) * this.options.step;
if (dir ? newVal > limValue : newVal < limValue) {
this.value(limValue);
} else {
this.value(newVal);
}
}
},
value: function (value) {
if (!isNaN(parseFloat(value)) && isFinite(value)) {
value = parseFloat(value);
this.options.value = value;
this.$input.val(value);
return this;
} else {
return this.options.value;
}
},
disable: function () {
this.options.disabled = true;
this.$input.attr('disabled','');
this.$element.find('button').addClass('disabled');
},
enable: function () {
this.options.disabled = false;
this.$input.removeAttr("disabled");
this.$element.find('button').removeClass('disabled');
}
};
// SPINNER PLUGIN DEFINITION
$.fn.spinner = function (option,value) {
var methodReturn;
var $set = this.each(function () {
var $this = $(this);
var data = $this.data('spinner');
var options = typeof option === 'object' && option;
if (!data) $this.data('spinner', (data = new Spinner(this, options)));
if (typeof option === 'string') methodReturn = data[option](value);
});
return (methodReturn === undefined) ? $set : methodReturn;
};
$.fn.spinner.defaults = {
value: 1,
min: 1,
max: 999,
step: 1,
hold: true,
speed: 'medium',
disabled: false
};
$.fn.spinner.Constructor = Spinner;
// SPINNER DATA-API
$(function () {
$('body').on('mousedown.spinner.data-api', '.spinner', function (e) {
var $this = $(this);
if ($this.data('spinner')) return;
$this.spinner($this.data());
});
});
}(window.jQuery);
/*
* Fuel UX Select
* https://github.com/ExactTarget/fuelux
*
* Copyright (c) 2012 ExactTarget
* Licensed under the MIT license.
*/
+function ($) { "use strict";
// SELECT CONSTRUCTOR AND PROTOTYPE
var Select = function (element, options) {
this.$element = $(element);
this.options = $.extend({}, $.fn.select.defaults, options);
this.$element.on('click', 'a', $.proxy(this.itemclicked, this));
this.$button = this.$element.find('.btn');
this.$label = this.$element.find('.dropdown-label');
this.setDefaultSelection();
if (options.resize === 'auto') {
this.resize();
}
};
Select.prototype = {
constructor: Select,
itemclicked: function (e) {
this.$selectedItem = $(e.target).parent();
this.$label.text(this.$selectedItem.text());
// pass object including text and any data-attributes
// to onchange event
var data = this.selectedItem();
// trigger changed event
this.$element.trigger('changed', data);
e.preventDefault();
},
resize: function() {
var el = $('#selectTextSize')[0];
// create element if it doesn't exist
// used to calculate the length of the longest string
if(!el) {
$('<div/>').attr({id:'selectTextSize'}).appendTo('body');
}
var width = 0;
var newWidth = 0;
// iterate through each item to find longest string
this.$element.find('a').each(function () {
var $this = $(this);
var txt = $this.text();
var $txtSize = $('#selectTextSize');
$txtSize.text(txt);
newWidth = $txtSize.outerWidth();
if(newWidth > width) {
width = newWidth;
}
});
this.$label.width(width);
},
selectedItem: function() {
var txt = this.$selectedItem.text();
return $.extend({ text: txt }, this.$selectedItem.data());
},
selectByText: function(text) {
var selector = 'li a:fuelTextExactCI(' + text + ')';
this.selectBySelector(selector);
},
selectByValue: function(value) {
var selector = 'li[data-value="' + value + '"]';
this.selectBySelector(selector);
},
selectByIndex: function(index) {
// zero-based index
var selector = 'li:eq(' + index + ')';
this.selectBySelector(selector);
},
selectBySelector: function(selector) {
var item = this.$element.find(selector);
this.$selectedItem = item;
this.$label.text(this.$selectedItem.text());
},
setDefaultSelection: function() {
var selector = 'li[data-selected=true]:first';
var item = this.$element.find(selector);
if(item.length === 0) {
// select first item
this.selectByIndex(0);
}
else {
// select by data-attribute
this.selectBySelector(selector);
item.removeData('selected');
item.removeAttr('data-selected');
}
},
enable: function() {
this.$button.removeClass('disabled');
},
disable: function() {
this.$button.addClass('disabled');
}
};
// SELECT PLUGIN DEFINITION
$.fn.select = function (option,value) {
var methodReturn;
var $set = this.each(function () {
var $this = $(this);
var data = $this.data('select');
var options = typeof option === 'object' && option;
if (!data) $this.data('select', (data = new Select(this, options)));
if (typeof option === 'string') methodReturn = data[option](value);
});
return (methodReturn === undefined) ? $set : methodReturn;
};
$.fn.select.defaults = {};
$.fn.select.Constructor = Select;
// SELECT DATA-API
$(function () {
$(window).on('load', function () {
$('.select').each(function () {
var $this = $(this);
if ($this.data('select')) return;
$this.select($this.data());
});
});
$('body').on('mousedown.select.data-api', '.select', function (e) {
var $this = $(this);
if ($this.data('select')) return;
$this.select($this.data());
});
});
}(window.jQuery);
/*
* Fuel UX Tree
* https://github.com/ExactTarget/fuelux
*
* Copyright (c) 2012 ExactTarget
* Licensed under the MIT license.
*/
+function ($) { "use strict";
// TREE CONSTRUCTOR AND PROTOTYPE
var Tree = function (element, options) {
this.$element = $(element);
this.options = $.extend({}, $.fn.tree.defaults, options);
this.$element.on('click', '.tree-item', $.proxy( function(ev) { this.selectItem(ev.currentTarget); } ,this));
this.$element.on('click', '.tree-folder-header', $.proxy( function(ev) { this.selectFolder(ev.currentTarget); }, this));
this.render();
};
Tree.prototype = {
constructor: Tree,
render: function () {
this.populate(this.$element);
},
populate: function ($el) {
var self = this;
var loader = $el.parent().find('.tree-loader:eq(0)');
loader.show();
this.options.dataSource.data($el.data(), function (items) {
loader.hide();
$.each( items.data, function(index, value) {
var $entity;
if(value.type === "folder") {
$entity = self.$element.find('.tree-folder:eq(0)').clone().show();
$entity.find('.tree-folder-name').html(value.name);
$entity.find('.tree-loader').html(self.options.loadingHTML);
$entity.find('.tree-folder-header').data(value);
} else if (value.type === "item") {
$entity = self.$element.find('.tree-item:eq(0)').clone().show();
$entity.find('.tree-item-name').html(value.name);
$entity.data(value);
}
if($el.hasClass('tree-folder-header')) {
$el.parent().find('.tree-folder-content:eq(0)').append($entity);
} else {
$el.append($entity);
}
});
self.$element.trigger('loaded');
});
},
selectItem: function (el) {
var $el = $(el);
var $all = this.$element.find('.tree-selected');
var data = [];
if (this.options.multiSelect) {
$.each($all, function(index, value) {
var $val = $(value);
if($val[0] !== $el[0]) {
data.push( $(value).data() );
}
});
} else if ($all[0] !== $el[0]) {
$all.removeClass('tree-selected')
.find('i').removeClass('icon-ok').addClass('tree-dot');
data.push($el.data());
}
if($el.hasClass('tree-selected')) {
$el.removeClass('tree-selected');
$el.find('i').removeClass('icon-ok').addClass('tree-dot');
} else {
$el.addClass ('tree-selected');
$el.find('i').removeClass('tree-dot').addClass('icon-ok');
if (this.options.multiSelect) {
data.push( $el.data() );
}
}
if(data.length) {
this.$element.trigger('selected', {info: data});
}
},
selectFolder: function (el) {
var $el = $(el);
var $par = $el.parent();
if($el.find('.icon-folder-close').length) {
if ($par.find('.tree-folder-content').children().length) {
$par.find('.tree-folder-content:eq(0)').show();
} else {
this.populate( $el );
}
$par.find('.icon-folder-close:eq(0)')
.removeClass('icon-folder-close')
.addClass('icon-folder-open');
this.$element.trigger('opened', $el.data());
} else {
if(this.options.cacheItems) {
$par.find('.tree-folder-content:eq(0)').hide();
} else {
$par.find('.tree-folder-content:eq(0)').empty();
}
$par.find('.icon-folder-open:eq(0)')
.removeClass('icon-folder-open')
.addClass('icon-folder-close');
this.$element.trigger('closed', $el.data());
}
},
selectedItems: function () {
var $sel = this.$element.find('.tree-selected');
var data = [];
$.each($sel, function (index, value) {
data.push($(value).data());
});
return data;
}
};
// TREE PLUGIN DEFINITION
$.fn.tree = function (option, value) {
var methodReturn;
var $set = this.each(function () {
var $this = $(this);
var data = $this.data('tree');
var options = typeof option === 'object' && option;
if (!data) $this.data('tree', (data = new Tree(this, options)));
if (typeof option === 'string') methodReturn = data[option](value);
});
return (methodReturn === undefined) ? $set : methodReturn;
};
$.fn.tree.defaults = {
multiSelect: false,
loadingHTML: '<div>Loading...</div>',
cacheItems: true
};
$.fn.tree.Constructor = Tree;
}(window.jQuery);
/*
* Fuel UX Wizard
* https://github.com/ExactTarget/fuelux
*
* Copyright (c) 2012 ExactTarget
* Licensed under the MIT license.
*/
+function ($) { "use strict";
// WIZARD CONSTRUCTOR AND PROTOTYPE
var Wizard = function (element, options) {
var kids;
this.$element = $(element);
this.options = $.extend({}, $.fn.wizard.defaults, options);
this.currentStep = 1;
this.numSteps = this.$element.find('li').length;
this.$prevBtn = this.$element.find('button.btn-prev');
this.$nextBtn = this.$element.find('button.btn-next');
kids = this.$nextBtn.children().detach();
this.nextText = $.trim(this.$nextBtn.text());
this.$nextBtn.append(kids);
// handle events
this.$prevBtn.on('click', $.proxy(this.previous, this));
this.$nextBtn.on('click', $.proxy(this.next, this));
this.$element.on('click', 'li.complete', $.proxy(this.stepclicked, this));
};
Wizard.prototype = {
constructor: Wizard,
setState: function () {
var canMovePrev = (this.currentStep > 1);
var firstStep = (this.currentStep === 1);
var lastStep = (this.currentStep === this.numSteps);
// disable buttons based on current step
this.$prevBtn.attr('disabled', (firstStep === true || canMovePrev === false));
// change button text of last step, if specified
var data = this.$nextBtn.data();
if (data && data.last) {
this.lastText = data.last;
if (typeof this.lastText !== 'undefined') {
// replace text
var text = (lastStep !== true) ? this.nextText : this.lastText;
var kids = this.$nextBtn.children().detach();
this.$nextBtn.text(text).append(kids);
}
}
// reset classes for all steps
var $steps = this.$element.find('li');
$steps.removeClass('active').removeClass('complete');
$steps.find('span.badge').removeClass('badge-info').removeClass('badge-success');
// set class for all previous steps
var prevSelector = 'li:lt(' + (this.currentStep - 1) + ')';
var $prevSteps = this.$element.find(prevSelector);
$prevSteps.addClass('complete');
$prevSteps.find('span.badge').addClass('badge-success');
// set class for current step
var currentSelector = 'li:eq(' + (this.currentStep - 1) + ')';
var $currentStep = this.$element.find(currentSelector);
$currentStep.addClass('active');
$currentStep.find('span.badge').addClass('badge-info');
// set display of target element
var target = $currentStep.data().target;
// Dillon changed for support multi wizard
$('.step-pane', $(target).parent()).removeClass('active');
$(target).addClass('active');
this.$element.trigger('changed');
},
stepclicked: function (e) {
var li = $(e.currentTarget);
// Dillon changed for support multi wizard
var index = li.parent().find('li').index(li);
var evt = $.Event('stepclick');
this.$element.trigger(evt, {step: index + 1});
if (evt.isDefaultPrevented()) return;
this.currentStep = (index + 1);
this.setState();
},
previous: function () {
var canMovePrev = (this.currentStep > 1);
if (canMovePrev) {
var e = $.Event('change');
this.$element.trigger(e, {step: this.currentStep, direction: 'previous'});
if (e.isDefaultPrevented()) return;
this.currentStep -= 1;
this.setState();
}
},
next: function () {
var canMoveNext = (this.currentStep + 1 <= this.numSteps);
var lastStep = (this.currentStep === this.numSteps);
if (canMoveNext) {
var e = $.Event('change');
this.$element.trigger(e, {step: this.currentStep, direction: 'next'});
if (e.isDefaultPrevented()) return;
this.currentStep += 1;
this.setState();
}
else if (lastStep) {
this.$element.trigger('finished');
}
},
selectedItem: function (val) {
return {
step: this.currentStep
};
}
};
// WIZARD PLUGIN DEFINITION
$.fn.wizard = function (option, value) {
var methodReturn;
var $set = this.each(function () {
var $this = $(this);
var data = $this.data('wizard');
var options = typeof option === 'object' && option;
if (!data) $this.data('wizard', (data = new Wizard(this, options)));
if (typeof option === 'string') methodReturn = data[option](value);
});
return (methodReturn === undefined) ? $set : methodReturn;
};
$.fn.wizard.defaults = {};
$.fn.wizard.Constructor = Wizard;
// WIZARD DATA-API
$(function () {
$('body').on('mousedown.wizard.data-api', '.wizard', function () {
var $this = $(this);
if ($this.data('wizard')) return;
$this.wizard($this.data());
});
});
}(window.jQuery);
|
PypiClean
|
/jupyter_react-0.7.0.tar.gz/jupyter_react-0.7.0/src/components/notebook/NotebookCommands.ts
|
import { ReadonlyPartialJSONObject } from '@lumino/coreutils';
import { CommandRegistry } from "@lumino/commands";
import { SessionContextDialogs } from "@jupyterlab/apputils";
import { CompletionHandler } from "@jupyterlab/completer";
import { NotebookActions, NotebookPanel, NotebookSearchProvider, NotebookTracker } from '@jupyterlab/notebook';
import { SearchDocumentModel, SearchDocumentView } from '@jupyterlab/documentsearch';
import { Widget } from '@lumino/widgets';
import { nullTranslator } from '@jupyterlab/translation';
/**
* The map of command ids used by the notebook.
*/
export const cmdIds = {
invoke: "completer:invoke",
select: "completer:select",
invokeNotebook: "completer:invoke-notebook",
selectNotebook: "completer:select-notebook",
startSearch: "documentsearch:start-search",
findNext: "documentsearch:find-next",
findPrevious: "documentsearch:find-previous",
save: "notebook:save",
interrupt: "notebook:interrupt-kernel",
restart: "notebook:restart-kernel",
switchKernel: "notebook:switch-kernel",
runAndAdvance: "notebook-cells:run-and-advance",
run: "notebook:run-cell",
runAll: "notebook:run-all",
deleteCells: "notebook-cells:delete",
insertAbove: "notebook-cells:insert-above",
insertBelow: "notebook-cells:insert-below",
deleteCell: "notebook-cells:delete",
selectAbove: "notebook-cells:select-above",
selectBelow: "notebook-cells:select-below",
extendAbove: "notebook-cells:extend-above",
extendTop: "notebook-cells:extend-top",
extendBelow: "notebook-cells:extend-below",
extendBottom: "notebook-cells:extend-bottom",
editMode: "notebook:edit-mode",
merge: "notebook-cells:merge",
split: "notebook-cells:split",
commandMode: "notebook:command-mode",
undo: "notebook-cells:undo",
redo: "notebook-cells:redo",
changeCellType: "notebook-cell:change-cell-type",
toCode: "notebook-cell:to-code",
};
export const NotebookCommands = (
commandRegistry: CommandRegistry,
notebookPanel: NotebookPanel,
completerHandler: CompletionHandler,
tracker: NotebookTracker,
path?: string,
): void => {
// Add commands.
commandRegistry.addCommand(cmdIds.invoke, {
label: "Completer: Invoke",
execute: () => completerHandler.invoke(),
});
commandRegistry.addCommand(cmdIds.select, {
label: "Completer: Select",
execute: () => completerHandler.completer.selectActive(),
});
commandRegistry.addCommand(cmdIds.invokeNotebook, {
label: "Invoke Notebook",
execute: () => {
if (notebookPanel.content.activeCell?.model.type === "code") {
return commandRegistry.execute(cmdIds.invoke);
}
},
});
commandRegistry.addCommand(cmdIds.selectNotebook, {
label: "Select Notebook",
execute: () => {
if (notebookPanel.content.activeCell?.model.type === "code") {
return commandRegistry.execute(cmdIds.select);
}
},
});
if (path) {
commandRegistry.addCommand(cmdIds.save, {
label: "Save",
execute: () => notebookPanel.context.save(),
});
}
let searchInstance: SearchDocumentView | undefined;
commandRegistry.addCommand(cmdIds.startSearch, {
label: 'Find…',
execute: () => {
if (!searchInstance) {
const provider = new NotebookSearchProvider(notebookPanel, nullTranslator);
const searchModel = new SearchDocumentModel(provider, 500);
searchInstance = new SearchDocumentView(searchModel);
/**
* Activate the target widget when the search panel is closing
*/
searchInstance.closed.connect(() => {
if (!notebookPanel.isDisposed) {
notebookPanel.activate();
}
});
searchInstance.disposed.connect(() => {
if (!notebookPanel.isDisposed) {
notebookPanel.activate();
}
// find next and previous are now disabled
commandRegistry.notifyCommandChanged();
});
/**
* Dispose resources when the widget is disposed.
*/
notebookPanel.disposed.connect(() => {
searchInstance?.dispose();
searchModel.dispose();
provider.dispose();
});
}
if (!searchInstance.isAttached) {
Widget.attach(searchInstance, notebookPanel.node);
searchInstance.node.style.top = `${
notebookPanel.toolbar.node.getBoundingClientRect().height +
notebookPanel.contentHeader.node.getBoundingClientRect().height
}px`;
if (searchInstance.model.searchExpression) {
searchInstance.model.refresh();
}
}
searchInstance.focusSearchInput();
}
});
commandRegistry.addCommand(cmdIds.findNext, {
label: 'Find Next',
isEnabled: () => !!searchInstance,
execute: async () => {
if (!searchInstance) {
return;
}
await searchInstance.model.highlightNext();
}
});
commandRegistry.addCommand(cmdIds.findPrevious, {
label: 'Find Previous',
isEnabled: () => !!searchInstance,
execute: async () => {
if (!searchInstance) {
return;
}
await searchInstance.model.highlightPrevious();
}
});
commandRegistry.addCommand(cmdIds.interrupt, {
label: "Interrupt",
execute: async () =>
notebookPanel.context.sessionContext.session?.kernel?.interrupt(),
});
const sessionContextDialogs = new SessionContextDialogs();
commandRegistry.addCommand(cmdIds.restart, {
label: "Restart Kernel",
execute: () =>
sessionContextDialogs.restart(notebookPanel.context.sessionContext),
});
commandRegistry.addCommand(cmdIds.switchKernel, {
label: "Switch Kernel",
execute: () =>
sessionContextDialogs.selectKernel(notebookPanel.context.sessionContext),
});
commandRegistry.addCommand(cmdIds.runAndAdvance, {
label: "Run and Advance",
execute: () => {
return NotebookActions.runAndAdvance(
notebookPanel.content,
notebookPanel.context.sessionContext
);
},
});
commandRegistry.addCommand(cmdIds.run, {
label: "Run",
execute: () => {
return NotebookActions.run(
notebookPanel.content,
notebookPanel.context.sessionContext
);
},
});
commandRegistry.addCommand(cmdIds.runAll, {
label: "Run all",
execute: () => {
return NotebookActions.runAll(
notebookPanel.content,
notebookPanel.context.sessionContext
);
},
});
commandRegistry.addCommand(cmdIds.deleteCells, {
label: "Delete",
execute: () => {
return NotebookActions.deleteCells(notebookPanel.content);
},
});
commandRegistry.addCommand(cmdIds.insertAbove, {
label: "Insert Above",
execute: () => {
return NotebookActions.insertAbove(notebookPanel.content);
},
});
commandRegistry.addCommand(cmdIds.insertBelow, {
label: "Insert Below",
execute: () => {
return NotebookActions.insertBelow(notebookPanel.content);
},
});
commandRegistry.addCommand(cmdIds.editMode, {
label: "Edit Mode",
execute: () => {
notebookPanel.content.mode = "edit";
},
});
commandRegistry.addCommand(cmdIds.commandMode, {
label: "Command Mode",
execute: () => {
notebookPanel.content.mode = "command";
},
});
commandRegistry.addCommand(cmdIds.selectBelow, {
label: "Select Below",
execute: () => NotebookActions.selectBelow(notebookPanel.content),
});
commandRegistry.addCommand(cmdIds.selectAbove, {
label: "Select Above",
execute: () => NotebookActions.selectAbove(notebookPanel.content),
});
commandRegistry.addCommand(cmdIds.extendAbove, {
label: "Extend Above",
execute: () => NotebookActions.extendSelectionAbove(notebookPanel.content),
});
commandRegistry.addCommand(cmdIds.extendTop, {
label: "Extend to Top",
execute: () =>
NotebookActions.extendSelectionAbove(notebookPanel.content, true),
});
commandRegistry.addCommand(cmdIds.extendBelow, {
label: "Extend Below",
execute: () => NotebookActions.extendSelectionBelow(notebookPanel.content),
});
commandRegistry.addCommand(cmdIds.extendBottom, {
label: "Extend to Bottom",
execute: () =>
NotebookActions.extendSelectionBelow(notebookPanel.content, true),
});
commandRegistry.addCommand(cmdIds.merge, {
label: "Merge Cells",
execute: () => NotebookActions.mergeCells(notebookPanel.content),
});
commandRegistry.addCommand(cmdIds.split, {
label: "Split Cell",
execute: () => NotebookActions.splitCell(notebookPanel.content),
});
commandRegistry.addCommand(cmdIds.undo, {
label: "Undo",
execute: () => NotebookActions.undo(notebookPanel.content),
});
commandRegistry.addCommand(cmdIds.redo, {
label: "Redo",
execute: () => NotebookActions.redo(notebookPanel.content),
});
commandRegistry.addCommand(cmdIds.toCode, {
label: 'Change to Code Cell Type',
execute: args => NotebookActions.changeCellType(notebookPanel.content, 'markdown')
});
function getCurrent(args: ReadonlyPartialJSONObject): NotebookPanel | null {
return tracker.currentWidget;
}
function isEnabled(): boolean {
return (
tracker.currentWidget !== null
);
}
commandRegistry.addCommand('run-selected-codecell', {
label: 'Run Cell',
execute: args => {
const current = getCurrent(args);
if (current) {
const { context, content } = current;
NotebookActions.run(content, context.sessionContext);
}
},
isEnabled,
});
const bindings = [
{
selector: ".jp-Notebook.jp-mod-editMode .jp-mod-completer-enabled",
keys: ["Tab"],
command: cmdIds.invokeNotebook,
},
{
selector: `.jp-mod-completer-active`,
keys: ["Enter"],
command: cmdIds.selectNotebook,
},
{
selector: ".jp-Notebook",
keys: ["Ctrl Enter"],
command: cmdIds.run,
},
{
selector: ".jp-Notebook",
keys: ["Shift Enter"],
command: cmdIds.runAndAdvance,
},
{
selector: ".jp-Notebook",
keys: ["Accel F"],
command: cmdIds.startSearch,
},
{
selector: ".jp-Notebook",
keys: ["Accel G"],
command: cmdIds.findNext,
},
{
selector: ".jp-Notebook",
keys: ["Accel Shift G"],
command: cmdIds.findPrevious,
},
{
selector: ".jp-Notebook.jp-mod-commandMode:focus",
keys: ["I", "I"],
command: cmdIds.interrupt,
},
{
selector: ".jp-Notebook.jp-mod-commandMode:focus",
keys: ["0", "0"],
command: cmdIds.restart,
},
{
selector: ".jp-Notebook.jp-mod-commandMode:focus",
keys: ["Enter"],
command: cmdIds.editMode,
},
{
selector: ".jp-Notebook.jp-mod-editMode",
keys: ["Escape"],
command: cmdIds.commandMode,
},
{
selector: ".jp-Notebook.jp-mod-commandMode:focus",
keys: ["Shift M"],
command: cmdIds.merge,
},
{
selector: ".jp-Notebook.jp-mod-editMode",
keys: ["Ctrl Shift -"],
command: cmdIds.split,
},
{
selector: ".jp-Notebook.jp-mod-commandMode:focus",
keys: ["J"],
command: cmdIds.selectBelow,
},
{
selector: ".jp-Notebook.jp-mod-commandMode:focus",
keys: ["ArrowDown"],
command: cmdIds.selectBelow,
},
{
selector: ".jp-Notebook.jp-mod-commandMode:focus",
keys: ["A"],
command: cmdIds.insertAbove,
},
{
selector: ".jp-Notebook.jp-mod-commandMode:focus",
keys: ["B"],
command: cmdIds.insertBelow,
},
{
selector: ".jp-Notebook.jp-mod-commandMode:focus",
keys: ["K"],
command: cmdIds.selectAbove,
},
{
selector: ".jp-Notebook.jp-mod-commandMode:focus",
keys: ["ArrowUp"],
command: cmdIds.selectAbove,
},
{
selector: ".jp-Notebook.jp-mod-commandMode:focus",
keys: ["Shift K"],
command: cmdIds.extendAbove,
},
{
selector: ".jp-Notebook.jp-mod-commandMode:focus",
keys: ["Shift J"],
command: cmdIds.extendBelow,
},
{
selector: ".jp-Notebook.jp-mod-commandMode:focus",
keys: ["Z"],
command: cmdIds.undo,
},
{
selector: ".jp-Notebook.jp-mod-commandMode:focus",
keys: ["Y"],
command: cmdIds.redo,
},
];
bindings.map((binding) => commandRegistry.addKeyBinding(binding));
if (path) {
commandRegistry.addKeyBinding({
selector: ".jp-Notebook",
keys: ["Accel S"],
command: cmdIds.save,
});
}
};
export default NotebookCommands;
|
PypiClean
|
/ClueDojo-1.4.3-1.tar.gz/ClueDojo-1.4.3-1/src/cluedojo/static/dojo/cldr/nls/nb/gregorian.js
|
({"dateFormatItem-yM":"M yyyy","field-dayperiod":"AM/PM","dateFormatItem-yQ":"Q yyyy","field-minute":"minutt","eraNames":["f.Kr.","e.Kr."],"dateFormatItem-MMMEd":"E d. MMM","field-weekday":"ukedag","dateFormatItem-hms":"h.mm.ss a","dateFormatItem-yQQQ":"QQQ y","dateFormatItem-MMdd":"dd.MM","days-standAlone-wide":["søndag","mandag","tirsdag","onsdag","torsdag","fredag","lørdag"],"dateFormatItem-MMM":"LLL","months-standAlone-narrow":["J","F","M","A","M","J","J","A","S","O","N","D"],"field-era":"tidsalder","field-hour":"time","quarters-standAlone-abbr":["K1","K2","K3","K4"],"dateFormatItem-y":"y","timeFormat-full":"'kl'. HH.mm.ss zzzz","months-standAlone-abbr":["jan.","feb.","mars","apr.","mai","juni","juli","aug.","sep.","okt.","nov.","des."],"dateFormatItem-yMMM":"MMM y","days-standAlone-narrow":["S","M","T","O","T","F","L"],"eraAbbr":["f.Kr.","e.Kr."],"dateFormatItem-yyyyMMMM":"MMMM y","dateFormat-long":"d. MMMM y","timeFormat-medium":"HH.mm.ss","dateFormatItem-EEEd":"EEE d","field-zone":"sone","dateFormatItem-Hm":"HH.mm","dateFormatItem-yyMM":"MM.yy","dateFormat-medium":"d. MMM y","dateFormatItem-yyMMM":"MMM yy","dateFormatItem-yyQQQQ":"QQQQ yy","quarters-standAlone-wide":["1. kvartal","2. kvartal","3. kvartal","4. kvartal"],"dateFormatItem-yMMMM":"MMMM y","dateFormatItem-ms":"mm.ss","field-year":"år","quarters-standAlone-narrow":["1","2","3","4"],"dateFormatItem-HHmmss":"HH.mm.ss","field-week":"uke","months-standAlone-wide":["januar","februar","mars","april","mai","juni","juli","august","september","oktober","november","desember"],"dateFormatItem-MMMMEd":"E d. MMMM","dateFormatItem-MMMd":"d. MMM","dateFormatItem-yyQ":"Q yy","timeFormat-long":"HH.mm.ss z","months-format-abbr":["jan.","feb.","mars","apr.","mai","juni","juli","aug.","sep.","okt.","nov.","des."],"timeFormat-short":"HH.mm","field-month":"måned","dateFormatItem-MMMMd":"d. MMMM","quarters-format-abbr":["K1","K2","K3","K4"],"days-format-abbr":["søn.","man.","tir.","ons.","tor.","fre.","lør."],"pm":"PM","dateFormatItem-M":"L","days-format-narrow":["S","M","T","O","T","F","L"],"field-second":"sekund","field-day":"dag","dateFormatItem-MEd":"E d.M","months-format-narrow":["J","F","M","A","M","J","J","A","S","O","N","D"],"dateFormatItem-hm":"h.mm a","am":"AM","days-standAlone-abbr":["søn.","man.","tir.","ons.","tor.","fre.","lør."],"dateFormat-short":"dd.MM.yy","dateFormatItem-yMMMEd":"EEE d. MMM y","dateFormat-full":"EEEE d. MMMM y","dateFormatItem-Md":"d.M.","dateFormatItem-yMEd":"EEE d.M.yyyy","months-format-wide":["januar","februar","mars","april","mai","juni","juli","august","september","oktober","november","desember"],"dateFormatItem-d":"d.","quarters-format-wide":["1. kvartal","2. kvartal","3. kvartal","4. kvartal"],"days-format-wide":["søndag","mandag","tirsdag","onsdag","torsdag","fredag","lørdag"],"eraNarrow":["f.Kr.","e.Kr."],"dateTimeFormats-appendItem-Day-Of-Week":"{0} {1}","dateTimeFormat-medium":"{1} {0}","dateTimeFormats-appendItem-Second":"{0} ({2}: {1})","dateTimeFormats-appendItem-Era":"{0} {1}","dateTimeFormats-appendItem-Week":"{0} ({2}: {1})","quarters-format-narrow":["1","2","3","4"],"dateTimeFormat-long":"{1} {0}","dateTimeFormat-full":"{1} {0}","dateTimeFormats-appendItem-Day":"{0} ({2}: {1})","dateTimeFormats-appendItem-Year":"{0} {1}","dateTimeFormats-appendItem-Hour":"{0} ({2}: {1})","dateTimeFormats-appendItem-Quarter":"{0} ({2}: {1})","dateTimeFormats-appendItem-Month":"{0} ({2}: {1})","dateTimeFormats-appendItem-Minute":"{0} ({2}: {1})","dateTimeFormats-appendItem-Timezone":"{0} {1}","dateTimeFormat-short":"{1} {0}","dateFormatItem-Hms":"H:mm:ss"})
|
PypiClean
|
/vantage6-server-4.0.0a5.tar.gz/vantage6-server-4.0.0a5/vantage6/server/websockets.py
|
import logging
import jwt
import datetime as dt
from flask import request, session
from flask_jwt_extended import get_jwt_identity, verify_jwt_in_request
from flask_socketio import Namespace, emit, join_room, leave_room
from vantage6.common import logger_name
from vantage6.common.task_status import has_task_failed
from vantage6.server import db
from vantage6.server.model.authenticatable import Authenticatable
from vantage6.server.model.rule import Operation, Scope
from vantage6.server.model.base import DatabaseSessionManager
ALL_NODES_ROOM = 'all_nodes'
class DefaultSocketNamespace(Namespace):
"""
This is the default SocketIO namespace. It is used for all the long-running
socket communication between the server and the clients. The clients of the
socket connection are nodes and users.
When socket communication is received from one of the clients, the
functions in this class are called to execute the corresponding action.
"""
socketio = None
log = logging.getLogger(logger_name(__name__))
def on_connect(self) -> None:
"""
A new incoming connection request from a client.
New connections are authenticated using their JWT authorization token
which is obtained from the REST API. A session is created for each
connected client, and lives as long as the connection is active.
Each client is assigned to rooms based on their permissions.
Nodes that are connecting are also set to status 'online'.
Note
----
Note that reconnecting clients are treated the same as new clients.
"""
self.log.info(f'Client connected: "{request.sid}"')
# try to catch jwt authorization token.
try:
verify_jwt_in_request()
except jwt.exceptions.ExpiredSignatureError:
self.log.error("JWT has expired")
emit("expired_token", room=request.sid)
return
except Exception as e:
self.log.error("Couldn't connect client! No or Invalid JWT token?")
self.log.exception(e)
session.name = "not-sure-yet"
self.__join_room_and_notify(request.sid)
# FIXME: expired probably doesn't cover it ...
emit("expired_token", room=request.sid)
return
# get identity from token.
session.auth_id = get_jwt_identity()
auth = db.Authenticatable.get(session.auth_id)
auth.status = 'online'
auth.save()
# It appears to be necessary to use the root socketio instance
# otherwise events cannot be sent outside the current namespace.
# In this case, only events to '/tasks' can be emitted otherwise.
if auth.type == 'node':
self.socketio.emit('node-status-changed', namespace='/admin')
# define socket-session variables.
session.type = auth.type
session.name = auth.username if session.type == 'user' else auth.name
self.log.info(
f'Client identified as <{session.type}>: <{session.name}>'
)
# join appropiate rooms
session.rooms = []
if session.type == 'node':
self._add_node_to_rooms(auth)
self.__alert_node_status(online=True, node=auth)
elif session.type == 'user':
self._add_user_to_rooms(auth)
for room in session.rooms:
self.__join_room_and_notify(room)
# cleanup (e.g. database session)
self.__cleanup()
@staticmethod
def _add_node_to_rooms(node: Authenticatable) -> None:
"""
Connect node to appropriate websocket rooms
Parameters
----------
node: Authenticatable
Node that is to be added to the rooms
"""
# node join rooms for all nodes and rooms for their collaboration
session.rooms.append(ALL_NODES_ROOM)
session.rooms.append(f'collaboration_{node.collaboration_id}')
session.rooms.append(
f'collaboration_{node.collaboration_id}_organization_'
f'{node.organization_id}')
@staticmethod
def _add_user_to_rooms(user: Authenticatable) -> None:
"""
Connect user to appropriate websocket rooms
Parameters
----------
user: Authenticatable
User that is to be added to the rooms
"""
# check for which collab rooms the user has permission to enter
session.user = db.User.get(session.auth_id)
if session.user.can('event', Scope.GLOBAL, Operation.RECEIVE):
# user joins all collaboration rooms
collabs = db.Collaboration.get()
for collab in collabs:
session.rooms.append(f'collaboration_{collab.id}')
elif session.user.can(
'event', Scope.COLLABORATION, Operation.RECEIVE):
# user joins all collaboration rooms that their organization
# participates in
for collab in user.organization.collaborations:
session.rooms.append(f'collaboration_{collab.id}')
elif session.user.can('event', Scope.ORGANIZATION, Operation.RECEIVE):
# user joins collaboration subrooms that include only messages
# relevant to their own node
for collab in user.organization.collaborations:
session.rooms.append(
f'collaboration_{collab.id}_organization_'
f'{user.organization.id}'
)
def on_disconnect(self) -> None:
"""
Client that disconnects is removed from all rooms they were in.
If nodes disconnect, their status is also set to offline and users may
be alerted to that. Also, any information on the node (e.g.
configuration) is removed from the database.
"""
if not self.__is_identified_client():
self.log.debug('Client disconnected before identification')
return
for room in session.rooms:
# self.__leave_room_and_notify(room)
self.__leave_room_and_notify(room)
auth = db.Authenticatable.get(session.auth_id)
auth.status = 'offline'
auth.save()
# It appears to be necessary to use the root socketio instance
# otherwise events cannot be sent outside the current namespace.
# In this case, only events to '/tasks' can be emitted otherwise.
if session.type == 'node':
self.log.warning('emitting to /admin')
self.socketio.emit('node-status-changed', namespace='/admin')
self.__alert_node_status(online=False, node=auth)
# delete any data on the node stored on the server (e.g.
# configuration data)
self.__clean_node_data(auth)
self.log.info(f'{session.name} disconnected')
# cleanup (e.g. database session)
self.__cleanup()
def on_message(self, message: str) -> None:
"""
On receiving a message from a client, log it.
Parameters
----------
message: str
Message that is going to be displayed in the server log
"""
self.log.info('received message: ' + message)
def on_error(self, e: str) -> None:
"""
An receiving an error from a client, log it.
Parameters
----------
e: str
Error message that is being displayed in the server log
"""
self.log.error(e)
def on_algorithm_status_change(self, data: dict) -> None:
"""
An algorithm container has changed its status. This status change may
be that the algorithm has finished, crashed, etc. Here we notify the
collaboration of the change.
Parameters
----------
data: Dict
Dictionary containing parameters on the updated algorithm status.
It should look as follows:
.. code:: python
{
# node_id where algorithm container was running
"node_id": 1,
# new status of algorithm container
"status": "active",
# result_id for which the algorithm was running
"result_id": 1,
# collaboration_id for which the algorithm was running
"collaboration_id": 1
}
"""
# only allow nodes to send this event
if session.type != 'node':
self.log.warn('Only nodes can send algorithm status changes! '
f'{session.type} {session.auth_id} is not allowed.')
return
run_id = data.get('run_id')
task_id = data.get('task_id')
collaboration_id = data.get('collaboration_id')
status = data.get('status')
node_id = data.get('node_id')
organization_id = data.get('organization_id')
parent_id = data.get('parent_id')
job_id = db.Run.get(run_id).task.job_id
# log event in server logs
msg = (f"A container for job_id={job_id} and run_id={run_id} "
f"in collaboration_id={collaboration_id} on node_id={node_id}")
if has_task_failed(status):
self.log.critical(f"{msg} exited with status={status}.")
else:
self.log.info(f"{msg} has a new status={status}.")
# emit task status change to other nodes/users in the collaboration
emit(
"algorithm_status_change", {
"status": status,
"run_id": run_id,
"task_id": task_id,
"job_id": job_id,
"collaboration_id": collaboration_id,
"node_id": node_id,
"organization_id": organization_id,
"parent_id": parent_id,
}, room=f"collaboration_{collaboration_id}"
)
# cleanup (e.g. database session)
self.__cleanup()
def on_node_info_update(self, node_config: dict) -> None:
"""
A node sends information about its configuration and other properties.
Store this in the database for the duration of the node's session.
Parameters
----------
node_config: dict
Dictionary containing the node's configuration.
"""
# only allow nodes to send this event
if session.type != 'node':
self.log.warn('Only nodes can send node configuration updates! '
f'{session.type} {session.auth_id} is not allowed.')
return
node = db.Node.get(session.auth_id)
# delete any old data that may be present (if cleanup on disconnect
# failed)
self.__clean_node_data(node=node)
# store (new) node config
to_store = []
for k, v in node_config.items():
# add single item or list of items
if isinstance(v, list):
to_store.extend([
db.NodeConfig(node_id=node.id, key=k, value=i)
for i in v
])
elif isinstance(v, dict):
to_store.extend([
db.NodeConfig(node_id=node.id, key=inner_key,
value=inner_val)
for inner_key, inner_val in v.items()
])
else:
to_store.append(db.NodeConfig(node_id=node.id, key=k, value=v))
node.config = to_store
node.save()
# cleanup (e.g. database session)
self.__cleanup()
def on_ping(self) -> None:
"""
A client sends a ping to the server. The server detects who sent the
ping and sets them as online.
"""
auth = db.Authenticatable.get(session.auth_id)
auth.status = 'online'
auth.last_seen = dt.datetime.utcnow()
auth.save()
def __join_room_and_notify(self, room: str) -> None:
"""
Joins room and notify other clients in this room.
Parameters
----------
room : str
name of the room the client want to join
"""
join_room(room)
msg = f'{session.type.title()} <{session.name}> joined room <{room}>'
self.log.info(msg)
self.__notify_room_join_or_leave(room, msg)
def __leave_room_and_notify(self, room: str) -> None:
"""
Leave room and notify other clients in this room.
Parameters
----------
room : str
name of the room the client is leaving
"""
leave_room(room)
msg = f'{session.name} left room {room}'
self.log.info(msg)
self.__notify_room_join_or_leave(room, msg)
@staticmethod
def __notify_room_join_or_leave(room: str, msg: str) -> None:
"""
Notify a room that one of its clients is joining or leaving
"""
# share message with other users and nodes, except for all_nodes. That
# room must never be notified for join or leave events since it
# contains nodes from different collaborations that shouldn't
# know about each other.
if room != ALL_NODES_ROOM:
emit('message', msg, room=room)
def __alert_node_status(self, online: bool, node: Authenticatable) -> None:
"""
Send status update of nodes when they change on/offline status
Parameters
----------
online: bool
Whether node is coming online or not
node: Authenticatable
The node SQLALchemy object
"""
event = 'node-online' if online else 'node-offline'
for room in session.rooms:
self.socketio.emit(
event,
{
'id': node.id, 'name': node.name,
'org_id': node.organization.id
},
namespace='/tasks',
room=room
)
@staticmethod
def __is_identified_client() -> bool:
"""
Check if client has been identified as an authenticated user or node
Returns
-------
bool
True if client has been identified, False otherwise
"""
return hasattr(session, 'auth_id')
@staticmethod
def __clean_node_data(node: db.Node) -> None:
"""
Remove any information from the database that the node shared about
e.g. its configuration
Parameters
----------
node: db.Node
The node SQLALchemy object
"""
for conf in node.config:
conf.delete()
@staticmethod
def __cleanup() -> None:
""" Cleanup database connections """
DatabaseSessionManager.clear_session()
|
PypiClean
|
/hugh_game-0.1-py3-none-any.whl/pgzero/music.py
|
from pygame.mixer import music as _music
from .loaders import ResourceLoader
from . import constants
__all__ = [
'rewind', 'stop', 'fadeout', 'set_volume', 'get_volume', 'get_pos',
'set_pos', 'play', 'queue', 'pause', 'unpause',
]
_music.set_endevent(constants.MUSIC_END)
class _MusicLoader(ResourceLoader):
"""Pygame's music API acts as a singleton with one 'current' track.
No objects are returned that represent different tracks, so this loader
can't return anything useful. But it can perform all the path name
validations and return the validated path, so that's what we do.
This loader should not be exposed to the user.
"""
EXTNS = ['mp3', 'ogg', 'oga']
TYPE = 'music'
def _load(self, path):
return path
_loader = _MusicLoader('music')
# State of whether we are paused or not
_paused = False
def _play(name, loop):
global _paused
path = _loader.load(name)
_music.load(path)
_music.play(loop)
_paused = False
def play(name):
"""Play a music file from the music/ directory.
The music will loop when it finishes playing.
"""
_play(name, -1)
def play_once(name):
"""Play a music file from the music/ directory."""
_play(name, 0)
def queue(name):
"""Queue a music file to follow the current track.
This will load a music file and queue it. A queued music file will begin as
soon as the current music naturally ends. If the current music is ever
stopped or changed, the queued song will be lost.
"""
path = _loader.load(name)
_music.queue(path)
def is_playing(name):
"""Return True if the music is playing and not paused."""
return _music.get_busy() and not _paused
def pause():
"""Temporarily stop playback of the music stream.
Call `unpause()` to resume.
"""
global _paused
_music.pause()
_paused = True
def unpause():
"""Resume playback of the music stream after it has been paused."""
global _paused
_music.unpause()
_paused = False
def fadeout(seconds):
"""Fade out and eventually stop the music playback.
:param seconds: The duration in seconds over which the sound will be faded
out. For example, to fade out over half a second, call
``music.fadeout(0.5)``.
"""
_music.fadeout(int(seconds * 1000))
rewind = _music.rewind
stop = _music.stop
get_volume = _music.get_volume
set_volume = _music.set_volume
get_pos = _music.get_pos
set_pos = _music.set_pos
|
PypiClean
|
/cobertura_parser-0.3.1.tar.gz/cobertura_parser-0.3.1/cobertura_parser/ext/jacoco.py
|
from lxml import etree as ET
import re
import os.path
def find_lines(j_package, filename):
"""Return all <line> elements for a given source file in a package."""
lines = list()
for sourcefile in j_package.iterfind("sourcefile"):
if (
sourcefile.attrib.get("name").split(".")[0]
== os.path.basename(filename).split(".")[0]
):
lines = lines + sourcefile.findall("line")
return lines
def line_is_after(jm, start_line):
return int(jm.attrib.get("line", 0)) > start_line
def method_lines(jmethod, jmethods, jlines):
"""Filter the lines from the given set of jlines that apply to the given jmethod."""
start_line = int(jmethod.attrib.get("line", 0))
larger = list(
int(jm.attrib.get("line", 0))
for jm in jmethods
if line_is_after(jm, start_line)
)
end_line = min(larger) if len(larger) else 99999999
for jline in jlines:
if start_line <= int(jline.attrib["nr"]) < end_line:
yield jline
def convert_lines(j_lines, into):
"""Convert the JaCoCo <line> elements into Cobertura <line> elements, add them under the given element."""
c_lines = ET.SubElement(into, "lines")
for jline in j_lines:
mb = int(jline.attrib["mb"])
cb = int(jline.attrib["cb"])
ci = int(jline.attrib["ci"])
cline = ET.SubElement(c_lines, "line")
cline.set("number", jline.attrib["nr"])
cline.set(
"hits", "1" if ci > 0 else "0"
) # Probably not true but no way to know from JaCoCo XML file
if mb + cb > 0:
percentage = str(int(100 * (float(cb) / (float(cb) + float(mb))))) + "%"
cline.set("branch", "true")
cline.set(
"condition-coverage",
percentage + " (" + str(cb) + "/" + str(cb + mb) + ")",
)
cond = ET.SubElement(ET.SubElement(cline, "conditions"), "condition")
cond.set("number", "0")
cond.set("type", "jump")
cond.set("coverage", percentage)
else:
cline.set("branch", "false")
def guess_filename(path_to_class, src_file_name):
if src_file_name.endswith(".kt"):
suffix = ".kt"
else:
suffix = ".java"
m = re.match("([^$]*)", path_to_class)
return (m.group(1) if m else path_to_class) + suffix
def add_counters(source, target):
target.set("line-rate", counter(source, "LINE"))
target.set("branch-rate", counter(source, "BRANCH"))
target.set("complexity", counter(source, "COMPLEXITY", sum))
def fraction(covered, missed):
return covered / (covered + missed)
def sum(covered, missed):
return covered + missed
def counter(source, type, operation=fraction):
cs = source.iterfind("counter")
c = next((ct for ct in cs if ct.attrib.get("type") == type), None)
if c is not None:
covered = float(c.attrib["covered"])
missed = float(c.attrib["missed"])
return str(operation(covered, missed))
else:
return "0.0"
def convert_method(j_method, j_lines):
c_method = ET.Element("method")
c_method.set("name", j_method.attrib["name"])
c_method.set("signature", j_method.attrib["desc"])
add_counters(j_method, c_method)
convert_lines(j_lines, c_method)
return c_method
def convert_class(j_class, j_package):
c_class = ET.Element("class")
c_class.set("name", j_class.attrib["name"].replace("/", "."))
# source file name can be None
try:
source_file_name = j_class.attrib["sourcefilename"]
except KeyError:
source_file_name = ""
c_class.set(
"filename",
guess_filename(j_class.attrib["name"], source_file_name),
)
all_j_lines = list(find_lines(j_package, c_class.attrib["filename"]))
# more than 8000 may causes mem issues
if len(all_j_lines) > 8000:
return c_class
c_methods = ET.SubElement(c_class, "methods")
all_j_methods = list(j_class.iterfind("method"))
str_list = []
for j_method in all_j_methods:
j_method_lines = method_lines(j_method, all_j_methods, all_j_lines)
each_node = convert_method(j_method, j_method_lines)
str_list.append(ET.tostring(each_node, encoding="unicode"))
for each in str_list:
c_methods.append(ET.fromstring(each))
add_counters(j_class, c_class)
convert_lines(all_j_lines, c_class)
return c_class
def convert_package(j_package):
c_package = ET.Element("package")
c_package.attrib["name"] = j_package.attrib["name"].replace("/", ".")
c_classes = ET.SubElement(c_package, "classes")
str_list = []
for j_class in j_package.iterfind("class"):
each_node = convert_class(j_class, j_package)
str_list.append(ET.tostring(each_node, encoding="unicode"))
for each in str_list:
c_classes.append(ET.fromstring(each))
add_counters(j_package, c_package)
return c_package
def convert_root(source, target):
try:
ts = int(source.find("sessioninfo").attrib["start"]) / 1000
except AttributeError:
ts = -1
target.set("timestamp", str(ts))
packages = ET.SubElement(target, "packages")
str_list = []
for package in source.iterfind("package"):
each_node = convert_package(package)
str_list.append(ET.tostring(each_node, encoding="unicode"))
for each in str_list:
packages.append(ET.fromstring(each))
add_counters(source, target)
def jacoco2cobertura(jacoco_string) -> str:
root = ET.parse(jacoco_string).getroot()
into = ET.Element("coverage")
convert_root(root, into)
output = f'<?xml version="1.0" ?>{ET.tostring(into, encoding="unicode")}'
return output
# mem leak in lxml
# https://stackoverflow.com/a/49139904/10641498
# https://www.reddit.com/r/Python/comments/j0gl8t/psa_pythonlxml_memory_leaks_and_a_solution/
def destroy_tree(tree):
root = tree
node_tracker = {root: [0, None]}
for node in root.iterdescendants():
parent = node.getparent()
node_tracker[node] = [node_tracker[parent][0] + 1, parent]
node_tracker = sorted(
[(depth, parent, child) for child, (depth, parent) in node_tracker.items()],
key=lambda x: x[0],
reverse=True,
)
for _, parent, child in node_tracker:
if parent is None:
break
parent.remove(child)
del tree
|
PypiClean
|
/basis_set_exchange-0.9.1.tar.gz/basis_set_exchange-0.9.1/basis_set_exchange/writers/demon2k.py
|
from .. import lut, manip, sort, misc, printing
def write_demon2k(basis):
'''Converts a basis set to deMon2K format
'''
if 'gto_spherical' in basis['function_types']:
s = '# This basis set uses spherical components\n\n'
else:
s = '# This basis set uses cartesian components\n\n'
basis = manip.uncontract_spdf(basis, 0, True)
basis = manip.uncontract_general(basis, False)
basis = sort.sort_basis(basis, False)
# Elements for which we have electron basis
electron_elements = [k for k, v in basis['elements'].items() if 'electron_shells' in v]
# Elements for which we have ECP
ecp_elements = [k for k, v in basis['elements'].items() if 'ecp_potentials' in v]
# Electron Basis
if electron_elements:
for z in electron_elements:
data = basis['elements'][z]
sym = lut.element_sym_from_Z(z, True)
elname = lut.element_name_from_Z(z).upper()
cont_string = misc.contraction_string(data)
# Need the start of electron shells if there are ECPs
ecp_electrons = data.get('ecp_electrons', 0)
shells_start = lut.electron_shells_start(ecp_electrons)
shells_start = list(shells_start)
s += 'O-{} {} ({})\n'.format(elname, sym.upper(), basis['name'])
s += '# {}\n'.format(cont_string)
nshells = len(data['electron_shells'])
s += ' {}\n'.format(nshells)
for shell in data['electron_shells']:
exponents = shell['exponents']
coefficients = shell['coefficients']
ncol = len(coefficients) + 1
nprim = len(exponents)
# We removed spdf already
assert len(shell['angular_momentum']) == 1
am = shell['angular_momentum'][0]
# shells_start has starting principal quantum numbers for all AM
pqn = shells_start[am]
shells_start[am] += 1
s += ' {} {} {}\n'.format(pqn, am, nprim)
point_places = [8 * i + 15 * (i - 1) for i in range(1, ncol + 1)]
s += printing.write_matrix([exponents, *coefficients], point_places, convert_exp=False)
# Write out ECP
if ecp_elements:
s += '\n\nECP\n'
for z in ecp_elements:
data = basis['elements'][z]
sym = lut.element_sym_from_Z(z, normalize=True)
max_ecp_am = max([x['angular_momentum'][0] for x in data['ecp_potentials']])
# Sort lowest->highest, then put the highest at the beginning
ecp_list = sorted(data['ecp_potentials'], key=lambda x: x['angular_momentum'])
ecp_list.insert(0, ecp_list.pop())
s += '{} nelec {}\n'.format(sym, data['ecp_electrons'])
for pot in ecp_list:
rexponents = pot['r_exponents']
gexponents = pot['gaussian_exponents']
coefficients = pot['coefficients']
am = pot['angular_momentum']
amchar = lut.amint_to_char(am).upper()
if am[0] == max_ecp_am:
s += '{} ul\n'.format(sym)
else:
s += '{} {}\n'.format(sym, amchar)
point_places = [0, 9, 32]
s += printing.write_matrix([rexponents, gexponents, *coefficients], point_places, convert_exp=False)
s += 'END\n'
return s
|
PypiClean
|
/edu-yh-4.1.5.tar.gz/edu-yh-4.1.5/thonny/plugins/help/packages.rst
|
Installing 3rd party packages
==============================
Thonny has two options for installing 3rd party libraries.
With pip-GUI
-------------
From "Tools" menu select "Manage packages..." and follow the instructions.
With pip on command line
------------------------
#. From "Tools" menu select "Open system shell...". You should get a new terminal window stating the correct name of *pip* command (usually ``pip`` or ``pip3``). In the following I've assumed the command name is ``pip``.
#. Enter ``pip install <package name>`` (eg. ``pip install pygame``) and press ENTER. You should see *pip* downloading and installing the package and printing a success message.
#. Close the terminal (optional).
#. Return to Thonny.
#. Reset interpreter by selecting "Stop/Restart backend" from "Run" menu (this is required only first time you do pip install).
#. Start using the package.
.. NOTE::
The "Open system shell..." menu is not available when running from the Flatpak on Linux.
Flatpak applications are sandboxed to protect the user's host system and data.
Allowing Thonny to open a shell on the host system would circumvent these protections.
To install Python packages from the command-line, please open your system's terminal application directly.
Using scientific Python packages
================================
Python distribution coming with Thonny doesn't contain scientific programming libraries
(eg. `NumPy <http://numpy.org/>`_ and `Matplotlib <http://matplotlib.org/>`_).
Recent versions of most popular scientific Python packages (eg. numpy, pandas and
matplotlib) have wheels available for popular platforms so you can most likely install
them with pip but in case you have troubles, you could try using Thonny with separate
Python distribution meant for scientific computing
(eg. `Anaconda <https://www.anaconda.com>`_
or `Pyzo <http://www.pyzo.org/>`_).
Example: Using Anaconda
------------------------------------
Go to https://www.anaconda.com/products/individual and download a suitable binary distribution for
your platform. Most likely you want graphical installer and 64-bit version (you may need
32-bit version if you have very old system).
Install it and find out where it puts Python executable (*pythonw.exe* in Windows and
*python3* or *python* in Linux and Mac).
In Thonny open "Tools" menu and select "Options...". In the options dialog open "Interpreter"
tab and show the location of Anaconda's Python executable.
After you have done this, next time you run your program, it will be run through Anaconda's
Python and all the libraries installed there are available.
|
PypiClean
|
/PyEngine-2D-1.3.0.tar.gz/PyEngine-2D-1.3.0/pyengine/Systems/UISystem.py
|
import pygame
from pyengine.Widgets import Entry, Button
from pyengine.Widgets.Widget import Widget
from pyengine import World
from pyengine.Utils import loggers, Colors
__all__ = ["UISystem"]
class UISystem:
def __init__(self, world: World):
self.world = world
self.widgets = pygame.sprite.Group()
self.focus = None
def get_widget(self, identity: int) -> Widget:
for i in self.widgets:
if i.identity == identity:
return i
loggers.get_logger("PyEngine").warning("Try to get widget with id "+str(identity)+" but it doesn't exist")
def add_widget(self, widget: Widget) -> Widget:
if not isinstance(widget, Widget):
raise TypeError("Argument is not type of "+str(Widget)+" but "+str(type(widget))+".")
if len(self.widgets):
widget.identity = self.widgets.sprites()[-1].identity + 1
else:
widget.identity = 0
self.widgets.add(widget)
widget.system = self
return widget
def has_widget(self, widget: Widget) -> bool:
return widget in self.widgets
def remove_widget(self, widget: Widget) -> None:
if widget in self.widgets:
self.widgets.remove(widget)
else:
raise ValueError("Widget has not in UISystem")
def mousepress(self, evt):
focustemp = None
for i in self.widgets.sprites():
if i.mousepress(evt):
while i.parent is not None:
i = i.parent
focustemp = i
i.focusin()
else:
if self.focus == i:
self.focus.focusout()
self.focus = focustemp
def mousemotion(self, evt):
for i in self.widgets.sprites():
if isinstance(i, Button):
if self.focus == i:
i.mousemotion(evt)
def keypress(self, evt):
for i in self.widgets.sprites():
if isinstance(i, Entry):
if self.focus == i:
i.keypress(evt)
def keyup(self, evt):
for i in self.widgets.sprites():
if isinstance(i, Entry):
if self.focus == i:
i.keyup(evt)
def update(self):
for i in self.widgets.sprites():
if isinstance(i, Entry):
if self.focus == i:
i.update()
def show(self, screen):
for i in self.widgets.sprites():
if i.isshow:
screen.blit(i.image, i.rect)
def show_debug(self, screen):
for i in self.widgets:
if i.isshow:
render = self.world.window.debugfont.render("ID : "+str(i.identity), 1, Colors.BLUE.value.get())
screen.blit(render, (i.rect.x + i.rect.width / 2 - render.get_width()/2, i.rect.y - 20))
|
PypiClean
|
/codemirror-5.40.0/mode/asn.1/asn.1.js
|
(function(mod) {
if (typeof exports == "object" && typeof module == "object") // CommonJS
mod(require("../../lib/codemirror"));
else if (typeof define == "function" && define.amd) // AMD
define(["../../lib/codemirror"], mod);
else // Plain browser env
mod(CodeMirror);
})(function(CodeMirror) {
"use strict";
CodeMirror.defineMode("asn.1", function(config, parserConfig) {
var indentUnit = config.indentUnit,
keywords = parserConfig.keywords || {},
cmipVerbs = parserConfig.cmipVerbs || {},
compareTypes = parserConfig.compareTypes || {},
status = parserConfig.status || {},
tags = parserConfig.tags || {},
storage = parserConfig.storage || {},
modifier = parserConfig.modifier || {},
accessTypes = parserConfig.accessTypes|| {},
multiLineStrings = parserConfig.multiLineStrings,
indentStatements = parserConfig.indentStatements !== false;
var isOperatorChar = /[\|\^]/;
var curPunc;
function tokenBase(stream, state) {
var ch = stream.next();
if (ch == '"' || ch == "'") {
state.tokenize = tokenString(ch);
return state.tokenize(stream, state);
}
if (/[\[\]\(\){}:=,;]/.test(ch)) {
curPunc = ch;
return "punctuation";
}
if (ch == "-"){
if (stream.eat("-")) {
stream.skipToEnd();
return "comment";
}
}
if (/\d/.test(ch)) {
stream.eatWhile(/[\w\.]/);
return "number";
}
if (isOperatorChar.test(ch)) {
stream.eatWhile(isOperatorChar);
return "operator";
}
stream.eatWhile(/[\w\-]/);
var cur = stream.current();
if (keywords.propertyIsEnumerable(cur)) return "keyword";
if (cmipVerbs.propertyIsEnumerable(cur)) return "variable cmipVerbs";
if (compareTypes.propertyIsEnumerable(cur)) return "atom compareTypes";
if (status.propertyIsEnumerable(cur)) return "comment status";
if (tags.propertyIsEnumerable(cur)) return "variable-3 tags";
if (storage.propertyIsEnumerable(cur)) return "builtin storage";
if (modifier.propertyIsEnumerable(cur)) return "string-2 modifier";
if (accessTypes.propertyIsEnumerable(cur)) return "atom accessTypes";
return "variable";
}
function tokenString(quote) {
return function(stream, state) {
var escaped = false, next, end = false;
while ((next = stream.next()) != null) {
if (next == quote && !escaped){
var afterNext = stream.peek();
//look if the character if the quote is like the B in '10100010'B
if (afterNext){
afterNext = afterNext.toLowerCase();
if(afterNext == "b" || afterNext == "h" || afterNext == "o")
stream.next();
}
end = true; break;
}
escaped = !escaped && next == "\\";
}
if (end || !(escaped || multiLineStrings))
state.tokenize = null;
return "string";
};
}
function Context(indented, column, type, align, prev) {
this.indented = indented;
this.column = column;
this.type = type;
this.align = align;
this.prev = prev;
}
function pushContext(state, col, type) {
var indent = state.indented;
if (state.context && state.context.type == "statement")
indent = state.context.indented;
return state.context = new Context(indent, col, type, null, state.context);
}
function popContext(state) {
var t = state.context.type;
if (t == ")" || t == "]" || t == "}")
state.indented = state.context.indented;
return state.context = state.context.prev;
}
//Interface
return {
startState: function(basecolumn) {
return {
tokenize: null,
context: new Context((basecolumn || 0) - indentUnit, 0, "top", false),
indented: 0,
startOfLine: true
};
},
token: function(stream, state) {
var ctx = state.context;
if (stream.sol()) {
if (ctx.align == null) ctx.align = false;
state.indented = stream.indentation();
state.startOfLine = true;
}
if (stream.eatSpace()) return null;
curPunc = null;
var style = (state.tokenize || tokenBase)(stream, state);
if (style == "comment") return style;
if (ctx.align == null) ctx.align = true;
if ((curPunc == ";" || curPunc == ":" || curPunc == ",")
&& ctx.type == "statement"){
popContext(state);
}
else if (curPunc == "{") pushContext(state, stream.column(), "}");
else if (curPunc == "[") pushContext(state, stream.column(), "]");
else if (curPunc == "(") pushContext(state, stream.column(), ")");
else if (curPunc == "}") {
while (ctx.type == "statement") ctx = popContext(state);
if (ctx.type == "}") ctx = popContext(state);
while (ctx.type == "statement") ctx = popContext(state);
}
else if (curPunc == ctx.type) popContext(state);
else if (indentStatements && (((ctx.type == "}" || ctx.type == "top")
&& curPunc != ';') || (ctx.type == "statement"
&& curPunc == "newstatement")))
pushContext(state, stream.column(), "statement");
state.startOfLine = false;
return style;
},
electricChars: "{}",
lineComment: "--",
fold: "brace"
};
});
function words(str) {
var obj = {}, words = str.split(" ");
for (var i = 0; i < words.length; ++i) obj[words[i]] = true;
return obj;
}
CodeMirror.defineMIME("text/x-ttcn-asn", {
name: "asn.1",
keywords: words("DEFINITIONS OBJECTS IF DERIVED INFORMATION ACTION" +
" REPLY ANY NAMED CHARACTERIZED BEHAVIOUR REGISTERED" +
" WITH AS IDENTIFIED CONSTRAINED BY PRESENT BEGIN" +
" IMPORTS FROM UNITS SYNTAX MIN-ACCESS MAX-ACCESS" +
" MINACCESS MAXACCESS REVISION STATUS DESCRIPTION" +
" SEQUENCE SET COMPONENTS OF CHOICE DistinguishedName" +
" ENUMERATED SIZE MODULE END INDEX AUGMENTS EXTENSIBILITY" +
" IMPLIED EXPORTS"),
cmipVerbs: words("ACTIONS ADD GET NOTIFICATIONS REPLACE REMOVE"),
compareTypes: words("OPTIONAL DEFAULT MANAGED MODULE-TYPE MODULE_IDENTITY" +
" MODULE-COMPLIANCE OBJECT-TYPE OBJECT-IDENTITY" +
" OBJECT-COMPLIANCE MODE CONFIRMED CONDITIONAL" +
" SUBORDINATE SUPERIOR CLASS TRUE FALSE NULL" +
" TEXTUAL-CONVENTION"),
status: words("current deprecated mandatory obsolete"),
tags: words("APPLICATION AUTOMATIC EXPLICIT IMPLICIT PRIVATE TAGS" +
" UNIVERSAL"),
storage: words("BOOLEAN INTEGER OBJECT IDENTIFIER BIT OCTET STRING" +
" UTCTime InterfaceIndex IANAifType CMIP-Attribute" +
" REAL PACKAGE PACKAGES IpAddress PhysAddress" +
" NetworkAddress BITS BMPString TimeStamp TimeTicks" +
" TruthValue RowStatus DisplayString GeneralString" +
" GraphicString IA5String NumericString" +
" PrintableString SnmpAdminAtring TeletexString" +
" UTF8String VideotexString VisibleString StringStore" +
" ISO646String T61String UniversalString Unsigned32" +
" Integer32 Gauge Gauge32 Counter Counter32 Counter64"),
modifier: words("ATTRIBUTE ATTRIBUTES MANDATORY-GROUP MANDATORY-GROUPS" +
" GROUP GROUPS ELEMENTS EQUALITY ORDERING SUBSTRINGS" +
" DEFINED"),
accessTypes: words("not-accessible accessible-for-notify read-only" +
" read-create read-write"),
multiLineStrings: true
});
});
|
PypiClean
|
/timemory-3.3.0rc2.tar.gz/timemory-3.3.0rc2/external/pybind11/docs/advanced/pycpp/numpy.rst
|
.. _numpy:
NumPy
#####
Buffer protocol
===============
Python supports an extremely general and convenient approach for exchanging
data between plugin libraries. Types can expose a buffer view [#f2]_, which
provides fast direct access to the raw internal data representation. Suppose we
want to bind the following simplistic Matrix class:
.. code-block:: cpp
class Matrix {
public:
Matrix(size_t rows, size_t cols) : m_rows(rows), m_cols(cols) {
m_data = new float[rows*cols];
}
float *data() { return m_data; }
size_t rows() const { return m_rows; }
size_t cols() const { return m_cols; }
private:
size_t m_rows, m_cols;
float *m_data;
};
The following binding code exposes the ``Matrix`` contents as a buffer object,
making it possible to cast Matrices into NumPy arrays. It is even possible to
completely avoid copy operations with Python expressions like
``np.array(matrix_instance, copy = False)``.
.. code-block:: cpp
py::class_<Matrix>(m, "Matrix", py::buffer_protocol())
.def_buffer([](Matrix &m) -> py::buffer_info {
return py::buffer_info(
m.data(), /* Pointer to buffer */
sizeof(float), /* Size of one scalar */
py::format_descriptor<float>::format(), /* Python struct-style format descriptor */
2, /* Number of dimensions */
{ m.rows(), m.cols() }, /* Buffer dimensions */
{ sizeof(float) * m.cols(), /* Strides (in bytes) for each index */
sizeof(float) }
);
});
Supporting the buffer protocol in a new type involves specifying the special
``py::buffer_protocol()`` tag in the ``py::class_`` constructor and calling the
``def_buffer()`` method with a lambda function that creates a
``py::buffer_info`` description record on demand describing a given matrix
instance. The contents of ``py::buffer_info`` mirror the Python buffer protocol
specification.
.. code-block:: cpp
struct buffer_info {
void *ptr;
ssize_t itemsize;
std::string format;
ssize_t ndim;
std::vector<ssize_t> shape;
std::vector<ssize_t> strides;
};
To create a C++ function that can take a Python buffer object as an argument,
simply use the type ``py::buffer`` as one of its arguments. Buffers can exist
in a great variety of configurations, hence some safety checks are usually
necessary in the function body. Below, you can see an basic example on how to
define a custom constructor for the Eigen double precision matrix
(``Eigen::MatrixXd``) type, which supports initialization from compatible
buffer objects (e.g. a NumPy matrix).
.. code-block:: cpp
/* Bind MatrixXd (or some other Eigen type) to Python */
typedef Eigen::MatrixXd Matrix;
typedef Matrix::Scalar Scalar;
constexpr bool rowMajor = Matrix::Flags & Eigen::RowMajorBit;
py::class_<Matrix>(m, "Matrix", py::buffer_protocol())
.def("__init__", [](Matrix &m, py::buffer b) {
typedef Eigen::Stride<Eigen::Dynamic, Eigen::Dynamic> Strides;
/* Request a buffer descriptor from Python */
py::buffer_info info = b.request();
/* Some sanity checks ... */
if (info.format != py::format_descriptor<Scalar>::format())
throw std::runtime_error("Incompatible format: expected a double array!");
if (info.ndim != 2)
throw std::runtime_error("Incompatible buffer dimension!");
auto strides = Strides(
info.strides[rowMajor ? 0 : 1] / (py::ssize_t)sizeof(Scalar),
info.strides[rowMajor ? 1 : 0] / (py::ssize_t)sizeof(Scalar));
auto map = Eigen::Map<Matrix, 0, Strides>(
static_cast<Scalar *>(info.ptr), info.shape[0], info.shape[1], strides);
new (&m) Matrix(map);
});
For reference, the ``def_buffer()`` call for this Eigen data type should look
as follows:
.. code-block:: cpp
.def_buffer([](Matrix &m) -> py::buffer_info {
return py::buffer_info(
m.data(), /* Pointer to buffer */
sizeof(Scalar), /* Size of one scalar */
py::format_descriptor<Scalar>::format(), /* Python struct-style format descriptor */
2, /* Number of dimensions */
{ m.rows(), m.cols() }, /* Buffer dimensions */
{ sizeof(Scalar) * (rowMajor ? m.cols() : 1),
sizeof(Scalar) * (rowMajor ? 1 : m.rows()) }
/* Strides (in bytes) for each index */
);
})
For a much easier approach of binding Eigen types (although with some
limitations), refer to the section on :doc:`/advanced/cast/eigen`.
.. seealso::
The file :file:`tests/test_buffers.cpp` contains a complete example
that demonstrates using the buffer protocol with pybind11 in more detail.
.. [#f2] http://docs.python.org/3/c-api/buffer.html
Arrays
======
By exchanging ``py::buffer`` with ``py::array`` in the above snippet, we can
restrict the function so that it only accepts NumPy arrays (rather than any
type of Python object satisfying the buffer protocol).
In many situations, we want to define a function which only accepts a NumPy
array of a certain data type. This is possible via the ``py::array_t<T>``
template. For instance, the following function requires the argument to be a
NumPy array containing double precision values.
.. code-block:: cpp
void f(py::array_t<double> array);
When it is invoked with a different type (e.g. an integer or a list of
integers), the binding code will attempt to cast the input into a NumPy array
of the requested type. Note that this feature requires the
:file:`pybind11/numpy.h` header to be included.
Data in NumPy arrays is not guaranteed to packed in a dense manner;
furthermore, entries can be separated by arbitrary column and row strides.
Sometimes, it can be useful to require a function to only accept dense arrays
using either the C (row-major) or Fortran (column-major) ordering. This can be
accomplished via a second template argument with values ``py::array::c_style``
or ``py::array::f_style``.
.. code-block:: cpp
void f(py::array_t<double, py::array::c_style | py::array::forcecast> array);
The ``py::array::forcecast`` argument is the default value of the second
template parameter, and it ensures that non-conforming arguments are converted
into an array satisfying the specified requirements instead of trying the next
function overload.
Structured types
================
In order for ``py::array_t`` to work with structured (record) types, we first
need to register the memory layout of the type. This can be done via
``PYBIND11_NUMPY_DTYPE`` macro, called in the plugin definition code, which
expects the type followed by field names:
.. code-block:: cpp
struct A {
int x;
double y;
};
struct B {
int z;
A a;
};
// ...
PYBIND11_MODULE(test, m) {
// ...
PYBIND11_NUMPY_DTYPE(A, x, y);
PYBIND11_NUMPY_DTYPE(B, z, a);
/* now both A and B can be used as template arguments to py::array_t */
}
The structure should consist of fundamental arithmetic types, ``std::complex``,
previously registered substructures, and arrays of any of the above. Both C++
arrays and ``std::array`` are supported. While there is a static assertion to
prevent many types of unsupported structures, it is still the user's
responsibility to use only "plain" structures that can be safely manipulated as
raw memory without violating invariants.
Vectorizing functions
=====================
Suppose we want to bind a function with the following signature to Python so
that it can process arbitrary NumPy array arguments (vectors, matrices, general
N-D arrays) in addition to its normal arguments:
.. code-block:: cpp
double my_func(int x, float y, double z);
After including the ``pybind11/numpy.h`` header, this is extremely simple:
.. code-block:: cpp
m.def("vectorized_func", py::vectorize(my_func));
Invoking the function like below causes 4 calls to be made to ``my_func`` with
each of the array elements. The significant advantage of this compared to
solutions like ``numpy.vectorize()`` is that the loop over the elements runs
entirely on the C++ side and can be crunched down into a tight, optimized loop
by the compiler. The result is returned as a NumPy array of type
``numpy.dtype.float64``.
.. code-block:: pycon
>>> x = np.array([[1, 3],[5, 7]])
>>> y = np.array([[2, 4],[6, 8]])
>>> z = 3
>>> result = vectorized_func(x, y, z)
The scalar argument ``z`` is transparently replicated 4 times. The input
arrays ``x`` and ``y`` are automatically converted into the right types (they
are of type ``numpy.dtype.int64`` but need to be ``numpy.dtype.int32`` and
``numpy.dtype.float32``, respectively).
.. note::
Only arithmetic, complex, and POD types passed by value or by ``const &``
reference are vectorized; all other arguments are passed through as-is.
Functions taking rvalue reference arguments cannot be vectorized.
In cases where the computation is too complicated to be reduced to
``vectorize``, it will be necessary to create and access the buffer contents
manually. The following snippet contains a complete example that shows how this
works (the code is somewhat contrived, since it could have been done more
simply using ``vectorize``).
.. code-block:: cpp
#include <pybind11/pybind11.h>
#include <pybind11/numpy.h>
namespace py = pybind11;
py::array_t<double> add_arrays(py::array_t<double> input1, py::array_t<double> input2) {
py::buffer_info buf1 = input1.request(), buf2 = input2.request();
if (buf1.ndim != 1 || buf2.ndim != 1)
throw std::runtime_error("Number of dimensions must be one");
if (buf1.size != buf2.size)
throw std::runtime_error("Input shapes must match");
/* No pointer is passed, so NumPy will allocate the buffer */
auto result = py::array_t<double>(buf1.size);
py::buffer_info buf3 = result.request();
double *ptr1 = (double *) buf1.ptr,
*ptr2 = (double *) buf2.ptr,
*ptr3 = (double *) buf3.ptr;
for (size_t idx = 0; idx < buf1.shape[0]; idx++)
ptr3[idx] = ptr1[idx] + ptr2[idx];
return result;
}
PYBIND11_MODULE(test, m) {
m.def("add_arrays", &add_arrays, "Add two NumPy arrays");
}
.. seealso::
The file :file:`tests/test_numpy_vectorize.cpp` contains a complete
example that demonstrates using :func:`vectorize` in more detail.
Direct access
=============
For performance reasons, particularly when dealing with very large arrays, it
is often desirable to directly access array elements without internal checking
of dimensions and bounds on every access when indices are known to be already
valid. To avoid such checks, the ``array`` class and ``array_t<T>`` template
class offer an unchecked proxy object that can be used for this unchecked
access through the ``unchecked<N>`` and ``mutable_unchecked<N>`` methods,
where ``N`` gives the required dimensionality of the array:
.. code-block:: cpp
m.def("sum_3d", [](py::array_t<double> x) {
auto r = x.unchecked<3>(); // x must have ndim = 3; can be non-writeable
double sum = 0;
for (ssize_t i = 0; i < r.shape(0); i++)
for (ssize_t j = 0; j < r.shape(1); j++)
for (ssize_t k = 0; k < r.shape(2); k++)
sum += r(i, j, k);
return sum;
});
m.def("increment_3d", [](py::array_t<double> x) {
auto r = x.mutable_unchecked<3>(); // Will throw if ndim != 3 or flags.writeable is false
for (ssize_t i = 0; i < r.shape(0); i++)
for (ssize_t j = 0; j < r.shape(1); j++)
for (ssize_t k = 0; k < r.shape(2); k++)
r(i, j, k) += 1.0;
}, py::arg().noconvert());
To obtain the proxy from an ``array`` object, you must specify both the data
type and number of dimensions as template arguments, such as ``auto r =
myarray.mutable_unchecked<float, 2>()``.
If the number of dimensions is not known at compile time, you can omit the
dimensions template parameter (i.e. calling ``arr_t.unchecked()`` or
``arr.unchecked<T>()``. This will give you a proxy object that works in the
same way, but results in less optimizable code and thus a small efficiency
loss in tight loops.
Note that the returned proxy object directly references the array's data, and
only reads its shape, strides, and writeable flag when constructed. You must
take care to ensure that the referenced array is not destroyed or reshaped for
the duration of the returned object, typically by limiting the scope of the
returned instance.
The returned proxy object supports some of the same methods as ``py::array`` so
that it can be used as a drop-in replacement for some existing, index-checked
uses of ``py::array``:
- ``r.ndim()`` returns the number of dimensions
- ``r.data(1, 2, ...)`` and ``r.mutable_data(1, 2, ...)``` returns a pointer to
the ``const T`` or ``T`` data, respectively, at the given indices. The
latter is only available to proxies obtained via ``a.mutable_unchecked()``.
- ``itemsize()`` returns the size of an item in bytes, i.e. ``sizeof(T)``.
- ``ndim()`` returns the number of dimensions.
- ``shape(n)`` returns the size of dimension ``n``
- ``size()`` returns the total number of elements (i.e. the product of the shapes).
- ``nbytes()`` returns the number of bytes used by the referenced elements
(i.e. ``itemsize()`` times ``size()``).
.. seealso::
The file :file:`tests/test_numpy_array.cpp` contains additional examples
demonstrating the use of this feature.
Ellipsis
========
Python 3 provides a convenient ``...`` ellipsis notation that is often used to
slice multidimensional arrays. For instance, the following snippet extracts the
middle dimensions of a tensor with the first and last index set to zero.
.. code-block:: python
a = # a NumPy array
b = a[0, ..., 0]
The function ``py::ellipsis()`` function can be used to perform the same
operation on the C++ side:
.. code-block:: cpp
py::array a = /* A NumPy array */;
py::array b = a[py::make_tuple(0, py::ellipsis(), 0)];
|
PypiClean
|
/import_scons-2.5.0-4-py3-none-any.whl/SCons/Tool/ldc.py
|
__revision__ = "src/engine/SCons/Tool/ldc.py 2016/07/08 19:15:27 dholth"
import os
import subprocess
import SCons.Action
import SCons.Builder
import SCons.Defaults
import SCons.Scanner.D
import SCons.Tool
import SCons.Tool.DCommon
def generate(env):
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
static_obj.add_action('.d', SCons.Defaults.DAction)
shared_obj.add_action('.d', SCons.Defaults.ShDAction)
static_obj.add_emitter('.d', SCons.Defaults.StaticObjectEmitter)
shared_obj.add_emitter('.d', SCons.Defaults.SharedObjectEmitter)
env['DC'] = env.Detect('ldc2')
env['DCOM'] = '$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -c -of=$TARGET $SOURCES'
env['_DINCFLAGS'] = '${_concat(DINCPREFIX, DPATH, DINCSUFFIX, __env__, RDirs, TARGET, SOURCE)}'
env['_DVERFLAGS'] = '${_concat(DVERPREFIX, DVERSIONS, DVERSUFFIX, __env__)}'
env['_DDEBUGFLAGS'] = '${_concat(DDEBUGPREFIX, DDEBUG, DDEBUGSUFFIX, __env__)}'
env['_DFLAGS'] = '${_concat(DFLAGPREFIX, DFLAGS, DFLAGSUFFIX, __env__)}'
env['SHDC'] = '$DC'
env['SHDCOM'] = '$DC $_DINCFLAGS $_DVERFLAGS $_DDEBUGFLAGS $_DFLAGS -c -relocation-model=pic -of=$TARGET $SOURCES'
env['DPATH'] = ['#/']
env['DFLAGS'] = []
env['DVERSIONS'] = []
env['DDEBUG'] = []
if env['DC']:
SCons.Tool.DCommon.addDPATHToEnv(env, env['DC'])
env['DINCPREFIX'] = '-I='
env['DINCSUFFIX'] = ''
env['DVERPREFIX'] = '-version='
env['DVERSUFFIX'] = ''
env['DDEBUGPREFIX'] = '-debug='
env['DDEBUGSUFFIX'] = ''
env['DFLAGPREFIX'] = '-'
env['DFLAGSUFFIX'] = ''
env['DFILESUFFIX'] = '.d'
env['DLINK'] = '$DC'
env['DLINKFLAGS'] = SCons.Util.CLVar('')
env['DLINKCOM'] = '$DLINK -of=$TARGET $DLINKFLAGS $__DRPATH $SOURCES $_DLIBDIRFLAGS $_DLIBFLAGS'
env['DSHLINK'] = '$DC'
env['DSHLINKFLAGS'] = SCons.Util.CLVar('$DLINKFLAGS -shared -defaultlib=phobos2-ldc')
# Hack for Fedora the packages of which use the wrong name :-(
if os.path.exists('/usr/lib64/libphobos-ldc.so') or os.path.exists('/usr/lib32/libphobos-ldc.so') or os.path.exists('/usr/lib/libphobos-ldc.so') :
env['DSHLINKFLAGS'] = SCons.Util.CLVar('$DLINKFLAGS -shared -defaultlib=phobos-ldc')
env['SHDLINKCOM'] = '$DLINK -of=$TARGET $DSHLINKFLAGS $__DSHLIBVERSIONFLAGS $__DRPATH $SOURCES $_DLIBDIRFLAGS $_DLIBFLAGS'
env['DLIBLINKPREFIX'] = '' if env['PLATFORM'] == 'win32' else '-L-l'
env['DLIBLINKSUFFIX'] = '.lib' if env['PLATFORM'] == 'win32' else ''
#env['_DLIBFLAGS'] = '${_concat(DLIBLINKPREFIX, LIBS, DLIBLINKSUFFIX, __env__, RDirs, TARGET, SOURCE)}'
env['_DLIBFLAGS'] = '${_stripixes(DLIBLINKPREFIX, LIBS, DLIBLINKSUFFIX, LIBPREFIXES, LIBSUFFIXES, __env__)}'
env['DLIBDIRPREFIX'] = '-L-L'
env['DLIBDIRSUFFIX'] = ''
env['_DLIBDIRFLAGS'] = '${_concat(DLIBDIRPREFIX, LIBPATH, DLIBDIRSUFFIX, __env__, RDirs, TARGET, SOURCE)}'
env['DLIB'] = 'lib' if env['PLATFORM'] == 'win32' else 'ar cr'
env['DLIBCOM'] = '$DLIB $_DLIBFLAGS {0}$TARGET $SOURCES $_DLIBFLAGS'.format('-c ' if env['PLATFORM'] == 'win32' else '')
#env['_DLIBFLAGS'] = '${_concat(DLIBFLAGPREFIX, DLIBFLAGS, DLIBFLAGSUFFIX, __env__)}'
env['DLIBFLAGPREFIX'] = '-'
env['DLIBFLAGSUFFIX'] = ''
# __RPATH is set to $_RPATH in the platform specification if that
# platform supports it.
env['DRPATHPREFIX'] = '-L-rpath='
env['DRPATHSUFFIX'] = ''
env['_DRPATH'] = '${_concat(DRPATHPREFIX, RPATH, DRPATHSUFFIX, __env__)}'
# Support for versioned libraries
env['_DSHLIBVERSIONFLAGS'] = '$DSHLIBVERSIONFLAGS -L-soname=$_DSHLIBSONAME'
env['_DSHLIBSONAME'] = '${DShLibSonameGenerator(__env__,TARGET)}'
# NOTE: this is a quick hack, the soname will only work if there is
# c/c++ linker loaded which provides callback for the ShLibSonameGenerator
env['DShLibSonameGenerator'] = SCons.Tool.ShLibSonameGenerator
# NOTE: this is only for further reference, currently $DSHLIBVERSION does
# not work, the user must use $SHLIBVERSION
env['DSHLIBVERSION'] = '$SHLIBVERSION'
env['DSHLIBVERSIONFLAGS'] = []
SCons.Tool.createStaticLibBuilder(env)
def exists(env):
return env.Detect('ldc2')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
PypiClean
|
/elf_diff-0.6.0-py3-none-any.whl/elf_diff/plugin.py
|
# -*- mode: python -*-
#
# elf_diff
#
# Copyright (C) 2021 Noseglasses ([email protected])
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with along with
# this program. If not, see <http://www.gnu.org/licenses/>.
#
from elf_diff.error_handling import warning
from elf_diff.auxiliary import getDirectoryThatStoresModuleOfObj
from elf_diff.settings import Settings
import importlib
import importlib.util
from typing import List, Type, Dict, Optional
import abc
class PluginException(Exception):
def __init__(self, plugin, msg: str):
super().__init__("Plugin %s: %s" % (type(self).__name__, msg))
class PluginConfigurationKey(object):
def __init__(
self,
name: str,
description: str,
is_optional: bool = False,
default: Optional[str] = None,
):
self.name = name
self.description = description
self.is_optional = is_optional
self.default = default
if self.is_optional and (default is None):
raise Exception(
f"Optional configuration key {name} is lacking a default value"
)
PluginConfigurationInformation = List[PluginConfigurationKey]
class Plugin(object):
"""An elf_diff plugin"""
def __init__(self, settings: Settings, plugin_configuration: Dict[str, str]):
self._settings: Settings = settings
self._plugin_configuration: Dict[str, str] = plugin_configuration
self.validateConfiguration()
def validateConfiguration(self) -> None:
"""Validate the configuration supplied to the plugin"""
configuration_information: PluginConfigurationInformation = type(
self
).getConfigurationInformation()
config_keys: Dict[str, PluginConfigurationKey] = {}
for config_key in configuration_information:
if config_key.name in config_keys.keys():
raise PluginException(
self,
"Plugin exports more than one configuration options named '{config_key.name}",
)
config_keys[config_key.name] = config_key
print("Verifying config keys...")
for config_key_encountered, value in self._plugin_configuration.items():
if config_key_encountered not in config_keys.keys():
raise PluginException(
self,
"Unexpected configuration entry '%s = %s' encountered"
% (config_key_encountered, value),
)
def pluginWarning(self, msg: str) -> None:
"""Output a warning in plugin scope"""
warning("Plugin %s: %s" % (type(self).__name__, msg))
def isConfigurationParameterAvailable(self, name: str) -> bool:
"""Return True if the configuration parameter is available, False otherwise"""
return name in self._plugin_configuration.keys()
def getConfigurationParameter(self, name: str) -> str:
"""Returns the value of a configuration parameter or throw an error if unavailable"""
if name not in self._plugin_configuration.keys():
configuration_information = type(self).getConfigurationInformation()
for key in configuration_information:
if key.name == name:
if key.default is None:
raise PluginException(
self,
"Trying to access the undefined default value of configuration key '{key.name}'",
)
return key.default
raise Exception(
f"Trying to access the default value of an undefined configuration key '{key}'"
)
return self._plugin_configuration[name]
def getModulePath(self) -> str:
"""Return the directory that holds the plugin module"""
return getDirectoryThatStoresModuleOfObj(self)
def log(self, msg: str) -> None:
"""Output a log message in plugin scope"""
if not (
("quiet" in self._plugin_configuration.keys())
and self._plugin_configuration["quiet"]
):
print("Plugin %s: %s" % (type(self).__name__, msg))
@staticmethod
def getConfigurationInformation() -> PluginConfigurationInformation:
"""Returns plugin configuration information"""
return [
PluginConfigurationKey(
name="quiet",
description="Disables plugin logging",
is_optional=True,
default="False",
)
]
class ExportPairReportPlugin(Plugin):
"""A pair report plugin that exports the document tree"""
@abc.abstractmethod
def export(self, document):
pass
PLUGIN_TYPES: List[Type[Plugin]] = [ExportPairReportPlugin]
ACTIVE_PLUGINS: Dict[Type[Plugin], List[Plugin]] = {}
for plugin_type in PLUGIN_TYPES:
ACTIVE_PLUGINS[plugin_type] = []
def activatePluginByType(
settings: Settings, class_, plugin_configuration: Dict[str, str]
) -> None:
"""Register a plugin by providing its plugin class and configuration parameters"""
for plugin_type in PLUGIN_TYPES:
if issubclass(class_, plugin_type):
ACTIVE_PLUGINS[plugin_type].append(class_(settings, plugin_configuration))
def activatePlugin(settings: Settings, plugin_object) -> bool:
"""Register a plugin object"""
class_ = type(plugin_object)
sucessfully_activated = False
for plugin_type in PLUGIN_TYPES:
if issubclass(class_, plugin_type):
ACTIVE_PLUGINS[plugin_type].append(plugin_object)
sucessfully_activated = True
return sucessfully_activated
def loadPluginClass(plugin_path: str, class_name: str) -> Type:
"""Load a plugin class with given name from a module with given path"""
spec = importlib.util.spec_from_file_location("elf_diff.user_plugin", plugin_path)
if spec is None:
raise Exception(f"Unable to load python module from '{plugin_path}'")
if spec.loader is None:
raise Exception("No loader in spec")
plugin_module = importlib.util.module_from_spec(spec)
if plugin_module is None:
raise Exception("Unable to load python module from spec")
# mypy chokes on next line: error: "_LoaderProtocol" has no attribute "exec_module"
spec.loader.exec_module(plugin_module) # type: ignore
return getattr(plugin_module, class_name)
def getActivePlugins(plugin_type: Type) -> List:
"""Return a list of activated plugin objects of a given type"""
return ACTIVE_PLUGINS[plugin_type]
def activatePluginsFromCommandLine(settings: Settings) -> None:
"""Register any plugins that are defined via command line switches"""
if (not settings.load_plugin) or (len(settings.load_plugin) == 0):
return
print(
"Registering %s plugin(s) defined at command line:"
% (str(len(settings.load_plugin)))
)
for plugin_definition in settings.load_plugin:
tokens: List[str] = plugin_definition.split(";")
if len(tokens) < 2:
warning("Ignoring strange load_plugin definition '%s'" % plugin_definition)
continue
path: str = tokens[0]
class_name: str = tokens[1]
print(
" module '%s', class '%s', %s configuration parameters"
% (path, class_name, (len(tokens) - 2))
)
plugin_configuration: Dict[str, str] = {}
for param_tokens in tokens[2:]:
key, value = param_tokens.split("=")
if (not key) or (not value):
warning(
"Ignoring strange key value pair '%s' in plugin definition '%s'"
% (param_tokens, plugin_definition)
)
continue
print(f" {key} = '{value}'")
plugin_configuration[key] = value
plugin_class: Optional[Type] = None
try:
plugin_class = loadPluginClass(path, class_name)
except Exception as e:
warning("Unable to load plugin class: %s" % e)
continue
activatePluginByType(settings, plugin_class, plugin_configuration)
|
PypiClean
|
/taskcc-alipay-sdk-python-3.3.398.tar.gz/taskcc-alipay-sdk-python-3.3.398/alipay/aop/api/request/AlipayAssetPointBudgetQueryRequest.py
|
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
class AlipayAssetPointBudgetQueryRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.asset.point.budget.query'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
|
PypiClean
|
/alchemite_apiclient-0.61.0-py3-none-any.whl/alchemite_apiclient/model/tar_fn_weighted_sum_above_all_of.py
|
import re # noqa: F401
import sys # noqa: F401
from alchemite_apiclient.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from alchemite_apiclient.exceptions import ApiAttributeError
class TarFnWeightedSumAboveAllOf(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('type',): {
'WEIGHTED_SUM_ABOVE': "weighted sum above",
},
}
validations = {
}
@cached_property
def additional_properties_type(): # noqa
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types(): # noqa
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'type': (str,), # noqa: E501
}
@cached_property
def discriminator(): # noqa
return None
attribute_map = {
'type': 'type', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""TarFnWeightedSumAboveAllOf - a model defined in OpenAPI
Args:
Keyword Args:
type (str): Find a solution where the weighted sum of the given columns is above `\"minimum\"`.. defaults to "weighted sum above", must be one of ["weighted sum above", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
type = kwargs.get('type', "weighted sum above")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.type = type
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""TarFnWeightedSumAboveAllOf - a model defined in OpenAPI
Args:
Keyword Args:
type (str): Find a solution where the weighted sum of the given columns is above `\"minimum\"`.. defaults to "weighted sum above", must be one of ["weighted sum above", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
type = kwargs.get('type', "weighted sum above")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.type = type
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
PypiClean
|
/Twisted_binary-20.3.0-cp38-cp38-win_amd64.whl/twisted/web/error.py
|
from __future__ import division, absolute_import
try:
from future_builtins import ascii
except ImportError:
pass
__all__ = [
'Error', 'PageRedirect', 'InfiniteRedirection', 'RenderError',
'MissingRenderMethod', 'MissingTemplateLoader', 'UnexposedMethodError',
'UnfilledSlot', 'UnsupportedType', 'FlattenerError',
'RedirectWithNoLocation',
]
from twisted.web._responses import RESPONSES
from twisted.python.compat import unicode, nativeString, intToBytes, Sequence
def _codeToMessage(code):
"""
Returns the response message corresponding to an HTTP code, or None
if the code is unknown or unrecognized.
@type code: L{bytes}
@param code: Refers to an HTTP status code, for example C{http.NOT_FOUND}.
@return: A string message or none
@rtype: L{bytes}
"""
try:
return RESPONSES.get(int(code))
except (ValueError, AttributeError):
return None
class Error(Exception):
"""
A basic HTTP error.
@type status: L{bytes}
@ivar status: Refers to an HTTP status code, for example C{http.NOT_FOUND}.
@type message: L{bytes}
@param message: A short error message, for example "NOT FOUND".
@type response: L{bytes}
@ivar response: A complete HTML document for an error page.
"""
def __init__(self, code, message=None, response=None):
"""
Initializes a basic exception.
@type code: L{bytes} or L{int}
@param code: Refers to an HTTP status code (for example, 200) either as
an integer or a bytestring representing such. If no C{message} is
given, C{code} is mapped to a descriptive bytestring that is used
instead.
@type message: L{bytes}
@param message: A short error message, for example "NOT FOUND".
@type response: L{bytes}
@param response: A complete HTML document for an error page.
"""
message = message or _codeToMessage(code)
Exception.__init__(self, code, message, response)
if isinstance(code, int):
# If we're given an int, convert it to a bytestring
# downloadPage gives a bytes, Agent gives an int, and it worked by
# accident previously, so just make it keep working.
code = intToBytes(code)
self.status = code
self.message = message
self.response = response
def __str__(self):
return nativeString(self.status + b" " + self.message)
class PageRedirect(Error):
"""
A request resulted in an HTTP redirect.
@type location: L{bytes}
@ivar location: The location of the redirect which was not followed.
"""
def __init__(self, code, message=None, response=None, location=None):
"""
Initializes a page redirect exception.
@type code: L{bytes}
@param code: Refers to an HTTP status code, for example
C{http.NOT_FOUND}. If no C{message} is given, C{code} is mapped to a
descriptive string that is used instead.
@type message: L{bytes}
@param message: A short error message, for example "NOT FOUND".
@type response: L{bytes}
@param response: A complete HTML document for an error page.
@type location: L{bytes}
@param location: The location response-header field value. It is an
absolute URI used to redirect the receiver to a location other than
the Request-URI so the request can be completed.
"""
Error.__init__(self, code, message, response)
if self.message and location:
self.message = self.message + b" to " + location
self.location = location
class InfiniteRedirection(Error):
"""
HTTP redirection is occurring endlessly.
@type location: L{bytes}
@ivar location: The first URL in the series of redirections which was
not followed.
"""
def __init__(self, code, message=None, response=None, location=None):
"""
Initializes an infinite redirection exception.
@type code: L{bytes}
@param code: Refers to an HTTP status code, for example
C{http.NOT_FOUND}. If no C{message} is given, C{code} is mapped to a
descriptive string that is used instead.
@type message: L{bytes}
@param message: A short error message, for example "NOT FOUND".
@type response: L{bytes}
@param response: A complete HTML document for an error page.
@type location: L{bytes}
@param location: The location response-header field value. It is an
absolute URI used to redirect the receiver to a location other than
the Request-URI so the request can be completed.
"""
Error.__init__(self, code, message, response)
if self.message and location:
self.message = self.message + b" to " + location
self.location = location
class RedirectWithNoLocation(Error):
"""
Exception passed to L{ResponseFailed} if we got a redirect without a
C{Location} header field.
@type uri: L{bytes}
@ivar uri: The URI which failed to give a proper location header
field.
@since: 11.1
"""
def __init__(self, code, message, uri):
"""
Initializes a page redirect exception when no location is given.
@type code: L{bytes}
@param code: Refers to an HTTP status code, for example
C{http.NOT_FOUND}. If no C{message} is given, C{code} is mapped to
a descriptive string that is used instead.
@type message: L{bytes}
@param message: A short error message.
@type uri: L{bytes}
@param uri: The URI which failed to give a proper location header
field.
"""
Error.__init__(self, code, message)
self.message = self.message + b" to " + uri
self.uri = uri
class UnsupportedMethod(Exception):
"""
Raised by a resource when faced with a strange request method.
RFC 2616 (HTTP 1.1) gives us two choices when faced with this situation:
If the type of request is known to us, but not allowed for the requested
resource, respond with NOT_ALLOWED. Otherwise, if the request is something
we don't know how to deal with in any case, respond with NOT_IMPLEMENTED.
When this exception is raised by a Resource's render method, the server
will make the appropriate response.
This exception's first argument MUST be a sequence of the methods the
resource *does* support.
"""
allowedMethods = ()
def __init__(self, allowedMethods, *args):
Exception.__init__(self, allowedMethods, *args)
self.allowedMethods = allowedMethods
if not isinstance(allowedMethods, Sequence):
raise TypeError(
"First argument must be a sequence of supported methods, "
"but my first argument is not a sequence.")
def __str__(self):
return "Expected one of %r" % (self.allowedMethods,)
class SchemeNotSupported(Exception):
"""
The scheme of a URI was not one of the supported values.
"""
class RenderError(Exception):
"""
Base exception class for all errors which can occur during template
rendering.
"""
class MissingRenderMethod(RenderError):
"""
Tried to use a render method which does not exist.
@ivar element: The element which did not have the render method.
@ivar renderName: The name of the renderer which could not be found.
"""
def __init__(self, element, renderName):
RenderError.__init__(self, element, renderName)
self.element = element
self.renderName = renderName
def __repr__(self):
return '%r: %r had no render method named %r' % (
self.__class__.__name__, self.element, self.renderName)
class MissingTemplateLoader(RenderError):
"""
L{MissingTemplateLoader} is raised when trying to render an Element without
a template loader, i.e. a C{loader} attribute.
@ivar element: The Element which did not have a document factory.
"""
def __init__(self, element):
RenderError.__init__(self, element)
self.element = element
def __repr__(self):
return '%r: %r had no loader' % (self.__class__.__name__,
self.element)
class UnexposedMethodError(Exception):
"""
Raised on any attempt to get a method which has not been exposed.
"""
class UnfilledSlot(Exception):
"""
During flattening, a slot with no associated data was encountered.
"""
class UnsupportedType(Exception):
"""
During flattening, an object of a type which cannot be flattened was
encountered.
"""
class ExcessiveBufferingError(Exception):
"""
The HTTP/2 protocol has been forced to buffer an excessive amount of
outbound data, and has therefore closed the connection and dropped all
outbound data.
"""
class FlattenerError(Exception):
"""
An error occurred while flattening an object.
@ivar _roots: A list of the objects on the flattener's stack at the time
the unflattenable object was encountered. The first element is least
deeply nested object and the last element is the most deeply nested.
"""
def __init__(self, exception, roots, traceback):
self._exception = exception
self._roots = roots
self._traceback = traceback
Exception.__init__(self, exception, roots, traceback)
def _formatRoot(self, obj):
"""
Convert an object from C{self._roots} to a string suitable for
inclusion in a render-traceback (like a normal Python traceback, but
can include "frame" source locations which are not in Python source
files).
@param obj: Any object which can be a render step I{root}.
Typically, L{Tag}s, strings, and other simple Python types.
@return: A string representation of C{obj}.
@rtype: L{str}
"""
# There's a circular dependency between this class and 'Tag', although
# only for an isinstance() check.
from twisted.web.template import Tag
if isinstance(obj, (bytes, str, unicode)):
# It's somewhat unlikely that there will ever be a str in the roots
# list. However, something like a MemoryError during a str.replace
# call (eg, replacing " with ") could possibly cause this.
# Likewise, UTF-8 encoding a unicode string to a byte string might
# fail like this.
if len(obj) > 40:
if isinstance(obj, unicode):
ellipsis = u'<...>'
else:
ellipsis = b'<...>'
return ascii(obj[:20] + ellipsis + obj[-20:])
else:
return ascii(obj)
elif isinstance(obj, Tag):
if obj.filename is None:
return 'Tag <' + obj.tagName + '>'
else:
return "File \"%s\", line %d, column %d, in \"%s\"" % (
obj.filename, obj.lineNumber,
obj.columnNumber, obj.tagName)
else:
return ascii(obj)
def __repr__(self):
"""
Present a string representation which includes a template traceback, so
we can tell where this error occurred in the template, as well as in
Python.
"""
# Avoid importing things unnecessarily until we actually need them;
# since this is an 'error' module we should be extra paranoid about
# that.
from traceback import format_list
if self._roots:
roots = ' ' + '\n '.join([
self._formatRoot(r) for r in self._roots]) + '\n'
else:
roots = ''
if self._traceback:
traceback = '\n'.join([
line
for entry in format_list(self._traceback)
for line in entry.splitlines()]) + '\n'
else:
traceback = ''
return (
'Exception while flattening:\n' +
roots + traceback +
self._exception.__class__.__name__ + ': ' +
str(self._exception) + '\n')
def __str__(self):
return repr(self)
class UnsupportedSpecialHeader(Exception):
"""
A HTTP/2 request was received that contained a HTTP/2 pseudo-header field
that is not recognised by Twisted.
"""
|
PypiClean
|
/tensorflow_edwin-2.10.1-cp38-cp38-win_amd64.whl/tensorflow/python/trackable/python_state.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import abc
import six
from tensorflow.python.trackable import base
from tensorflow.python.util.tf_export import tf_export
@tf_export("train.experimental.PythonState")
@six.add_metaclass(abc.ABCMeta)
class PythonState(base.Trackable):
"""A mixin for putting Python state in an object-based checkpoint.
This is an abstract class which allows extensions to TensorFlow's object-based
checkpointing (see `tf.train.Checkpoint`). For example a wrapper for NumPy
arrays:
```python
import io
import numpy
class NumpyWrapper(tf.train.experimental.PythonState):
def __init__(self, array):
self.array = array
def serialize(self):
string_file = io.BytesIO()
try:
numpy.save(string_file, self.array, allow_pickle=False)
serialized = string_file.getvalue()
finally:
string_file.close()
return serialized
def deserialize(self, string_value):
string_file = io.BytesIO(string_value)
try:
self.array = numpy.load(string_file, allow_pickle=False)
finally:
string_file.close()
```
Instances of `NumpyWrapper` are checkpointable objects, and will be saved and
restored from checkpoints along with TensorFlow state like variables.
```python
root = tf.train.Checkpoint(numpy=NumpyWrapper(numpy.array([1.])))
save_path = root.save(prefix)
root.numpy.array *= 2.
assert [2.] == root.numpy.array
root.restore(save_path)
assert [1.] == root.numpy.array
```
"""
@abc.abstractmethod
def serialize(self):
"""Callback to serialize the object. Returns a string."""
@abc.abstractmethod
def deserialize(self, string_value):
"""Callback to deserialize the object."""
|
PypiClean
|
/task-core-0.2.1.tar.gz/task-core-0.2.1/task_core/service.py
|
"""service and task objects"""
import logging
import yaml
from .base import BaseFileData
from .tasks import TaskManager
from .schema import ServiceSchemaValidator
LOG = logging.getLogger(__name__)
class Service(BaseFileData):
"""service representation"""
def __init__(self, definition):
self._data = None
self._tasks = None
self._hosts = []
super().__init__(definition)
ServiceSchemaValidator.instance().validate(self._data)
self._task_mgr = TaskManager.instance()
@property
def hosts(self) -> list:
return self._hosts
def add_host(self, host) -> list:
self._hosts.append(host)
return self.hosts
def remove_host(self, host) -> list:
self._hosts.remove(host)
return self.hosts
@property
def type(self) -> str:
return self._data.get("type", "service")
@property
def version(self) -> str:
return self._data.get("version")
@property
def provides(self):
return self.name
@property
def requires(self) -> list:
return self._data.get("requires", [])
@property
def tasks(self) -> list:
return self._data.get("tasks", [])
def get_tasks_needed_by(self):
"""build a dict of needed by and provides"""
refs = {}
for _task in self.tasks:
provides = _task.get("provides", [])
for need in _task.get("needed-by", []):
if refs.get(need):
refs[need] = sorted(list(set(refs[need] + provides)))
else:
refs[need] = sorted(list(set(provides)))
return refs
def update_task_requires(self, needs: dict):
"""update task requires based on needed by info"""
for _task in self.tasks:
for need, provides in needs.items():
if provides is None:
# shouldn't be None, but to be safe let's skip it
LOG.warning("A task with no provides has a needed-by %s", need)
continue
if provides is not None and isinstance(provides, str):
provides = [provides]
if need in _task.get("provides", []):
_task["requires"] = list(set(_task.get("requires", []) + provides))
def build_tasks(self, task_type_override=None):
tasks = []
for _task in self.tasks:
if task_type_override:
task_type = task_type_override
else:
task_type = self._task_mgr.get_driver(_task.get("driver", "service"))
task = task_type(self.name, _task, self.hosts)
task.version = tuple(int(v) for v in self.version.split("."))
tasks.append(task)
return tasks
def save(self, location) -> None:
with open(location, encoding="utf-8", mode="w") as fout:
yaml.dump(self.data, fout)
|
PypiClean
|
/uliweb_ui-0.1.0-py3-none-any.whl/uliweb_ui/static/modules/gridster/jquery.gridster.with-extras.js
|
;(function(root, factory) {
if (typeof define === 'function' && define.amd) {
define('gridster-coords', ['jquery'], factory);
} else {
root.GridsterCoords = factory(root.$ || root.jQuery);
}
}(this, function($) {
/**
* Creates objects with coordinates (x1, y1, x2, y2, cx, cy, width, height)
* to simulate DOM elements on the screen.
* Coords is used by Gridster to create a faux grid with any DOM element can
* collide.
*
* @class Coords
* @param {HTMLElement|Object} obj The jQuery HTMLElement or a object with: left,
* top, width and height properties.
* @return {Object} Coords instance.
* @constructor
*/
function Coords(obj) {
if (obj[0] && $.isPlainObject(obj[0])) {
this.data = obj[0];
}else {
this.el = obj;
}
this.isCoords = true;
this.coords = {};
this.init();
return this;
}
var fn = Coords.prototype;
fn.init = function(){
this.set();
this.original_coords = this.get();
};
fn.set = function(update, not_update_offsets) {
var el = this.el;
if (el && !update) {
this.data = el.offset();
this.data.width = el.width();
this.data.height = el.height();
}
if (el && update && !not_update_offsets) {
var offset = el.offset();
this.data.top = offset.top;
this.data.left = offset.left;
}
var d = this.data;
typeof d.left === 'undefined' && (d.left = d.x1);
typeof d.top === 'undefined' && (d.top = d.y1);
this.coords.x1 = d.left;
this.coords.y1 = d.top;
this.coords.x2 = d.left + d.width;
this.coords.y2 = d.top + d.height;
this.coords.cx = d.left + (d.width / 2);
this.coords.cy = d.top + (d.height / 2);
this.coords.width = d.width;
this.coords.height = d.height;
this.coords.el = el || false ;
return this;
};
fn.update = function(data){
if (!data && !this.el) {
return this;
}
if (data) {
var new_data = $.extend({}, this.data, data);
this.data = new_data;
return this.set(true, true);
}
this.set(true);
return this;
};
fn.get = function(){
return this.coords;
};
fn.destroy = function() {
this.el.removeData('coords');
delete this.el;
};
//jQuery adapter
$.fn.coords = function() {
if (this.data('coords') ) {
return this.data('coords');
}
var ins = new Coords(this, arguments[0]);
this.data('coords', ins);
return ins;
};
return Coords;
}));
;(function(root, factory) {
if (typeof define === 'function' && define.amd) {
define('gridster-collision', ['jquery', 'gridster-coords'], factory);
} else {
root.GridsterCollision = factory(root.$ || root.jQuery,
root.GridsterCoords);
}
}(this, function($, Coords) {
var defaults = {
colliders_context: document.body,
overlapping_region: 'C'
// ,on_overlap: function(collider_data){},
// on_overlap_start : function(collider_data){},
// on_overlap_stop : function(collider_data){}
};
/**
* Detects collisions between a DOM element against other DOM elements or
* Coords objects.
*
* @class Collision
* @uses Coords
* @param {HTMLElement} el The jQuery wrapped HTMLElement.
* @param {HTMLElement|Array} colliders Can be a jQuery collection
* of HTMLElements or an Array of Coords instances.
* @param {Object} [options] An Object with all options you want to
* overwrite:
* @param {String} [options.overlapping_region] Determines when collision
* is valid, depending on the overlapped area. Values can be: 'N', 'S',
* 'W', 'E', 'C' or 'all'. Default is 'C'.
* @param {Function} [options.on_overlap_start] Executes a function the first
* time each `collider ` is overlapped.
* @param {Function} [options.on_overlap_stop] Executes a function when a
* `collider` is no longer collided.
* @param {Function} [options.on_overlap] Executes a function when the
* mouse is moved during the collision.
* @return {Object} Collision instance.
* @constructor
*/
function Collision(el, colliders, options) {
this.options = $.extend(defaults, options);
this.$element = el;
this.last_colliders = [];
this.last_colliders_coords = [];
this.set_colliders(colliders);
this.init();
}
Collision.defaults = defaults;
var fn = Collision.prototype;
fn.init = function() {
this.find_collisions();
};
fn.overlaps = function(a, b) {
var x = false;
var y = false;
if ((b.x1 >= a.x1 && b.x1 <= a.x2) ||
(b.x2 >= a.x1 && b.x2 <= a.x2) ||
(a.x1 >= b.x1 && a.x2 <= b.x2)
) { x = true; }
if ((b.y1 >= a.y1 && b.y1 <= a.y2) ||
(b.y2 >= a.y1 && b.y2 <= a.y2) ||
(a.y1 >= b.y1 && a.y2 <= b.y2)
) { y = true; }
return (x && y);
};
fn.detect_overlapping_region = function(a, b){
var regionX = '';
var regionY = '';
if (a.y1 > b.cy && a.y1 < b.y2) { regionX = 'N'; }
if (a.y2 > b.y1 && a.y2 < b.cy) { regionX = 'S'; }
if (a.x1 > b.cx && a.x1 < b.x2) { regionY = 'W'; }
if (a.x2 > b.x1 && a.x2 < b.cx) { regionY = 'E'; }
return (regionX + regionY) || 'C';
};
fn.calculate_overlapped_area_coords = function(a, b){
var x1 = Math.max(a.x1, b.x1);
var y1 = Math.max(a.y1, b.y1);
var x2 = Math.min(a.x2, b.x2);
var y2 = Math.min(a.y2, b.y2);
return $({
left: x1,
top: y1,
width : (x2 - x1),
height: (y2 - y1)
}).coords().get();
};
fn.calculate_overlapped_area = function(coords){
return (coords.width * coords.height);
};
fn.manage_colliders_start_stop = function(new_colliders_coords, start_callback, stop_callback){
var last = this.last_colliders_coords;
for (var i = 0, il = last.length; i < il; i++) {
if ($.inArray(last[i], new_colliders_coords) === -1) {
start_callback.call(this, last[i]);
}
}
for (var j = 0, jl = new_colliders_coords.length; j < jl; j++) {
if ($.inArray(new_colliders_coords[j], last) === -1) {
stop_callback.call(this, new_colliders_coords[j]);
}
}
};
fn.find_collisions = function(player_data_coords){
var self = this;
var overlapping_region = this.options.overlapping_region;
var colliders_coords = [];
var colliders_data = [];
var $colliders = (this.colliders || this.$colliders);
var count = $colliders.length;
var player_coords = self.$element.coords()
.update(player_data_coords || false).get();
while(count--){
var $collider = self.$colliders ?
$($colliders[count]) : $colliders[count];
var $collider_coords_ins = ($collider.isCoords) ?
$collider : $collider.coords();
var collider_coords = $collider_coords_ins.get();
var overlaps = self.overlaps(player_coords, collider_coords);
if (!overlaps) {
continue;
}
var region = self.detect_overlapping_region(
player_coords, collider_coords);
//todo: make this an option
if (region === overlapping_region || overlapping_region === 'all') {
var area_coords = self.calculate_overlapped_area_coords(
player_coords, collider_coords);
var area = self.calculate_overlapped_area(area_coords);
var collider_data = {
area: area,
area_coords : area_coords,
region: region,
coords: collider_coords,
player_coords: player_coords,
el: $collider
};
if (self.options.on_overlap) {
self.options.on_overlap.call(this, collider_data);
}
colliders_coords.push($collider_coords_ins);
colliders_data.push(collider_data);
}
}
if (self.options.on_overlap_stop || self.options.on_overlap_start) {
this.manage_colliders_start_stop(colliders_coords,
self.options.on_overlap_start, self.options.on_overlap_stop);
}
this.last_colliders_coords = colliders_coords;
return colliders_data;
};
fn.get_closest_colliders = function(player_data_coords){
var colliders = this.find_collisions(player_data_coords);
colliders.sort(function(a, b) {
/* if colliders are being overlapped by the "C" (center) region,
* we have to set a lower index in the array to which they are placed
* above in the grid. */
if (a.region === 'C' && b.region === 'C') {
if (a.coords.y1 < b.coords.y1 || a.coords.x1 < b.coords.x1) {
return - 1;
}else{
return 1;
}
}
if (a.area < b.area) {
return 1;
}
return 1;
});
return colliders;
};
fn.set_colliders = function(colliders) {
if (typeof colliders === 'string' || colliders instanceof $) {
this.$colliders = $(colliders,
this.options.colliders_context).not(this.$element);
}else{
this.colliders = $(colliders);
}
};
//jQuery adapter
$.fn.collision = function(collider, options) {
return new Collision( this, collider, options );
};
return Collision;
}));
;(function(window, undefined) {
/* Delay, debounce and throttle functions taken from underscore.js
*
* Copyright (c) 2009-2013 Jeremy Ashkenas, DocumentCloud and
* Investigative Reporters & Editors
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
window.delay = function(func, wait) {
var args = Array.prototype.slice.call(arguments, 2);
return setTimeout(function(){ return func.apply(null, args); }, wait);
};
window.debounce = function(func, wait, immediate) {
var timeout;
return function() {
var context = this, args = arguments;
var later = function() {
timeout = null;
if (!immediate) func.apply(context, args);
};
if (immediate && !timeout) func.apply(context, args);
clearTimeout(timeout);
timeout = setTimeout(later, wait);
};
};
window.throttle = function(func, wait) {
var context, args, timeout, throttling, more, result;
var whenDone = debounce(
function(){ more = throttling = false; }, wait);
return function() {
context = this; args = arguments;
var later = function() {
timeout = null;
if (more) func.apply(context, args);
whenDone();
};
if (!timeout) timeout = setTimeout(later, wait);
if (throttling) {
more = true;
} else {
result = func.apply(context, args);
}
whenDone();
throttling = true;
return result;
};
};
})(window);
;(function(root, factory) {
if (typeof define === 'function' && define.amd) {
define('gridster-draggable', ['jquery'], factory);
} else {
root.GridsterDraggable = factory(root.$ || root.jQuery);
}
}(this, function($) {
var defaults = {
items: 'li',
distance: 1,
limit: true,
offset_left: 0,
autoscroll: true,
ignore_dragging: ['INPUT', 'TEXTAREA', 'SELECT', 'BUTTON'], // or function
handle: null,
container_width: 0, // 0 == auto
move_element: true,
helper: false, // or 'clone'
remove_helper: true
// drag: function(e) {},
// start : function(e, ui) {},
// stop : function(e) {}
};
var $window = $(window);
var dir_map = { x : 'left', y : 'top' };
var isTouch = !!('ontouchstart' in window);
var capitalize = function(str) {
return str.charAt(0).toUpperCase() + str.slice(1);
};
var idCounter = 0;
var uniqId = function() {
return ++idCounter + '';
}
/**
* Basic drag implementation for DOM elements inside a container.
* Provide start/stop/drag callbacks.
*
* @class Draggable
* @param {HTMLElement} el The HTMLelement that contains all the widgets
* to be dragged.
* @param {Object} [options] An Object with all options you want to
* overwrite:
* @param {HTMLElement|String} [options.items] Define who will
* be the draggable items. Can be a CSS Selector String or a
* collection of HTMLElements.
* @param {Number} [options.distance] Distance in pixels after mousedown
* the mouse must move before dragging should start.
* @param {Boolean} [options.limit] Constrains dragging to the width of
* the container
* @param {Object|Function} [options.ignore_dragging] Array of node names
* that sould not trigger dragging, by default is `['INPUT', 'TEXTAREA',
* 'SELECT', 'BUTTON']`. If a function is used return true to ignore dragging.
* @param {offset_left} [options.offset_left] Offset added to the item
* that is being dragged.
* @param {Number} [options.drag] Executes a callback when the mouse is
* moved during the dragging.
* @param {Number} [options.start] Executes a callback when the drag
* starts.
* @param {Number} [options.stop] Executes a callback when the drag stops.
* @return {Object} Returns `el`.
* @constructor
*/
function Draggable(el, options) {
this.options = $.extend({}, defaults, options);
this.$document = $(document);
this.$container = $(el);
this.$dragitems = $(this.options.items, this.$container);
this.is_dragging = false;
this.player_min_left = 0 + this.options.offset_left;
this.id = uniqId();
this.ns = '.gridster-draggable-' + this.id;
this.init();
}
Draggable.defaults = defaults;
var fn = Draggable.prototype;
fn.init = function() {
var pos = this.$container.css('position');
this.calculate_dimensions();
this.$container.css('position', pos === 'static' ? 'relative' : pos);
this.disabled = false;
this.events();
$(window).bind(this.nsEvent('resize'),
throttle($.proxy(this.calculate_dimensions, this), 200));
};
fn.nsEvent = function(ev) {
return (ev || '') + this.ns;
};
fn.events = function() {
this.pointer_events = {
start: this.nsEvent('touchstart') + ' ' + this.nsEvent('mousedown'),
move: this.nsEvent('touchmove') + ' ' + this.nsEvent('mousemove'),
end: this.nsEvent('touchend') + ' ' + this.nsEvent('mouseup'),
};
this.$container.on(this.nsEvent('selectstart'),
$.proxy(this.on_select_start, this));
this.$container.on(this.pointer_events.start, this.options.items,
$.proxy(this.drag_handler, this));
this.$document.on(this.pointer_events.end, $.proxy(function(e) {
this.is_dragging = false;
if (this.disabled) { return; }
this.$document.off(this.pointer_events.move);
if (this.drag_start) {
this.on_dragstop(e);
}
}, this));
};
fn.get_actual_pos = function($el) {
var pos = $el.position();
return pos;
};
fn.get_mouse_pos = function(e) {
if (e.originalEvent && e.originalEvent.touches) {
var oe = e.originalEvent;
e = oe.touches.length ? oe.touches[0] : oe.changedTouches[0];
}
return {
left: e.clientX,
top: e.clientY
};
};
fn.get_offset = function(e) {
e.preventDefault();
var mouse_actual_pos = this.get_mouse_pos(e);
var diff_x = Math.round(
mouse_actual_pos.left - this.mouse_init_pos.left);
var diff_y = Math.round(mouse_actual_pos.top - this.mouse_init_pos.top);
var left = Math.round(this.el_init_offset.left +
diff_x - this.baseX + $(window).scrollLeft() - this.win_offset_x);
var top = Math.round(this.el_init_offset.top +
diff_y - this.baseY + $(window).scrollTop() - this.win_offset_y);
if (this.options.limit) {
if (left > this.player_max_left) {
left = this.player_max_left;
} else if(left < this.player_min_left) {
left = this.player_min_left;
}
}
return {
position: {
left: left,
top: top
},
pointer: {
left: mouse_actual_pos.left,
top: mouse_actual_pos.top,
diff_left: diff_x + ($(window).scrollLeft() - this.win_offset_x),
diff_top: diff_y + ($(window).scrollTop() - this.win_offset_y)
}
};
};
fn.get_drag_data = function(e) {
var offset = this.get_offset(e);
offset.$player = this.$player;
offset.$helper = this.helper ? this.$helper : this.$player;
return offset;
};
fn.set_limits = function(container_width) {
container_width || (container_width = this.$container.width());
this.player_max_left = (container_width - this.player_width +
- this.options.offset_left);
this.options.container_width = container_width;
return this;
};
fn.scroll_in = function(axis, data) {
var dir_prop = dir_map[axis];
var area_size = 50;
var scroll_inc = 30;
var is_x = axis === 'x';
var window_size = is_x ? this.window_width : this.window_height;
var doc_size = is_x ? $(document).width() : $(document).height();
var player_size = is_x ? this.$player.width() : this.$player.height();
var next_scroll;
var scroll_offset = $window['scroll' + capitalize(dir_prop)]();
var min_window_pos = scroll_offset;
var max_window_pos = min_window_pos + window_size;
var mouse_next_zone = max_window_pos - area_size; // down/right
var mouse_prev_zone = min_window_pos + area_size; // up/left
var abs_mouse_pos = min_window_pos + data.pointer[dir_prop];
var max_player_pos = (doc_size - window_size + player_size);
if (abs_mouse_pos >= mouse_next_zone) {
next_scroll = scroll_offset + scroll_inc;
if (next_scroll < max_player_pos) {
$window['scroll' + capitalize(dir_prop)](next_scroll);
this['scroll_offset_' + axis] += scroll_inc;
}
}
if (abs_mouse_pos <= mouse_prev_zone) {
next_scroll = scroll_offset - scroll_inc;
if (next_scroll > 0) {
$window['scroll' + capitalize(dir_prop)](next_scroll);
this['scroll_offset_' + axis] -= scroll_inc;
}
}
return this;
};
fn.manage_scroll = function(data) {
this.scroll_in('x', data);
this.scroll_in('y', data);
};
fn.calculate_dimensions = function(e) {
this.window_height = $window.height();
this.window_width = $window.width();
};
fn.drag_handler = function(e) {
var node = e.target.nodeName;
// skip if drag is disabled, or click was not done with the mouse primary button
if (this.disabled || e.which !== 1 && !isTouch) {
return;
}
if (this.ignore_drag(e)) {
return;
}
var self = this;
var first = true;
this.$player = $(e.currentTarget);
this.el_init_pos = this.get_actual_pos(this.$player);
this.mouse_init_pos = this.get_mouse_pos(e);
this.offsetY = this.mouse_init_pos.top - this.el_init_pos.top;
this.$document.on(this.pointer_events.move, function(mme) {
var mouse_actual_pos = self.get_mouse_pos(mme);
var diff_x = Math.abs(
mouse_actual_pos.left - self.mouse_init_pos.left);
var diff_y = Math.abs(
mouse_actual_pos.top - self.mouse_init_pos.top);
if (!(diff_x > self.options.distance ||
diff_y > self.options.distance)
) {
return false;
}
if (first) {
first = false;
self.on_dragstart.call(self, mme);
return false;
}
if (self.is_dragging === true) {
self.on_dragmove.call(self, mme);
}
return false;
});
if (!isTouch) { return false; }
};
fn.on_dragstart = function(e) {
e.preventDefault();
if (this.is_dragging) { return this; }
this.drag_start = this.is_dragging = true;
var offset = this.$container.offset();
this.baseX = Math.round(offset.left);
this.baseY = Math.round(offset.top);
this.initial_container_width = this.options.container_width || this.$container.width();
if (this.options.helper === 'clone') {
this.$helper = this.$player.clone()
.appendTo(this.$container).addClass('helper');
this.helper = true;
} else {
this.helper = false;
}
this.win_offset_y = $(window).scrollTop();
this.win_offset_x = $(window).scrollLeft();
this.scroll_offset_y = 0;
this.scroll_offset_x = 0;
this.el_init_offset = this.$player.offset();
this.player_width = this.$player.width();
this.player_height = this.$player.height();
this.set_limits(this.options.container_width);
if (this.options.start) {
this.options.start.call(this.$player, e, this.get_drag_data(e));
}
return false;
};
fn.on_dragmove = function(e) {
var data = this.get_drag_data(e);
this.options.autoscroll && this.manage_scroll(data);
if (this.options.move_element) {
(this.helper ? this.$helper : this.$player).css({
'position': 'absolute',
'left' : data.position.left,
'top' : data.position.top
});
}
var last_position = this.last_position || data.position;
data.prev_position = last_position;
if (this.options.drag) {
this.options.drag.call(this.$player, e, data);
}
this.last_position = data.position;
return false;
};
fn.on_dragstop = function(e) {
var data = this.get_drag_data(e);
this.drag_start = false;
if (this.options.stop) {
this.options.stop.call(this.$player, e, data);
}
if (this.helper && this.options.remove_helper) {
this.$helper.remove();
}
return false;
};
fn.on_select_start = function(e) {
if (this.disabled) { return; }
if (this.ignore_drag(e)) {
return;
}
return false;
};
fn.enable = function() {
this.disabled = false;
};
fn.disable = function() {
this.disabled = true;
};
fn.destroy = function() {
this.disable();
this.$container.off(this.ns);
this.$document.off(this.ns);
$(window).off(this.ns);
$.removeData(this.$container, 'drag');
};
fn.ignore_drag = function(event) {
if (this.options.handle) {
return !$(event.target).is(this.options.handle);
}
if ($.isFunction(this.options.ignore_dragging)) {
return this.options.ignore_dragging(event);
}
return $(event.target).is(this.options.ignore_dragging.join(', '));
};
//jQuery adapter
$.fn.drag = function ( options ) {
return new Draggable(this, options);
};
return Draggable;
}));
;(function(root, factory) {
if (typeof define === 'function' && define.amd) {
define(['jquery', 'gridster-draggable', 'gridster-collision'], factory);
} else {
root.Gridster = factory(root.$ || root.jQuery, root.GridsterDraggable,
root.GridsterCollision);
}
}(this, function($, Draggable, Collision) {
var defaults = {
namespace: '',
widget_selector: 'li',
widget_margins: [10, 10],
widget_base_dimensions: [400, 225],
extra_rows: 0,
extra_cols: 0,
min_cols: 1,
max_cols: Infinity,
min_rows: 15,
max_size_x: false,
autogrow_cols: false,
autogenerate_stylesheet: true,
avoid_overlapped_widgets: true,
auto_init: true,
serialize_params: function($w, wgd) {
return {
col: wgd.col,
row: wgd.row,
size_x: wgd.size_x,
size_y: wgd.size_y
};
},
collision: {},
draggable: {
items: '.gs-w',
distance: 4,
ignore_dragging: Draggable.defaults.ignore_dragging.slice(0)
},
resize: {
enabled: false,
axes: ['both'],
handle_append_to: '',
handle_class: 'gs-resize-handle',
max_size: [Infinity, Infinity],
min_size: [1, 1]
}
};
/**
* @class Gridster
* @uses Draggable
* @uses Collision
* @param {HTMLElement} el The HTMLelement that contains all the widgets.
* @param {Object} [options] An Object with all options you want to
* overwrite:
* @param {HTMLElement|String} [options.widget_selector] Define who will
* be the draggable widgets. Can be a CSS Selector String or a
* collection of HTMLElements
* @param {Array} [options.widget_margins] Margin between widgets.
* The first index for the horizontal margin (left, right) and
* the second for the vertical margin (top, bottom).
* @param {Array} [options.widget_base_dimensions] Base widget dimensions
* in pixels. The first index for the width and the second for the
* height.
* @param {Number} [options.extra_cols] Add more columns in addition to
* those that have been calculated.
* @param {Number} [options.extra_rows] Add more rows in addition to
* those that have been calculated.
* @param {Number} [options.min_cols] The minimum required columns.
* @param {Number} [options.max_cols] The maximum columns possible (set to null
* for no maximum).
* @param {Number} [options.min_rows] The minimum required rows.
* @param {Number} [options.max_size_x] The maximum number of columns
* that a widget can span.
* @param {Boolean} [options.autogenerate_stylesheet] If true, all the
* CSS required to position all widgets in their respective columns
* and rows will be generated automatically and injected to the
* `<head>` of the document. You can set this to false, and write
* your own CSS targeting rows and cols via data-attributes like so:
* `[data-col="1"] { left: 10px; }`
* @param {Boolean} [options.avoid_overlapped_widgets] Avoid that widgets loaded
* from the DOM can be overlapped. It is helpful if the positions were
* bad stored in the database or if there was any conflict.
* @param {Boolean} [options.auto_init] Automatically call gridster init
* method or not when the plugin is instantiated.
* @param {Function} [options.serialize_params] Return the data you want
* for each widget in the serialization. Two arguments are passed:
* `$w`: the jQuery wrapped HTMLElement, and `wgd`: the grid
* coords object (`col`, `row`, `size_x`, `size_y`).
* @param {Object} [options.collision] An Object with all options for
* Collision class you want to overwrite. See Collision docs for
* more info.
* @param {Object} [options.draggable] An Object with all options for
* Draggable class you want to overwrite. See Draggable docs for more
* info.
* @param {Object|Function} [options.draggable.ignore_dragging] Note that
* if you use a Function, and resize is enabled, you should ignore the
* resize handlers manually (options.resize.handle_class).
* @param {Object} [options.resize] An Object with resize config options.
* @param {Boolean} [options.resize.enabled] Set to true to enable
* resizing.
* @param {Array} [options.resize.axes] Axes in which widgets can be
* resized. Possible values: ['x', 'y', 'both'].
* @param {String} [options.resize.handle_append_to] Set a valid CSS
* selector to append resize handles to.
* @param {String} [options.resize.handle_class] CSS class name used
* by resize handles.
* @param {Array} [options.resize.max_size] Limit widget dimensions
* when resizing. Array values should be integers:
* `[max_cols_occupied, max_rows_occupied]`
* @param {Array} [options.resize.min_size] Limit widget dimensions
* when resizing. Array values should be integers:
* `[min_cols_occupied, min_rows_occupied]`
* @param {Function} [options.resize.start] Function executed
* when resizing starts.
* @param {Function} [otions.resize.resize] Function executed
* during the resizing.
* @param {Function} [options.resize.stop] Function executed
* when resizing stops.
*
* @constructor
*/
function Gridster(el, options) {
this.options = $.extend(true, {}, defaults, options);
this.$el = $(el);
this.$wrapper = this.$el.parent();
this.$widgets = this.$el.children(
this.options.widget_selector).addClass('gs-w');
this.widgets = [];
this.$changed = $([]);
this.wrapper_width = this.$wrapper.width();
this.min_widget_width = (this.options.widget_margins[0] * 2) +
this.options.widget_base_dimensions[0];
this.min_widget_height = (this.options.widget_margins[1] * 2) +
this.options.widget_base_dimensions[1];
this.generated_stylesheets = [];
this.$style_tags = $([]);
this.options.auto_init && this.init();
}
Gridster.defaults = defaults;
Gridster.generated_stylesheets = [];
/**
* Sorts an Array of grid coords objects (representing the grid coords of
* each widget) in ascending way.
*
* @method sort_by_row_asc
* @param {Array} widgets Array of grid coords objects
* @return {Array} Returns the array sorted.
*/
Gridster.sort_by_row_asc = function(widgets) {
widgets = widgets.sort(function(a, b) {
if (!a.row) {
a = $(a).coords().grid;
b = $(b).coords().grid;
}
if (a.row > b.row) {
return 1;
}
return -1;
});
return widgets;
};
/**
* Sorts an Array of grid coords objects (representing the grid coords of
* each widget) placing first the empty cells upper left.
*
* @method sort_by_row_and_col_asc
* @param {Array} widgets Array of grid coords objects
* @return {Array} Returns the array sorted.
*/
Gridster.sort_by_row_and_col_asc = function(widgets) {
widgets = widgets.sort(function(a, b) {
if (a.row > b.row || a.row === b.row && a.col > b.col) {
return 1;
}
return -1;
});
return widgets;
};
/**
* Sorts an Array of grid coords objects by column (representing the grid
* coords of each widget) in ascending way.
*
* @method sort_by_col_asc
* @param {Array} widgets Array of grid coords objects
* @return {Array} Returns the array sorted.
*/
Gridster.sort_by_col_asc = function(widgets) {
widgets = widgets.sort(function(a, b) {
if (a.col > b.col) {
return 1;
}
return -1;
});
return widgets;
};
/**
* Sorts an Array of grid coords objects (representing the grid coords of
* each widget) in descending way.
*
* @method sort_by_row_desc
* @param {Array} widgets Array of grid coords objects
* @return {Array} Returns the array sorted.
*/
Gridster.sort_by_row_desc = function(widgets) {
widgets = widgets.sort(function(a, b) {
if (a.row + a.size_y < b.row + b.size_y) {
return 1;
}
return -1;
});
return widgets;
};
/** Instance Methods **/
var fn = Gridster.prototype;
fn.init = function() {
this.options.resize.enabled && this.setup_resize();
this.generate_grid_and_stylesheet();
this.get_widgets_from_DOM();
this.set_dom_grid_height();
this.set_dom_grid_width();
this.$wrapper.addClass('ready');
this.draggable();
this.options.resize.enabled && this.resizable();
$(window).bind('resize.gridster', throttle(
$.proxy(this.recalculate_faux_grid, this), 200));
};
/**
* Disables dragging.
*
* @method disable
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.disable = function() {
this.$wrapper.find('.player-revert').removeClass('player-revert');
this.drag_api.disable();
return this;
};
/**
* Enables dragging.
*
* @method enable
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.enable = function() {
this.drag_api.enable();
return this;
};
/**
* Disables drag-and-drop widget resizing.
*
* @method disable
* @return {Class} Returns instance of gridster Class.
*/
fn.disable_resize = function() {
this.$el.addClass('gs-resize-disabled');
this.resize_api.disable();
return this;
};
/**
* Enables drag-and-drop widget resizing.
*
* @method enable
* @return {Class} Returns instance of gridster Class.
*/
fn.enable_resize = function() {
this.$el.removeClass('gs-resize-disabled');
this.resize_api.enable();
return this;
};
/**
* Add a new widget to the grid.
*
* @method add_widget
* @param {String|HTMLElement} html The string representing the HTML of the widget
* or the HTMLElement.
* @param {Number} [size_x] The nº of rows the widget occupies horizontally.
* @param {Number} [size_y] The nº of columns the widget occupies vertically.
* @param {Number} [col] The column the widget should start in.
* @param {Number} [row] The row the widget should start in.
* @param {Array} [max_size] max_size Maximun size (in units) for width and height.
* @param {Array} [min_size] min_size Minimum size (in units) for width and height.
* @return {HTMLElement} Returns the jQuery wrapped HTMLElement representing.
* the widget that was just created.
*/
fn.add_widget = function(html, size_x, size_y, col, row, max_size, min_size) {
var pos;
size_x || (size_x = 1);
size_y || (size_y = 1);
if (!col & !row) {
pos = this.next_position(size_x, size_y);
} else {
pos = {
col: col,
row: row,
size_x: size_x,
size_y: size_y
};
this.empty_cells(col, row, size_x, size_y);
}
var $w = $(html).attr({
'data-col': pos.col,
'data-row': pos.row,
'data-sizex' : size_x,
'data-sizey' : size_y
}).addClass('gs-w').appendTo(this.$el).hide();
this.$widgets = this.$widgets.add($w);
this.register_widget($w);
this.add_faux_rows(pos.size_y);
//this.add_faux_cols(pos.size_x);
if (max_size) {
this.set_widget_max_size($w, max_size);
}
if (min_size) {
this.set_widget_min_size($w, min_size);
}
this.set_dom_grid_width();
this.set_dom_grid_height();
this.drag_api.set_limits(this.cols * this.min_widget_width);
return $w.fadeIn();
};
/**
* Change widget size limits.
*
* @method set_widget_min_size
* @param {HTMLElement|Number} $widget The jQuery wrapped HTMLElement
* representing the widget or an index representing the desired widget.
* @param {Array} min_size Minimum size (in units) for width and height.
* @return {HTMLElement} Returns instance of gridster Class.
*/
fn.set_widget_min_size = function($widget, min_size) {
$widget = typeof $widget === 'number' ?
this.$widgets.eq($widget) : $widget;
if (!$widget.length) { return this; }
var wgd = $widget.data('coords').grid;
wgd.min_size_x = min_size[0];
wgd.min_size_y = min_size[1];
return this;
};
/**
* Change widget size limits.
*
* @method set_widget_max_size
* @param {HTMLElement|Number} $widget The jQuery wrapped HTMLElement
* representing the widget or an index representing the desired widget.
* @param {Array} max_size Maximun size (in units) for width and height.
* @return {HTMLElement} Returns instance of gridster Class.
*/
fn.set_widget_max_size = function($widget, max_size) {
$widget = typeof $widget === 'number' ?
this.$widgets.eq($widget) : $widget;
if (!$widget.length) { return this; }
var wgd = $widget.data('coords').grid;
wgd.max_size_x = max_size[0];
wgd.max_size_y = max_size[1];
return this;
};
/**
* Append the resize handle into a widget.
*
* @method add_resize_handle
* @param {HTMLElement} $widget The jQuery wrapped HTMLElement
* representing the widget.
* @return {HTMLElement} Returns instance of gridster Class.
*/
fn.add_resize_handle = function($w) {
var append_to = this.options.resize.handle_append_to;
$(this.resize_handle_tpl).appendTo( append_to ? $(append_to, $w) : $w);
return this;
};
/**
* Change the size of a widget. Width is limited to the current grid width.
*
* @method resize_widget
* @param {HTMLElement} $widget The jQuery wrapped HTMLElement
* representing the widget.
* @param {Number} size_x The number of columns that will occupy the widget.
* By default <code>size_x</code> is limited to the space available from
* the column where the widget begins, until the last column to the right.
* @param {Number} size_y The number of rows that will occupy the widget.
* @param {Function} [callback] Function executed when the widget is removed.
* @return {HTMLElement} Returns $widget.
*/
fn.resize_widget = function($widget, size_x, size_y, callback) {
var wgd = $widget.coords().grid;
var col = wgd.col;
var max_cols = this.options.max_cols;
var old_size_y = wgd.size_y;
var old_col = wgd.col;
var new_col = old_col;
size_x || (size_x = wgd.size_x);
size_y || (size_y = wgd.size_y);
if (max_cols !== Infinity) {
size_x = Math.min(size_x, max_cols - col + 1);
}
if (size_y > old_size_y) {
this.add_faux_rows(Math.max(size_y - old_size_y, 0));
}
var player_rcol = (col + size_x - 1);
if (player_rcol > this.cols) {
this.add_faux_cols(player_rcol - this.cols);
}
var new_grid_data = {
col: new_col,
row: wgd.row,
size_x: size_x,
size_y: size_y
};
this.mutate_widget_in_gridmap($widget, wgd, new_grid_data);
this.set_dom_grid_height();
this.set_dom_grid_width();
if (callback) {
callback.call(this, new_grid_data.size_x, new_grid_data.size_y);
}
return $widget;
};
/**
* Mutate widget dimensions and position in the grid map.
*
* @method mutate_widget_in_gridmap
* @param {HTMLElement} $widget The jQuery wrapped HTMLElement
* representing the widget to mutate.
* @param {Object} wgd Current widget grid data (col, row, size_x, size_y).
* @param {Object} new_wgd New widget grid data.
* @return {HTMLElement} Returns instance of gridster Class.
*/
fn.mutate_widget_in_gridmap = function($widget, wgd, new_wgd) {
var old_size_x = wgd.size_x;
var old_size_y = wgd.size_y;
var old_cells_occupied = this.get_cells_occupied(wgd);
var new_cells_occupied = this.get_cells_occupied(new_wgd);
var empty_cols = [];
$.each(old_cells_occupied.cols, function(i, col) {
if ($.inArray(col, new_cells_occupied.cols) === -1) {
empty_cols.push(col);
}
});
var occupied_cols = [];
$.each(new_cells_occupied.cols, function(i, col) {
if ($.inArray(col, old_cells_occupied.cols) === -1) {
occupied_cols.push(col);
}
});
var empty_rows = [];
$.each(old_cells_occupied.rows, function(i, row) {
if ($.inArray(row, new_cells_occupied.rows) === -1) {
empty_rows.push(row);
}
});
var occupied_rows = [];
$.each(new_cells_occupied.rows, function(i, row) {
if ($.inArray(row, old_cells_occupied.rows) === -1) {
occupied_rows.push(row);
}
});
this.remove_from_gridmap(wgd);
if (occupied_cols.length) {
var cols_to_empty = [
new_wgd.col, new_wgd.row, new_wgd.size_x, Math.min(old_size_y, new_wgd.size_y), $widget
];
this.empty_cells.apply(this, cols_to_empty);
}
if (occupied_rows.length) {
var rows_to_empty = [new_wgd.col, new_wgd.row, new_wgd.size_x, new_wgd.size_y, $widget];
this.empty_cells.apply(this, rows_to_empty);
}
// not the same that wgd = new_wgd;
wgd.col = new_wgd.col;
wgd.row = new_wgd.row;
wgd.size_x = new_wgd.size_x;
wgd.size_y = new_wgd.size_y;
this.add_to_gridmap(new_wgd, $widget);
$widget.removeClass('player-revert');
//update coords instance attributes
$widget.data('coords').update({
width: (new_wgd.size_x * this.options.widget_base_dimensions[0] +
((new_wgd.size_x - 1) * this.options.widget_margins[0]) * 2),
height: (new_wgd.size_y * this.options.widget_base_dimensions[1] +
((new_wgd.size_y - 1) * this.options.widget_margins[1]) * 2)
});
$widget.attr({
'data-col': new_wgd.col,
'data-row': new_wgd.row,
'data-sizex': new_wgd.size_x,
'data-sizey': new_wgd.size_y
});
if (empty_cols.length) {
var cols_to_remove_holes = [
empty_cols[0], new_wgd.row,
empty_cols.length,
Math.min(old_size_y, new_wgd.size_y),
$widget
];
this.remove_empty_cells.apply(this, cols_to_remove_holes);
}
if (empty_rows.length) {
var rows_to_remove_holes = [
new_wgd.col, new_wgd.row, new_wgd.size_x, new_wgd.size_y, $widget
];
this.remove_empty_cells.apply(this, rows_to_remove_holes);
}
this.move_widget_up($widget);
return this;
};
/**
* Move down widgets in cells represented by the arguments col, row, size_x,
* size_y
*
* @method empty_cells
* @param {Number} col The column where the group of cells begin.
* @param {Number} row The row where the group of cells begin.
* @param {Number} size_x The number of columns that the group of cells
* occupy.
* @param {Number} size_y The number of rows that the group of cells
* occupy.
* @param {HTMLElement} $exclude Exclude widgets from being moved.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.empty_cells = function(col, row, size_x, size_y, $exclude) {
var $nexts = this.widgets_below({
col: col,
row: row - size_y,
size_x: size_x,
size_y: size_y
});
$nexts.not($exclude).each($.proxy(function(i, w) {
var wgd = $(w).coords().grid;
if ( !(wgd.row <= (row + size_y - 1))) { return; }
var diff = (row + size_y) - wgd.row;
this.move_widget_down($(w), diff);
}, this));
this.set_dom_grid_height();
return this;
};
/**
* Move up widgets below cells represented by the arguments col, row, size_x,
* size_y.
*
* @method remove_empty_cells
* @param {Number} col The column where the group of cells begin.
* @param {Number} row The row where the group of cells begin.
* @param {Number} size_x The number of columns that the group of cells
* occupy.
* @param {Number} size_y The number of rows that the group of cells
* occupy.
* @param {HTMLElement} exclude Exclude widgets from being moved.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.remove_empty_cells = function(col, row, size_x, size_y, exclude) {
var $nexts = this.widgets_below({
col: col,
row: row,
size_x: size_x,
size_y: size_y
});
$nexts.not(exclude).each($.proxy(function(i, widget) {
this.move_widget_up( $(widget), size_y );
}, this));
this.set_dom_grid_height();
return this;
};
/**
* Get the most left column below to add a new widget.
*
* @method next_position
* @param {Number} size_x The nº of rows the widget occupies horizontally.
* @param {Number} size_y The nº of columns the widget occupies vertically.
* @return {Object} Returns a grid coords object representing the future
* widget coords.
*/
fn.next_position = function(size_x, size_y) {
size_x || (size_x = 1);
size_y || (size_y = 1);
var ga = this.gridmap;
var cols_l = ga.length;
var valid_pos = [];
var rows_l;
for (var c = 1; c < cols_l; c++) {
rows_l = ga[c].length;
for (var r = 1; r <= rows_l; r++) {
var can_move_to = this.can_move_to({
size_x: size_x,
size_y: size_y
}, c, r);
if (can_move_to) {
valid_pos.push({
col: c,
row: r,
size_y: size_y,
size_x: size_x
});
}
}
}
if (valid_pos.length) {
return Gridster.sort_by_row_and_col_asc(valid_pos)[0];
}
return false;
};
/**
* Remove a widget from the grid.
*
* @method remove_widget
* @param {HTMLElement} el The jQuery wrapped HTMLElement you want to remove.
* @param {Boolean|Function} silent If true, widgets below the removed one
* will not move up. If a Function is passed it will be used as callback.
* @param {Function} callback Function executed when the widget is removed.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.remove_widget = function(el, silent, callback) {
var $el = el instanceof $ ? el : $(el);
var wgd = $el.coords().grid;
// if silent is a function assume it's a callback
if ($.isFunction(silent)) {
callback = silent;
silent = false;
}
this.cells_occupied_by_placeholder = {};
this.$widgets = this.$widgets.not($el);
var $nexts = this.widgets_below($el);
this.remove_from_gridmap(wgd);
$el.fadeOut($.proxy(function() {
$el.remove();
if (!silent) {
$nexts.each($.proxy(function(i, widget) {
this.move_widget_up( $(widget), wgd.size_y );
}, this));
}
this.set_dom_grid_height();
if (callback) {
callback.call(this, el);
}
}, this));
return this;
};
/**
* Remove all widgets from the grid.
*
* @method remove_all_widgets
* @param {Function} callback Function executed for each widget removed.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.remove_all_widgets = function(callback) {
this.$widgets.each($.proxy(function(i, el){
this.remove_widget(el, true, callback);
}, this));
return this;
};
/**
* Returns a serialized array of the widgets in the grid.
*
* @method serialize
* @param {HTMLElement} [$widgets] The collection of jQuery wrapped
* HTMLElements you want to serialize. If no argument is passed all widgets
* will be serialized.
* @return {Array} Returns an Array of Objects with the data specified in
* the serialize_params option.
*/
fn.serialize = function($widgets) {
$widgets || ($widgets = this.$widgets);
return $widgets.map($.proxy(function(i, widget) {
var $w = $(widget);
return this.options.serialize_params($w, $w.coords().grid);
}, this)).get();
};
/**
* Returns a serialized array of the widgets that have changed their
* position.
*
* @method serialize_changed
* @return {Array} Returns an Array of Objects with the data specified in
* the serialize_params option.
*/
fn.serialize_changed = function() {
return this.serialize(this.$changed);
};
/**
* Convert widgets from DOM elements to "widget grid data" Objects.
*
* @method dom_to_coords
* @param {HTMLElement} $widget The widget to be converted.
*/
fn.dom_to_coords = function($widget) {
return {
'col': parseInt($widget.attr('data-col'), 10),
'row': parseInt($widget.attr('data-row'), 10),
'size_x': parseInt($widget.attr('data-sizex'), 10) || 1,
'size_y': parseInt($widget.attr('data-sizey'), 10) || 1,
'max_size_x': parseInt($widget.attr('data-max-sizex'), 10) || false,
'max_size_y': parseInt($widget.attr('data-max-sizey'), 10) || false,
'min_size_x': parseInt($widget.attr('data-min-sizex'), 10) || false,
'min_size_y': parseInt($widget.attr('data-min-sizey'), 10) || false,
'el': $widget
};
};
/**
* Creates the grid coords object representing the widget an add it to the
* mapped array of positions.
*
* @method register_widget
* @param {HTMLElement|Object} $el jQuery wrapped HTMLElement representing
* the widget, or an "widget grid data" Object with (col, row, el ...).
* @return {Boolean} Returns true if the widget final position is different
* than the original.
*/
fn.register_widget = function($el) {
var isDOM = $el instanceof jQuery;
var wgd = isDOM ? this.dom_to_coords($el) : $el;
var posChanged = false;
isDOM || ($el = wgd.el);
var empty_upper_row = this.can_go_widget_up(wgd);
if (empty_upper_row) {
wgd.row = empty_upper_row;
$el.attr('data-row', empty_upper_row);
this.$el.trigger('gridster:positionchanged', [wgd]);
posChanged = true;
}
if (this.options.avoid_overlapped_widgets &&
!this.can_move_to(
{size_x: wgd.size_x, size_y: wgd.size_y}, wgd.col, wgd.row)
) {
$.extend(wgd, this.next_position(wgd.size_x, wgd.size_y));
$el.attr({
'data-col': wgd.col,
'data-row': wgd.row,
'data-sizex': wgd.size_x,
'data-sizey': wgd.size_y
});
posChanged = true;
}
// attach Coord object to player data-coord attribute
$el.data('coords', $el.coords());
// Extend Coord object with grid position info
$el.data('coords').grid = wgd;
this.add_to_gridmap(wgd, $el);
this.options.resize.enabled && this.add_resize_handle($el);
return posChanged;
};
/**
* Update in the mapped array of positions the value of cells represented by
* the grid coords object passed in the `grid_data` param.
*
* @param {Object} grid_data The grid coords object representing the cells
* to update in the mapped array.
* @param {HTMLElement|Boolean} value Pass `false` or the jQuery wrapped
* HTMLElement, depends if you want to delete an existing position or add
* a new one.
* @method update_widget_position
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.update_widget_position = function(grid_data, value) {
this.for_each_cell_occupied(grid_data, function(col, row) {
if (!this.gridmap[col]) { return this; }
this.gridmap[col][row] = value;
});
return this;
};
/**
* Remove a widget from the mapped array of positions.
*
* @method remove_from_gridmap
* @param {Object} grid_data The grid coords object representing the cells
* to update in the mapped array.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.remove_from_gridmap = function(grid_data) {
return this.update_widget_position(grid_data, false);
};
/**
* Add a widget to the mapped array of positions.
*
* @method add_to_gridmap
* @param {Object} grid_data The grid coords object representing the cells
* to update in the mapped array.
* @param {HTMLElement|Boolean} value The value to set in the specified
* position .
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.add_to_gridmap = function(grid_data, value) {
this.update_widget_position(grid_data, value || grid_data.el);
if (grid_data.el) {
var $widgets = this.widgets_below(grid_data.el);
$widgets.each($.proxy(function(i, widget) {
this.move_widget_up( $(widget));
}, this));
}
};
/**
* Make widgets draggable.
*
* @uses Draggable
* @method draggable
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.draggable = function() {
var self = this;
var draggable_options = $.extend(true, {}, this.options.draggable, {
offset_left: this.options.widget_margins[0],
offset_top: this.options.widget_margins[1],
container_width: this.cols * this.min_widget_width,
limit: true,
start: function(event, ui) {
self.$widgets.filter('.player-revert')
.removeClass('player-revert');
self.$player = $(this);
self.$helper = $(ui.$helper);
self.helper = !self.$helper.is(self.$player);
self.on_start_drag.call(self, event, ui);
self.$el.trigger('gridster:dragstart');
},
stop: function(event, ui) {
self.on_stop_drag.call(self, event, ui);
self.$el.trigger('gridster:dragstop');
},
drag: throttle(function(event, ui) {
self.on_drag.call(self, event, ui);
self.$el.trigger('gridster:drag');
}, 60)
});
this.drag_api = this.$el.drag(draggable_options);
return this;
};
/**
* Bind resize events to get resize working.
*
* @method resizable
* @return {Class} Returns instance of gridster Class.
*/
fn.resizable = function() {
this.resize_api = this.$el.drag({
items: '.' + this.options.resize.handle_class,
offset_left: this.options.widget_margins[0],
container_width: this.container_width,
move_element: false,
resize: true,
limit: this.options.autogrow_cols ? false : true,
start: $.proxy(this.on_start_resize, this),
stop: $.proxy(function(event, ui) {
delay($.proxy(function() {
this.on_stop_resize(event, ui);
}, this), 120);
}, this),
drag: throttle($.proxy(this.on_resize, this), 60)
});
return this;
};
/**
* Setup things required for resizing. Like build templates for drag handles.
*
* @method setup_resize
* @return {Class} Returns instance of gridster Class.
*/
fn.setup_resize = function() {
this.resize_handle_class = this.options.resize.handle_class;
var axes = this.options.resize.axes;
var handle_tpl = '<span class="' + this.resize_handle_class + ' ' +
this.resize_handle_class + '-{type}" />';
this.resize_handle_tpl = $.map(axes, function(type) {
return handle_tpl.replace('{type}', type);
}).join('');
if ($.isArray(this.options.draggable.ignore_dragging)) {
this.options.draggable.ignore_dragging.push(
'.' + this.resize_handle_class);
}
return this;
};
/**
* This function is executed when the player begins to be dragged.
*
* @method on_start_drag
* @param {Event} event The original browser event
* @param {Object} ui A prepared ui object with useful drag-related data
*/
fn.on_start_drag = function(event, ui) {
this.$helper.add(this.$player).add(this.$wrapper).addClass('dragging');
this.highest_col = this.get_highest_occupied_cell().col;
this.$player.addClass('player');
this.player_grid_data = this.$player.coords().grid;
this.placeholder_grid_data = $.extend({}, this.player_grid_data);
this.set_dom_grid_height(this.$el.height() +
(this.player_grid_data.size_y * this.min_widget_height));
this.set_dom_grid_width(this.cols);
var pgd_sizex = this.player_grid_data.size_x;
var cols_diff = this.cols - this.highest_col;
if (this.options.autogrow_cols && cols_diff <= pgd_sizex) {
this.add_faux_cols(Math.min(pgd_sizex - cols_diff, 1));
}
var colliders = this.faux_grid;
var coords = this.$player.data('coords').coords;
this.cells_occupied_by_player = this.get_cells_occupied(
this.player_grid_data);
this.cells_occupied_by_placeholder = this.get_cells_occupied(
this.placeholder_grid_data);
this.last_cols = [];
this.last_rows = [];
// see jquery.collision.js
this.collision_api = this.$helper.collision(
colliders, this.options.collision);
this.$preview_holder = $('<' + this.$player.get(0).tagName + ' />', {
'class': 'preview-holder',
'data-row': this.$player.attr('data-row'),
'data-col': this.$player.attr('data-col'),
css: {
width: coords.width,
height: coords.height
}
}).appendTo(this.$el);
if (this.options.draggable.start) {
this.options.draggable.start.call(this, event, ui);
}
};
/**
* This function is executed when the player is being dragged.
*
* @method on_drag
* @param {Event} event The original browser event
* @param {Object} ui A prepared ui object with useful drag-related data
*/
fn.on_drag = function(event, ui) {
//break if dragstop has been fired
if (this.$player === null) {
return false;
}
var abs_offset = {
left: ui.position.left + this.baseX,
top: ui.position.top + this.baseY
};
// auto grow cols
if (this.options.autogrow_cols) {
var prcol = this.placeholder_grid_data.col +
this.placeholder_grid_data.size_x - 1;
// "- 1" due to adding at least 1 column in on_start_drag
if (prcol >= this.cols - 1 && this.options.max_cols >= this.cols + 1) {
this.add_faux_cols(1);
this.set_dom_grid_width(this.cols + 1);
this.drag_api.set_limits(this.container_width);
}
this.collision_api.set_colliders(this.faux_grid);
}
this.colliders_data = this.collision_api.get_closest_colliders(
abs_offset);
this.on_overlapped_column_change(
this.on_start_overlapping_column, this.on_stop_overlapping_column);
this.on_overlapped_row_change(
this.on_start_overlapping_row, this.on_stop_overlapping_row);
if (this.helper && this.$player) {
this.$player.css({
'left': ui.position.left,
'top': ui.position.top
});
}
if (this.options.draggable.drag) {
this.options.draggable.drag.call(this, event, ui);
}
};
/**
* This function is executed when the player stops being dragged.
*
* @method on_stop_drag
* @param {Event} event The original browser event
* @param {Object} ui A prepared ui object with useful drag-related data
*/
fn.on_stop_drag = function(event, ui) {
this.$helper.add(this.$player).add(this.$wrapper)
.removeClass('dragging');
ui.position.left = ui.position.left + this.baseX;
ui.position.top = ui.position.top + this.baseY;
this.colliders_data = this.collision_api.get_closest_colliders(
ui.position);
this.on_overlapped_column_change(
this.on_start_overlapping_column,
this.on_stop_overlapping_column
);
this.on_overlapped_row_change(
this.on_start_overlapping_row,
this.on_stop_overlapping_row
);
this.$player.addClass('player-revert').removeClass('player')
.attr({
'data-col': this.placeholder_grid_data.col,
'data-row': this.placeholder_grid_data.row
}).css({
'left': '',
'top': ''
});
this.$changed = this.$changed.add(this.$player);
this.cells_occupied_by_player = this.get_cells_occupied(
this.placeholder_grid_data);
this.set_cells_player_occupies(
this.placeholder_grid_data.col, this.placeholder_grid_data.row);
this.$player.coords().grid.row = this.placeholder_grid_data.row;
this.$player.coords().grid.col = this.placeholder_grid_data.col;
if (this.options.draggable.stop) {
this.options.draggable.stop.call(this, event, ui);
}
this.$preview_holder.remove();
this.$player = null;
this.$helper = null;
this.placeholder_grid_data = {};
this.player_grid_data = {};
this.cells_occupied_by_placeholder = {};
this.cells_occupied_by_player = {};
this.set_dom_grid_height();
this.set_dom_grid_width();
if (this.options.autogrow_cols) {
this.drag_api.set_limits(this.cols * this.min_widget_width);
}
};
/**
* This function is executed every time a widget starts to be resized.
*
* @method on_start_resize
* @param {Event} event The original browser event
* @param {Object} ui A prepared ui object with useful drag-related data
*/
fn.on_start_resize = function(event, ui) {
this.$resized_widget = ui.$player.closest('.gs-w');
this.resize_coords = this.$resized_widget.coords();
this.resize_wgd = this.resize_coords.grid;
this.resize_initial_width = this.resize_coords.coords.width;
this.resize_initial_height = this.resize_coords.coords.height;
this.resize_initial_sizex = this.resize_coords.grid.size_x;
this.resize_initial_sizey = this.resize_coords.grid.size_y;
this.resize_initial_col = this.resize_coords.grid.col;
this.resize_last_sizex = this.resize_initial_sizex;
this.resize_last_sizey = this.resize_initial_sizey;
this.resize_max_size_x = Math.min(this.resize_wgd.max_size_x ||
this.options.resize.max_size[0],
this.options.max_cols - this.resize_initial_col + 1);
this.resize_max_size_y = this.resize_wgd.max_size_y ||
this.options.resize.max_size[1];
this.resize_min_size_x = (this.resize_wgd.min_size_x ||
this.options.resize.min_size[0] || 1);
this.resize_min_size_y = (this.resize_wgd.min_size_y ||
this.options.resize.min_size[1] || 1);
this.resize_initial_last_col = this.get_highest_occupied_cell().col;
this.set_dom_grid_width(this.cols);
this.resize_dir = {
right: ui.$player.is('.' + this.resize_handle_class + '-x'),
bottom: ui.$player.is('.' + this.resize_handle_class + '-y')
};
this.$resized_widget.css({
'min-width': this.options.widget_base_dimensions[0],
'min-height': this.options.widget_base_dimensions[1]
});
var nodeName = this.$resized_widget.get(0).tagName;
this.$resize_preview_holder = $('<' + nodeName + ' />', {
'class': 'preview-holder resize-preview-holder',
'data-row': this.$resized_widget.attr('data-row'),
'data-col': this.$resized_widget.attr('data-col'),
'css': {
'width': this.resize_initial_width,
'height': this.resize_initial_height
}
}).appendTo(this.$el);
this.$resized_widget.addClass('resizing');
if (this.options.resize.start) {
this.options.resize.start.call(this, event, ui, this.$resized_widget);
}
this.$el.trigger('gridster:resizestart');
};
/**
* This function is executed every time a widget stops being resized.
*
* @method on_stop_resize
* @param {Event} event The original browser event
* @param {Object} ui A prepared ui object with useful drag-related data
*/
fn.on_stop_resize = function(event, ui) {
this.$resized_widget
.removeClass('resizing')
.css({
'width': '',
'height': ''
});
delay($.proxy(function() {
this.$resize_preview_holder
.remove()
.css({
'min-width': '',
'min-height': ''
});
if (this.options.resize.stop) {
this.options.resize.stop.call(this, event, ui, this.$resized_widget);
}
this.$el.trigger('gridster:resizestop');
}, this), 300);
this.set_dom_grid_width();
if (this.options.autogrow_cols) {
this.drag_api.set_limits(this.cols * this.min_widget_width);
}
};
/**
* This function is executed when a widget is being resized.
*
* @method on_resize
* @param {Event} event The original browser event
* @param {Object} ui A prepared ui object with useful drag-related data
*/
fn.on_resize = function(event, ui) {
var rel_x = (ui.pointer.diff_left);
var rel_y = (ui.pointer.diff_top);
var wbd_x = this.options.widget_base_dimensions[0];
var wbd_y = this.options.widget_base_dimensions[1];
var margin_x = this.options.widget_margins[0];
var margin_y = this.options.widget_margins[1];
var max_size_x = this.resize_max_size_x;
var min_size_x = this.resize_min_size_x;
var max_size_y = this.resize_max_size_y;
var min_size_y = this.resize_min_size_y;
var autogrow = this.options.autogrow_cols;
var width;
var max_width = Infinity;
var max_height = Infinity;
var inc_units_x = Math.ceil((rel_x / (wbd_x + margin_x * 2)) - 0.2);
var inc_units_y = Math.ceil((rel_y / (wbd_y + margin_y * 2)) - 0.2);
var size_x = Math.max(1, this.resize_initial_sizex + inc_units_x);
var size_y = Math.max(1, this.resize_initial_sizey + inc_units_y);
var max_cols = (this.container_width / this.min_widget_width) -
this.resize_initial_col + 1;
var limit_width = ((max_cols * this.min_widget_width) - margin_x * 2);
size_x = Math.max(Math.min(size_x, max_size_x), min_size_x);
size_x = Math.min(max_cols, size_x);
width = (max_size_x * wbd_x) + ((size_x - 1) * margin_x * 2);
max_width = Math.min(width, limit_width);
min_width = (min_size_x * wbd_x) + ((size_x - 1) * margin_x * 2);
size_y = Math.max(Math.min(size_y, max_size_y), min_size_y);
max_height = (max_size_y * wbd_y) + ((size_y - 1) * margin_y * 2);
min_height = (min_size_y * wbd_y) + ((size_y - 1) * margin_y * 2);
if (this.resize_dir.right) {
size_y = this.resize_initial_sizey;
} else if (this.resize_dir.bottom) {
size_x = this.resize_initial_sizex;
}
if (autogrow) {
var last_widget_col = this.resize_initial_col + size_x - 1;
if (autogrow && this.resize_initial_last_col <= last_widget_col) {
this.set_dom_grid_width(Math.max(last_widget_col + 1, this.cols));
if (this.cols < last_widget_col) {
this.add_faux_cols(last_widget_col - this.cols);
}
}
}
var css_props = {};
!this.resize_dir.bottom && (css_props.width = Math.max(Math.min(
this.resize_initial_width + rel_x, max_width), min_width));
!this.resize_dir.right && (css_props.height = Math.max(Math.min(
this.resize_initial_height + rel_y, max_height), min_height));
this.$resized_widget.css(css_props);
if (size_x !== this.resize_last_sizex ||
size_y !== this.resize_last_sizey) {
this.resize_widget(this.$resized_widget, size_x, size_y);
this.set_dom_grid_width(this.cols);
this.$resize_preview_holder.css({
'width': '',
'height': ''
}).attr({
'data-row': this.$resized_widget.attr('data-row'),
'data-sizex': size_x,
'data-sizey': size_y
});
}
if (this.options.resize.resize) {
this.options.resize.resize.call(this, event, ui, this.$resized_widget);
}
this.$el.trigger('gridster:resize');
this.resize_last_sizex = size_x;
this.resize_last_sizey = size_y;
};
/**
* Executes the callbacks passed as arguments when a column begins to be
* overlapped or stops being overlapped.
*
* @param {Function} start_callback Function executed when a new column
* begins to be overlapped. The column is passed as first argument.
* @param {Function} stop_callback Function executed when a column stops
* being overlapped. The column is passed as first argument.
* @method on_overlapped_column_change
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.on_overlapped_column_change = function(start_callback, stop_callback) {
if (!this.colliders_data.length) {
return this;
}
var cols = this.get_targeted_columns(
this.colliders_data[0].el.data.col);
var last_n_cols = this.last_cols.length;
var n_cols = cols.length;
var i;
for (i = 0; i < n_cols; i++) {
if ($.inArray(cols[i], this.last_cols) === -1) {
(start_callback || $.noop).call(this, cols[i]);
}
}
for (i = 0; i< last_n_cols; i++) {
if ($.inArray(this.last_cols[i], cols) === -1) {
(stop_callback || $.noop).call(this, this.last_cols[i]);
}
}
this.last_cols = cols;
return this;
};
/**
* Executes the callbacks passed as arguments when a row starts to be
* overlapped or stops being overlapped.
*
* @param {Function} start_callback Function executed when a new row begins
* to be overlapped. The row is passed as first argument.
* @param {Function} end_callback Function executed when a row stops being
* overlapped. The row is passed as first argument.
* @method on_overlapped_row_change
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.on_overlapped_row_change = function(start_callback, end_callback) {
if (!this.colliders_data.length) {
return this;
}
var rows = this.get_targeted_rows(this.colliders_data[0].el.data.row);
var last_n_rows = this.last_rows.length;
var n_rows = rows.length;
var i;
for (i = 0; i < n_rows; i++) {
if ($.inArray(rows[i], this.last_rows) === -1) {
(start_callback || $.noop).call(this, rows[i]);
}
}
for (i = 0; i < last_n_rows; i++) {
if ($.inArray(this.last_rows[i], rows) === -1) {
(end_callback || $.noop).call(this, this.last_rows[i]);
}
}
this.last_rows = rows;
};
/**
* Sets the current position of the player
*
* @param {Number} col
* @param {Number} row
* @param {Boolean} no_player
* @method set_player
* @return {object}
*/
fn.set_player = function(col, row, no_player) {
var self = this;
if (!no_player) {
this.empty_cells_player_occupies();
}
var cell = !no_player ? self.colliders_data[0].el.data : {col: col};
var to_col = cell.col;
var to_row = row || cell.row;
this.player_grid_data = {
col: to_col,
row: to_row,
size_y : this.player_grid_data.size_y,
size_x : this.player_grid_data.size_x
};
this.cells_occupied_by_player = this.get_cells_occupied(
this.player_grid_data);
var $overlapped_widgets = this.get_widgets_overlapped(
this.player_grid_data);
var constraints = this.widgets_constraints($overlapped_widgets);
this.manage_movements(constraints.can_go_up, to_col, to_row);
this.manage_movements(constraints.can_not_go_up, to_col, to_row);
/* if there is not widgets overlapping in the new player position,
* update the new placeholder position. */
if (!$overlapped_widgets.length) {
var pp = this.can_go_player_up(this.player_grid_data);
if (pp !== false) {
to_row = pp;
}
this.set_placeholder(to_col, to_row);
}
return {
col: to_col,
row: to_row
};
};
/**
* See which of the widgets in the $widgets param collection can go to
* a upper row and which not.
*
* @method widgets_contraints
* @param {jQuery} $widgets A jQuery wrapped collection of
* HTMLElements.
* @return {object} Returns a literal Object with two keys: `can_go_up` &
* `can_not_go_up`. Each contains a set of HTMLElements.
*/
fn.widgets_constraints = function($widgets) {
var $widgets_can_go_up = $([]);
var $widgets_can_not_go_up;
var wgd_can_go_up = [];
var wgd_can_not_go_up = [];
$widgets.each($.proxy(function(i, w) {
var $w = $(w);
var wgd = $w.coords().grid;
if (this.can_go_widget_up(wgd)) {
$widgets_can_go_up = $widgets_can_go_up.add($w);
wgd_can_go_up.push(wgd);
} else {
wgd_can_not_go_up.push(wgd);
}
}, this));
$widgets_can_not_go_up = $widgets.not($widgets_can_go_up);
return {
can_go_up: Gridster.sort_by_row_asc(wgd_can_go_up),
can_not_go_up: Gridster.sort_by_row_desc(wgd_can_not_go_up)
};
};
/**
* Sorts an Array of grid coords objects (representing the grid coords of
* each widget) in descending way.
*
* @method manage_movements
* @param {jQuery} $widgets A jQuery collection of HTMLElements
* representing the widgets you want to move.
* @param {Number} to_col The column to which we want to move the widgets.
* @param {Number} to_row The row to which we want to move the widgets.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.manage_movements = function($widgets, to_col, to_row) {
$.each($widgets, $.proxy(function(i, w) {
var wgd = w;
var $w = wgd.el;
var can_go_widget_up = this.can_go_widget_up(wgd);
if (can_go_widget_up) {
//target CAN go up
//so move widget up
this.move_widget_to($w, can_go_widget_up);
this.set_placeholder(to_col, can_go_widget_up + wgd.size_y);
} else {
//target can't go up
var can_go_player_up = this.can_go_player_up(
this.player_grid_data);
if (!can_go_player_up) {
// target can't go up
// player cant't go up
// so we need to move widget down to a position that dont
// overlaps player
var y = (to_row + this.player_grid_data.size_y) - wgd.row;
this.move_widget_down($w, y);
this.set_placeholder(to_col, to_row);
}
}
}, this));
return this;
};
/**
* Determines if there is a widget in the row and col given. Or if the
* HTMLElement passed as first argument is the player.
*
* @method is_player
* @param {Number|HTMLElement} col_or_el A jQuery wrapped collection of
* HTMLElements.
* @param {Number} [row] The column to which we want to move the widgets.
* @return {Boolean} Returns true or false.
*/
fn.is_player = function(col_or_el, row) {
if (row && !this.gridmap[col_or_el]) { return false; }
var $w = row ? this.gridmap[col_or_el][row] : col_or_el;
return $w && ($w.is(this.$player) || $w.is(this.$helper));
};
/**
* Determines if the widget that is being dragged is currently over the row
* and col given.
*
* @method is_player_in
* @param {Number} col The column to check.
* @param {Number} row The row to check.
* @return {Boolean} Returns true or false.
*/
fn.is_player_in = function(col, row) {
var c = this.cells_occupied_by_player || {};
return $.inArray(col, c.cols) >= 0 && $.inArray(row, c.rows) >= 0;
};
/**
* Determines if the placeholder is currently over the row and col given.
*
* @method is_placeholder_in
* @param {Number} col The column to check.
* @param {Number} row The row to check.
* @return {Boolean} Returns true or false.
*/
fn.is_placeholder_in = function(col, row) {
var c = this.cells_occupied_by_placeholder || {};
return this.is_placeholder_in_col(col) && $.inArray(row, c.rows) >= 0;
};
/**
* Determines if the placeholder is currently over the column given.
*
* @method is_placeholder_in_col
* @param {Number} col The column to check.
* @return {Boolean} Returns true or false.
*/
fn.is_placeholder_in_col = function(col) {
var c = this.cells_occupied_by_placeholder || [];
return $.inArray(col, c.cols) >= 0;
};
/**
* Determines if the cell represented by col and row params is empty.
*
* @method is_empty
* @param {Number} col The column to check.
* @param {Number} row The row to check.
* @return {Boolean} Returns true or false.
*/
fn.is_empty = function(col, row) {
if (typeof this.gridmap[col] !== 'undefined') {
if(typeof this.gridmap[col][row] !== 'undefined' &&
this.gridmap[col][row] === false
) {
return true;
}
return false;
}
return true;
};
/**
* Determines if the cell represented by col and row params is occupied.
*
* @method is_occupied
* @param {Number} col The column to check.
* @param {Number} row The row to check.
* @return {Boolean} Returns true or false.
*/
fn.is_occupied = function(col, row) {
if (!this.gridmap[col]) {
return false;
}
if (this.gridmap[col][row]) {
return true;
}
return false;
};
/**
* Determines if there is a widget in the cell represented by col/row params.
*
* @method is_widget
* @param {Number} col The column to check.
* @param {Number} row The row to check.
* @return {Boolean|HTMLElement} Returns false if there is no widget,
* else returns the jQuery HTMLElement
*/
fn.is_widget = function(col, row) {
var cell = this.gridmap[col];
if (!cell) {
return false;
}
cell = cell[row];
if (cell) {
return cell;
}
return false;
};
/**
* Determines if there is a widget in the cell represented by col/row
* params and if this is under the widget that is being dragged.
*
* @method is_widget_under_player
* @param {Number} col The column to check.
* @param {Number} row The row to check.
* @return {Boolean} Returns true or false.
*/
fn.is_widget_under_player = function(col, row) {
if (this.is_widget(col, row)) {
return this.is_player_in(col, row);
}
return false;
};
/**
* Get widgets overlapping with the player or with the object passed
* representing the grid cells.
*
* @method get_widgets_under_player
* @return {HTMLElement} Returns a jQuery collection of HTMLElements
*/
fn.get_widgets_under_player = function(cells) {
cells || (cells = this.cells_occupied_by_player || {cols: [], rows: []});
var $widgets = $([]);
$.each(cells.cols, $.proxy(function(i, col) {
$.each(cells.rows, $.proxy(function(i, row) {
if(this.is_widget(col, row)) {
$widgets = $widgets.add(this.gridmap[col][row]);
}
}, this));
}, this));
return $widgets;
};
/**
* Put placeholder at the row and column specified.
*
* @method set_placeholder
* @param {Number} col The column to which we want to move the
* placeholder.
* @param {Number} row The row to which we want to move the
* placeholder.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.set_placeholder = function(col, row) {
var phgd = $.extend({}, this.placeholder_grid_data);
var $nexts = this.widgets_below({
col: phgd.col,
row: phgd.row,
size_y: phgd.size_y,
size_x: phgd.size_x
});
// Prevents widgets go out of the grid
var right_col = (col + phgd.size_x - 1);
if (right_col > this.cols) {
col = col - (right_col - col);
}
var moved_down = this.placeholder_grid_data.row < row;
var changed_column = this.placeholder_grid_data.col !== col;
this.placeholder_grid_data.col = col;
this.placeholder_grid_data.row = row;
this.cells_occupied_by_placeholder = this.get_cells_occupied(
this.placeholder_grid_data);
this.$preview_holder.attr({
'data-row' : row,
'data-col' : col
});
if (moved_down || changed_column) {
$nexts.each($.proxy(function(i, widget) {
this.move_widget_up(
$(widget), this.placeholder_grid_data.col - col + phgd.size_y);
}, this));
}
var $widgets_under_ph = this.get_widgets_under_player(
this.cells_occupied_by_placeholder);
if ($widgets_under_ph.length) {
$widgets_under_ph.each($.proxy(function(i, widget) {
var $w = $(widget);
this.move_widget_down(
$w, row + phgd.size_y - $w.data('coords').grid.row);
}, this));
}
};
/**
* Determines whether the player can move to a position above.
*
* @method can_go_player_up
* @param {Object} widget_grid_data The actual grid coords object of the
* player.
* @return {Number|Boolean} If the player can be moved to an upper row
* returns the row number, else returns false.
*/
fn.can_go_player_up = function(widget_grid_data) {
var p_bottom_row = widget_grid_data.row + widget_grid_data.size_y - 1;
var result = true;
var upper_rows = [];
var min_row = 10000;
var $widgets_under_player = this.get_widgets_under_player();
/* generate an array with columns as index and array with upper rows
* empty as value */
this.for_each_column_occupied(widget_grid_data, function(tcol) {
var grid_col = this.gridmap[tcol];
var r = p_bottom_row + 1;
upper_rows[tcol] = [];
while (--r > 0) {
if (this.is_empty(tcol, r) || this.is_player(tcol, r) ||
this.is_widget(tcol, r) &&
grid_col[r].is($widgets_under_player)
) {
upper_rows[tcol].push(r);
min_row = r < min_row ? r : min_row;
} else {
break;
}
}
if (upper_rows[tcol].length === 0) {
result = false;
return true; //break
}
upper_rows[tcol].sort(function(a, b) {
return a - b;
});
});
if (!result) { return false; }
return this.get_valid_rows(widget_grid_data, upper_rows, min_row);
};
/**
* Determines whether a widget can move to a position above.
*
* @method can_go_widget_up
* @param {Object} widget_grid_data The actual grid coords object of the
* widget we want to check.
* @return {Number|Boolean} If the widget can be moved to an upper row
* returns the row number, else returns false.
*/
fn.can_go_widget_up = function(widget_grid_data) {
var p_bottom_row = widget_grid_data.row + widget_grid_data.size_y - 1;
var result = true;
var upper_rows = [];
var min_row = 10000;
/* generate an array with columns as index and array with topmost rows
* empty as value */
this.for_each_column_occupied(widget_grid_data, function(tcol) {
var grid_col = this.gridmap[tcol];
upper_rows[tcol] = [];
var r = p_bottom_row + 1;
// iterate over each row
while (--r > 0) {
if (this.is_widget(tcol, r) && !this.is_player_in(tcol, r)) {
if (!grid_col[r].is(widget_grid_data.el)) {
break;
}
}
if (!this.is_player(tcol, r) &&
!this.is_placeholder_in(tcol, r) &&
!this.is_player_in(tcol, r)) {
upper_rows[tcol].push(r);
}
if (r < min_row) {
min_row = r;
}
}
if (upper_rows[tcol].length === 0) {
result = false;
return true; //break
}
upper_rows[tcol].sort(function(a, b) {
return a - b;
});
});
if (!result) { return false; }
return this.get_valid_rows(widget_grid_data, upper_rows, min_row);
};
/**
* Search a valid row for the widget represented by `widget_grid_data' in
* the `upper_rows` array. Iteration starts from row specified in `min_row`.
*
* @method get_valid_rows
* @param {Object} widget_grid_data The actual grid coords object of the
* player.
* @param {Array} upper_rows An array with columns as index and arrays
* of valid rows as values.
* @param {Number} min_row The upper row from which the iteration will start.
* @return {Number|Boolean} Returns the upper row valid from the `upper_rows`
* for the widget in question.
*/
fn.get_valid_rows = function(widget_grid_data, upper_rows, min_row) {
var p_top_row = widget_grid_data.row;
var p_bottom_row = widget_grid_data.row + widget_grid_data.size_y - 1;
var size_y = widget_grid_data.size_y;
var r = min_row - 1;
var valid_rows = [];
while (++r <= p_bottom_row ) {
var common = true;
$.each(upper_rows, function(col, rows) {
if ($.isArray(rows) && $.inArray(r, rows) === -1) {
common = false;
}
});
if (common === true) {
valid_rows.push(r);
if (valid_rows.length === size_y) {
break;
}
}
}
var new_row = false;
if (size_y === 1) {
if (valid_rows[0] !== p_top_row) {
new_row = valid_rows[0] || false;
}
} else {
if (valid_rows[0] !== p_top_row) {
new_row = this.get_consecutive_numbers_index(
valid_rows, size_y);
}
}
return new_row;
};
fn.get_consecutive_numbers_index = function(arr, size_y) {
var max = arr.length;
var result = [];
var first = true;
var prev = -1; // or null?
for (var i=0; i < max; i++) {
if (first || arr[i] === prev + 1) {
result.push(i);
if (result.length === size_y) {
break;
}
first = false;
} else {
result = [];
first = true;
}
prev = arr[i];
}
return result.length >= size_y ? arr[result[0]] : false;
};
/**
* Get widgets overlapping with the player.
*
* @method get_widgets_overlapped
* @return {jQuery} Returns a jQuery collection of HTMLElements.
*/
fn.get_widgets_overlapped = function() {
var $w;
var $widgets = $([]);
var used = [];
var rows_from_bottom = this.cells_occupied_by_player.rows.slice(0);
rows_from_bottom.reverse();
$.each(this.cells_occupied_by_player.cols, $.proxy(function(i, col) {
$.each(rows_from_bottom, $.proxy(function(i, row) {
// if there is a widget in the player position
if (!this.gridmap[col]) { return true; } //next iteration
var $w = this.gridmap[col][row];
if (this.is_occupied(col, row) && !this.is_player($w) &&
$.inArray($w, used) === -1
) {
$widgets = $widgets.add($w);
used.push($w);
}
}, this));
}, this));
return $widgets;
};
/**
* This callback is executed when the player begins to collide with a column.
*
* @method on_start_overlapping_column
* @param {Number} col The collided column.
* @return {jQuery} Returns a jQuery collection of HTMLElements.
*/
fn.on_start_overlapping_column = function(col) {
this.set_player(col, false);
};
/**
* A callback executed when the player begins to collide with a row.
*
* @method on_start_overlapping_row
* @param {Number} row The collided row.
* @return {jQuery} Returns a jQuery collection of HTMLElements.
*/
fn.on_start_overlapping_row = function(row) {
this.set_player(false, row);
};
/**
* A callback executed when the the player ends to collide with a column.
*
* @method on_stop_overlapping_column
* @param {Number} col The collided row.
* @return {jQuery} Returns a jQuery collection of HTMLElements.
*/
fn.on_stop_overlapping_column = function(col) {
this.set_player(col, false);
var self = this;
this.for_each_widget_below(col, this.cells_occupied_by_player.rows[0],
function(tcol, trow) {
self.move_widget_up(this, self.player_grid_data.size_y);
});
};
/**
* This callback is executed when the player ends to collide with a row.
*
* @method on_stop_overlapping_row
* @param {Number} row The collided row.
* @return {jQuery} Returns a jQuery collection of HTMLElements.
*/
fn.on_stop_overlapping_row = function(row) {
this.set_player(false, row);
var self = this;
var cols = this.cells_occupied_by_player.cols;
for (var c = 0, cl = cols.length; c < cl; c++) {
this.for_each_widget_below(cols[c], row, function(tcol, trow) {
self.move_widget_up(this, self.player_grid_data.size_y);
});
}
};
/**
* Move a widget to a specific row. The cell or cells must be empty.
* If the widget has widgets below, all of these widgets will be moved also
* if they can.
*
* @method move_widget_to
* @param {HTMLElement} $widget The jQuery wrapped HTMLElement of the
* widget is going to be moved.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.move_widget_to = function($widget, row) {
var self = this;
var widget_grid_data = $widget.coords().grid;
var diff = row - widget_grid_data.row;
var $next_widgets = this.widgets_below($widget);
var can_move_to_new_cell = this.can_move_to(
widget_grid_data, widget_grid_data.col, row, $widget);
if (can_move_to_new_cell === false) {
return false;
}
this.remove_from_gridmap(widget_grid_data);
widget_grid_data.row = row;
this.add_to_gridmap(widget_grid_data);
$widget.attr('data-row', row);
this.$changed = this.$changed.add($widget);
$next_widgets.each(function(i, widget) {
var $w = $(widget);
var wgd = $w.coords().grid;
var can_go_up = self.can_go_widget_up(wgd);
if (can_go_up && can_go_up !== wgd.row) {
self.move_widget_to($w, can_go_up);
}
});
return this;
};
/**
* Move up the specified widget and all below it.
*
* @method move_widget_up
* @param {HTMLElement} $widget The widget you want to move.
* @param {Number} [y_units] The number of cells that the widget has to move.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.move_widget_up = function($widget, y_units) {
var el_grid_data = $widget.coords().grid;
var actual_row = el_grid_data.row;
var moved = [];
var can_go_up = true;
y_units || (y_units = 1);
if (!this.can_go_up($widget)) { return false; } //break;
this.for_each_column_occupied(el_grid_data, function(col) {
// can_go_up
if ($.inArray($widget, moved) === -1) {
var widget_grid_data = $widget.coords().grid;
var next_row = actual_row - y_units;
next_row = this.can_go_up_to_row(
widget_grid_data, col, next_row);
if (!next_row) {
return true;
}
var $next_widgets = this.widgets_below($widget);
this.remove_from_gridmap(widget_grid_data);
widget_grid_data.row = next_row;
this.add_to_gridmap(widget_grid_data);
$widget.attr('data-row', widget_grid_data.row);
this.$changed = this.$changed.add($widget);
moved.push($widget);
$next_widgets.each($.proxy(function(i, widget) {
this.move_widget_up($(widget), y_units);
}, this));
}
});
};
/**
* Move down the specified widget and all below it.
*
* @method move_widget_down
* @param {jQuery} $widget The jQuery object representing the widget
* you want to move.
* @param {Number} y_units The number of cells that the widget has to move.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.move_widget_down = function($widget, y_units) {
var el_grid_data, actual_row, moved, y_diff;
if (y_units <= 0) { return false; }
el_grid_data = $widget.coords().grid;
actual_row = el_grid_data.row;
moved = [];
y_diff = y_units;
if (!$widget) { return false; }
if ($.inArray($widget, moved) === -1) {
var widget_grid_data = $widget.coords().grid;
var next_row = actual_row + y_units;
var $next_widgets = this.widgets_below($widget);
this.remove_from_gridmap(widget_grid_data);
$next_widgets.each($.proxy(function(i, widget) {
var $w = $(widget);
var wd = $w.coords().grid;
var tmp_y = this.displacement_diff(
wd, widget_grid_data, y_diff);
if (tmp_y > 0) {
this.move_widget_down($w, tmp_y);
}
}, this));
widget_grid_data.row = next_row;
this.update_widget_position(widget_grid_data, $widget);
$widget.attr('data-row', widget_grid_data.row);
this.$changed = this.$changed.add($widget);
moved.push($widget);
}
};
/**
* Check if the widget can move to the specified row, else returns the
* upper row possible.
*
* @method can_go_up_to_row
* @param {Number} widget_grid_data The current grid coords object of the
* widget.
* @param {Number} col The target column.
* @param {Number} row The target row.
* @return {Boolean|Number} Returns the row number if the widget can move
* to the target position, else returns false.
*/
fn.can_go_up_to_row = function(widget_grid_data, col, row) {
var ga = this.gridmap;
var result = true;
var urc = []; // upper_rows_in_columns
var actual_row = widget_grid_data.row;
var r;
/* generate an array with columns as index and array with
* upper rows empty in the column */
this.for_each_column_occupied(widget_grid_data, function(tcol) {
var grid_col = ga[tcol];
urc[tcol] = [];
r = actual_row;
while (r--) {
if (this.is_empty(tcol, r) &&
!this.is_placeholder_in(tcol, r)
) {
urc[tcol].push(r);
} else {
break;
}
}
if (!urc[tcol].length) {
result = false;
return true;
}
});
if (!result) { return false; }
/* get common rows starting from upper position in all the columns
* that widget occupies */
r = row;
for (r = 1; r < actual_row; r++) {
var common = true;
for (var uc = 0, ucl = urc.length; uc < ucl; uc++) {
if (urc[uc] && $.inArray(r, urc[uc]) === -1) {
common = false;
}
}
if (common === true) {
result = r;
break;
}
}
return result;
};
fn.displacement_diff = function(widget_grid_data, parent_bgd, y_units) {
var actual_row = widget_grid_data.row;
var diffs = [];
var parent_max_y = parent_bgd.row + parent_bgd.size_y;
this.for_each_column_occupied(widget_grid_data, function(col) {
var temp_y_units = 0;
for (var r = parent_max_y; r < actual_row; r++) {
if (this.is_empty(col, r)) {
temp_y_units = temp_y_units + 1;
}
}
diffs.push(temp_y_units);
});
var max_diff = Math.max.apply(Math, diffs);
y_units = (y_units - max_diff);
return y_units > 0 ? y_units : 0;
};
/**
* Get widgets below a widget.
*
* @method widgets_below
* @param {HTMLElement} $el The jQuery wrapped HTMLElement.
* @return {jQuery} A jQuery collection of HTMLElements.
*/
fn.widgets_below = function($el) {
var el_grid_data = $.isPlainObject($el) ? $el : $el.coords().grid;
var self = this;
var ga = this.gridmap;
var next_row = el_grid_data.row + el_grid_data.size_y - 1;
var $nexts = $([]);
this.for_each_column_occupied(el_grid_data, function(col) {
self.for_each_widget_below(col, next_row, function(tcol, trow) {
if (!self.is_player(this) && $.inArray(this, $nexts) === -1) {
$nexts = $nexts.add(this);
return true; // break
}
});
});
return Gridster.sort_by_row_asc($nexts);
};
/**
* Update the array of mapped positions with the new player position.
*
* @method set_cells_player_occupies
* @param {Number} col The new player col.
* @param {Number} col The new player row.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.set_cells_player_occupies = function(col, row) {
this.remove_from_gridmap(this.placeholder_grid_data);
this.placeholder_grid_data.col = col;
this.placeholder_grid_data.row = row;
this.add_to_gridmap(this.placeholder_grid_data, this.$player);
return this;
};
/**
* Remove from the array of mapped positions the reference to the player.
*
* @method empty_cells_player_occupies
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.empty_cells_player_occupies = function() {
this.remove_from_gridmap(this.placeholder_grid_data);
return this;
};
fn.can_go_up = function($el) {
var el_grid_data = $el.coords().grid;
var initial_row = el_grid_data.row;
var prev_row = initial_row - 1;
var ga = this.gridmap;
var upper_rows_by_column = [];
var result = true;
if (initial_row === 1) { return false; }
this.for_each_column_occupied(el_grid_data, function(col) {
var $w = this.is_widget(col, prev_row);
if (this.is_occupied(col, prev_row) ||
this.is_player(col, prev_row) ||
this.is_placeholder_in(col, prev_row) ||
this.is_player_in(col, prev_row)
) {
result = false;
return true; //break
}
});
return result;
};
/**
* Check if it's possible to move a widget to a specific col/row. It takes
* into account the dimensions (`size_y` and `size_x` attrs. of the grid
* coords object) the widget occupies.
*
* @method can_move_to
* @param {Object} widget_grid_data The grid coords object that represents
* the widget.
* @param {Object} col The col to check.
* @param {Object} row The row to check.
* @param {Number} [max_row] The max row allowed.
* @return {Boolean} Returns true if all cells are empty, else return false.
*/
fn.can_move_to = function(widget_grid_data, col, row, max_row) {
var ga = this.gridmap;
var $w = widget_grid_data.el;
var future_wd = {
size_y: widget_grid_data.size_y,
size_x: widget_grid_data.size_x,
col: col,
row: row
};
var result = true;
//Prevents widgets go out of the grid
var right_col = col + widget_grid_data.size_x - 1;
if (right_col > this.cols) {
return false;
}
if (max_row && max_row < row + widget_grid_data.size_y - 1) {
return false;
}
this.for_each_cell_occupied(future_wd, function(tcol, trow) {
var $tw = this.is_widget(tcol, trow);
if ($tw && (!widget_grid_data.el || $tw.is($w))) {
result = false;
}
});
return result;
};
/**
* Given the leftmost column returns all columns that are overlapping
* with the player.
*
* @method get_targeted_columns
* @param {Number} [from_col] The leftmost column.
* @return {Array} Returns an array with column numbers.
*/
fn.get_targeted_columns = function(from_col) {
var max = (from_col || this.player_grid_data.col) +
(this.player_grid_data.size_x - 1);
var cols = [];
for (var col = from_col; col <= max; col++) {
cols.push(col);
}
return cols;
};
/**
* Given the upper row returns all rows that are overlapping with the player.
*
* @method get_targeted_rows
* @param {Number} [from_row] The upper row.
* @return {Array} Returns an array with row numbers.
*/
fn.get_targeted_rows = function(from_row) {
var max = (from_row || this.player_grid_data.row) +
(this.player_grid_data.size_y - 1);
var rows = [];
for (var row = from_row; row <= max; row++) {
rows.push(row);
}
return rows;
};
/**
* Get all columns and rows that a widget occupies.
*
* @method get_cells_occupied
* @param {Object} el_grid_data The grid coords object of the widget.
* @return {Object} Returns an object like `{ cols: [], rows: []}`.
*/
fn.get_cells_occupied = function(el_grid_data) {
var cells = { cols: [], rows: []};
var i;
if (arguments[1] instanceof $) {
el_grid_data = arguments[1].coords().grid;
}
for (i = 0; i < el_grid_data.size_x; i++) {
var col = el_grid_data.col + i;
cells.cols.push(col);
}
for (i = 0; i < el_grid_data.size_y; i++) {
var row = el_grid_data.row + i;
cells.rows.push(row);
}
return cells;
};
/**
* Iterate over the cells occupied by a widget executing a function for
* each one.
*
* @method for_each_cell_occupied
* @param {Object} el_grid_data The grid coords object that represents the
* widget.
* @param {Function} callback The function to execute on each column
* iteration. Column and row are passed as arguments.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.for_each_cell_occupied = function(grid_data, callback) {
this.for_each_column_occupied(grid_data, function(col) {
this.for_each_row_occupied(grid_data, function(row) {
callback.call(this, col, row);
});
});
return this;
};
/**
* Iterate over the columns occupied by a widget executing a function for
* each one.
*
* @method for_each_column_occupied
* @param {Object} el_grid_data The grid coords object that represents
* the widget.
* @param {Function} callback The function to execute on each column
* iteration. The column number is passed as first argument.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.for_each_column_occupied = function(el_grid_data, callback) {
for (var i = 0; i < el_grid_data.size_x; i++) {
var col = el_grid_data.col + i;
callback.call(this, col, el_grid_data);
}
};
/**
* Iterate over the rows occupied by a widget executing a function for
* each one.
*
* @method for_each_row_occupied
* @param {Object} el_grid_data The grid coords object that represents
* the widget.
* @param {Function} callback The function to execute on each column
* iteration. The row number is passed as first argument.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.for_each_row_occupied = function(el_grid_data, callback) {
for (var i = 0; i < el_grid_data.size_y; i++) {
var row = el_grid_data.row + i;
callback.call(this, row, el_grid_data);
}
};
fn._traversing_widgets = function(type, direction, col, row, callback) {
var ga = this.gridmap;
if (!ga[col]) { return; }
var cr, max;
var action = type + '/' + direction;
if (arguments[2] instanceof $) {
var el_grid_data = arguments[2].coords().grid;
col = el_grid_data.col;
row = el_grid_data.row;
callback = arguments[3];
}
var matched = [];
var trow = row;
var methods = {
'for_each/above': function() {
while (trow--) {
if (trow > 0 && this.is_widget(col, trow) &&
$.inArray(ga[col][trow], matched) === -1
) {
cr = callback.call(ga[col][trow], col, trow);
matched.push(ga[col][trow]);
if (cr) { break; }
}
}
},
'for_each/below': function() {
for (trow = row + 1, max = ga[col].length; trow < max; trow++) {
if (this.is_widget(col, trow) &&
$.inArray(ga[col][trow], matched) === -1
) {
cr = callback.call(ga[col][trow], col, trow);
matched.push(ga[col][trow]);
if (cr) { break; }
}
}
}
};
if (methods[action]) {
methods[action].call(this);
}
};
/**
* Iterate over each widget above the column and row specified.
*
* @method for_each_widget_above
* @param {Number} col The column to start iterating.
* @param {Number} row The row to start iterating.
* @param {Function} callback The function to execute on each widget
* iteration. The value of `this` inside the function is the jQuery
* wrapped HTMLElement.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.for_each_widget_above = function(col, row, callback) {
this._traversing_widgets('for_each', 'above', col, row, callback);
return this;
};
/**
* Iterate over each widget below the column and row specified.
*
* @method for_each_widget_below
* @param {Number} col The column to start iterating.
* @param {Number} row The row to start iterating.
* @param {Function} callback The function to execute on each widget
* iteration. The value of `this` inside the function is the jQuery wrapped
* HTMLElement.
* @return {Class} Returns the instance of the Gridster Class.
*/
fn.for_each_widget_below = function(col, row, callback) {
this._traversing_widgets('for_each', 'below', col, row, callback);
return this;
};
/**
* Returns the highest occupied cell in the grid.
*
* @method get_highest_occupied_cell
* @return {Object} Returns an object with `col` and `row` numbers.
*/
fn.get_highest_occupied_cell = function() {
var r;
var gm = this.gridmap;
var rl = gm[1].length;
var rows = [], cols = [];
var row_in_col = [];
for (var c = gm.length - 1; c >= 1; c--) {
for (r = rl - 1; r >= 1; r--) {
if (this.is_widget(c, r)) {
rows.push(r);
cols.push(c);
break;
}
}
}
return {
col: Math.max.apply(Math, cols),
row: Math.max.apply(Math, rows)
};
};
fn.get_widgets_from = function(col, row) {
var ga = this.gridmap;
var $widgets = $();
if (col) {
$widgets = $widgets.add(
this.$widgets.filter(function() {
var tcol = $(this).attr('data-col');
return (tcol === col || tcol > col);
})
);
}
if (row) {
$widgets = $widgets.add(
this.$widgets.filter(function() {
var trow = $(this).attr('data-row');
return (trow === row || trow > row);
})
);
}
return $widgets;
};
/**
* Set the current height of the parent grid.
*
* @method set_dom_grid_height
* @return {Object} Returns the instance of the Gridster class.
*/
fn.set_dom_grid_height = function(height) {
if (typeof height === 'undefined') {
var r = this.get_highest_occupied_cell().row;
height = r * this.min_widget_height;
}
this.container_height = height;
this.$el.css('height', this.container_height);
return this;
};
/**
* Set the current width of the parent grid.
*
* @method set_dom_grid_width
* @return {Object} Returns the instance of the Gridster class.
*/
fn.set_dom_grid_width = function(cols) {
if (typeof cols === 'undefined') {
cols = this.get_highest_occupied_cell().col;
}
var max_cols = (this.options.autogrow_cols ? this.options.max_cols :
this.cols);
cols = Math.min(max_cols, Math.max(cols, this.options.min_cols));
this.container_width = cols * this.min_widget_width;
this.$el.css('width', this.container_width);
return this;
};
/**
* It generates the neccessary styles to position the widgets.
*
* @method generate_stylesheet
* @param {Number} rows Number of columns.
* @param {Number} cols Number of rows.
* @return {Object} Returns the instance of the Gridster class.
*/
fn.generate_stylesheet = function(opts) {
var styles = '';
var max_size_x = this.options.max_size_x || this.cols;
var max_rows = 0;
var max_cols = 0;
var i;
var rules;
opts || (opts = {});
opts.cols || (opts.cols = this.cols);
opts.rows || (opts.rows = this.rows);
opts.namespace || (opts.namespace = this.options.namespace);
opts.widget_base_dimensions ||
(opts.widget_base_dimensions = this.options.widget_base_dimensions);
opts.widget_margins ||
(opts.widget_margins = this.options.widget_margins);
opts.min_widget_width = (opts.widget_margins[0] * 2) +
opts.widget_base_dimensions[0];
opts.min_widget_height = (opts.widget_margins[1] * 2) +
opts.widget_base_dimensions[1];
// don't duplicate stylesheets for the same configuration
var serialized_opts = $.param(opts);
if ($.inArray(serialized_opts, Gridster.generated_stylesheets) >= 0) {
return false;
}
this.generated_stylesheets.push(serialized_opts);
Gridster.generated_stylesheets.push(serialized_opts);
/* generate CSS styles for cols */
for (i = opts.cols; i >= 0; i--) {
styles += (opts.namespace + ' [data-col="'+ (i + 1) + '"] { left:' +
((i * opts.widget_base_dimensions[0]) +
(i * opts.widget_margins[0]) +
((i + 1) * opts.widget_margins[0])) + 'px; }\n');
}
/* generate CSS styles for rows */
for (i = opts.rows; i >= 0; i--) {
styles += (opts.namespace + ' [data-row="' + (i + 1) + '"] { top:' +
((i * opts.widget_base_dimensions[1]) +
(i * opts.widget_margins[1]) +
((i + 1) * opts.widget_margins[1]) ) + 'px; }\n');
}
for (var y = 1; y <= opts.rows; y++) {
styles += (opts.namespace + ' [data-sizey="' + y + '"] { height:' +
(y * opts.widget_base_dimensions[1] +
(y - 1) * (opts.widget_margins[1] * 2)) + 'px; }\n');
}
for (var x = 1; x <= max_size_x; x++) {
styles += (opts.namespace + ' [data-sizex="' + x + '"] { width:' +
(x * opts.widget_base_dimensions[0] +
(x - 1) * (opts.widget_margins[0] * 2)) + 'px; }\n');
}
this.remove_style_tags();
return this.add_style_tag(styles);
};
/**
* Injects the given CSS as string to the head of the document.
*
* @method add_style_tag
* @param {String} css The styles to apply.
* @return {Object} Returns the instance of the Gridster class.
*/
fn.add_style_tag = function(css) {
var d = document;
var tag = d.createElement('style');
d.getElementsByTagName('head')[0].appendChild(tag);
tag.setAttribute('type', 'text/css');
if (tag.styleSheet) {
tag.styleSheet.cssText = css;
} else {
tag.appendChild(document.createTextNode(css));
}
this.$style_tags = this.$style_tags.add(tag);
return this;
};
/**
* Remove the style tag with the associated id from the head of the document
*
* @method remove_style_tag
* @return {Object} Returns the instance of the Gridster class.
*/
fn.remove_style_tags = function() {
var all_styles = Gridster.generated_stylesheets;
var ins_styles = this.generated_stylesheets;
this.$style_tags.remove();
Gridster.generated_stylesheets = $.map(all_styles, function(s) {
if ($.inArray(s, ins_styles) === -1) { return s; }
});
};
/**
* Generates a faux grid to collide with it when a widget is dragged and
* detect row or column that we want to go.
*
* @method generate_faux_grid
* @param {Number} rows Number of columns.
* @param {Number} cols Number of rows.
* @return {Object} Returns the instance of the Gridster class.
*/
fn.generate_faux_grid = function(rows, cols) {
this.faux_grid = [];
this.gridmap = [];
var col;
var row;
for (col = cols; col > 0; col--) {
this.gridmap[col] = [];
for (row = rows; row > 0; row--) {
this.add_faux_cell(row, col);
}
}
return this;
};
/**
* Add cell to the faux grid.
*
* @method add_faux_cell
* @param {Number} row The row for the new faux cell.
* @param {Number} col The col for the new faux cell.
* @return {Object} Returns the instance of the Gridster class.
*/
fn.add_faux_cell = function(row, col) {
var coords = $({
left: this.baseX + ((col - 1) * this.min_widget_width),
top: this.baseY + (row -1) * this.min_widget_height,
width: this.min_widget_width,
height: this.min_widget_height,
col: col,
row: row,
original_col: col,
original_row: row
}).coords();
if (!$.isArray(this.gridmap[col])) {
this.gridmap[col] = [];
}
this.gridmap[col][row] = false;
this.faux_grid.push(coords);
return this;
};
/**
* Add rows to the faux grid.
*
* @method add_faux_rows
* @param {Number} rows The number of rows you want to add to the faux grid.
* @return {Object} Returns the instance of the Gridster class.
*/
fn.add_faux_rows = function(rows) {
var actual_rows = this.rows;
var max_rows = actual_rows + (rows || 1);
for (var r = max_rows; r > actual_rows; r--) {
for (var c = this.cols; c >= 1; c--) {
this.add_faux_cell(r, c);
}
}
this.rows = max_rows;
if (this.options.autogenerate_stylesheet) {
this.generate_stylesheet();
}
return this;
};
/**
* Add cols to the faux grid.
*
* @method add_faux_cols
* @param {Number} cols The number of cols you want to add to the faux grid.
* @return {Object} Returns the instance of the Gridster class.
*/
fn.add_faux_cols = function(cols) {
var actual_cols = this.cols;
var max_cols = actual_cols + (cols || 1);
max_cols = Math.min(max_cols, this.options.max_cols);
for (var c = actual_cols + 1; c <= max_cols; c++) {
for (var r = this.rows; r >= 1; r--) {
this.add_faux_cell(r, c);
}
}
this.cols = max_cols;
if (this.options.autogenerate_stylesheet) {
this.generate_stylesheet();
}
return this;
};
/**
* Recalculates the offsets for the faux grid. You need to use it when
* the browser is resized.
*
* @method recalculate_faux_grid
* @return {Object} Returns the instance of the Gridster class.
*/
fn.recalculate_faux_grid = function() {
var aw = this.$wrapper.width();
this.baseX = ($(window).width() - aw) / 2;
this.baseY = this.$wrapper.offset().top;
$.each(this.faux_grid, $.proxy(function(i, coords) {
this.faux_grid[i] = coords.update({
left: this.baseX + (coords.data.col -1) * this.min_widget_width,
top: this.baseY + (coords.data.row -1) * this.min_widget_height
});
}, this));
return this;
};
/**
* Get all widgets in the DOM and register them.
*
* @method get_widgets_from_DOM
* @return {Object} Returns the instance of the Gridster class.
*/
fn.get_widgets_from_DOM = function() {
var widgets_coords = this.$widgets.map($.proxy(function(i, widget) {
var $w = $(widget);
return this.dom_to_coords($w);
}, this));
widgets_coords = Gridster.sort_by_row_and_col_asc(widgets_coords);
var changes = $(widgets_coords).map($.proxy(function(i, wgd) {
return this.register_widget(wgd) || null;
}, this));
if (changes.length) {
this.$el.trigger('gridster:positionschanged');
}
return this;
};
/**
* Calculate columns and rows to be set based on the configuration
* parameters, grid dimensions, etc ...
*
* @method generate_grid_and_stylesheet
* @return {Object} Returns the instance of the Gridster class.
*/
fn.generate_grid_and_stylesheet = function() {
var aw = this.$wrapper.width();
var max_cols = this.options.max_cols;
var cols = Math.floor(aw / this.min_widget_width) +
this.options.extra_cols;
var actual_cols = this.$widgets.map(function() {
return $(this).attr('data-col');
}).get();
//needed to pass tests with phantomjs
actual_cols.length || (actual_cols = [0]);
var min_cols = Math.max.apply(Math, actual_cols);
this.cols = Math.max(min_cols, cols, this.options.min_cols);
if (max_cols !== Infinity && max_cols >= min_cols && max_cols < this.cols) {
this.cols = max_cols;
}
// get all rows that could be occupied by the current widgets
var max_rows = this.options.extra_rows;
this.$widgets.each(function(i, w) {
max_rows += (+$(w).attr('data-sizey'));
});
this.rows = Math.max(max_rows, this.options.min_rows);
this.baseX = ($(window).width() - aw) / 2;
this.baseY = this.$wrapper.offset().top;
if (this.options.autogenerate_stylesheet) {
this.generate_stylesheet();
}
return this.generate_faux_grid(this.rows, this.cols);
};
/**
* Destroy this gridster by removing any sign of its presence, making it easy to avoid memory leaks
*
* @method destroy
* @param {Boolean} remove If true, remove gridster from DOM.
* @return {Object} Returns the instance of the Gridster class.
*/
fn.destroy = function(remove) {
this.$el.removeData('gridster');
// remove bound callback on window resize
$(window).unbind('.gridster');
if (this.drag_api) {
this.drag_api.destroy();
}
this.remove_style_tags();
remove && this.$el.remove();
return this;
};
//jQuery adapter
$.fn.gridster = function(options) {
return this.each(function() {
if (! $(this).data('gridster')) {
$(this).data('gridster', new Gridster( this, options ));
}
});
};
return Gridster;
}));
;(function(root, factory) {
if (typeof define === 'function' && define.amd) {
define(['jquery', 'gridster'], factory);
} else {
root.Gridster = factory(root.$ || root.jQuery, root.Gridster);
}
}(this, function($, Gridster) {
var fn = Gridster.prototype;
fn.widgets_in_col = function(col) {
if (!this.gridmap[col]) {
return false;
}
for (var i = this.gridmap[col].length - 1; i >= 0; i--) {
if (this.is_widget(col, i) !== false) {
return true;
}
}
return false;
};
fn.widgets_in_row = function(row) {
for (var i = this.gridmap.length; i >= 1; i--) {
if (this.is_widget(i, row) !== false) {
return true;
}
}
return false;
};
fn.widgets_in_range = function(col1, row1, col2, row2) {
var valid_cols = [];
var valid_rows = [];
var $widgets = $([]);
var c, r, $w, wgd;
for (c = col2; c >= col1; c--) {
for (r = row2; r >= row1; r--) {
$w = this.is_widget(c, r);
if ($w !== false) {
wgd = $w.data('coords').grid;
if (wgd.col >= col1 && wgd.col <= col2 &&
wgd.row >= row1 && wgd.row <= row2
) {
$widgets = $widgets.add($w);
}
}
}
}
return $widgets;
};
fn.get_bottom_most_occupied_cell = function() {
var row = 0;
var col = 0;
this.for_each_cell(function($el, c, r) {
if ($el && r > row) {
row = r;
col = c;
}
});
return {col: col, row: row};
};
fn.get_right_most_occupied_cell = function() {
var row = 0;
var col = 0;
this.for_each_cell(function($el, c, r) {
if ($el) {
row = r;
col = c;
return false;
}
});
return {col: col, row: row};
};
fn.for_each_cell = function(callback, gridmap) {
gridmap || (gridmap = this.gridmap);
var cols = gridmap.length;
var rows = gridmap[1].length;
cols_iter:
for (var c = cols - 1; c >= 1; c--) {
for (var r = rows - 1; r >= 1; r--) {
var $el = gridmap[c] && gridmap[c][r];
if (callback) {
if (callback.call(this, $el, c, r) === false) {
break cols_iter;
} else { continue; }
}
}
}
};
fn.next_position_in_range = function(size_x, size_y, max_rows) {
size_x || (size_x = 1);
size_y || (size_y = 1);
var ga = this.gridmap;
var cols_l = ga.length;
var valid_pos = [];
var rows_l;
for (var c = 1; c < cols_l; c++) {
rows_l = max_rows || ga[c].length;
for (var r = 1; r <= rows_l; r++) {
var can_move_to = this.can_move_to({
size_x: size_x,
size_y: size_y
}, c, r, max_rows);
if (can_move_to) {
valid_pos.push({
col: c,
row: r,
size_y: size_y,
size_x: size_x
});
}
}
}
if (valid_pos.length >= 1) {
return Gridster.sort_by_col_asc(valid_pos)[0];
}
return false;
};
fn.closest_to_right = function(col, row) {
if (!this.gridmap[col]) { return false; }
var cols_l = this.gridmap.length - 1;
for (var c = col; c <= cols_l; c++) {
if (this.gridmap[c][row]) {
return { col: c, row: row };
}
}
return false;
};
fn.closest_to_left = function(col, row) {
var cols_l = this.gridmap.length - 1;
if (!this.gridmap[col]) { return false; }
for (var c = col; c >= 1; c--) {
if (this.gridmap[c][row]) {
return { col: c, row: row };
}
}
return false;
};
return Gridster;
}));
|
PypiClean
|
/gmcm-django-superadmin-2.0.10.tar.gz/gmcm-django-superadmin-2.0.10/superadmin/views/list.py
|
# Python
import operator
from functools import reduce
# Django
from django.core.paginator import InvalidPage
from django.db.models import Q
from django.http import Http404
from django.utils.translation import gettext as _
from django.views.generic import ListView as BaseListView
# Local
from .base import SiteView, get_base_view
from ..shortcuts import get_urls_of_site
from ..utils import import_mixin, import_all_mixins
# Utilities
from ..services import FieldService
class ListMixin:
"""Define class"""
allow_empty = True
action = "list"
def get_queryset(self):
queryset = super().get_queryset()
search_params = self.request.GET
if search_params:
params = search_params.dict()
search = params.pop("search", None)
params.pop("page", None)
params.pop("paginate_by", None)
model_site = self.site
if (
search
and hasattr(model_site, "search_params")
and isinstance(model_site.search_params, (list, tuple))
and model_site.search_params
):
search = search.replace("+", ",").replace(";", ",")
search_split = search.split(",")
for search_value in search_split:
filters = {
key: search_value.strip() for key in model_site.search_params
}
params.update(**filters)
args = [Q(**{key: value}) for key, value in filters.items()]
queryset = queryset.filter(reduce(operator.__or__, args))
return queryset
def paginate_queryset(self, queryset, page_size):
"""Paginate the queryset, if needed."""
paginator = self.get_paginator(
queryset,
page_size,
orphans=self.get_paginate_orphans(),
allow_empty_first_page=self.get_allow_empty(),
)
page_kwarg = self.page_kwarg
page = self.kwargs.get(page_kwarg) or self.request.GET.get(page_kwarg) or 1
try:
page_number = int(page)
except ValueError:
if page == "last":
page_number = paginator.num_pages
else:
raise Http404(
_("Page is not “last”, nor can it be converted to an int.")
)
try:
if page_number > paginator.num_pages:
page_number = paginator.num_pages
page = paginator.page(page_number)
return paginator, page, page.object_list, page.has_other_pages()
except InvalidPage as e:
raise Http404(
_("Invalid page (%(page_number)s): %(message)s")
% {"page_number": page_number, "message": str(e)}
)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update(self.site.list_extra_context)
opts = {
"fields": self.get_list_fields(),
"rows": self.get_rows(context["object_list"]),
"page_start_index": context["page_obj"].start_index()
if context["is_paginated"]
else 1,
"page_end_index": context["page_obj"].end_index()
if context["is_paginated"]
else context["object_list"].count(),
"total_records": context["paginator"].count
if context["is_paginated"]
else context["object_list"].count(),
}
if (
hasattr(self.site, "search_params")
and isinstance(self.site.search_params, (list, tuple))
and self.site.search_params
):
opts.update({"search_params": self.site.search_params})
if "site" in context:
context["site"].update(opts)
else:
context.update({"site": opts})
return context
def get_paginate_by(self, queryset):
paginate_by = self.request.GET.get("paginate_by")
if paginate_by:
return paginate_by
return super().get_paginate_by(queryset)
def get_list_fields(self):
fields = [
(name, FieldService.get_field_label(self.model, name))
for name in self.site.list_fields
]
return fields
def get_editable_fields(self):
fields = [
(name, FieldService.get_field_label(self.model, name))
for name in self.site.form_class._meta.fields
]
return fields
def get_rows(self, queryset):
rows = [
{
"instance": instance,
"values": self.get_values(instance),
"urls": get_urls_of_site(
self.site, object=instance, user=self.request.user
),
}
for instance in queryset
]
return rows
def get_values(self, instance):
values = [
FieldService.get_field_value(instance, name)
for name in self.site.list_fields
]
return values
class ListView(SiteView):
def view(self, request, *args, **kwargs):
"""Crear la List View del modelo"""
# Class
FilterMixin = import_mixin("FilterMixin")
mixins = import_all_mixins() + [FilterMixin, ListMixin]
View = get_base_view(BaseListView, mixins, self.get_site())
# Set attriburtes
View.paginate_by = self.site.paginate_by
View.__bases__ = (*self.site.list_mixins, *View.__bases__)
view = View.as_view()
return view(request, *args, **kwargs)
|
PypiClean
|
/adafruit_circuitpython_ble-10.0.3-py3-none-any.whl/adafruit_ble/characteristics/__init__.py
|
from __future__ import annotations
import struct
import _bleio
from ..attributes import Attribute
try:
from typing import Optional, Type, Union, Tuple, Iterable, TYPE_CHECKING
if TYPE_CHECKING:
from circuitpython_typing import ReadableBuffer
from adafruit_ble.uuid import UUID
from adafruit_ble.services import Service
except ImportError:
pass
__version__ = "10.0.3"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_BLE.git"
class Characteristic:
"""
Top level Characteristic class that does basic binding.
:param UUID uuid: The uuid of the characteristic
:param int properties: The properties of the characteristic,
specified as a bitmask of these values bitwise-or'd together:
`BROADCAST`, `INDICATE`, `NOTIFY`, `READ`, `WRITE`, `WRITE_NO_RESPONSE`.
:param int read_perm: Specifies whether the characteristic can be read by a client,
and if so, which security mode is required.
Must be one of the integer values `Attribute.NO_ACCESS`, `Attribute.OPEN`,
`Attribute.ENCRYPT_NO_MITM`, `Attribute.ENCRYPT_WITH_MITM`,
`Attribute.LESC_ENCRYPT_WITH_MITM`,
`Attribute.SIGNED_NO_MITM`, or `Attribute.SIGNED_WITH_MITM`.
:param int write_perm: Specifies whether the characteristic can be written by a client,
and if so, which security mode is required. Values allowed are the same as ``read_perm``.
:param int max_length: Maximum length in bytes of the characteristic value. The maximum allowed
by the BLE specification is 512. On nRF, if ``fixed_length`` is ``True``, the maximum
is 510. The default value is 20, which is the maximum
number of data bytes that fit in a single BLE 4.x ATT packet.
:param bool fixed_length: True if the characteristic value is of fixed length.
:param buf initial_value: The initial value for this characteristic. If not given, will be
filled with zeros.
.. data:: BROADCAST
property: allowed in advertising packets
.. data:: INDICATE
property: server will indicate to the client when the value is set and wait for a response
.. data:: NOTIFY
property: server will notify the client when the value is set
.. data:: READ
property: clients may read this characteristic
.. data:: WRITE
property: clients may write this characteristic; a response will be sent back
.. data:: WRITE_NO_RESPONSE
property: clients may write this characteristic; no response will be sent back"""
BROADCAST = _bleio.Characteristic.BROADCAST
INDICATE = _bleio.Characteristic.INDICATE
NOTIFY = _bleio.Characteristic.NOTIFY
READ = _bleio.Characteristic.READ
WRITE = _bleio.Characteristic.WRITE
WRITE_NO_RESPONSE = _bleio.Characteristic.WRITE_NO_RESPONSE
def __init__(
self,
*,
uuid: Optional[UUID] = None,
properties: int = 0,
read_perm: int = Attribute.OPEN,
write_perm: int = Attribute.OPEN,
max_length: Optional[int] = None,
fixed_length: bool = False,
initial_value: Optional[ReadableBuffer] = None,
) -> None:
self.field_name = None # Set by Service during basic binding
if uuid:
self.uuid = uuid
self.properties = properties
self.read_perm = read_perm
self.write_perm = write_perm
self.max_length = max_length
self.fixed_length = fixed_length
self.initial_value = initial_value
def _ensure_bound(
self, service: Service, initial_value: Optional[bytes] = None
) -> None:
"""Binds the characteristic to the local Service or remote Characteristic object given."""
if self.field_name in service.bleio_characteristics:
return
if service.remote:
for characteristic in service.bleio_service.characteristics:
if characteristic.uuid == self.uuid.bleio_uuid:
bleio_characteristic = characteristic
break
else:
raise AttributeError("Characteristic not available on remote service")
else:
bleio_characteristic = self.__bind_locally(service, initial_value)
service.bleio_characteristics[self.field_name] = bleio_characteristic
def __bind_locally(
self, service: Service, initial_value: Optional[bytes]
) -> _bleio.Characteristic:
if initial_value is None:
initial_value = self.initial_value
if initial_value is None and self.max_length:
initial_value = bytes(self.max_length)
max_length = self.max_length
if max_length is None and initial_value is None:
max_length = 0
initial_value = b""
elif max_length is None:
max_length = len(initial_value)
return _bleio.Characteristic.add_to_service(
service.bleio_service,
self.uuid.bleio_uuid,
initial_value=initial_value,
max_length=max_length,
fixed_length=self.fixed_length,
properties=self.properties,
read_perm=self.read_perm,
write_perm=self.write_perm,
)
def __get__(
self, service: Optional[Service], cls: Optional[Type[Service]] = None
) -> ReadableBuffer:
# CircuitPython doesn't invoke descriptor protocol on obj's class,
# but CPython does. In the CPython case, pretend that it doesn't.
if service is None:
return self
self._ensure_bound(service)
bleio_characteristic = service.bleio_characteristics[self.field_name]
return bleio_characteristic.value
def __set__(self, service: Service, value: ReadableBuffer) -> None:
self._ensure_bound(service, value)
if value is None:
value = b""
bleio_characteristic = service.bleio_characteristics[self.field_name]
bleio_characteristic.value = value
class ComplexCharacteristic:
"""
Characteristic class that does complex binding where the subclass returns a full object for
interacting with the characteristic data. The Characteristic itself will be shadowed once it
has been bound to the corresponding instance attribute.
"""
def __init__(
self,
*,
uuid: Optional[UUID] = None,
properties: int = 0,
read_perm: int = Attribute.OPEN,
write_perm: int = Attribute.OPEN,
max_length: int = 20,
fixed_length: bool = False,
initial_value: Optional[ReadableBuffer] = None,
) -> None:
self.field_name = None # Set by Service during basic binding
if uuid:
self.uuid = uuid
self.properties = properties
self.read_perm = read_perm
self.write_perm = write_perm
self.max_length = max_length
self.fixed_length = fixed_length
self.initial_value = initial_value
def bind(self, service: Service) -> _bleio.Characteristic:
"""Binds the characteristic to the local Service or remote Characteristic object given."""
if service.remote:
for characteristic in service.bleio_service.characteristics:
if characteristic.uuid == self.uuid.bleio_uuid:
return characteristic
raise AttributeError("Characteristic not available on remote service")
return _bleio.Characteristic.add_to_service(
service.bleio_service,
self.uuid.bleio_uuid,
initial_value=self.initial_value,
max_length=self.max_length,
properties=self.properties,
read_perm=self.read_perm,
write_perm=self.write_perm,
)
def __get__(
self, service: Optional[Service], cls: Optional[Type[Service]] = None
) -> _bleio.Characteristic:
if service is None:
return self
bound_object = self.bind(service)
setattr(service, self.field_name, bound_object)
return bound_object
class StructCharacteristic(Characteristic):
"""
Data descriptor for a structure with a fixed format.
:param struct_format: a `struct` format string describing how to pack multiple values
into the characteristic bytestring
:param UUID uuid: The uuid of the characteristic
:param int properties: see `Characteristic`
:param int read_perm: see `Characteristic`
:param int write_perm: see `Characteristic`
:param buf initial_value: see `Characteristic`
"""
def __init__(
self,
struct_format,
*,
uuid: Optional[UUID] = None,
properties: int = 0,
read_perm: int = Attribute.OPEN,
write_perm: int = Attribute.OPEN,
initial_value: Optional[ReadableBuffer] = None,
) -> None:
self._struct_format = struct_format
self._expected_size = struct.calcsize(struct_format)
if initial_value is not None:
initial_value = struct.pack(self._struct_format, *initial_value)
super().__init__(
uuid=uuid,
initial_value=initial_value,
max_length=self._expected_size,
fixed_length=True,
properties=properties,
read_perm=read_perm,
write_perm=write_perm,
)
def __get__(
self, obj: Optional[Service], cls: Optional[Type[Service]] = None
) -> Optional[Union[Tuple, "StructCharacteristic"]]:
if obj is None:
return self
raw_data = super().__get__(obj, cls)
if len(raw_data) < self._expected_size:
return None
return struct.unpack(self._struct_format, raw_data)
def __set__(self, obj: Service, value: Iterable) -> None:
encoded = struct.pack(self._struct_format, *value)
super().__set__(obj, encoded)
|
PypiClean
|
/freesurfer_pp_moc-2.2.2.tar.gz/freesurfer_pp_moc-2.2.2/freesurfer_pp_moc/freesurfer_pp_moc.py
|
import os
from os import listdir, sep
from os.path import abspath, basename, isdir
import shutil
import pudb
import sys
import time
import glob
# import the Chris app superclass
from chrisapp.base import ChrisApp
Gstr_title = """
__ __
/ _| / _|
| |_ _ __ ___ ___ ___ _ _ _ __| |_ ___ _ __ _ __ _ __ _ __ ___ ___ ___
| _| '__/ _ \/ _ | __| | | | '__| _/ _ \ '__| | '_ \| '_ \ | '_ ` _ \ / _ \ / __|
| | | | | __/ __|__ \ |_| | | | || __/ | | |_) | |_) || | | | | | (_) | (__
|_| |_| \___|\___|___/\__,_|_| |_| \___|_| | .__/| .__/ |_| |_| |_|\___/ \___|
______| | | |______
|______|_| |_|______|
"""
Gstr_synopsis = """
NAME
freesurfer_pp_moc.py
SYNOPSIS
python freesurfer_pp_moc.py \\
[-v <level>] [--verbosity <level>] \\
[--version] \\
[--man] \\
[--meta] \\
[--copySpec <copySpec>] \\
[--ageSpec <ageSpec>] \\
<inputDir> \\
<outputDir>
BRIEF EXAMPLE
* To copy some directory in open storage to an output directory:
mkdir in out
python freesurfer_pp_moc.py \\
--saveinputmeta --saveoutputmeta \\
-a 10-06-01 \\
-c stats,sag,cor,tra,3D \\
in out
DESCRIPTION
`pacscopy.py` simply copies a directory specified with the
`--dir <dir>` flag-value pair to the output directory.
ARGS
[-v <level>] [--verbosity <level>]
Verbosity level for app. Not used currently.
[--version]
If specified, print version number.
[--man]
If specified, print (this) man page.
[--meta]
If specified, print plugin meta data.
[-T <targetTreeHead>] | [--treePrint <targetTreeHead>]
Print a simple directory tree rooted on <targetTreeHead>. Typically
used to print the internal database with a
-T ../preprocessed
[-a <ageSpec>] | [--ageSpec <ageSpec>]
A string in <YY>-<MM>-<DD> format that denotes an *exact* target to
retrieve. Consult '-T ../preprocessed' to see full range of specs.
[-c <copySpec>] | [--copySpec <copySpec>]
A comma separated string denoting the preprocessed subdirs to copy.
Note that a substring glob is performed, thus a spec of 'tra' will
target 'aparc.a2009s+aseg-LUT-tra'.
[-P <processDelay>] | [--processDelay <processDelay>]
A delay timer to simulate remote processing. The script will pause for
<processDelay> seconds.
"""
class Freesurfer_pp_moc(ChrisApp):
"""
A "dummy" app containing the output of some prior FreeSurfer runs,
organized in
<YR>-yr/<MO>-mo/<DA>-da
directory structure within the container. This app simply copies one
of these pre-processed output trees into the output folder of the plugin.
"""
AUTHORS = 'FNNDSC ([email protected])'
SELFPATH = os.path.dirname(os.path.abspath(__file__))
SELFEXEC = os.path.basename(__file__)
EXECSHELL = 'python3'
TITLE = 'FreeSurfer Pre-Populated'
CATEGORY = 'FreeSurfer'
TYPE = 'ds'
DESCRIPTION = 'A "dummy" app that contains some prior FreeSurfer output and simply copies this to the output directory.'
DOCUMENTATION = 'https://github.com/FNNDSC/pl-freesurfer_pp_moc'
VERSION = '2.2.2'
ICON = '' # url of an icon image
LICENSE = 'Opensource (MIT)'
MAX_NUMBER_OF_WORKERS = 1 # Override with integer value
MIN_NUMBER_OF_WORKERS = 1 # Override with integer value
MAX_CPU_LIMIT = '' # Override with millicore value as string, e.g. '2000m'
MIN_CPU_LIMIT = '' # Override with millicore value as string, e.g. '2000m'
MAX_MEMORY_LIMIT = '' # Override with string, e.g. '1Gi', '2000Mi'
MIN_MEMORY_LIMIT = '' # Override with string, e.g. '1Gi', '2000Mi'
MIN_GPU_LIMIT = 0 # Override with the minimum number of GPUs, as an integer, for your plugin
MAX_GPU_LIMIT = 0 # Override with the maximum number of GPUs, as an integer, for your plugin
# Fill out this with key-value output descriptive info (such as an output file path
# relative to the output dir) that you want to save to the output meta file when
# called with the --saveoutputmeta flag
OUTPUT_META_DICT = {}
str_tree = ''
def show_man_page(self):
"""
Print some quick help.
"""
print(Gstr_synopsis)
@staticmethod
def dirTree_probe(dir, padding, print_files=False):
"""
Simple method that returns a string of a dir tree layout.
Relies on global variable, <str_tree>!!!
"""
Freesurfer_pp_moc.str_tree += padding[:-1] + '+-' + basename(abspath(dir)) + '/' + '\n'
padding = padding + ' '
files = []
if print_files:
files = listdir(dir)
else:
files = [x for x in listdir(dir) if isdir(dir + sep + x)]
count = 0
for file in files:
count += 1
Freesurfer_pp_moc.str_tree += padding + '|' + '\n'
path = dir + sep + file
if isdir(path):
if count == len(files):
Freesurfer_pp_moc.dirTree_probe(path, padding + ' ', print_files)
else:
Freesurfer_pp_moc.dirTree_probe(path, padding + '|', print_files)
else:
Freesurfer_pp_moc.str_tree += padding + '+-' + file + '\n'
return Freesurfer_pp_moc.str_tree
def define_parameters(self):
"""
Define the CLI arguments accepted by this plugin app.
"""
self.add_argument("-T", "--treePrint",
help = "Simple dirtree print. Specify head of target tree",
type = str,
dest = 'treePrint',
optional = True,
default = "")
self.add_argument("-a", "--ageSpec",
help = "A string in <YY>-<MM>-<DD> format that denotes an *exact* target to retrieve",
type = str,
dest = 'ageSpec',
optional = True,
default = "")
self.add_argument("-c", "--copySpec",
help = "A comma separated string denoting the subdirs to copy",
type = str,
dest = 'copySpec',
optional = True,
default = "stats")
self.add_argument("-P", "--processDelay",
help = "delay timer to simulate remote processing",
type = str,
dest = 'processDelay',
optional = True,
default = "0")
self.add_argument("--jsonReturn",
help = "output final return in json",
type = bool,
dest = 'jsonReturn',
action = 'store_true',
optional = True,
default = False)
def run(self, options):
"""
Define the code to be run by this plugin app.
"""
if len(options.treePrint):
str_tree = ''
str_tree = Freesurfer_pp_moc.dirTree_probe(options.treePrint, '')
print(str_tree)
sys.exit(0)
print(Gstr_title)
print('Version: %s' % Freesurfer_pp_moc.VERSION)
if len(options.processDelay):
print('Simulating a process delay of %s seconds...' % options.processDelay)
time.sleep(int(options.processDelay))
str_ageDirDefault = '10-yr/06-mo/01-da'
if len(options.ageSpec):
l_ageSpec = options.ageSpec.split('-')
str_ageDir = '%s-yr/%s-mo/%s-da' % (l_ageSpec[0], l_ageSpec[1], l_ageSpec[2])
else:
str_ageDir = str_ageDirDefault
str_treeAgeSpec = '../preprocessed/%s' % str_ageDir
if not os.path.isdir(str_treeAgeSpec):
print('It seems the ageSpec dir does not seem valid. Reverting to default.')
str_treeAgeSpec = '../preprocessed/%s' % str_ageDirDefault
# pudb.set_trace()
lstr_targetDir = options.copySpec.split(',')
for str_targetDir in lstr_targetDir:
lstr_targetDirFull = glob.glob("%s/*%s*" % (options.outputdir, str_targetDir))
if len(lstr_targetDirFull):
print('Deleting any pre-existing data in output dir: %s...' % lstr_targetDirFull[0])
shutil.rmtree('%s' % (lstr_targetDirFull[0]), ignore_errors = True)
lstr_sourceDir = glob.glob('%s/*%s*' % (str_treeAgeSpec, str_targetDir))
if len(lstr_sourceDir):
str_targetDirFull = '%s/%s' % \
(options.outputdir, os.path.basename(lstr_sourceDir[0]))
if os.path.isdir(lstr_sourceDir[0]):
print('Copying tree from %s to %s...' % \
(lstr_sourceDir[0], str_targetDirFull))
shutil.copytree(lstr_sourceDir[0], str_targetDirFull)
# ENTRYPOINT
if __name__ == "__main__":
app = Freesurfer_pp_moc()
app.launch()
|
PypiClean
|
/dash_vtk-0.0.9-py3-none-any.whl/dash_vtk/SliceRepresentation.py
|
from dash.development.base_component import Component, _explicitize_args
class SliceRepresentation(Component):
"""A SliceRepresentation component.
SliceRepresentation is responsible to convert a vtkPolyData into rendering
It takes the following set of properties:
- colorBy: ['POINTS', ''],
- pointSize: 1,
- color: [1,1,1],
Keyword arguments:
- children (list of a list of or a singular dash component, string or numbers | a list of or a singular dash component, string or number; optional)
- id (string; optional):
The ID used to identify this component.
- actor (dict; optional):
Properties to set to the slice/actor.
- colorDataRange (list of numbers | string; default 'auto'):
Data range use for the colorMap.
- colorMapPreset (string; default 'erdc_rainbow_bright'):
Preset name for the lookup table color map.
- iSlice (number; optional):
index of the slice along i.
- jSlice (number; optional):
index of the slice along j.
- kSlice (number; optional):
index of the slice along k.
- mapper (dict; optional):
Properties to set to the mapper.
- property (dict; optional):
Properties to set to the volume.property.
- xSlice (number; optional):
index of the slice along x.
- ySlice (number; optional):
index of the slice along y.
- zSlice (number; optional):
index of the slice along z."""
@_explicitize_args
def __init__(self, children=None, id=Component.UNDEFINED, mapper=Component.UNDEFINED, actor=Component.UNDEFINED, property=Component.UNDEFINED, colorMapPreset=Component.UNDEFINED, colorDataRange=Component.UNDEFINED, iSlice=Component.UNDEFINED, jSlice=Component.UNDEFINED, kSlice=Component.UNDEFINED, xSlice=Component.UNDEFINED, ySlice=Component.UNDEFINED, zSlice=Component.UNDEFINED, **kwargs):
self._prop_names = ['children', 'id', 'actor', 'colorDataRange', 'colorMapPreset', 'iSlice', 'jSlice', 'kSlice', 'mapper', 'property', 'xSlice', 'ySlice', 'zSlice']
self._type = 'SliceRepresentation'
self._namespace = 'dash_vtk'
self._valid_wildcard_attributes = []
self.available_properties = ['children', 'id', 'actor', 'colorDataRange', 'colorMapPreset', 'iSlice', 'jSlice', 'kSlice', 'mapper', 'property', 'xSlice', 'ySlice', 'zSlice']
self.available_wildcard_properties = []
_explicit_args = kwargs.pop('_explicit_args')
_locals = locals()
_locals.update(kwargs) # For wildcard attrs
args = {k: _locals[k] for k in _explicit_args if k != 'children'}
for k in []:
if k not in args:
raise TypeError(
'Required argument `' + k + '` was not specified.')
super(SliceRepresentation, self).__init__(children=children, **args)
|
PypiClean
|
/lofar-obs-xml-2.0.tar.gz/lofar-obs-xml-2.0/lofarobsxml/pipelines.py
|
r'''
The pipelines module contains classes that represent data procesing
pipelines. At this moment, only NDPPP is implemented here, but later
on, this module will also support calibrator/target pipelines and
imaging settings.
'''
from lofarobsxml.observationspecificationbase import ObservationSpecificationBase
from lofarobsxml.utilities import AutoReprBaseClass, typecheck, lower_case, unique, indent
from lofarobsxml.momformats import mom_duration, mom_timestamp, check_mom_topology
import ephem
class NDPPP(AutoReprBaseClass):
r'''
The demixing- and averaging parameters for NDPPP.
**Parameters**
avg_freq_step : int
How many channels to average.
avg_time_step : int
How many time steps to average.
demix_freq_step : int
Width (in channels) of the window over which demixing is
done. This must be a multiple of ``avg_freq_step``.
demix_time_step : int
Length (in time slots) of the demixing window. This must be a
multiple of ``avg_time_step``.
demix_always : None or list of strings
Sources to always demix. Valid source names are: ['CasA',
'CygA', 'TauA', 'HydraA', 'VirA', 'HerA'].
ignore_target : None or bool
See imagoing cookbook documentation. None implies observatory
default.
**Examples**
>>> dmx = NDPPP(16, 2, demix_freq_step=64, demix_time_step=10,
... demix_always=['CygA', 'CasA'])
>>> dmx
NDPPP(avg_freq_step = 16,
avg_time_step = 2,
demix_always = ['CygA', 'CasA'],
demix_freq_step = 64,
demix_if_needed = None,
demix_time_step = 10,
ignore_target = None)
>>> print(dmx.xml())
<BLANKLINE>
<demixingParameters>
<averagingFreqStep>16</averagingFreqStep>
<averagingTimeStep>2</averagingTimeStep>
<demixFreqStep>64</demixFreqStep>
<demixTimeStep>10</demixTimeStep>
<demixAlways>[CygA,CasA]</demixAlways>
<demixIfNeeded></demixIfNeeded>
<ignoreTarget></ignoreTarget>
</demixingParameters>
Of course, several consistency checks are made:
>>> NDPPP(16, 2, demix_freq_step=64, demix_time_step=5,
... demix_always=['CygA', 'CasA'])
Traceback (most recent call last):
...
ValueError: NDPPP.demix_time_step(5) is not a multiple of NDPPP.avg_time_step(2)
>>> NDPPP(16, 2.5, demix_freq_step=64, demix_time_step=5,
... demix_always=['CygA', 'CasA'])
Traceback (most recent call last):
...
TypeError: type(NDPPP.avg_time_step)(2.5) not in ['int']
'''
def __init__(self,
avg_freq_step = 64, avg_time_step = 1,
demix_freq_step = 64, demix_time_step = 10,
demix_always = None, demix_if_needed = None,
ignore_target = None):
self.avg_freq_step = avg_freq_step
self.avg_time_step = avg_time_step
self.demix_freq_step = demix_freq_step
self.demix_time_step = demix_time_step
self.demix_always = demix_always
if type(self.demix_always) is str:
self.demix_always = [self.demix_always]
self.demix_if_needed = demix_if_needed
if type(self.demix_if_needed) is str:
self.demix_if_needed = [self.demix_if_needed]
self.ignore_target = ignore_target
self.validate()
def validate(self):
r'''
Raise a ValueError or TypeError if problems with the
specification are detected.
'''
typecheck(self.avg_freq_step, int, 'NDPPP.avg_freq_step')
typecheck(self.avg_time_step, int, 'NDPPP.avg_time_step')
typecheck(self.demix_freq_step, int, 'NDPPP.demix_freq_step')
typecheck(self.demix_time_step, int, 'NDPPP.demix_time_step')
typecheck(self.demix_always, [list, type(None)],
'NDPPP.demix_always')
typecheck(self.demix_if_needed, [list, type(None)],
'NDPPP.demix_if_needed')
if self.demix_freq_step % self.avg_freq_step != 0:
raise ValueError('NDPPP.demix_freq_step(%r) is not a multiple of NDPPP.avg_freq_step(%r)' %
(self.demix_freq_step,
self.avg_freq_step))
if self.demix_time_step % self.avg_time_step != 0:
raise ValueError('NDPPP.demix_time_step(%r) is not a multiple of NDPPP.avg_time_step(%r)' %
(self.demix_time_step,
self.avg_time_step))
typecheck(self.ignore_target, [type(None), type(True)],
'NDPPP.ignore_target')
def xml(self):
r'''
Produce an xml representation of demixing settings.
'''
template = '''
<demixingParameters>
<averagingFreqStep>%(avg_freq_step)d</averagingFreqStep>
<averagingTimeStep>%(avg_time_step)d</averagingTimeStep>
<demixFreqStep>%(demix_freq_step)d</demixFreqStep>
<demixTimeStep>%(demix_time_step)d</demixTimeStep>
<demixAlways>%(demix_always)s</demixAlways>
<demixIfNeeded>%(demix_if_needed)s</demixIfNeeded>
<ignoreTarget>%(ignore_target)s</ignoreTarget>
</demixingParameters>'''
args = {'avg_freq_step' : self.avg_freq_step,
'avg_time_step' : self.avg_time_step,
'demix_freq_step' : self.demix_freq_step,
'demix_time_step' : self.demix_time_step,
'demix_always' : '',
'demix_if_needed' : '',
'ignore_target' : ''}
if self.demix_always is not None:
args['demix_always'] = '['+','.join(self.demix_always)+']'
if self.demix_if_needed is not None:
args['demix_if_needed'] = '['+','.join(self.demix_if_needed)+']'
if self.ignore_target is not None:
args['ignore_target'] = lower_case(self.ignore_target)
return template % args
class AveragingPipeline(ObservationSpecificationBase):
r'''
**Parameters**
processing_nr_tasks should be roughly nr of subbands divided by 3.
flagging_strategy: 'LBAdefault', 'HBAdefault', or None
NDPPP flagging strategy.
initial_status : string
Either 'opened' or 'approved'
**Examples**
>>> from lofarobsxml import TargetSource, Angle
>>> from lofarobsxml.backend import BackendProcessing
>>> from lofarobsxml.observation import Observation
>>> from lofarobsxml.beam import Beam
>>> target = TargetSource(name = 'Cyg A',
... ra_angle = Angle(hms = (19, 59, 28.3566)),
... dec_angle = Angle(sdms = ('+', 40, 44, 2.097)))
>>> bm = Beam(0, target, '77..324')
>>> obs = Observation('HBA_DUAL_INNER', 'LBA_LOW', (2013, 10, 20, 18, 5, 0),
... duration_seconds = 600, name = 'Main observation',
... stations = ['CS001', 'RS106', 'DE601'],
... clock_mhz = 200, beam_list = [bm],
... backend = BackendProcessing())
>>> avg = AveragingPipeline(name = 'Avg Pipeline', ndppp = NDPPP())
>>> avg.add_input_data_product(obs.children[0])
>>> obs.append_child(avg)
>>> avg
AveragingPipeline(parent = Observation('Main observation'),
children = None,
default_template = 'Preprocessing Pipeline',
duration_s = None,
flagging_strategy = None,
initial_status = 'opened',
input_data = [Beam(parent = Observation('Main observation'),
children = None,
duration_s = None,
initial_status = 'opened',
measurement_type = 'Target',
name = 'Cyg A',
sap_id = 0,
storage_cluster = 'CEP4',
storage_partition = '/data/projects',
subband_spec = '77..324',
target_source = TargetSource(name = 'Cyg A',
ra_angle = Angle(shms = ('+', 19, 59, 28.3566)),
dec_angle = Angle(sdms = ('+', 40, 44, 2.097))),
tied_array_beams = None)],
name = 'Avg Pipeline',
ndppp = NDPPP(avg_freq_step = 64,
avg_time_step = 1,
demix_always = None,
demix_freq_step = 64,
demix_if_needed = None,
demix_time_step = 10,
ignore_target = None),
predecessor_label = None,
processing_cluster = 'CEP4',
processing_nr_cores = 2,
processing_nr_tasks = 163,
processing_partition = 'cpu',
start_date = None,
storage_cluster = 'CEP4',
storage_partition = '/data/projects')
>>> print(avg.xml('Project name'))
<lofar:pipeline xsi:type="lofar:AveragingPipelineType">
<topology>Main_observat.1.Avg_Pipeline.uv.dps</topology>
<predecessor_topology>Main_observat</predecessor_topology>
<name>Avg Pipeline</name>
<description>Avg Pipeline: "Preprocessing Pipeline"</description>
<processingCluster>
<name>CEP4</name>
<partition>cpu</partition>
<numberOfTasks>163</numberOfTasks>
<numberOfCoresPerTask>2</numberOfCoresPerTask>
</processingCluster>
<currentStatus>
<mom2:openedStatus/>
</currentStatus>
<averagingPipelineAttributes>
<defaultTemplate>Preprocessing Pipeline</defaultTemplate>
<duration></duration>
<startTime></startTime>
<endTime></endTime>
<demixingParameters>
<averagingFreqStep>64</averagingFreqStep>
<averagingTimeStep>1</averagingTimeStep>
<demixFreqStep>64</demixFreqStep>
<demixTimeStep>10</demixTimeStep>
<demixAlways></demixAlways>
<demixIfNeeded></demixIfNeeded>
<ignoreTarget></ignoreTarget>
</demixingParameters>
<flaggingStrategy>HBAdefault</flaggingStrategy>
</averagingPipelineAttributes>
<usedDataProducts>
<item>
<lofar:uvDataProduct topology="Main_observat.0.Cyg_A.SAP000.uv.dps">
<name>Main_observat.0.Cyg_A.SAP000.uv.dps</name>
</lofar:uvDataProduct>
</item>
</usedDataProducts>
<resultDataProducts>
<item>
<lofar:uvDataProduct>
<name>Main_observat.1.Avg_Pipeline.uv.dps</name>
<topology>Main_observat.1.Avg_Pipeline.uv.dps</topology>
<status>no_data</status>
<storageCluster>
<name>CEP4</name>
<partition>/data/projects</partition>
</storageCluster>
</lofar:uvDataProduct>
</item>
</resultDataProducts>
</lofar:pipeline>
'''
def __init__(self, name, ndppp, input_data = None,
duration_s = None, start_date = None,
flagging_strategy = None,
parent = None, children = None,
predecessor_label = None,
initial_status='opened',
processing_cluster='CEP4',
processing_partition=None,
processing_nr_tasks = 163, #This is a sensible default for CEP4 NDPPP pipelines, not for all pipelines
processing_nr_cores = 2): #This is a sensible default for CEP4 NDPPP pipelines, not for all pipelines
super(AveragingPipeline, self).__init__(name=name,
parent=parent,
children=children,
initial_status=initial_status)
self.ndppp = ndppp
self.input_data = None
self.duration_s = duration_s
self.start_date = start_date
self.flagging_strategy = flagging_strategy
self.default_template = 'Preprocessing Pipeline'
self.predecessor_label = predecessor_label
if input_data is not None:
for item in input_data:
self.add_input_data_product(item)
self.processing_cluster = processing_cluster.upper()
self.storage_cluster = self.processing_cluster
self.processing_partition = processing_partition
self.processing_nr_tasks = processing_nr_tasks
self.processing_nr_cores = processing_nr_cores
if self.processing_partition is None:
if self.processing_cluster == 'CEP2':
self.processing_partition = 'cpu' #We also set this one to 'cpu' but it doesn't mean anything
self.storage_partition = "/data" #This path is now coded in multiple locations, can probably be refactored into the base class?
raise ValueError('CEP2 has been decommissioned long ago. Please select another cluster.')
elif self.processing_cluster == 'CEP4':
self.processing_partition = 'cpu' #We also have 'gpu' but that's not used right now
self.storage_partition = "/data/projects" #This path is now coded in multiple locations, can probably be refactored into the base class?
if self.processing_partition is None:
raise ValueError('No processing partition specified for cluster %s' %
self.processing_cluster)
self.validate()
def validate(self):
r'''
'''
typecheck(self.ndppp, NDPPP,
'AveragingPipeline.NDPPP')
typecheck(self.predecessor_label, [type(None), str],
'AveragingPipeline.predecessor_label')
def add_input_data_product(self, input_sap):
r'''
'''
if self.input_data is None:
self.input_data = []
self.input_data.append(input_sap)
def predecessor(self):
r'''
'''
if self.predecessor_label is not None:
return self.predecessor_label
predecessor_observations = unique(
[data_set.parent.label()
for data_set in self.input_data
if data_set.parent is not None])
if len(predecessor_observations) != 1:
raise ValueError('AveragingPipeline: more than one predecessor (%r)' %
predecessor_observations)
return predecessor_observations[0]
def xml_prefix(self, project_name = None):
template ='''<lofar:pipeline xsi:type="lofar:AveragingPipelineType">
<topology>%(label)s</topology>
<predecessor_topology>%(predecessor)s</predecessor_topology>
<name>%(name)s</name>
<description>%(name)s: "%(default_template)s"</description>
<processingCluster>
<name>%(processing_cluster)s</name>
<partition>%(processing_partition)s</partition>
<numberOfTasks>%(processing_nr_tasks)s</numberOfTasks>
<numberOfCoresPerTask>%(processing_nr_cores)s</numberOfCoresPerTask>
</processingCluster>
<currentStatus>
<mom2:%(initial_status)sStatus/>
</currentStatus>
<averagingPipelineAttributes>
<defaultTemplate>%(default_template)s</defaultTemplate>
<duration>%(duration)s</duration>
<startTime>%(start_time)s</startTime>
<endTime></endTime>%(ndppp)s
<flaggingStrategy>%(flagging_strategy)s</flaggingStrategy>
</averagingPipelineAttributes>
<usedDataProducts>%(used_data_products)s
</usedDataProducts>
<resultDataProducts>
<item>
<lofar:uvDataProduct>
<name>%(label)s</name>
<topology>%(label)s</topology>
<status>no_data</status>
<storageCluster>
<name>%(storage_cluster)s</name>
<partition>%(storage_partition)s</partition>
</storageCluster>
</lofar:uvDataProduct>
</item>
</resultDataProducts>
'''
used_data_product_template = '''\n<item>
<lofar:uvDataProduct topology="%(name)s">
<name>%(name)s</name>
</lofar:uvDataProduct>
</item>'''
args = {
'label' : check_mom_topology(self.label() + '.uv.dps'),
'predecessor' : self.predecessor(),
'name' : self.name,
'default_template' : self.default_template,
'duration' : '',
'start_time' : '',
'flagging_strategy': self.flagging_strategy,
'ndppp' : indent(self.ndppp.xml(), 4),
'used_data_products' : '',
'initial_status': self.initial_status,
'processing_cluster': self.processing_cluster,
'processing_partition': self.processing_partition,
'processing_nr_tasks': self.processing_nr_tasks,
'processing_nr_cores': self.processing_nr_cores,
'storage_cluster': self.storage_cluster,
'storage_partition': self.storage_partition
}
if self.duration_s is not None:
args['duration'] = mom_duration(seconds = self.duration_s)
if self.start_date is not None:
start_date = ephem.Date(self.start_date).tuple()
rounded_start_date = start_date[:-1]+(int(round(start_date[-1])),)
args['start_time'] = mom_timestamp(*rounded_start_date)
if self.input_data is None:
raise ValueError('AveragingPipeline.input_data is None!')
if self.flagging_strategy is None:
args['flagging_strategy'] = self.input_data[0].parent.antenna_set[0:3].upper()+'default'
elif self.flagging_strategy in ['HBAdefault', 'LBAdefault']:
args['flagging_strategy'] = self.flagging_strategy
else:
raise ValueError('lofarobsxml.AverigingPipeline: unknown flagging strategy %r' %
self.flagging_strategy)
args['used_data_products'] = indent(
'\n'.join([
used_data_product_template % {'name' : sap.data_products_label()} #TODO this needs a proper fix as the topology for the observation has changed
for sap in self.input_data]),
4)
return template % args
def xml_suffix(self, project_name= None):
return '</lofar:pipeline>'
|
PypiClean
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.