id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
/django-pyc-1.1.0.tar.gz/django-pyc-1.1.0/test_project/settings.py
|
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'kgoqit(&apn#(s@jn0ari1pgtvi6_j#k94ut*=0y0v771qawfj'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'test_project.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_pyc',
'test_project',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
|
PypiClean
|
/OctoBot-Backtesting-1.9.1.tar.gz/OctoBot-Backtesting-1.9.1/octobot_backtesting/api/__init__.py
|
from octobot_backtesting.api import data_file_converters
from octobot_backtesting.api import data_file
from octobot_backtesting.api import importer
from octobot_backtesting.api import backtesting
from octobot_backtesting.api import exchange_data_collector
from octobot_backtesting.api.data_file_converters import (
convert_data_file,
)
from octobot_backtesting.api.data_file import (
get_all_available_data_files,
delete_data_file,
get_file_description,
)
from octobot_backtesting.api.importer import (
create_importer,
get_available_data_types,
get_data_file,
get_data_file_from_importers,
get_data_file_path,
get_available_time_frames,
get_available_symbols,
get_data_timestamp_interval,
get_all_ohlcvs,
stop_importer,
)
from octobot_backtesting.api.backtesting import (
set_time_updater_interval,
set_iteration_timeout,
get_importers,
get_backtesting_current_time,
get_backtesting_starting_time,
get_backtesting_ending_time,
register_backtesting_timestamp_whitelist,
get_backtesting_timestamp_whitelist,
is_backtesting_enabled,
get_backtesting_data_files,
get_backtesting_duration,
create_and_init_backtest_data,
get_preloaded_candles_manager,
initialize_backtesting,
initialize_independent_backtesting_config,
modify_backtesting_timestamps,
adapt_backtesting_channels,
start_backtesting,
stop_backtesting,
stop_independent_backtesting,
)
from octobot_backtesting.api.exchange_data_collector import (
exchange_historical_data_collector_factory,
exchange_bot_snapshot_data_collector_factory,
initialize_and_run_data_collector,
stop_data_collector,
is_data_collector_in_progress,
get_data_collector_progress,
is_data_collector_finished,
)
__all__ = [
"convert_data_file",
"get_all_available_data_files",
"delete_data_file",
"get_file_description",
"create_importer",
"get_available_data_types",
"get_data_file",
"get_data_file_from_importers",
"get_data_file_path",
"get_available_time_frames",
"get_available_symbols",
"get_data_timestamp_interval",
"get_all_ohlcvs",
"stop_importer",
"set_time_updater_interval",
"set_iteration_timeout",
"get_importers",
"get_backtesting_current_time",
"get_backtesting_starting_time",
"get_backtesting_ending_time",
"register_backtesting_timestamp_whitelist",
"get_backtesting_timestamp_whitelist",
"is_backtesting_enabled",
"get_backtesting_data_files",
"get_backtesting_duration",
"create_and_init_backtest_data",
"get_preloaded_candles_manager",
"initialize_backtesting",
"initialize_independent_backtesting_config",
"modify_backtesting_timestamps",
"adapt_backtesting_channels",
"start_backtesting",
"stop_backtesting",
"stop_independent_backtesting",
"exchange_historical_data_collector_factory",
"exchange_bot_snapshot_data_collector_factory",
"initialize_and_run_data_collector",
"stop_data_collector",
"is_data_collector_in_progress",
"get_data_collector_progress",
"is_data_collector_finished",
]
|
PypiClean
|
/lucumaproject-1.1.6.tar.gz/lucumaproject-1.1.6/README.rst
|
A sample Python project
=======================
A sample project that exists as an aid to the `Python Packaging User Guide
<https://packaging.python.org>`_'s `Tutorial on Packaging and Distributing
Projects <https://packaging.python.org/en/latest/distributing.html>`_.
This projects does not aim to cover best practices for Python project
development as a whole. For example, it does not provide guidance or tool
recommendations for version control, documentation, or testing.
`The source for this project is available here
<https://github.com/pypa/sampleproject>`_.
Most of the configuration for a Python project is done in the ``setup.py``
file, an example of which is included in this project. You should edit this
file accordingly to adapt this sample project to your needs.
----
This is the README file for the project.
The file should use UTF-8 encoding and be written using `reStructuredText
<http://docutils.sourceforge.net/rst.html>`_. It
will be used to generate the project webpage on PyPI and will be displayed as
the project homepage on common code-hosting services, and should be written for
that purpose.
Typical contents for this file would include an overview of the project, basic
usage examples, etc. Generally, including the project changelog in here is not
a good idea, although a simple "What's New" section for the most recent version
may be appropriate.
|
PypiClean
|
/django-bulma-form-templates-0.8.4.tar.gz/django-bulma-form-templates-0.8.4/README.md
|
## Fork of [Django Bulma (timonweb)](https://github.com/timonweb/django-bulma)
This fork moves more functionality into the templates, instead of adding CSS in python code.
It is also more extensible, since templates can be included and blocks can be overriden.
For the added functionality, look at section [New Additions](#new-additions)
# A Bulma Theme for Django Projects

A Django base theme based on Bulma ([bulma.io](https://bulma.io/)). Bulma is a modern CSS framework based on Flexbox.
*** work in progress ***
## Installation
1. Install the python package django-bulma from pip
``pip install django-bulma``
Alternatively, you can install download or clone this repo and call ``pip install -e .``.
2. Add to INSTALLED_APPS in your **settings.py**:
`'bulma',`
3. If you want to use the provided base template, extend from **bulma/base.html**:
```
{% extends 'bulma/base.html' %}
{% block title %}Bulma Site{% endblock %}
{% block content %}
Content goes here...
{% endblock content %}
```
4. If you want to customize bulma sass and your own components:
4.1 Copy bulma static files into your project's **STATIC_ROOT**:
```
python manage.py copy_bulma_static_into_project
```
You should see **bulma** dir appeared in your **STATIC_ROOT**. It contains
two dirs:
* **sass** - this is the place where you can put your own sass code and customize
bulma variables
* **css** - this is where compiled sass output goes, you should link this file
in your base.html
4.2 Install npm packages for sass compilation to work:
```
python manage.py bulma install
```
4.3 Start sass watch mode:
```
python manage.py bulma start
```
5. For forms, in your templates, load the `bulma_tags` library and use the `|bulma` filters:
##### Example template
```django
{% load bulma_tags %}
{# Display a form #}
<form action="/url/to/submit/" method="post">
{% csrf_token %}
{{ form|bulma }}
<div class="field">
<button type="submit" class="button is-primary">Login</button>
</div>
<input type="hidden" name="next" value="{{ next }}"/>
</form>
```
## Included templates
**django-bulma** comes with:
* a base template,
* django core registration templates,
## Bugs and suggestions
If you have found a bug or if you have a request for additional functionality, please use the issue tracker on GitHub.
[https://github.com/nkay08/django-bulma/issues](https://github.com/nkay08/django-bulma/issues)
# New Additions
The form and fields can be rendered in exactly the same way as before.
However, fields can now also be used by simply including a template.
## Templates
- `bulma/forms/field.html`: The basic field template that is included by django-bulma's `form.html`
- `bulma/forms/field_include.html`: Can be included directly with a `with field=form.<your_field>` statement. Does NOT add markup classes, but they can be provided manually.
- `bulma/forms/bulma_field_include.html`: Can be included directly with a `with field=form.<your_field>` statement, and adds markup classes like the `bulma` template filter
- `bulma/forms/bulma_inline_field_include.html`: Can be included directly with a `with field=form.<your_field>` statement, and adds markup classes like the `bulma_inline` template filter
- `bulma/forms/bulma_horizontal_field_include.html`: Can be included directly with a `with field=form.<your_field>` statement, and adds markup classes like the `bulma_horizontal` template filter
You can customize the fields, e.g. by extending `bulma/forms/field_include.html` and overriding its blocks and then changing the respective setting.
## Settings
You can specify which templates `django-bulma` uses for rendering forms and fields, and thus allow extensibility and customization.
These affect `django-bulma`'s rendering template filters, but also all field templates that are prefixed with `bulma_`.
Options for `settings.py`:
- `BULMA_FIELD_TEMPLATE`: Specifies which field template is used by bulma rendering. Default `"bulma/forms/field_include.html"`.
- `BULMA_FIELD_WRAPPER_TEMPLATE`: Specifies which field wrapper template is used by bulma rendering. This wrapper coverts some context dicts to flat variables. Default `"bulma/forms/field.html"`.
- `BULMA_FORM_TEMPLATE`: Specifies which form template is used by bulma rendering. Default `"bulma/forms/form.html"`.
- `BULMA_FORMSET_TEMPLATE`: Specifies which formset template is used by bulma rendering. Default `"bulma/forms/formset.html"`.
## Bulma CSS
- Inline icons: You can now generate inputs that have inline icons
- Add `has-icons-left` or `has-icons-right` or both as `classes_value` when including or providing them as parameter when using the `bulma` template tag
- You can specify the icon css class with the context `icon_left_class` and `icon_right_class` (currently only possible when including template)
|
PypiClean
|
/django-celery-fulldbresult-0.5.3.tar.gz/django-celery-fulldbresult-0.5.3/django_celery_fulldbresult/migrations/0001_initial.py
|
from __future__ import unicode_literals
from django.db import models, migrations
import djcelery.picklefield
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='TaskResultMeta',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, verbose_name='ID', serialize=False)),
('task_id', models.CharField(unique=True, verbose_name='task id', max_length=255)),
('task', models.CharField(verbose_name='task name', max_length=200)),
('args', models.TextField(help_text='JSON encoded positional arguments', verbose_name='Arguments', default='[]', blank=True)),
('kwargs', models.TextField(help_text='JSON encoded keyword arguments', verbose_name='Keyword arguments', default='{}', blank=True)),
('hostname', models.CharField(verbose_name='hostname', max_length=255, blank=True, null=True)),
('exchange', models.CharField(verbose_name='exchange', max_length=200, default=None, blank=True, null=True)),
('routing_key', models.CharField(verbose_name='routing key', max_length=200, default=None, blank=True, null=True)),
('expires', models.DateTimeField(verbose_name='expires', blank=True, null=True)),
('status', models.CharField(verbose_name='state', max_length=50, default='PENDING', choices=[('REVOKED', 'REVOKED'), ('SUCCESS', 'SUCCESS'), ('RECEIVED', 'RECEIVED'), ('PENDING', 'PENDING'), ('RETRY', 'RETRY'), ('FAILURE', 'FAILURE'), ('STARTED', 'STARTED')])),
('result', djcelery.picklefield.PickledObjectField(default=None, editable=False, null=True)),
('date_submitted', models.DateTimeField(verbose_name='submitted at', blank=True, null=True)),
('date_done', models.DateTimeField(verbose_name='done at', auto_now=True)),
('traceback', models.TextField(verbose_name='traceback', blank=True, null=True)),
('hidden', models.BooleanField(db_index=True, default=False, editable=False)),
('meta', djcelery.picklefield.PickledObjectField(default=None, editable=False, null=True)),
],
options={
'verbose_name_plural': 'task states',
'verbose_name': 'task state',
'db_table': 'celery_taskresultmeta',
},
bases=(models.Model,),
),
]
|
PypiClean
|
/tm-vec-1.0.1.tar.gz/tm-vec-1.0.1/ipynb/.ipynb_checkpoints/repo_EMBED-checkpoint.ipynb
|
```
import numpy as np
import pandas as pd
import torch
from torch.utils.data import Dataset
from embed_structure_model import trans_basic_block, trans_basic_block_Config
from tm_vec_utils import featurize_prottrans, embed_tm_vec
from transformers import T5EncoderModel, T5Tokenizer
import re
import gc
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
import seaborn as sns
#Load the ProtTrans model and ProtTrans tokenizer
tokenizer = T5Tokenizer.from_pretrained("Rostlab/prot_t5_xl_uniref50", do_lower_case=False )
model = T5EncoderModel.from_pretrained("Rostlab/prot_t5_xl_uniref50")
gc.collect()
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
model = model.to(device)
model = model.eval()
#TM-Vec model paths
tm_vec_model_cpnt = "/mnt/home/thamamsy/ceph/deepblast/models/transformer_lr0.0001_dmodel1024_nlayer2_datasample_45_thresh_300_pairs_in_folds_included_23M_normal_tmax/checkpoints/last.ckpt"
tm_vec_model_config = "/mnt/home/thamamsy/ceph/deepblast/models/transformer_lr0.0001_dmodel1024_nlayer2_datasample_45_thresh_300_pairs_in_folds_included_23M_normal_tmax/params.json"
#tm_vec_model_cpnt = '/mnt/home/thamamsy/ceph/deepblast/models/transformer_lr0.000075_dmodel1024_nlayer2_all_pairs_tm_sample_95percent_141Mtest/checkpoints/epoch=3-step=1490999-val_loss=0.0272.ckpt'
#tm_vec_model_config = '/mnt/home/thamamsy/ceph/deepblast/models/transformer_lr0.000075_dmodel1024_nlayer2_all_pairs_tm_sample_95percent_141Mtest/params.json'
#Load the TM-Vec model
tm_vec_model_config = trans_basic_block_Config.from_json(tm_vec_model_config)
model_deep = trans_basic_block.load_from_checkpoint(tm_vec_model_cpnt, config=tm_vec_model_config)
model_deep = model_deep.to(device)
model_deep = model_deep.eval()
#Load some example sequences- in this case Bacteriocins
sequence_file = pd.read_csv("/mnt/home/thamamsy/ceph/deepblast/data/other_benchmarks/bagel_bacteriocins_class_1_unique.csv")
sequence_file['length'] = [len(bact) for bact in sequence_file['Sequence'].values]
#Filter for sequences that meet some criteria- in this case sequences that are longer than 30 residues
sequence_file_longer_than_30 = sequence_file[sequence_file['length'] >= 30]
#Make a list of your sequences
flat_seqs = list(sequence_file_longer_than_30['Sequence'].values)
#Loop through the sequences and embed them
i = 0
embed_all_sequences = []
while i < len(flat_seqs):
protrans_sequence = featurize_prottrans(flat_seqs[i:i+1], model, tokenizer, device)
embedded_sequence = embed_tm_vec(protrans_sequence, model_deep, device)
embed_all_sequences.append(embedded_sequence)
i = i + 1
#Perform TSNE on the resulting embedding vectors
np_bacteriocine_embeddings = np.concatenate(embed_all_sequences, axis=0)
all_bact_X_embedded = TSNE(n_components=2, learning_rate='auto', init='random').fit_transform(np_bacteriocine_embeddings)
all_bact_X_embedded_df = pd.DataFrame(all_bact_X_embedded)
all_bact_X_embedded_df.columns = ["Dim1", "Dim2"]
#Combine the TSNE with the Subclass identity of the class 1 bacteriocin
all_bact_X_embedded_df['Subclass'] = sequence_file_longer_than_30['Subclass'].values
#Visualize the TSNE, coloring by Subclass
sns.lmplot(x="Dim1", y="Dim2", data=all_bact_X_embedded_df, hue="Subclass", fit_reg=False)
```
|
PypiClean
|
/aws_audit-0.1.0-py3-none-any.whl/aws_audit/plugins/sns/snshandler.py
|
import json, os
# from botocore.exceptions import ClientError
# import boto3
from aws_audit.helper.common import CommonHelper
from aws_audit.utils.errors import classify_error
class snshandler :
def __init__(self, session, region, logger):
self.logger = logger
self.region = region
self.helper = CommonHelper(session, region, logger)
try:
# Create SNS client
self.sns_client = session.client('sns', region_name=region)
except Exception as e:
raise classify_error(self.logger, e, 'Failed to create SNS client', {'region': self.region})
self.topics = self.list_topics()
# Get list of all topics
def list_topics(self):
topics = []
response = {}
while True:
try:
if 'NextToken' in response:
response = self.sns_client.list_topics(NextToken=response['NextToken'])
else:
response = self.sns_client.list_topics()
topics.extend(response['Topics'])
except Exception as e:
raise classify_error(self.logger, e, 'Failed to call SNS list_topics', {'region': self.region})
if 'NextToken' not in response:
break
return topics
# Get attribute details for topic
def get_topic_attribute_details(self, topic_arn, attribute):
try:
response = self.sns_client.get_topic_attributes(TopicArn=topic_arn)
if attribute in response['Attributes']:
return response['Attributes'][attribute]
else:
return ''
except Exception as e:
raise classify_error(self.logger, e, 'Failed to call SNS get_topic_attributes', {'region': self.region})
# Get all subscriptions for topic
def list_subscriptions_by_topic(self, topic_arn):
subscriptions = []
response = {}
while True:
try:
if 'NextToken' in response:
response = self.sns_client.list_subscriptions_by_topic(TopicArn=topic_arn,NextToken=response['NextToken'])
else:
response = self.sns_client.list_subscriptions_by_topic(TopicArn=topic_arn)
subscriptions.extend(response['Subscriptions'])
except Exception as e:
raise classify_error(self.logger, e, 'Failed to call SNS list_subscriptions_by_topic', {'region': self.region})
if 'NextToken' not in response:
break
return subscriptions
def sns_topics_not_encrypted(self):
failures = []
for topic in self.topics:
try:
kms_key_id = self.get_topic_attribute_details(topic['TopicArn'],'KmsMasterKeyId')
if kms_key_id == '':
failures = self.helper.append_item_to_list(failures, 'sns', topic['TopicArn'], topic['TopicArn'] , self.region)
except Exception as e:
self.logger.error(f"Error while getting attribute for topic {topic['TopicArn']}: {e}")
return failures
def sns_topics_not_encrypted_with_kms_cmk(self):
failures = []
for topic in self.topics:
try:
kms_key_id = self.get_topic_attribute_details(topic['TopicArn'],'KmsMasterKeyId')
if kms_key_id != '' and kms_key_id == 'alias/aws/sns':
failures = self.helper.append_item_to_list(failures, 'sns', topic['TopicArn'], topic['TopicArn'] , self.region)
except Exception as e:
self.logger.error(f"Error while getting attribute for topic {topic['TopicArn']}: {e}")
return failures
def sns_topic_is_exposed(self):
failures = []
for topic in self.topics:
try:
policy = self.get_topic_attribute_details(topic['TopicArn'],'Policy')
policy_document = json.loads(policy)
is_exposed = self.helper.is_policy_exposed_to_everyone(policy_document)
if is_exposed:
failures = self.helper.append_item_to_list(failures, 'sns', topic['TopicArn'], topic['TopicArn'] , self.region)
except Exception as e:
self.logger.error(f"Error while getting attribute for topic {topic['TopicArn']}: {e}")
continue
return failures
def sns_topic_have_cross_account_access(self):
failures = []
for topic in self.topics:
try:
policy = self.get_topic_attribute_details(topic['TopicArn'],'Policy')
policy_document = json.loads(policy)
is_caa_enabled = self.helper.is_policy_has_cross_account_access(policy_document)
if is_caa_enabled:
failures = self.helper.append_item_to_list(failures, 'sns', topic['TopicArn'], topic['TopicArn'] , self.region)
except Exception as e:
self.logger.error(f"Error while getting attribute for topic {topic['TopicArn']}: {e}")
continue
return failures
def sns_topic_using_insecure_subscription(self):
failures = []
for topic in self.topics:
try:
subscriptions = self.list_subscriptions_by_topic(topic['TopicArn'])
insecure_subscription = 0
for subscription in subscriptions:
if subscription['Protocol'] == 'http' :
insecure_subscription = 1
if insecure_subscription:
failures = self.helper.append_item_to_list(failures, 'sns', topic['TopicArn'], topic['TopicArn'] , self.region)
except Exception as e:
self.logger.error(f"Error while checking subscription for topic {topic['TopicArn']}: {e}")
return failures
def sns_topic_allows_eveyone_to_publish(self):
failures = []
for topic in self.topics:
try:
policy = self.get_topic_attribute_details(topic['TopicArn'],'Policy')
policy_document = json.loads(policy)
for statement in policy_document['Statement']:
if 'AWS' in statement['Principal']:
if statement['Effect'] == 'Allow' and statement['Principal']['AWS'] == '*' and statement['Action'] == 'SNS:Publish' and statement['Resource'] == topic['TopicArn'] and 'Condition' not in statement:
failures = self.helper.append_item_to_list(failures, 'sns', topic['TopicArn'], topic['TopicArn'] , self.region)
except Exception as e:
self.logger.error(f"Error while checking attribute for topic {topic['TopicArn']}: {e}")
return failures
def sns_topic_allows_eveyone_to_subscribe(self):
failures = []
for topic in self.topics:
try:
policy = self.get_topic_attribute_details(topic['TopicArn'],'Policy')
policy_document = json.loads(policy)
action_rule = ["SNS:Receive","SNS:Subscribe"]
for statement in policy_document['Statement']:
statement_action = []
if isinstance(statement['Action'], list):
statement_action = statement['Action']
statement_action.sort()
else :
statement_action.append(statement['Action'])
if 'AWS' in statement['Principal']:
if statement['Effect'] == 'Allow' and statement['Principal']['AWS'] == '*' and action_rule == statement_action and statement['Resource'] == topic['TopicArn'] and 'Condition' not in statement:
failures = self.helper.append_item_to_list(failures, 'sns', topic['TopicArn'], topic['TopicArn'] , self.region)
except Exception as e:
self.logger.error(f"Error while checking attribute for topic {topic['TopicArn']}: {e}")
return failures
|
PypiClean
|
/custom-awscli-1.27.51.tar.gz/custom-awscli-1.27.51/awscli/examples/route53domains/update-domain-nameservers.rst
|
**To update the name servers for a domain**
The following ``update-domain-nameservers`` command updates the name servers for a domain.
This command runs only in the ``us-east-1`` Region. If your default region is set to ``us-east-1``, you can omit the ``region`` parameter. ::
aws route53domains update-domain-nameservers \
--region us-east-1 \
--domain-name example.com \
--nameservers Name=ns-1.awsdns-01.org Name=ns-2.awsdns-02.co.uk Name=ns-3.awsdns-03.net Name=ns-4.awsdns-04.com
Output::
{
"OperationId": "f1691ec4-0e7a-489e-82e0-b19d3example"
}
To confirm that the operation succeeded, you can run `get-domain-detail <https://docs.aws.amazon.com/cli/latest/reference/route53domains/get-domain-detail.html>`__ .
For more information, see `Adding or Changing Name Servers and Glue Records for a Domain <https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/domain-name-servers-glue-records.html>`__ in the *Amazon Route 53 Developer Guide*.
|
PypiClean
|
/packagetest114-0.114.tar.gz/packagetest114-0.114/devdoo/stack.py
|
import time
import uuid
from pack import Pack
from status import Status
class Stack:
"""Um pedido que entrou no sistema."""
def __init__(self, message, timeout, tries):
"""Cria um novo pedido com um número máximo de tentativas.
Depois disso, o pedido será considerado uma falha."""
self.deadline = None
self.status = Status()
self.timeout = timeout
self.tries = tries
self.msg = message
# Separa o path e o conteúdo do pedido
self.extract_path(message)
def extract_path(self, request):
"""Separa informações de roteamento e dados de um pedido recebido."""
self.__pack(request)
# Verifica se o pacote de configurações é válido
if self.pack.ready():
self.id = self.pack.id
self.service_id = self.pack.service_id
self.path = [str(self.id), '']
print "STACK.ID=============================>>>>>", self.id
print "self.path=============================>>>>>", self.path
#print "self.content=============================>>>>>", self.content
def update_deadline(self):
"""Atualize o prazo de solicitação se um tempo limite tiver sido especificado."""
if self.timeout is not None:
self.deadline = time.time() + self.timeout
if self.tries is not None:
self.tries -= 1
def make_request(self):
"""Devolve um pedido por sua identificação exclusiva para ser enviada a um worker.
Isso também atualiza o prazo de solicitação."""
self.update_deadline()
# self.path[0] = self.id
self.message = self.path + self.content
def expired(self):
"""Se verdadeiro o número máximo de tentativas foi realizado."""
return self.tries == 0
def timed_out(self):
"""Se verdadeiro passamos o prazo de execução do pedido."""
return self.deadline is not None and time.time() > self.deadline
# --------------------------------
# __find_service
# --------------------------------
def __pack(self, request):
if type(request) == list and len(request) == 3:
self.content = request[2:]
if type(self.content)==list and len(self.content)==1:
# Prepara configurações recebidas do rest
self.pack = Pack(self.content[0], self.status)
else:
self.status.error("ERROR_STACK", None, ["__pack de dados vindo do broker nao recebeu conteudo valido"])
else:
self.status.error("ERROR_STACK", None, ["__pack de dados vindo do broker nao e valido"])
# --------------------------------
# ready
# --------------------------------
def ready(self):
return self.status.ready() and self.pack.ready()
# --------------------------------
# send_error
# --------------------------------
def send_error(self):
return self.path + self.pack.send_error()
|
PypiClean
|
/terrascript_aws-1.0.5.tar.gz/terrascript_aws-1.0.5/src/terrascript_aws/glue/catalog_database.py
|
import terrascript.core as core
@core.schema
class Principal(core.Schema):
data_lake_principal_identifier: str | core.StringOut | None = core.attr(str, default=None)
def __init__(
self,
*,
data_lake_principal_identifier: str | core.StringOut | None = None,
):
super().__init__(
args=Principal.Args(
data_lake_principal_identifier=data_lake_principal_identifier,
),
)
@core.schema_args
class Args(core.SchemaArgs):
data_lake_principal_identifier: str | core.StringOut | None = core.arg(default=None)
@core.schema
class CreateTableDefaultPermission(core.Schema):
permissions: list[str] | core.ArrayOut[core.StringOut] | None = core.attr(
str, default=None, kind=core.Kind.array
)
principal: Principal | None = core.attr(Principal, default=None)
def __init__(
self,
*,
permissions: list[str] | core.ArrayOut[core.StringOut] | None = None,
principal: Principal | None = None,
):
super().__init__(
args=CreateTableDefaultPermission.Args(
permissions=permissions,
principal=principal,
),
)
@core.schema_args
class Args(core.SchemaArgs):
permissions: list[str] | core.ArrayOut[core.StringOut] | None = core.arg(default=None)
principal: Principal | None = core.arg(default=None)
@core.schema
class TargetDatabase(core.Schema):
catalog_id: str | core.StringOut = core.attr(str)
database_name: str | core.StringOut = core.attr(str)
def __init__(
self,
*,
catalog_id: str | core.StringOut,
database_name: str | core.StringOut,
):
super().__init__(
args=TargetDatabase.Args(
catalog_id=catalog_id,
database_name=database_name,
),
)
@core.schema_args
class Args(core.SchemaArgs):
catalog_id: str | core.StringOut = core.arg()
database_name: str | core.StringOut = core.arg()
@core.resource(type="aws_glue_catalog_database", namespace="glue")
class CatalogDatabase(core.Resource):
"""
ARN of the Glue Catalog Database.
"""
arn: str | core.StringOut = core.attr(str, computed=True)
"""
(Optional) ID of the Glue Catalog to create the database in. If omitted, this defaults to the AWS Ac
count ID.
"""
catalog_id: str | core.StringOut | None = core.attr(str, default=None, computed=True)
"""
(Optional) Creates a set of default permissions on the table for principals. See [`create_table_defa
ult_permission`](#create_table_default_permission) below.
"""
create_table_default_permission: list[CreateTableDefaultPermission] | core.ArrayOut[
CreateTableDefaultPermission
] | None = core.attr(
CreateTableDefaultPermission, default=None, computed=True, kind=core.Kind.array
)
"""
(Optional) Description of the database.
"""
description: str | core.StringOut | None = core.attr(str, default=None)
"""
Catalog ID and name of the database
"""
id: str | core.StringOut = core.attr(str, computed=True)
"""
(Optional) Location of the database (for example, an HDFS path).
"""
location_uri: str | core.StringOut | None = core.attr(str, default=None, computed=True)
"""
(Required) Name of the database. The acceptable characters are lowercase letters, numbers, and the u
nderscore character.
"""
name: str | core.StringOut = core.attr(str)
"""
(Optional) List of key-value pairs that define parameters and properties of the database.
"""
parameters: dict[str, str] | core.MapOut[core.StringOut] | None = core.attr(
str, default=None, kind=core.Kind.map
)
"""
(Optional) Configuration block for a target database for resource linking. See [`target_database`](#
target_database) below.
"""
target_database: TargetDatabase | None = core.attr(TargetDatabase, default=None)
def __init__(
self,
resource_name: str,
*,
name: str | core.StringOut,
catalog_id: str | core.StringOut | None = None,
create_table_default_permission: list[CreateTableDefaultPermission]
| core.ArrayOut[CreateTableDefaultPermission]
| None = None,
description: str | core.StringOut | None = None,
location_uri: str | core.StringOut | None = None,
parameters: dict[str, str] | core.MapOut[core.StringOut] | None = None,
target_database: TargetDatabase | None = None,
depends_on: list[str] | core.ArrayOut[core.StringOut] | None = None,
provider: str | core.StringOut | None = None,
lifecycle: core.Lifecycle | None = None,
):
super().__init__(
name=resource_name,
args=CatalogDatabase.Args(
name=name,
catalog_id=catalog_id,
create_table_default_permission=create_table_default_permission,
description=description,
location_uri=location_uri,
parameters=parameters,
target_database=target_database,
depends_on=depends_on,
provider=provider,
lifecycle=lifecycle,
),
)
@core.schema_args
class Args(core.Resource.Args):
catalog_id: str | core.StringOut | None = core.arg(default=None)
create_table_default_permission: list[CreateTableDefaultPermission] | core.ArrayOut[
CreateTableDefaultPermission
] | None = core.arg(default=None)
description: str | core.StringOut | None = core.arg(default=None)
location_uri: str | core.StringOut | None = core.arg(default=None)
name: str | core.StringOut = core.arg()
parameters: dict[str, str] | core.MapOut[core.StringOut] | None = core.arg(default=None)
target_database: TargetDatabase | None = core.arg(default=None)
|
PypiClean
|
/B9gemyaeix-4.14.1.tar.gz/B9gemyaeix-4.14.1/weblate/trans/migrations/0133_glossary_missing_files.py
|
import os
from django.conf import settings
from django.db import migrations
from weblate.formats.ttkit import TBXFormat
from weblate.vcs.git import LocalRepository
def migrate_glossaries(apps, schema_editor):
"""
Removes automatically created glossaries for source language.
These were wrongly created by 0127_fix_source_glossary since
0d8b564903518a313d4116ffe82d9c7bc31f7908 - it created blank repo.
"""
Component = apps.get_model("trans", "Component")
db_alias = schema_editor.connection.alias
for component in (
Component.objects.using(db_alias)
.filter(is_glossary=True, repo="local:")
.prefetch_related("project", "source_language")
):
repo_path = os.path.join(
settings.DATA_DIR, "vcs", component.project.slug, component.slug
)
changed = False
for translation in component.translation_set.select_related("language"):
if translation.language_id == component.source_language_id:
continue
filename = os.path.join(repo_path, translation.filename)
if os.path.exists(filename):
continue
print(f"Adding missing {filename}")
TBXFormat.create_new_file(filename, translation.language.code, "")
store = TBXFormat(
filename,
language_code=translation.language.code,
source_language=component.source_language.code,
)
store.save()
changed = True
# Mark all strings a pending to be committed later
translation.unit_set.update(
pending=True,
details={"add_unit": True},
)
if changed:
repo = LocalRepository(repo_path)
with repo.lock:
repo.execute(["add", repo_path])
if repo.needs_commit():
repo.commit("Migrate glossary content")
class Migration(migrations.Migration):
dependencies = [
("trans", "0132_alter_unit_state"),
]
operations = [
migrations.RunPython(
migrate_glossaries, migrations.RunPython.noop, elidable=True
)
]
|
PypiClean
|
/easyCrystallography-0.3.0.tar.gz/easyCrystallography-0.3.0/easyCrystallography/Components/Site.py
|
from __future__ import annotations
__author__ = 'github.com/wardsimon'
__version__ = '0.1.0'
from typing import List, Union, ClassVar, TypeVar, Optional, Dict, TYPE_CHECKING
from easyCore import np
from easyCore.Objects.Variable import Descriptor, Parameter
from easyCore.Objects.ObjectClasses import BaseObj
from easyCore.Objects.Groups import BaseCollection
from easyCore.Utils.io.star import StarLoop
from .Lattice import PeriodicLattice
from .Specie import Specie
if TYPE_CHECKING:
from easyCore.Utils.typing import iF
_SITE_DETAILS = {
"label": {
"value": "H",
"description": "A unique identifier for a particular site in the crystal",
"url": "https://www.iucr.org/__data/iucr/cifdic_html/1/cif_core.dic/Iatom_site_label.html",
},
"position": {
"value": 0.0,
"description": "Atom-site coordinate as fractions of the unit cell length.",
"url": "https://www.iucr.org/__data/iucr/cifdic_html/1/cif_core.dic/Iatom_site_fract_.html",
"fixed": True,
},
"occupancy": {
"value": 1.0,
"description": "The fraction of the atom type present at this site.",
"url": "https://www.iucr.org/__data/iucr/cifdic_html/1/cif_core.dic/Iatom_site_occupancy.html",
"fixed": True,
},
}
S = TypeVar("S", bound="Site")
class Site(BaseObj):
label: ClassVar[Descriptor]
specie: ClassVar[Specie]
occupancy: ClassVar[Parameter]
fract_x: ClassVar[Parameter]
fract_y: ClassVar[Parameter]
fract_z: ClassVar[Parameter]
def __init__(
self,
label: Optional[Union[str, Descriptor]] = None,
specie: Optional[Union[str, Specie]] = None,
occupancy: Optional[Union[float, Parameter]] = None,
fract_x: Optional[Union[float, Parameter]] = None,
fract_y: Optional[Union[float, Parameter]] = None,
fract_z: Optional[Union[float, Parameter]] = None,
interface: Optional[iF] = None,
**kwargs,
):
super(Site, self).__init__(
"site",
label=Descriptor("label", **_SITE_DETAILS["label"]),
specie=Specie(_SITE_DETAILS["label"]["value"]),
occupancy=Parameter("occupancy", **_SITE_DETAILS["occupancy"]),
fract_x=Parameter("fract_x", **_SITE_DETAILS["position"]),
fract_y=Parameter("fract_y", **_SITE_DETAILS["position"]),
fract_z=Parameter("fract_z", **_SITE_DETAILS["position"]),
**kwargs,
)
if label is not None:
self.label = label
if specie is not None:
self.specie = specie
else:
if label is not None:
self.specie = label
if occupancy is not None:
self.occupancy = occupancy
if fract_x is not None:
self.fract_x = fract_x
if fract_y is not None:
self.fract_y = fract_y
if fract_z is not None:
self.fract_z = fract_z
self.interface = interface
def __repr__(self) -> str:
return (
f"Atom {self.name} ({self.specie.raw_value}) @"
f" ({self.fract_x.raw_value}, {self.fract_y.raw_value}, {self.fract_z.raw_value})"
)
@property
def name(self) -> str:
return self.label.raw_value
@property
def fract_coords(self) -> np.ndarray:
"""
Get the current sites fractional co-ordinates as an array
:return: Array containing fractional co-ordinates
:rtype: np.ndarray
"""
return np.array(
[self.fract_x.raw_value, self.fract_y.raw_value, self.fract_z.raw_value]
)
def fract_distance(self, other_site: S) -> float:
"""
Get the distance between two sites
:param other_site: Second site
:param other_site: Second site
:type other_site: Site
:return: Distance between 2 sites
:rtype: float
"""
return np.linalg.norm(other_site.fract_coords - self.fract_coords)
@property
def x(self) -> Parameter:
return self.fract_x
@property
def y(self) -> Parameter:
return self.fract_y
@property
def z(self) -> Parameter:
return self.fract_z
@property
def is_magnetic(self) -> bool:
return getattr(self.specie, 'spin', None) is not None or hasattr(self, 'msp')
class PeriodicSite(Site):
def __init__(
self,
lattice: Optional[PeriodicLattice] = None,
label: Optional[Union[str, Descriptor]] = None,
specie: Optional[Union[str, Specie]] = None,
occupancy: Optional[Union[float, Parameter]] = None,
fract_x: Optional[Union[float, Parameter]] = None,
fract_y: Optional[Union[float, Parameter]] = None,
fract_z: Optional[Union[float, Parameter]] = None,
interface: Optional[iF] = None,
**kwargs,
):
super(PeriodicSite, self).__init__(
label, specie, occupancy, fract_x, fract_y, fract_z, **kwargs
)
if lattice is None:
lattice = PeriodicLattice()
self.lattice = lattice
self.interface = interface
@staticmethod
def _from_site_kwargs(lattice: PeriodicLattice, site: S) -> Dict[str, float]:
return {
"lattice": lattice,
"label": site.label,
"specie": site.specie,
"occupancy": site.occupancy,
"fract_x": site.fract_x,
"fract_y": site.fract_y,
"fract_z": site.fract_z,
"interface": site.interface,
}
@classmethod
def from_site(cls, lattice: PeriodicLattice, site: S) -> S:
kwargs = cls._from_site_kwargs(lattice, site)
return cls(**kwargs)
def get_orbit(self) -> np.ndarray:
"""
Generate all orbits for a given fractional position.
"""
sym_op = self.lattice.spacegroup._sg_data.get_orbit
return sym_op(self.fract_coords)
@property
def cart_coords(self) -> np.ndarray:
"""
Get the atomic position in Cartesian form.
:return:
:rtype:
"""
return self.lattice.get_cartesian_coords(self.fract_coords)
class Atoms(BaseCollection):
_SITE_CLASS = Site
def __init__(self, name: str, *args, interface: Optional[iF] = None, **kwargs):
if not isinstance(name, str):
raise TypeError("A `name` for this collection must be given in string form")
super(Atoms, self).__init__(name, *args, **kwargs)
self.interface = interface
self._kwargs._stack_enabled = True
def __repr__(self) -> str:
return f"Collection of {len(self)} sites."
def __getitem__(
self, idx: Union[int, slice]
) -> Union[Parameter, Descriptor, BaseObj, "BaseCollection"]:
if isinstance(idx, str) and idx in self.atom_labels:
idx = self.atom_labels.index(idx)
return super(Atoms, self).__getitem__(idx)
def __delitem__(self, key: Union[int, str]):
if isinstance(key, str) and key in self.atom_labels:
key = self.atom_labels.index(key)
return super(Atoms, self).__delitem__(key)
def append(self, item: S):
if not issubclass(type(item), Site):
raise TypeError("Item must be a Site")
if item.label.raw_value in self.atom_labels:
raise AttributeError(
f"An atom of name {item.label.raw_value} already exists."
)
super(Atoms, self).append(item)
@property
def atom_labels(self) -> List[str]:
return [atom.label.raw_value for atom in self]
@property
def atom_species(self) -> List[str]:
return [atom.specie.raw_value for atom in self]
@property
def atom_occupancies(self) -> np.ndarray:
return np.array([atom.occupancy.raw_value for atom in self])
A = TypeVar("A", bound=Atoms)
class PeriodicAtoms(Atoms):
_SITE_CLASS = PeriodicSite
def __init__(self, name: str, *args,
lattice: Optional[PeriodicLattice] = None,
interface: Optional[iF] = None, **kwargs):
args = list(args)
if lattice is None:
for item in args:
if hasattr(item, "lattice"):
lattice = item.lattice
break
if lattice is None:
raise AttributeError
for idx, item in enumerate(args):
if issubclass(type(item), Site):
args[idx] = self._SITE_CLASS.from_site(lattice, item)
super(PeriodicAtoms, self).__init__(name, *args, **kwargs, interface=interface)
self.lattice = lattice
@classmethod
def from_atoms(cls, lattice: PeriodicLattice, atoms: Atoms) -> A:
return cls(atoms.name, *atoms, lattice=lattice, interface=atoms.interface)
def __repr__(self) -> str:
return f"Collection of {len(self)} periodic sites."
def append(self, item: S):
if not issubclass(item.__class__, Site):
raise TypeError("Item must be a Site or periodic site")
if item.label.raw_value in self.atom_labels:
raise AttributeError(
f"An atom of name {item.label.raw_value} already exists."
)
# if isinstance(item, Site):
item = self._SITE_CLASS.from_site(self.lattice, item)
super(PeriodicAtoms, self).append(item)
def get_orbits(self, magnetic_only: bool = False):
orbit_dict = {}
for item in self:
if magnetic_only and not item.is_magnetic:
continue
orbit_dict[item.label.raw_value] = item.get_orbit()
return orbit_dict
|
PypiClean
|
/clawpack-5.9.0.tar.gz/clawpack-5.9.0/pyclaw/examples/euler_gravity_3d/rising_hot_sphere.py
|
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
from mappedGrid import euler3d_mappedgrid as mg
from six.moves import range
try:
from mpi4py import MPI
mpiAvailable = True
except ImportError:
raise ImportError('mpi4py is not available')
mpiAvailable = False
if mpiAvailable:
mpiRank = MPI.COMM_WORLD.Get_rank()
mpiSize = MPI.COMM_WORLD.Get_size()
else:
mpiRank = 0
mpiSize = 1
# Constants
gamma = 1.4 # Ratio of specific heats
gamma1 = gamma - 1.
gR = 980.665 # Acceleration due to gravity [cm/s**2]
kBoltzmann = 1.3807e-16 # Boltzmann constant [erg/K]
nAvogadro = 6.0221e23 # Avogadro's number [1/mol]
# Hot Sphere Parameters
xSphere = 1000.e5; ySphere = 1000.e5; zSphere = 150.e5
rSphere = 40.e5 # Radius of Sphere [cm]
TSphere = 2.7e4 # Temperature of Sphere Perturbation
# Grid Parameters
mxyz = [80,80,80] # Number of Grid Cells
xyzMin = [500.e5 , 500.e5 , 80.0e5 ] # Domain limits (min) [cm]
xyzMax = [1500.0e5, 1500.0e5, 950.0e5] # Domain limits (max) [cm]
mapType = "ZeroToOne"
z0 = xyzMin[2]
zN = xyzMax[2]
# Gravity Terms
gravityTerm = True # Turn Gravity Term On or Off in Riemann Solver
gravityEflux = False # Turn Gravity Term in Energy Flux On/Off
gFlux = 0
if gravityEflux: gFlux = 1
#-----------------------------------------------------------------------
# Name: outputDensity(state)
#
# Description:
# Using derived quantities function, output 2D slice of density
# for use in regression testing.
#-----------------------------------------------------------------------
def outputDensity(state):
state.p[0,:,:,:] = state.q[0,:,:,:]
nx = np.size(state.q,1)
x = np.reshape(state.q[0,nx/2,:,:],np.size(state.q[0,nx/2,:,:]),order='F')
np.savetxt('verify_rising_hot_sphere_classic_1.txt',x,fmt='%18.8e',delimiter=' ')
#-----------------------------------------------------------------------
# Description:
# Equilibrium atmosphere
#
# Inputs:
# p0[mz+2*mbc] : pressure (1D array)
# rho0[mz+2*mbc] : density (1D array)
# Mavg[mz+2*mbc] : average molecular mass (1D array)
#
# Input/Outputs:
# p0,rho0,Mavg : 1D z-column initialization of p0 and rho0
#-----------------------------------------------------------------------
def setEquilibriumAtmosphere(p0,rho0,Mavg):
p0 = [1.28255457e+02,2.45768842e+01,4.14947876e+00,6.29750420e-01,1.01220380e-01,2.64133921e-02,1.22941741e-02,7.08667395e-03,4.52931611e-03,3.07286214e-03,2.16905463e-03,1.57652477e-03,1.17092484e-03,8.84611067e-04,6.77691403e-04,5.25138237e-04,4.10841768e-04,3.24102394e-04,2.57470120e-04,2.05925021e-04,1.65598592e-04,1.33701518e-04,1.08364754e-04,8.82441931e-05,7.21143717e-05,5.91376054e-05,4.86178229e-05,4.00787900e-05,3.30908693e-05,2.73888126e-05,2.27031016e-05,1.88518481e-05,1.56898948e-05,1.30700401e-05,1.08991559e-05,9.09869161e-06,7.60521743e-06,6.36376491e-06,5.32972657e-06,4.46856235e-06,3.74878325e-06,3.14890785e-06,2.64613146e-06,2.22646032e-06,1.87396531e-06,1.57844875e-06,1.33028392e-06,1.12211091e-06,9.47071388e-07,7.99762122e-07,6.75921511e-07,5.71493939e-07,4.83610358e-07,4.09325094e-07,3.46744110e-07,2.93793938e-07,2.49152408e-07,2.11367113e-07,1.79432411e-07,1.52415843e-07,1.29549499e-07,1.10136422e-07,9.37086690e-08,7.97324669e-08,6.79127210e-08,5.78532722e-08,4.93172661e-08,4.20604343e-08,3.58836884e-08,3.06389102e-08,2.61608771e-08,2.23557534e-08,1.91042726e-08,1.63479490e-08,1.39976779e-08,1.19853352e-08,1.02623231e-08,8.78713846e-09,7.53940212e-09,6.46885245e-09,5.55032464e-09,4.76222864e-09,4.09020086e-09,3.51658796e-09]
rho0 = [1.93347036e-07,4.03984315e-08,7.33795328e-09,1.16964004e-09,1.64049100e-10,2.53990286e-11,7.54287116e-12,3.40478277e-12,1.84556481e-12,1.10964372e-12,7.13581470e-13,4.81506393e-13,3.36472592e-13,2.41540079e-13,1.77156053e-13,1.32213794e-13,1.00089557e-13,7.67024111e-14,5.93930647e-14,4.64294817e-14,3.65782332e-14,2.90138753e-14,2.31378048e-14,1.85800114e-14,1.49929512e-14,1.21526733e-14,9.89015561e-15,8.07840567e-15,6.61976992e-15,5.43890503e-15,4.48202167e-15,3.70250573e-15,3.06590093e-15,2.54266886e-15,2.11283102e-15,1.75827860e-15,1.46560471e-15,1.22337830e-15,1.02239821e-15,8.55585508e-16,7.16578299e-16,6.01033981e-16,5.04419184e-16,4.23940996e-16,3.56468062e-16,2.99992883e-16,2.52633808e-16,2.12955966e-16,1.79630105e-16,1.51610996e-16,1.28075790e-16,1.08244792e-16,9.15665290e-17,7.74771188e-17,6.56137471e-17,5.55805979e-17,4.71251502e-17,3.99708405e-17,3.39261636e-17,2.88137888e-17,2.44878021e-17,2.08159094e-17,1.77092661e-17,1.50666724e-17,1.28321441e-17,1.09306468e-17,9.31730480e-18,7.94587120e-18,6.77866202e-18,5.78764327e-18,4.94156316e-18,4.22266806e-18,3.60840539e-18,3.08771188e-18,2.64374425e-18,2.26362608e-18,1.93817162e-18,1.65953699e-18,1.42386938e-18,1.22167290e-18,1.04819271e-18,8.99349679e-19,7.72429901e-19,6.64098458e-19]
Mavg = [28.85614554,28.85337155,28.83817654,28.56226512,27.60224909,26.26692289,25.23573593,24.45469565,23.79308533,23.18781005,22.61490394,22.07318988,21.55703223,21.06778441,20.60540309,20.17202267,19.76585711,19.38847601,19.0408475, 18.71970337,18.42758099,18.16274099,17.92359740,17.70606183,17.51035814,17.33530373,17.17893585,17.03979933,16.91620578,16.80712079,16.71028376,16.62471452,16.54940299,16.48292773,16.42454596,16.37307369,16.32776306,16.28801338,16.2531155, 16.22247335,16.19551611,16.17188138,16.15108306,16.13288090,16.11686426,16.10282002,16.09046507,16.07960946,16.07007411,16.06169374,16.05433222,16.04784993,16.04215209,16.03712679,16.0327204,16.02883120,16.02540929,16.02239140,16.01973516,16.01738918,16.01531699,16.01348647,16.01187781,16.01045286,16.00919766,16.00808580,16.00710454,16.00623687,16.00546792,16.00478755,16.00418349,16.00365220,16.00317996,16.00276269,16.00239247,16.00206303,16.00176987,16.00150902,16.00127962,16.00107519,16.00089299,16.00073063,16.00058692,16.00045964]
return p0,rho0,Mavg
#-----------------------------------------------------------------------
# Description:
# Modify pressure to create numeric atmosphere equilibrium
#
# Inputs:
# ze0[mz+2*mbc+1] : cell edge grid values
# p0[mz+2*mbc] : pressure
# rho0[mz+2*mbc] : density
#
# Input/Outputs:
# p0,rho0 : 1D z-column modification of p0 and rho0
#-----------------------------------------------------------------------
def modifyEquilibriumAtmosphere(zep0,p0,rho0):
# Compute the delta-z (dz)
nz = np.size(zep0)-1
dz = np.zeros([nz],dtype='float',order='F')
for iz in range(nz-1):
dz[iz] = zep0[iz+1]-zep0[iz]
# Compute modified pressure at cell centers
iz = nz-1
dz2 = (dz[iz]+dz[iz-1])*0.5
p0[iz] = p0[iz] + rho0[iz]*gR*dz2
for iz in range(nz-1,0,-1):
dz2 = (dz[iz]+dz[iz-1])*0.5
finterp = dz[iz-1]/(dz[iz]+dz[iz-1])
rho_b = rho0[iz]*finterp + rho0[iz-1]*(1.-finterp)
p0[iz-1] = p0[iz] + rho_b*gR*dz2
return p0
#-----------------------------------------------------------------------
# Description:
# Custom BCs for the z-direction
#-----------------------------------------------------------------------
def customBCLowerZ(state,dim,t,qbc,auxbc,mbc):
for k in range(mbc):
qbc[0,:,:,k] = rho0[k]
qbc[1,:,:,k] = 0.
qbc[2,:,:,k] = 0.
qbc[3,:,:,k] = 0.
qbc[4,:,:,k] = p0[k]/gamma1 + qbc[0,:,:,k]*gR*zcpZ[k]*gFlux
def customBCUpperZ(state,dim,t,qbc,auxbc,mbc):
for k in range(mbc):
qbc[0,:,:,-k-1] = rho0[-k-1]
qbc[1,:,:,-k-1] = qbc[1,:,:,-mbc-1]
qbc[2,:,:,-k-1] = qbc[2,:,:,-mbc-1]
qbc[3,:,:,-k-1] = qbc[3,:,:,-mbc-1]
rhov2 = (qbc[1,:,:,-k-1]**2 + qbc[2,:,:,-k-1]**2 + qbc[3,:,:,-k-1]**2)/qbc[0,:,:,-k-1]
qbc[4,:,:,-k-1] = p0[-k-1]/gamma1 + 0.5*rhov2 + qbc[0,:,:,-k-1]*gR*zcpZ[-k-1]*gFlux
def customAuxBCLowerZ(state,dim,t,qbc,auxbc,mbc):
auxbc[:,:,:,:mbc] = auxtmp[:,:,:,:mbc]
def customAuxBCUpperZ(state,dim,t,qbc,auxbc,mbc):
auxbc[:,:,:,-mbc:] = auxtmp[:,:,:,-mbc:]
#-----------------------------------------------------------------------
# Main script for solving 3D Euler equations using Clawpack/PyClaw.
#-----------------------------------------------------------------------
def euler3d(kernel_language='Fortran',solver_type='classic',\
use_petsc=False,outdir='./_output',\
output_format='hdf5',file_prefix='equil',disable_output=False,\
mx=mxyz[0],my=mxyz[1],mz=mxyz[2],\
tfinal=64.0,num_output_times=1):
if use_petsc:
import clawpack.petclaw as pyclaw
else:
from clawpack import pyclaw
if solver_type=='classic':
solver = pyclaw.ClawSolver3D()
solver.dimensional_split = True
solver.limiters = pyclaw.limiters.tvd.minmod
solver.num_ghost = 2
solver.order = 2
solver.fwave = True
elif solver_type=='sharpclaw':
solver = pyclaw.SharpClawSolver3D()
else:
raise Exception('Unrecognized solver_type.')
import logging
solver.logger.setLevel(logging.DEBUG)
import euler_3d_gmap
solver.rp = euler_3d_gmap
solver.num_eqn = 5
solver.num_waves = 3
solver.cfl_max = 0.6
solver.cfl_desired = 0.5
solver.dt_initial = 1.e-0
solver.max_steps = 10000
# Initialize Domain
x = pyclaw.Dimension(0.0,1.0,mx,name='x')
y = pyclaw.Dimension(0.0,1.0,my,name='y')
z = pyclaw.Dimension(0.0,1.0,mz,name='z')
domain = pyclaw.Domain([x,y,z])
num_aux = 15
state = pyclaw.State(domain,solver.num_eqn,num_aux)
state.problem_data['gamma']=gamma
state.problem_data['g_r'] = gR
state.problem_data['gravity'] = gravityTerm
state.problem_data['gravityflux'] = gravityEflux
# Grids
mbc = solver.num_ghost
grid = state.grid
# Computational Grid Sizes
dxc = domain.grid.delta[0]
dyc = domain.grid.delta[1]
dzc = domain.grid.delta[2]
pmx, pmy, pmz = grid.num_cells[0], grid.num_cells[1], grid.num_cells[2]
# Computational Grid Centers and nodes
centers = grid.c_centers # centers (Comp.)
centersBC = grid.c_centers_with_ghost(mbc) # centers w Ghost (Comp.)
nodesBC = grid.c_nodes_with_ghost(mbc) # nodes w Ghost (Comp.)
# Grid Centers Without Boundary Cells (1D Slice) - Comp. and Phys.
xcc = grid.x.centers # x centers (Comp.)
ycc = grid.y.centers # y centers (Comp.)
zcc = grid.z.centers # z centers (Comp.)
xcp,ycp,zcp = mg.mapc2pwrapper(xcc,ycc,zcc,pmz,xyzMin,xyzMax,mapType)
# Grid Centers Without Boundary Cells (3D Arrays)
Xcc,Ycc,Zcc = centers[0][:][:][:],centers[1][:][:][:],centers[2][:][:][:]
Xcp,Ycp,Zcp = mg.mapc2pwrapper(Xcc,Ycc,Zcc,pmz,xyzMin,xyzMax,mapType)
Xcp = np.reshape(Xcp,[pmx,pmy,pmz],order='F') # x centers (Phys.)
Ycp = np.reshape(Ycp,[pmx,pmy,pmz],order='F') # y centers (Phys.)
Zcp = np.reshape(Zcp,[pmx,pmy,pmz],order='F') # z centers (Phys.)
# Grid nodes With Boundary Cells (1D Slice along z)- Comp. and Phys.
xecZ = nodesBC[0][0][0][:] # x nodes along z (Comp.)
yecZ = nodesBC[1][0][0][:] # y nodes along z (Comp.)
zecZ = nodesBC[2][0][0][:] # z nodes along z (Comp.)
xepZ,yepZ,zepZ = mg.mapc2pwrapper(xecZ,yecZ,zecZ,pmz,xyzMin,xyzMax,mapType)
# Grid Centers With Boundary Cells (1D Slice along z) - Comp. and Phys.
global zcpZ
xccZ = centersBC[0][0][0][:] # x centers along z (Comp.)
yccZ = centersBC[1][0][0][:] # y centers along z (Comp.)
zccZ = centersBC[2][0][0][:] # z centers along z (Comp.)
xcpZ,ycpZ,zcpZ = mg.mapc2pwrapper(xccZ,yccZ,zccZ,pmz,xyzMin,xyzMax,mapType)
if np.sqrt(xepZ[0]**2+yepZ[0]**2+zepZ[0]**2) <= 0:
print("WARNING: z may go below Earth's surface"," zepZ: ",zepZ[0:10])
# Create vectors for 1D pressure and density column with boundary cells
mz0 = pmz+2*mbc
global p0, rho0, Mavg
p0 = np.zeros([mz0],dtype='float',order='F')
rho0 = np.zeros([mz0],dtype='float',order='F')
Mavg = np.zeros([mz0],dtype='float',order='F')
# Set the equilibrium pressure such that dp/dz = -rho*gR
p0,rho0,Mavg = setEquilibriumAtmosphere(p0,rho0,Mavg)
# Modify the equilibrium such that dp/dz = -rho*gR is held numerically
p0 = modifyEquilibriumAtmosphere(zepZ,p0,rho0)
# Set the auxiliary variables
xlower,ylower,zlower = nodesBC[0][0][0][0],nodesBC[1][0][0][0],nodesBC[2][0][0][0]
dxc,dyc,dzc = domain.grid.delta[0],domain.grid.delta[1],domain.grid.delta[2]
global auxtmp
auxtmp = np.zeros([num_aux,pmx+2*mbc,pmy+2*mbc,pmz+2*mbc],dtype='float',order='F')
auxtmp = mg.setauxiliaryvariables(num_aux,mbc,pmx,pmy,pmz,xlower,ylower,zlower,dxc,dyc,dzc,xyzMin,xyzMax,mapType)
state.aux[:,:,:,:] = auxtmp[:,mbc:-mbc,mbc:-mbc,mbc:-mbc]
# Set Index for Capcaity Function in state.aux (Python 0-based)
state.index_capa = 12
# Set the state variables (Initial Conditions)
# Initialize p,T,velSqrd
p = np.zeros([pmx,pmy,pmz],dtype='float',order='F')
T = np.zeros([pmx,pmy,pmz],dtype='float',order='F')
velSqrd = np.zeros([pmx,pmy,pmz],dtype='float',order='F')
# Density
for i in range(pmx):
for j in range(pmy):
# NEEDS TO BE FIXED WHEN MPI SLICES NORMAL TO Z
state.q[0,i,j,:] = rho0[mbc:pmz+mbc]
# Momentum
state.q[1,:,:,:] = 0. # x-momentum (rho*u)
state.q[2,:,:,:] = 0. # y-momentum (rho*v)
state.q[3,:,:,:] = 0. # z-momentum (rho*w)
# Velocity Squared (u**2+v**2+w**2)
velSqrd[:,:,:] = (state.q[1,:,:,:]**2+state.q[2,:,:,:]**2 + state.q[3,:,:,:]**2)/state.q[0,:,:,:]**2
# Energy
for i in range(pmx):
for j in range(pmy):
# NEEDS TO BE FIXED WHEN MPI SLICES NORMAL TO Z
p[i,j,:] = p0[mbc:pmz+mbc]
state.q[4,:,:,:] = p/gamma1 + 0.5*state.q[0,:,:,:]*velSqrd + state.q[0,:,:,:]*(gR)*Zcp[:,:,:]*gFlux
# Add Temperature Perturbation
T = p/state.q[0,:,:,:]
L = np.sqrt((Xcp-xSphere)**2+(Ycp-ySphere)**2+(Zcp-zSphere)**2)
for i in range(pmx):
for j in range(pmy):
for k in range(pmz):
if L[i,j,k] <= rSphere:
mu = Mavg[k+mbc]/nAvogadro
T[i,j,k] += TSphere*(kBoltzmann/mu)*(1.0-L[i,j,k]/rSphere)
p[i,j,k] = T[i,j,k]*state.q[0,i,j,k]
state.q[4,:,:,:] = p/gamma1 + 0.5*state.q[0,:,:,:]*velSqrd + state.q[0,:,:,:]*(gR)*Zcp[:,:,:]*gFlux # energy (e)
# Setup Boundary Conditions
# X - Boundary Conditions
solver.bc_lower[0] = pyclaw.BC.extrap
solver.bc_upper[0] = pyclaw.BC.extrap
# Y - Boundary Conditions
solver.bc_lower[1] = pyclaw.BC.extrap
solver.bc_upper[1] = pyclaw.BC.extrap
# Z - Boundary Conditions
solver.bc_lower[2] = pyclaw.BC.custom
solver.bc_upper[2] = pyclaw.BC.custom
solver.user_bc_lower = customBCLowerZ
solver.user_bc_upper = customBCUpperZ
# Aux - Boundary Conditions
solver.aux_bc_lower[0] = pyclaw.BC.extrap
solver.aux_bc_upper[0] = pyclaw.BC.extrap
solver.aux_bc_lower[1] = pyclaw.BC.extrap
solver.aux_bc_upper[1] = pyclaw.BC.extrap
solver.aux_bc_lower[2] = pyclaw.BC.custom
solver.aux_bc_upper[2] = pyclaw.BC.custom
solver.user_aux_bc_lower = customAuxBCLowerZ
solver.user_aux_bc_upper = customAuxBCUpperZ
# Solver Parameters
claw = pyclaw.Controller()
claw.verbosity = 4
claw.solution = pyclaw.Solution(state,domain)
claw.solver = solver
claw.output_format = output_format
claw.output_file_prefix = file_prefix
claw.keep_copy = False
if disable_output:
claw.output_format = None
claw.tfinal = tfinal
claw.num_output_times = num_output_times
claw.outdir = outdir
#state.mp = 1
#claw.compute_p = outputDensity
return claw
# __main__()
if __name__=="__main__":
from clawpack.pyclaw.util import run_app_from_main
output = run_app_from_main(euler3d)
|
PypiClean
|
/mr.bob-1.0.0.tar.gz/mr.bob-1.0.0/mrbob/rendering.py
|
from os import path
from shutil import copy2
import codecs
import fnmatch
import os
import re
import six
import stat
from jinja2 import Environment, StrictUndefined
jinja2_env = Environment(
block_start_string="{{%",
block_end_string="%}}",
variable_start_string="{{{",
variable_end_string="}}}",
trim_blocks=True,
undefined=StrictUndefined,
)
DEFAULT_IGNORED_FILES = ['.mrbob.ini', '.DS_Store']
DEFAULT_IGNORED_DIRECTORIES = []
def jinja2_renderer(s, v):
return jinja2_env.from_string(s).render(parse_variables(v))
def python_formatting_renderer(s, v):
return s % v
def parse_variables(variables):
d = dict()
for key, value in variables.items():
keys = key.split('.')
new_d = None
for k in keys[:-1]:
if new_d is None:
if k not in d:
d[k] = dict()
new_d = d[k]
else:
if k not in new_d:
new_d[k] = dict()
new_d = new_d[k]
if new_d is None:
d[keys[-1]] = value
else:
new_d[keys[-1]] = value
return dict(d)
def matches_any(filename, patterns):
result = any(fnmatch.fnmatch(filename, pat) for pat in patterns)
return result
def render_structure(fs_source_root, fs_target_root, variables, verbose,
renderer, ignored_files, ignored_directories):
"""Recursively copies the given filesystem path `fs_source_root_ to a target directory `fs_target_root`.
Any files ending in `.bob` are rendered as templates using the given
renderer using the variables dictionary, thereby losing the `.bob` suffix.
strings wrapped in `+` signs in file- or directory names will be replaced
with values from the variables, i.e. a file named `+name+.py.bob` given a
dictionary {'name': 'bar'} would be rendered as `bar.py`.
"""
ignored_files.extend(DEFAULT_IGNORED_FILES)
ignored_directories.extend(DEFAULT_IGNORED_DIRECTORIES)
if not isinstance(fs_source_root, six.text_type): # pragma: no cover
fs_source_root = six.u(fs_source_root)
for fs_source_dir, local_directories, local_files in os.walk(fs_source_root, topdown=True):
fs_target_dir = path.abspath(path.join(fs_target_root, path.relpath(fs_source_dir, fs_source_root)))
local_directories[:] = [d for d in local_directories if not matches_any(d, ignored_directories)]
for local_file in local_files:
if matches_any(local_file, ignored_files):
continue
render_template(
path.join(fs_source_dir, local_file),
render_filename(fs_target_dir, variables),
variables,
verbose,
renderer,
)
for local_directory in local_directories:
abs_dir = render_filename(path.join(fs_target_dir, local_directory), variables)
if not path.exists(abs_dir):
if verbose:
print(six.u("mkdir %s") % abs_dir)
os.mkdir(abs_dir)
def render_template(fs_source, fs_target_dir, variables, verbose, renderer):
filename = path.split(fs_source)[1]
if filename.endswith('.bob'):
filename = filename.split('.bob')[0]
fs_target_path = path.join(fs_target_dir, render_filename(filename, variables))
if verbose:
print(six.u("Rendering %s to %s") % (fs_source, fs_target_path))
fs_source_mode = stat.S_IMODE(os.stat(fs_source).st_mode)
with codecs.open(fs_source, 'r', 'utf-8') as f:
source_output = f.read()
output = renderer(source_output, variables)
# append newline due to jinja2 bug, see https://github.com/iElectric/mr.bob/issues/30
if source_output.endswith('\n') and not output.endswith('\n'):
output += '\n'
with codecs.open(fs_target_path, 'w', 'utf-8') as fs_target:
fs_target.write(output)
os.chmod(fs_target_path, fs_source_mode)
else:
fs_target_path = path.join(fs_target_dir, render_filename(filename, variables))
if verbose:
print(six.u("Copying %s to %s") % (fs_source, fs_target_path))
copy2(fs_source, fs_target_path)
return path.join(fs_target_dir, filename)
def render_filename(filename, variables):
variables_regex = re.compile(r"\+[^+%s]+\+" % re.escape(os.sep))
replaceables = variables_regex.findall(filename)
for replaceable in replaceables:
actual_replaceable = replaceable.replace('+', '')
if actual_replaceable in variables:
filename = filename.replace(replaceable, variables[actual_replaceable])
else:
raise KeyError('%s key part of filename %s was not found in variables %s' % (actual_replaceable, filename, variables))
return filename
|
PypiClean
|
/spyne-custom-2.13.22.tar.gz/spyne-custom-2.13.22/spyne/protocol/_inbase.py
|
from __future__ import print_function
import logging
logger = logging.getLogger(__name__)
import re
import pytz
import uuid
from math import modf
from time import strptime, mktime
from datetime import timedelta, time, datetime, date
from decimal import Decimal as D, InvalidOperation
from pytz import FixedOffset
try:
from lxml import etree
from lxml import html
except ImportError:
etree = None
html = None
from spyne.protocol._base import ProtocolMixin
from spyne.model import ModelBase, XmlAttribute, Array, Null, \
ByteArray, File, ComplexModelBase, AnyXml, AnyHtml, Unicode, String, \
Decimal, Double, Integer, Time, DateTime, Uuid, Date, Duration, Boolean, Any
from spyne.error import ValidationError
from spyne.model.binary import binary_decoding_handlers, BINARY_ENCODING_USE_DEFAULT
from spyne.util import six
from spyne.model.enum import EnumBase
from spyne.model.primitive.datetime import TIME_PATTERN, DATE_PATTERN
from spyne.util.cdict import cdict
_date_re = re.compile(DATE_PATTERN)
_time_re = re.compile(TIME_PATTERN)
_duration_re = re.compile(
r'(?P<sign>-?)'
r'P'
r'(?:(?P<years>\d+)Y)?'
r'(?:(?P<months>\d+)M)?'
r'(?:(?P<days>\d+)D)?'
r'(?:T(?:(?P<hours>\d+)H)?'
r'(?:(?P<minutes>\d+)M)?'
r'(?:(?P<seconds>\d+(.\d+)?)S)?)?'
)
class InProtocolBase(ProtocolMixin):
"""This is the abstract base class for all input protocol implementations.
Child classes can implement only the required subset of the public methods.
An output protocol must implement :func:`serialize` and
:func:`create_out_string`.
An input protocol must implement :func:`create_in_document`,
:func:`decompose_incoming_envelope` and :func:`deserialize`.
The ProtocolBase class supports the following events:
* ``before_deserialize``:
Called before the deserialization operation is attempted.
* ``after_deserialize``:
Called after the deserialization operation is finished.
The arguments the constructor takes are as follows:
:param app: The application this protocol belongs to.
:param mime_type: The mime_type this protocol should set for transports
that support this. This is a quick way to override the mime_type by
default instead of subclassing the releavant protocol implementation.
"""
def __init__(self, app=None, validator=None, mime_type=None,
ignore_wrappers=False, binary_encoding=None, string_encoding=None):
self.validator = None
super(InProtocolBase, self).__init__(app=app, mime_type=mime_type,
ignore_wrappers=ignore_wrappers,
binary_encoding=binary_encoding, string_encoding=string_encoding)
self.message = None
self.validator = None
self.set_validator(validator)
if mime_type is not None:
self.mime_type = mime_type
fsh = {
Any: self.any_from_bytes,
Null: self.null_from_bytes,
File: self.file_from_bytes,
Array: self.array_from_bytes,
Double: self.double_from_bytes,
String: self.string_from_bytes,
AnyXml: self.any_xml_from_bytes,
Boolean: self.boolean_from_bytes,
Integer: self.integer_from_bytes,
Unicode: self.unicode_from_bytes,
AnyHtml: self.any_html_from_bytes,
ByteArray: self.byte_array_from_bytes,
EnumBase: self.enum_base_from_bytes,
ModelBase: self.model_base_from_bytes,
XmlAttribute: self.xmlattribute_from_bytes,
ComplexModelBase: self.complex_model_base_from_bytes
}
self._from_bytes_handlers = cdict(fsh)
self._from_unicode_handlers = cdict(fsh)
self._from_bytes_handlers[Date] = self.date_from_bytes
self._from_bytes_handlers[Time] = self.time_from_bytes
self._from_bytes_handlers[Uuid] = self.uuid_from_bytes
self._from_bytes_handlers[Decimal] = self.decimal_from_bytes
self._from_bytes_handlers[DateTime] = self.datetime_from_bytes
self._from_bytes_handlers[Duration] = self.duration_from_bytes
self._from_unicode_handlers[Date] = self.date_from_unicode
self._from_unicode_handlers[Uuid] = self.uuid_from_unicode
self._from_unicode_handlers[Time] = self.time_from_unicode
self._from_unicode_handlers[Decimal] = self.decimal_from_unicode
self._from_unicode_handlers[DateTime] = self.datetime_from_unicode
self._from_unicode_handlers[Duration] = self.duration_from_unicode
self._datetime_dsmap = {
None: self._datetime_from_unicode,
'sec': self._datetime_from_sec,
'sec_float': self._datetime_from_sec_float,
'msec': self._datetime_from_msec,
'msec_float': self._datetime_from_msec_float,
'usec': self._datetime_from_usec,
}
def _datetime_from_sec(self, cls, value):
try:
return datetime.fromtimestamp(value)
except TypeError:
logger.error("Invalid value %r", value)
raise
def _datetime_from_sec_float(self, cls, value):
try:
return datetime.fromtimestamp(value)
except TypeError:
logger.error("Invalid value %r", value)
raise
def _datetime_from_msec(self, cls, value):
try:
return datetime.fromtimestamp(value // 1000)
except TypeError:
logger.error("Invalid value %r", value)
raise
def _datetime_from_msec_float(self, cls, value):
try:
return datetime.fromtimestamp(value / 1000)
except TypeError:
logger.error("Invalid value %r", value)
raise
def _datetime_from_usec(self, cls, value):
try:
return datetime.fromtimestamp(value / 1e6)
except TypeError:
logger.error("Invalid value %r", value)
raise
def create_in_document(self, ctx, in_string_encoding=None):
"""Uses ``ctx.in_string`` to set ``ctx.in_document``."""
def decompose_incoming_envelope(self, ctx, message):
"""Sets the ``ctx.method_request_string``, ``ctx.in_body_doc``,
``ctx.in_header_doc`` and ``ctx.service`` properties of the ctx object,
if applicable.
"""
def deserialize(self, ctx, message):
"""Takes a MethodContext instance and a string containing ONE document
instance in the ``ctx.in_string`` attribute.
Returns the corresponding native python object in the ctx.in_object
attribute.
"""
def validate_document(self, payload):
"""Method to be overriden to perform any sort of custom input
validation on the parsed input document.
"""
def set_validator(self, validator):
"""You must override this function if you want your protocol to support
validation."""
assert validator is None
self.validator = None
def from_bytes(self, class_, string, *args, **kwargs):
if string is None:
return None
if isinstance(string, six.string_types) and \
len(string) == 0 and class_.Attributes.empty_is_none:
return None
handler = self._from_bytes_handlers[class_]
return handler(class_, string, *args, **kwargs)
def from_unicode(self, class_, string, *args, **kwargs):
if string is None:
return None
#if not six.PY2:
# assert isinstance(string, str), \
# "Invalid type passed to `from_unicode`: {}".format(
# (class_, type(string), string))
cls_attrs = self.get_cls_attrs(class_)
if isinstance(string, six.string_types) and len(string) == 0 and \
cls_attrs.empty_is_none:
return None
handler = self._from_unicode_handlers[class_]
return handler(class_, string, *args, **kwargs)
def null_from_bytes(self, cls, value):
return None
def any_from_bytes(self, cls, value):
return value
def any_xml_from_bytes(self, cls, string):
try:
return etree.fromstring(string)
except etree.XMLSyntaxError as e:
raise ValidationError(string, "%%r: %r" % e)
def any_html_from_bytes(self, cls, string):
try:
return html.fromstring(string)
except etree.ParserError as e:
if e.args[0] == "Document is empty":
pass
else:
raise
def uuid_from_unicode(self, cls, string, suggested_encoding=None):
attr = self.get_cls_attrs(cls)
ser_as = attr.serialize_as
encoding = attr.encoding
if encoding is None:
encoding = suggested_encoding
retval = string
if ser_as in ('bytes', 'bytes_le'):
retval, = binary_decoding_handlers[encoding](string)
try:
retval = _uuid_deserialize[ser_as](retval)
except ValueError as e:
raise ValidationError(e)
return retval
def uuid_from_bytes(self, cls, string, suggested_encoding=None, **_):
attr = self.get_cls_attrs(cls)
ser_as = attr.serialize_as
encoding = attr.encoding
if encoding is None:
encoding = suggested_encoding
retval = string
if ser_as in ('bytes', 'bytes_le'):
retval, = binary_decoding_handlers[encoding](string)
elif isinstance(string, six.binary_type):
retval = string.decode('ascii')
try:
retval = _uuid_deserialize[ser_as](retval)
except ValueError as e:
raise ValidationError(e)
return retval
def unicode_from_bytes(self, cls, value):
retval = value
if isinstance(value, six.binary_type):
cls_attrs = self.get_cls_attrs(cls)
if cls_attrs.encoding is not None:
retval = six.text_type(value, cls_attrs.encoding,
errors=cls_attrs.unicode_errors)
elif self.string_encoding is not None:
retval = six.text_type(value, self.string_encoding,
errors=cls_attrs.unicode_errors)
else:
retval = six.text_type(value, errors=cls_attrs.unicode_errors)
return retval
def string_from_bytes(self, cls, value):
retval = value
cls_attrs = self.get_cls_attrs(cls)
if isinstance(value, six.text_type):
if cls_attrs.encoding is None:
raise Exception("You need to define a source encoding for "
"decoding incoming unicode values.")
else:
retval = value.encode(cls_attrs.encoding)
return retval
def decimal_from_unicode(self, cls, string):
cls_attrs = self.get_cls_attrs(cls)
if cls_attrs.max_str_len is not None and len(string) > \
cls_attrs.max_str_len:
raise ValidationError(string, "Decimal %%r longer than %d "
"characters" % cls_attrs.max_str_len)
try:
return D(string)
except InvalidOperation as e:
raise ValidationError(string, "%%r: %r" % e)
def decimal_from_bytes(self, cls, string):
return self.decimal_from_unicode(cls,
string.decode(self.default_string_encoding))
def double_from_bytes(self, cls, string):
try:
return float(string)
except (TypeError, ValueError) as e:
raise ValidationError(string, "%%r: %r" % e)
def integer_from_bytes(self, cls, string):
cls_attrs = self.get_cls_attrs(cls)
if isinstance(string, (six.text_type, six.binary_type)) and \
cls_attrs.max_str_len is not None and \
len(string) > cls_attrs.max_str_len:
raise ValidationError(string,
"Integer %%r longer than %d characters"
% cls_attrs.max_str_len)
try:
return int(string)
except ValueError:
raise ValidationError(string, "Could not cast %r to integer")
def time_from_unicode(self, cls, string):
"""Expects ISO formatted times."""
match = _time_re.match(string)
if match is None:
raise ValidationError(string, "%%r does not match regex %r " %
_time_re.pattern)
fields = match.groupdict(0)
microsec = fields.get('sec_frac')
if microsec is None or microsec == 0:
microsec = 0
else:
microsec = min(999999, int(round(float(microsec) * 1e6)))
return time(int(fields['hr']), int(fields['min']),
int(fields['sec']), microsec)
def time_from_bytes(self, cls, string):
if isinstance(string, six.binary_type):
string = string.decode(self.default_string_encoding)
return self.time_from_unicode(cls, string)
def date_from_unicode_iso(self, cls, string):
"""This is used by protocols like SOAP who need ISO8601-formatted dates
no matter what.
"""
try:
return date(*(strptime(string, u'%Y-%m-%d')[0:3]))
except ValueError:
match = cls._offset_re.match(string)
if match:
year = int(match.group('year'))
month = int(match.group('month'))
day = int(match.group('day'))
return date(year, month, day)
raise ValidationError(string)
def enum_base_from_bytes(self, cls, value):
if self.validator is self.SOFT_VALIDATION and not (
cls.validate_string(cls, value)):
raise ValidationError(value)
return getattr(cls, value)
def model_base_from_bytes(self, cls, value):
return cls.from_bytes(value)
def datetime_from_unicode_iso(self, cls, string):
astz = self.get_cls_attrs(cls).as_timezone
match = cls._utc_re.match(string)
if match:
tz = pytz.utc
retval = _parse_datetime_iso_match(match, tz=tz)
if astz is not None:
retval = retval.astimezone(astz)
return retval
if match is None:
match = cls._offset_re.match(string)
if match:
tz_hr, tz_min = [int(match.group(x))
for x in ("tz_hr", "tz_min")]
tz = FixedOffset(tz_hr * 60 + tz_min, {})
retval = _parse_datetime_iso_match(match, tz=tz)
if astz is not None:
retval = retval.astimezone(astz)
return retval
if match is None:
match = cls._local_re.match(string)
if match:
retval = _parse_datetime_iso_match(match)
if astz:
retval = retval.replace(tzinfo=astz)
return retval
raise ValidationError(string)
def datetime_from_unicode(self, cls, string):
serialize_as = self.get_cls_attrs(cls).serialize_as
return self._datetime_dsmap[serialize_as](cls, string)
def datetime_from_bytes(self, cls, string):
if isinstance(string, six.binary_type):
string = string.decode(self.default_string_encoding)
serialize_as = self.get_cls_attrs(cls).serialize_as
return self._datetime_dsmap[serialize_as](cls, string)
def date_from_bytes(self, cls, string):
if isinstance(string, six.binary_type):
string = string.decode(self.default_string_encoding)
date_format = self._get_date_format(self.get_cls_attrs(cls))
try:
if date_format is not None:
dt = datetime.strptime(string, date_format)
return date(dt.year, dt.month, dt.day)
return self.date_from_unicode_iso(cls, string)
except ValueError as e:
match = cls._offset_re.match(string)
if match:
return date(int(match.group('year')),
int(match.group('month')), int(match.group('day')))
else:
raise ValidationError(string,
"%%r: %s" % repr(e).replace("%", "%%"))
def date_from_unicode(self, cls, string):
date_format = self._get_date_format(self.get_cls_attrs(cls))
try:
if date_format is not None:
dt = datetime.strptime(string, date_format)
return date(dt.year, dt.month, dt.day)
return self.date_from_unicode_iso(cls, string)
except ValueError as e:
match = cls._offset_re.match(string)
if match:
return date(int(match.group('year')),
int(match.group('month')), int(match.group('day')))
else:
# the message from ValueError is quite nice already
raise ValidationError(e.message, "%s")
def duration_from_unicode(self, cls, string):
duration = _duration_re.match(string).groupdict(0)
if duration is None:
raise ValidationError(string,
"Time data '%%s' does not match regex '%s'" %
(_duration_re.pattern,))
days = int(duration['days'])
days += int(duration['months']) * 30
days += int(duration['years']) * 365
hours = int(duration['hours'])
minutes = int(duration['minutes'])
seconds = float(duration['seconds'])
f, i = modf(seconds)
seconds = i
microseconds = int(1e6 * f)
delta = timedelta(days=days, hours=hours, minutes=minutes,
seconds=seconds, microseconds=microseconds)
if duration['sign'] == "-":
delta *= -1
return delta
def duration_from_bytes(self, cls, string):
if isinstance(string, six.binary_type):
string = string.decode(self.default_string_encoding)
return self.duration_from_unicode(cls, string)
def boolean_from_bytes(self, cls, string):
return string.lower() in ('true', '1')
def byte_array_from_bytes(self, cls, value, suggested_encoding=None):
encoding = self.get_cls_attrs(cls).encoding
if encoding is BINARY_ENCODING_USE_DEFAULT:
encoding = suggested_encoding
return binary_decoding_handlers[encoding](value)
def file_from_bytes(self, cls, value, suggested_encoding=None):
encoding = self.get_cls_attrs(cls).encoding
if encoding is BINARY_ENCODING_USE_DEFAULT:
encoding = suggested_encoding
return File.Value(data=binary_decoding_handlers[encoding](value))
def complex_model_base_from_bytes(self, cls, string, **_):
raise TypeError("Only primitives can be deserialized from string.")
def array_from_bytes(self, cls, string, **_):
if self.get_cls_attrs(cls).serialize_as != 'sd-list':
raise TypeError("Only primitives can be deserialized from string.")
# sd-list being space-delimited list.
retval = []
inner_type, = cls._type_info.values()
for s in string.split():
retval.append(self.from_bytes(inner_type, s))
return retval
def xmlattribute_from_bytes(self, cls, value):
return self.from_bytes(cls.type, value)
def _datetime_from_unicode(self, cls, string):
cls_attrs = self.get_cls_attrs(cls)
# get parser
parser = cls_attrs.parser
# get date_format
dt_format = cls_attrs.dt_format
if dt_format is None:
dt_format = cls_attrs.date_format
if dt_format is None:
dt_format = cls_attrs.out_format
if dt_format is None:
dt_format = cls_attrs.format
# parse the string
if parser is not None:
retval = parser(self, cls, string)
elif dt_format is not None:
if six.PY2:
# FIXME: perhaps it should encode to string's encoding instead
# of utf8 all the time
if isinstance(dt_format, six.text_type):
dt_format = dt_format.encode('utf8')
if isinstance(string, six.text_type):
string = string.encode('utf8')
retval = datetime.strptime(string, dt_format)
astz = cls_attrs.as_timezone
if astz:
retval = retval.astimezone(cls_attrs.as_time_zone)
else:
retval = self.datetime_from_unicode_iso(cls, string)
return retval
_uuid_deserialize = {
None: lambda s: uuid.UUID(s),
'hex': lambda s: uuid.UUID(hex=s),
'urn': lambda s: uuid.UUID(hex=s),
'bytes': lambda s: uuid.UUID(bytes=s),
'bytes_le': lambda s: uuid.UUID(bytes_le=s),
'fields': lambda s: uuid.UUID(fields=s),
'int': lambda s: uuid.UUID(int=s),
('int', int): lambda s: uuid.UUID(int=s),
('int', str): lambda s: uuid.UUID(int=int(s)),
}
if six.PY2:
_uuid_deserialize[('int', long)] = _uuid_deserialize[('int', int)]
def _parse_datetime_iso_match(date_match, tz=None):
fields = date_match.groupdict()
year = int(fields.get('year'))
month = int(fields.get('month'))
day = int(fields.get('day'))
hour = int(fields.get('hr'))
minute = int(fields.get('min'))
second = int(fields.get('sec'))
usecond = fields.get("sec_frac")
if usecond is None:
usecond = 0
else:
# we only get the most significant 6 digits because that's what
# datetime can handle.
usecond = min(999999, int(round(float(usecond) * 1e6)))
return datetime(year, month, day, hour, minute, second, usecond, tz)
_dt_sec = lambda cls, val: \
int(mktime(val.timetuple()))
_dt_sec_float = lambda cls, val: \
mktime(val.timetuple()) + (val.microsecond / 1e6)
_dt_msec = lambda cls, val: \
int(mktime(val.timetuple())) * 1000 + (val.microsecond // 1000)
_dt_msec_float = lambda cls, val: \
mktime(val.timetuple()) * 1000 + (val.microsecond / 1000.0)
_dt_usec = lambda cls, val: \
int(mktime(val.timetuple())) * 1000000 + val.microsecond
_datetime_smap = {
'sec': _dt_sec,
'secs': _dt_sec,
'second': _dt_sec,
'seconds': _dt_sec,
'sec_float': _dt_sec_float,
'secs_float': _dt_sec_float,
'second_float': _dt_sec_float,
'seconds_float': _dt_sec_float,
'msec': _dt_msec,
'msecs': _dt_msec,
'msecond': _dt_msec,
'mseconds': _dt_msec,
'millisecond': _dt_msec,
'milliseconds': _dt_msec,
'msec_float': _dt_msec_float,
'msecs_float': _dt_msec_float,
'msecond_float': _dt_msec_float,
'mseconds_float': _dt_msec_float,
'millisecond_float': _dt_msec_float,
'milliseconds_float': _dt_msec_float,
'usec': _dt_usec,
'usecs': _dt_usec,
'usecond': _dt_usec,
'useconds': _dt_usec,
'microsecond': _dt_usec,
'microseconds': _dt_usec,
}
def _file_to_iter(f):
try:
data = f.read(65536)
while len(data) > 0:
yield data
data = f.read(65536)
finally:
f.close()
|
PypiClean
|
/wad2023_program-1.3.0-py3-none-any.whl/wad2023_program/view.py
|
from rich import box
from rich.console import Console, Group
from rich.panel import Panel
from rich.table import Table
from .model import Session, Speaker
def speaker_text(speaker: Speaker) -> str:
"""Create a text for a speaker.
Returns a string in a default format for a speaker.
Args:
speaker: the speaker object.
Returns:
The format for the speaker.
"""
output_text = f'\n[yellow][b]{speaker.name}[/b][/yellow]'
if len(speaker.tagline) > 0:
output_text += f'\n[orange][i]{speaker.tagline}[/i][/orange]'
if len(speaker.bio) > 0:
output_text += '\n\n' + speaker.bio
if len(speaker.links) > 0:
output_text += '\n'
for link in speaker.links:
output_text += (f'\n[yellow][b]{link.name}:[/b][/yellow] ' +
f'[green]{link.url}[/green]')
return output_text
def view_sessions_as_table(sessions: list[Session]) -> None:
"""View Sessions in a table.
Prints the sessions in a `rich` table.
Args:
sessions: a list with Session objects to view.
"""
console = Console()
table = Table(box=box.HORIZONTALS)
table.add_column('*')
table.add_column('Type')
table.add_column('Date')
table.add_column('Start')
table.add_column('End')
table.add_column('Stage')
table.add_column('Title')
table.add_column('Speakers')
for sess in sessions:
table.add_row(
'*' if sess.favourite else '',
sess.session_type.capitalize(),
f'{sess.start_time_berlin:%Y-%m-%d}',
f'{sess.start_time_berlin:%H:%M}',
f'{sess.end_time_berlin:%H:%M}',
sess.stage.name,
sess.title,
', '.join([speaker.name for speaker in sess.speakers])
)
console.print(table)
def view_sessions_as_csv(sessions: list[Session]) -> None:
"""View Sessions in a csv.
Prints the sessions in a CSV output.
Args:
sessions: a list with Session objects to view.
"""
columns = ('Favourite', 'Type', 'Date', 'Start',
'End', 'Stage', 'Title', 'Speakers')
print(';'.join([f'"{column}"' for column in columns]))
for sess in sessions:
row = ['*' if sess.favourite else '',
sess.session_type.capitalize(),
f'{sess.start_time_berlin:%Y-%m-%d}',
f'{sess.start_time_berlin:%H:%M}',
f'{sess.end_time_berlin:%H:%M}',
sess.stage.name,
sess.title,
', '.join([speaker.name for speaker in sess.speakers])]
print(';'.join([f'"{row_data}"' for row_data in row]))
def view_sessions_as_details(sessions: list[Session]) -> None:
"""View Sessions with details.
Prints the sessions in a CSV output.
Args:
sessions: a list with Session objects to view.
"""
console = Console()
for sess in sessions:
table = Table(box=box.MINIMAL, show_header=False)
table.add_column('Field')
table.add_column('Information')
table.add_row('Title', sess.title)
table.add_row('Session ID', str(sess.id))
table.add_row(
'Date', (f'{sess.start_time_berlin:%Y-%m-%d} ' +
f'({sess.start_time_berlin:%H:%M} - ' +
f'{sess.end_time_berlin:%H:%M})'))
table.add_row('Stage', sess.stage.name)
output_parts: list[str | Table] = [table]
if len(sess.description) > 0:
output_parts.append(sess.description + '\n')
output_parts.append('[green][b]## Speakers[/b][/green]')
for speaker in sess.speakers:
output_parts.append(speaker_text(speaker))
console.print(
Panel(
Group(*output_parts)
))
|
PypiClean
|
/cleanrl-1.2.0.tar.gz/cleanrl-1.2.0/cleanrl_utils/docker_queue.py
|
import argparse
import shlex
import time
import docker
parser = argparse.ArgumentParser(description="CleanRL Docker Submission")
# Common arguments
parser.add_argument("--exp-script", type=str, default="test1.sh", help="the file name of this experiment")
# parser.add_argument('--cuda', type=lambda x:bool(strtobool(x)), default=True, nargs='?', const=True,
# help='if toggled, cuda will not be enabled by default')
parser.add_argument("--num-vcpus", type=int, default=16, help="total number of vcpus used in the host machine")
parser.add_argument("--frequency", type=int, default=1, help="the number of seconds to check container update status")
args = parser.parse_args()
client = docker.from_env()
# c = client.containers.run("ubuntu:latest", "echo hello world", detach=True)
with open(args.exp_script) as f:
lines = f.readlines()
tasks = []
for line in lines:
line.replace("\n", "")
line_split = shlex.split(line)
for idx, item in enumerate(line_split):
if item == "-e":
break
env_vars = line_split[idx + 1 : idx + 2]
image = line_split[idx + 2]
commands = line_split[idx + 3 :]
tasks += [[image, env_vars, commands]]
running_containers = []
vcpus = list(range(args.num_vcpus))
while len(tasks) != 0:
time.sleep(args.frequency)
# update running_containers
new_running_containers = []
for item in running_containers:
c = item[0]
c.reload()
if c.status != "exited":
new_running_containers += [item]
else:
print(f"✅ task on vcpu {item[1]} has finished")
vcpus += [item[1]]
running_containers = new_running_containers
if len(vcpus) != 0:
task = tasks.pop()
vcpu = vcpus.pop()
# if args.cuda:
# c = client.containers.run(
# image=task[0],
# environment=task[1],
# command=task[2],
# runtime="nvidia",
# cpuset_cpus=str(vcpu),
# detach=True)
# running_containers += [[c, vcpu]]
# else:
c = client.containers.run(image=task[0], environment=task[1], command=task[2], cpuset_cpus=str(vcpu), detach=True)
running_containers += [[c, vcpu]]
print("========================")
print(f"remaining tasks={len(tasks)}, running containers={len(running_containers)}")
print(f"running on vcpu {vcpu}", task)
|
PypiClean
|
/fern_uberduck-0.0.6.tar.gz/fern_uberduck-0.0.6/src/uberduck/client.py
|
import typing
import urllib.parse
from json.decoder import JSONDecodeError
import httpx
import pydantic
import typing_extensions
from .core.api_error import ApiError
from .core.jsonable_encoder import jsonable_encoder
from .core.remove_none_from_headers import remove_none_from_headers
from .environment import UberduckEnvironment
from .errors.unprocessable_entity_error import UnprocessableEntityError
from .resources.backing_tracks.client import AsyncBackingTracksClient, BackingTracksClient
from .resources.reference_audios.client import AsyncReferenceAudiosClient, ReferenceAudiosClient
from .types.convert_english_to_ipa_phonemes_request_phones import ConvertEnglishToIpaPhonemesRequestPhones
from .types.dataset_speaker import DatasetSpeaker
from .types.freestyle_request_backing_track import FreestyleRequestBackingTrack
from .types.g_2_p_response import G2PResponse
from .types.get_voice_data_request_mode import GetVoiceDataRequestMode
from .types.get_voices_request_mode import GetVoicesRequestMode
from .types.http_validation_error import HttpValidationError
from .types.mix_item import MixItem
from .types.sample import Sample
from .types.speak_status_response import SpeakStatusResponse
from .types.tts_request import TtsRequest
from .types.uuid import Uuid
from .types.voice_return import VoiceReturn
from .types.voice_stats import VoiceStats
# this is used as the default value for optional parameters
OMIT = typing.cast(typing.Any, ...)
class Uberduck:
def __init__(
self, *, environment: UberduckEnvironment = UberduckEnvironment.PRODUCTION, token: typing.Optional[str] = None
):
self._environment = environment
self._token = token
self.reference_audios = ReferenceAudiosClient(environment=self._environment, token=self._token)
self.backing_tracks = BackingTracksClient(environment=self._environment, token=self._token)
def get_voices(
self,
*,
mode: GetVoicesRequestMode,
language: typing.Optional[str] = None,
is_commercial: typing.Optional[bool] = None,
is_private: typing.Optional[bool] = None,
slim: typing.Optional[bool] = None,
) -> typing.List[VoiceReturn]:
_response = httpx.request(
"GET",
urllib.parse.urljoin(f"{self._environment.value}/", "voices"),
params={
"mode": mode,
"language": language,
"is_commercial": is_commercial,
"is_private": is_private,
"slim": slim,
},
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.List[VoiceReturn], _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
def get_voice_detail(self, voicemodel_uuid: str) -> VoiceReturn:
_response = httpx.request(
"GET",
urllib.parse.urljoin(f"{self._environment.value}/", f"voices/{voicemodel_uuid}/detail"),
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(VoiceReturn, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
def fetch_voice_samples(self, voicemodel_uuid: str) -> typing.List[Sample]:
_response = httpx.request(
"GET",
urllib.parse.urljoin(f"{self._environment.value}/", f"voices/{voicemodel_uuid}/samples"),
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.List[Sample], _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
def generate_lyrics(
self,
*,
subject: typing.Optional[str] = OMIT,
lines: typing.Optional[int] = OMIT,
backing_track: typing.Optional[str] = OMIT,
generate_title: typing.Optional[bool] = OMIT,
) -> typing.Any:
_request: typing.Dict[str, typing.Any] = {}
if subject is not OMIT:
_request["subject"] = subject
if lines is not OMIT:
_request["lines"] = lines
if backing_track is not OMIT:
_request["backing_track"] = backing_track
if generate_title is not OMIT:
_request["generate_title"] = generate_title
_response = httpx.request(
"POST",
urllib.parse.urljoin(f"{self._environment.value}/", "tts/lyrics"),
json=jsonable_encoder(_request),
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
def generate_freestyle(
self,
*,
subject: typing.Optional[str] = OMIT,
lines: typing.Optional[int] = OMIT,
lyrics: typing.Optional[typing.List[typing.List[str]]] = OMIT,
bpm: typing.Optional[float] = OMIT,
backing_track: typing.Optional[FreestyleRequestBackingTrack] = OMIT,
voice: typing.Optional[str] = OMIT,
voicemodel_uuid: typing.Optional[str] = OMIT,
title: typing.Optional[str] = OMIT,
format: typing.Optional[typing_extensions.Literal["json"]] = OMIT,
generate_title: typing.Optional[bool] = OMIT,
) -> typing.Any:
_request: typing.Dict[str, typing.Any] = {}
if subject is not OMIT:
_request["subject"] = subject
if lines is not OMIT:
_request["lines"] = lines
if lyrics is not OMIT:
_request["lyrics"] = lyrics
if bpm is not OMIT:
_request["bpm"] = bpm
if backing_track is not OMIT:
_request["backing_track"] = backing_track
if voice is not OMIT:
_request["voice"] = voice
if voicemodel_uuid is not OMIT:
_request["voicemodel_uuid"] = voicemodel_uuid
if title is not OMIT:
_request["title"] = title
if format is not OMIT:
_request["format"] = format
if generate_title is not OMIT:
_request["generate_title"] = generate_title
_response = httpx.request(
"POST",
urllib.parse.urljoin(f"{self._environment.value}/", "tts/freestyle"),
json=jsonable_encoder(_request),
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
def do_voice_to_voice_conversion(
self,
*,
voicemodel_uuid: str,
input: typing.Optional[str] = OMIT,
reference_audio_uuid: typing.Optional[str] = OMIT,
pitch_shift: typing.Optional[int] = OMIT,
mix: typing.Optional[typing.List[MixItem]] = OMIT,
) -> typing.Any:
_request: typing.Dict[str, typing.Any] = {"voicemodel_uuid": voicemodel_uuid}
if input is not OMIT:
_request["input"] = input
if reference_audio_uuid is not OMIT:
_request["reference_audio_uuid"] = reference_audio_uuid
if pitch_shift is not OMIT:
_request["pitch_shift"] = pitch_shift
if mix is not OMIT:
_request["mix"] = mix
_response = httpx.request(
"POST",
urllib.parse.urljoin(f"{self._environment.value}/", "tts/convert"),
json=jsonable_encoder(_request),
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
def train(
self,
*,
name: typing.Optional[str] = OMIT,
model_type: typing.Optional[str] = OMIT,
description: typing.Optional[str] = OMIT,
base_model_uuid: typing.Optional[str] = OMIT,
data: typing.Optional[typing.List[DatasetSpeaker]] = OMIT,
device_count: typing.Optional[int] = OMIT,
rank: typing.Optional[int] = OMIT,
epochs: typing.Optional[int] = OMIT,
batch_size: typing.Optional[int] = OMIT,
learning_rate: typing.Optional[float] = OMIT,
ignored_layers: typing.Optional[typing.List[str]] = OMIT,
save_on_finish: typing.Optional[bool] = OMIT,
is_cleaned: typing.Optional[bool] = OMIT,
epochs_per_checkpoint: typing.Optional[int] = OMIT,
is_torchmoji: typing.Optional[bool] = OMIT,
is_multispeaker: typing.Optional[bool] = OMIT,
is_speaker_encoder: typing.Optional[bool] = OMIT,
) -> typing.Any:
_request: typing.Dict[str, typing.Any] = {}
if name is not OMIT:
_request["name"] = name
if model_type is not OMIT:
_request["model_type"] = model_type
if description is not OMIT:
_request["description"] = description
if base_model_uuid is not OMIT:
_request["base_model_uuid"] = base_model_uuid
if data is not OMIT:
_request["data"] = data
if device_count is not OMIT:
_request["device_count"] = device_count
if rank is not OMIT:
_request["rank"] = rank
if epochs is not OMIT:
_request["epochs"] = epochs
if batch_size is not OMIT:
_request["batch_size"] = batch_size
if learning_rate is not OMIT:
_request["learning_rate"] = learning_rate
if ignored_layers is not OMIT:
_request["ignored_layers"] = ignored_layers
if save_on_finish is not OMIT:
_request["save_on_finish"] = save_on_finish
if is_cleaned is not OMIT:
_request["is_cleaned"] = is_cleaned
if epochs_per_checkpoint is not OMIT:
_request["epochs_per_checkpoint"] = epochs_per_checkpoint
if is_torchmoji is not OMIT:
_request["is_torchmoji"] = is_torchmoji
if is_multispeaker is not OMIT:
_request["is_multispeaker"] = is_multispeaker
if is_speaker_encoder is not OMIT:
_request["is_speaker_encoder"] = is_speaker_encoder
_response = httpx.request(
"POST",
urllib.parse.urljoin(f"{self._environment.value}/", "train"),
json=jsonable_encoder(_request),
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
def get_speak_status(self, *, uuid: str) -> SpeakStatusResponse:
_response = httpx.request(
"GET",
urllib.parse.urljoin(f"{self._environment.value}/", "speak-status"),
params={"uuid": uuid},
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(SpeakStatusResponse, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
def generate_speech(self, *, request: TtsRequest, uberduck_id: typing.Optional[str] = None) -> Uuid:
_response = httpx.request(
"POST",
urllib.parse.urljoin(f"{self._environment.value}/", "speak"),
json=jsonable_encoder(request),
headers=remove_none_from_headers(
{
"uberduck-id": uberduck_id,
"Authorization": f"Bearer {self._token}" if self._token is not None else None,
}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(Uuid, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
def generate_speech_synchronously(self, *, request: TtsRequest, uberduck_id: typing.Optional[str] = None) -> None:
_response = httpx.request(
"POST",
urllib.parse.urljoin(f"{self._environment.value}/", "speak-synchronous"),
json=jsonable_encoder(request),
headers=remove_none_from_headers(
{
"uberduck-id": uberduck_id,
"Authorization": f"Bearer {self._token}" if self._token is not None else None,
}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
def get_voice_stats(
self,
*,
username: typing.Optional[str] = None,
voicemodel_uuid: typing.Union[typing.Optional[str], typing.List[str]],
) -> typing.List[VoiceStats]:
_response = httpx.request(
"GET",
urllib.parse.urljoin(f"{self._environment.value}/", "voice-stats"),
params={"username": username, "voicemodel_uuid": voicemodel_uuid},
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.List[VoiceStats], _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
def get_voice_data(
self,
*,
name: str,
architecture: typing.Optional[str] = None,
mode: typing.Optional[GetVoiceDataRequestMode] = None,
) -> VoiceReturn:
_response = httpx.request(
"GET",
urllib.parse.urljoin(f"{self._environment.value}/", "voice-data"),
params={"name": name, "architecture": architecture, "mode": mode},
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(VoiceReturn, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
def convert_english_to_ipa_phonemes(
self, *, text: str, phones: typing.Optional[ConvertEnglishToIpaPhonemesRequestPhones] = None
) -> G2PResponse:
_response = httpx.request(
"GET",
urllib.parse.urljoin(f"{self._environment.value}/", "g2p"),
params={"text": text, "phones": phones},
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(G2PResponse, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
class AsyncUberduck:
def __init__(
self, *, environment: UberduckEnvironment = UberduckEnvironment.PRODUCTION, token: typing.Optional[str] = None
):
self._environment = environment
self._token = token
self.reference_audios = AsyncReferenceAudiosClient(environment=self._environment, token=self._token)
self.backing_tracks = AsyncBackingTracksClient(environment=self._environment, token=self._token)
async def get_voices(
self,
*,
mode: GetVoicesRequestMode,
language: typing.Optional[str] = None,
is_commercial: typing.Optional[bool] = None,
is_private: typing.Optional[bool] = None,
slim: typing.Optional[bool] = None,
) -> typing.List[VoiceReturn]:
async with httpx.AsyncClient() as _client:
_response = await _client.request(
"GET",
urllib.parse.urljoin(f"{self._environment.value}/", "voices"),
params={
"mode": mode,
"language": language,
"is_commercial": is_commercial,
"is_private": is_private,
"slim": slim,
},
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.List[VoiceReturn], _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
async def get_voice_detail(self, voicemodel_uuid: str) -> VoiceReturn:
async with httpx.AsyncClient() as _client:
_response = await _client.request(
"GET",
urllib.parse.urljoin(f"{self._environment.value}/", f"voices/{voicemodel_uuid}/detail"),
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(VoiceReturn, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
async def fetch_voice_samples(self, voicemodel_uuid: str) -> typing.List[Sample]:
async with httpx.AsyncClient() as _client:
_response = await _client.request(
"GET",
urllib.parse.urljoin(f"{self._environment.value}/", f"voices/{voicemodel_uuid}/samples"),
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.List[Sample], _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
async def generate_lyrics(
self,
*,
subject: typing.Optional[str] = OMIT,
lines: typing.Optional[int] = OMIT,
backing_track: typing.Optional[str] = OMIT,
generate_title: typing.Optional[bool] = OMIT,
) -> typing.Any:
_request: typing.Dict[str, typing.Any] = {}
if subject is not OMIT:
_request["subject"] = subject
if lines is not OMIT:
_request["lines"] = lines
if backing_track is not OMIT:
_request["backing_track"] = backing_track
if generate_title is not OMIT:
_request["generate_title"] = generate_title
async with httpx.AsyncClient() as _client:
_response = await _client.request(
"POST",
urllib.parse.urljoin(f"{self._environment.value}/", "tts/lyrics"),
json=jsonable_encoder(_request),
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
async def generate_freestyle(
self,
*,
subject: typing.Optional[str] = OMIT,
lines: typing.Optional[int] = OMIT,
lyrics: typing.Optional[typing.List[typing.List[str]]] = OMIT,
bpm: typing.Optional[float] = OMIT,
backing_track: typing.Optional[FreestyleRequestBackingTrack] = OMIT,
voice: typing.Optional[str] = OMIT,
voicemodel_uuid: typing.Optional[str] = OMIT,
title: typing.Optional[str] = OMIT,
format: typing.Optional[typing_extensions.Literal["json"]] = OMIT,
generate_title: typing.Optional[bool] = OMIT,
) -> typing.Any:
_request: typing.Dict[str, typing.Any] = {}
if subject is not OMIT:
_request["subject"] = subject
if lines is not OMIT:
_request["lines"] = lines
if lyrics is not OMIT:
_request["lyrics"] = lyrics
if bpm is not OMIT:
_request["bpm"] = bpm
if backing_track is not OMIT:
_request["backing_track"] = backing_track
if voice is not OMIT:
_request["voice"] = voice
if voicemodel_uuid is not OMIT:
_request["voicemodel_uuid"] = voicemodel_uuid
if title is not OMIT:
_request["title"] = title
if format is not OMIT:
_request["format"] = format
if generate_title is not OMIT:
_request["generate_title"] = generate_title
async with httpx.AsyncClient() as _client:
_response = await _client.request(
"POST",
urllib.parse.urljoin(f"{self._environment.value}/", "tts/freestyle"),
json=jsonable_encoder(_request),
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
async def do_voice_to_voice_conversion(
self,
*,
voicemodel_uuid: str,
input: typing.Optional[str] = OMIT,
reference_audio_uuid: typing.Optional[str] = OMIT,
pitch_shift: typing.Optional[int] = OMIT,
mix: typing.Optional[typing.List[MixItem]] = OMIT,
) -> typing.Any:
_request: typing.Dict[str, typing.Any] = {"voicemodel_uuid": voicemodel_uuid}
if input is not OMIT:
_request["input"] = input
if reference_audio_uuid is not OMIT:
_request["reference_audio_uuid"] = reference_audio_uuid
if pitch_shift is not OMIT:
_request["pitch_shift"] = pitch_shift
if mix is not OMIT:
_request["mix"] = mix
async with httpx.AsyncClient() as _client:
_response = await _client.request(
"POST",
urllib.parse.urljoin(f"{self._environment.value}/", "tts/convert"),
json=jsonable_encoder(_request),
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
async def train(
self,
*,
name: typing.Optional[str] = OMIT,
model_type: typing.Optional[str] = OMIT,
description: typing.Optional[str] = OMIT,
base_model_uuid: typing.Optional[str] = OMIT,
data: typing.Optional[typing.List[DatasetSpeaker]] = OMIT,
device_count: typing.Optional[int] = OMIT,
rank: typing.Optional[int] = OMIT,
epochs: typing.Optional[int] = OMIT,
batch_size: typing.Optional[int] = OMIT,
learning_rate: typing.Optional[float] = OMIT,
ignored_layers: typing.Optional[typing.List[str]] = OMIT,
save_on_finish: typing.Optional[bool] = OMIT,
is_cleaned: typing.Optional[bool] = OMIT,
epochs_per_checkpoint: typing.Optional[int] = OMIT,
is_torchmoji: typing.Optional[bool] = OMIT,
is_multispeaker: typing.Optional[bool] = OMIT,
is_speaker_encoder: typing.Optional[bool] = OMIT,
) -> typing.Any:
_request: typing.Dict[str, typing.Any] = {}
if name is not OMIT:
_request["name"] = name
if model_type is not OMIT:
_request["model_type"] = model_type
if description is not OMIT:
_request["description"] = description
if base_model_uuid is not OMIT:
_request["base_model_uuid"] = base_model_uuid
if data is not OMIT:
_request["data"] = data
if device_count is not OMIT:
_request["device_count"] = device_count
if rank is not OMIT:
_request["rank"] = rank
if epochs is not OMIT:
_request["epochs"] = epochs
if batch_size is not OMIT:
_request["batch_size"] = batch_size
if learning_rate is not OMIT:
_request["learning_rate"] = learning_rate
if ignored_layers is not OMIT:
_request["ignored_layers"] = ignored_layers
if save_on_finish is not OMIT:
_request["save_on_finish"] = save_on_finish
if is_cleaned is not OMIT:
_request["is_cleaned"] = is_cleaned
if epochs_per_checkpoint is not OMIT:
_request["epochs_per_checkpoint"] = epochs_per_checkpoint
if is_torchmoji is not OMIT:
_request["is_torchmoji"] = is_torchmoji
if is_multispeaker is not OMIT:
_request["is_multispeaker"] = is_multispeaker
if is_speaker_encoder is not OMIT:
_request["is_speaker_encoder"] = is_speaker_encoder
async with httpx.AsyncClient() as _client:
_response = await _client.request(
"POST",
urllib.parse.urljoin(f"{self._environment.value}/", "train"),
json=jsonable_encoder(_request),
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
async def get_speak_status(self, *, uuid: str) -> SpeakStatusResponse:
async with httpx.AsyncClient() as _client:
_response = await _client.request(
"GET",
urllib.parse.urljoin(f"{self._environment.value}/", "speak-status"),
params={"uuid": uuid},
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(SpeakStatusResponse, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
async def generate_speech(self, *, request: TtsRequest, uberduck_id: typing.Optional[str] = None) -> Uuid:
async with httpx.AsyncClient() as _client:
_response = await _client.request(
"POST",
urllib.parse.urljoin(f"{self._environment.value}/", "speak"),
json=jsonable_encoder(request),
headers=remove_none_from_headers(
{
"uberduck-id": uberduck_id,
"Authorization": f"Bearer {self._token}" if self._token is not None else None,
}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(Uuid, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
async def generate_speech_synchronously(
self, *, request: TtsRequest, uberduck_id: typing.Optional[str] = None
) -> None:
async with httpx.AsyncClient() as _client:
_response = await _client.request(
"POST",
urllib.parse.urljoin(f"{self._environment.value}/", "speak-synchronous"),
json=jsonable_encoder(request),
headers=remove_none_from_headers(
{
"uberduck-id": uberduck_id,
"Authorization": f"Bearer {self._token}" if self._token is not None else None,
}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
async def get_voice_stats(
self,
*,
username: typing.Optional[str] = None,
voicemodel_uuid: typing.Union[typing.Optional[str], typing.List[str]],
) -> typing.List[VoiceStats]:
async with httpx.AsyncClient() as _client:
_response = await _client.request(
"GET",
urllib.parse.urljoin(f"{self._environment.value}/", "voice-stats"),
params={"username": username, "voicemodel_uuid": voicemodel_uuid},
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(typing.List[VoiceStats], _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
async def get_voice_data(
self,
*,
name: str,
architecture: typing.Optional[str] = None,
mode: typing.Optional[GetVoiceDataRequestMode] = None,
) -> VoiceReturn:
async with httpx.AsyncClient() as _client:
_response = await _client.request(
"GET",
urllib.parse.urljoin(f"{self._environment.value}/", "voice-data"),
params={"name": name, "architecture": architecture, "mode": mode},
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(VoiceReturn, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
async def convert_english_to_ipa_phonemes(
self, *, text: str, phones: typing.Optional[ConvertEnglishToIpaPhonemesRequestPhones] = None
) -> G2PResponse:
async with httpx.AsyncClient() as _client:
_response = await _client.request(
"GET",
urllib.parse.urljoin(f"{self._environment.value}/", "g2p"),
params={"text": text, "phones": phones},
headers=remove_none_from_headers(
{"Authorization": f"Bearer {self._token}" if self._token is not None else None}
),
timeout=60,
)
if 200 <= _response.status_code < 300:
return pydantic.parse_obj_as(G2PResponse, _response.json()) # type: ignore
if _response.status_code == 422:
raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore
try:
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, body=_response.text)
raise ApiError(status_code=_response.status_code, body=_response_json)
|
PypiClean
|
/neuro-flow-23.7.0.tar.gz/neuro-flow-23.7.0/src/neuro_flow/cli/live.py
|
import click
import neuro_sdk
from contextlib import AsyncExitStack
from typing import List, Optional, Sequence, Tuple
from neuro_flow.cli.click_types import (
LIVE_JOB,
LIVE_JOB_OR_ALL,
PROJECT,
LiveJobSuffixType,
)
from neuro_flow.cli.utils import argument, option, wrap_async
from neuro_flow.live_runner import LiveRunner
from ..storage.api import ApiStorage
from ..storage.base import Storage
from .root import Root
@click.command()
@wrap_async()
async def ps(
root: Root,
) -> None:
"""List all jobs"""
async with AsyncExitStack() as stack:
client = await stack.enter_async_context(neuro_sdk.get())
storage: Storage = await stack.enter_async_context(ApiStorage(client))
runner = await stack.enter_async_context(
LiveRunner(root.config_dir, root.console, client, storage, root)
)
await runner.ps()
@click.command()
@option("-s", "--suffix", help="Optional suffix for multi-jobs")
@option(
"-p",
"--param",
type=(str, str),
multiple=True,
help="Set params of the batch config",
)
@option(
"--dry-run",
is_flag=True,
default=False,
help="Print run command instead of starting job.",
)
@argument("job-id", type=LIVE_JOB)
@argument("args", nargs=-1)
@wrap_async()
async def run(
root: Root,
job_id: str,
suffix: Optional[str],
dry_run: bool,
args: Optional[Tuple[str]],
param: List[Tuple[str, str]],
) -> None:
"""Run a job.
RUN job JOB-ID or ATTACH to it if the job is already running
For multi-jobs an explicit job suffix can be used with explicit job arguments.
"""
if args:
root.console.print(
"[yellow]args are deprecated, use --param instead",
)
async with AsyncExitStack() as stack:
client = await stack.enter_async_context(neuro_sdk.get())
storage: Storage = await stack.enter_async_context(ApiStorage(client))
runner = await stack.enter_async_context(
LiveRunner(root.config_dir, root.console, client, storage, root, dry_run)
)
await runner.run(
job_id,
suffix=suffix,
args=args,
params={key: value for key, value in param},
)
@click.command()
@argument("job-id", type=LIVE_JOB)
@argument("suffix", required=False, type=LiveJobSuffixType(job_id_param_name="job_id"))
@wrap_async()
async def logs(
root: Root,
job_id: str,
suffix: Optional[str],
) -> None:
"""Print logs.
Display logs for JOB-ID
"""
async with AsyncExitStack() as stack:
client = await stack.enter_async_context(neuro_sdk.get())
storage: Storage = await stack.enter_async_context(ApiStorage(client))
runner = await stack.enter_async_context(
LiveRunner(root.config_dir, root.console, client, storage, root)
)
await runner.logs(job_id, suffix)
@click.command()
@argument("job-id", type=LIVE_JOB)
@argument("suffix", required=False, type=LiveJobSuffixType(job_id_param_name="job_id"))
@wrap_async()
async def status(
root: Root,
job_id: str,
suffix: Optional[str],
) -> None:
"""Show job status.
Print status for JOB-ID
"""
async with AsyncExitStack() as stack:
client = await stack.enter_async_context(neuro_sdk.get())
storage: Storage = await stack.enter_async_context(ApiStorage(client))
runner = await stack.enter_async_context(
LiveRunner(root.config_dir, root.console, client, storage, root)
)
await runner.status(job_id, suffix)
@click.command()
@argument("job-id", type=LIVE_JOB_OR_ALL)
@argument("suffix", required=False, type=LiveJobSuffixType(job_id_param_name="job_id"))
@wrap_async()
async def kill(
root: Root,
job_id: str,
suffix: Optional[str],
) -> None:
"""Kill a job.
Kill JOB-ID, use `kill ALL` for killing all jobs."""
async with AsyncExitStack() as stack:
client = await stack.enter_async_context(neuro_sdk.get())
storage: Storage = await stack.enter_async_context(ApiStorage(client))
runner = await stack.enter_async_context(
LiveRunner(root.config_dir, root.console, client, storage, root)
)
if job_id != "ALL":
await runner.kill(job_id, suffix)
else:
if suffix is not None:
raise click.BadArgumentUsage(
"Suffix is not supported when killing ALL jobs"
)
await runner.kill_all()
@click.command()
@argument("flow_ids", type=PROJECT, nargs=-1, required=True)
@wrap_async()
async def delete_flow(
root: Root,
flow_ids: Sequence[str],
) -> None:
"""Completely remove flow with all related entities"""
async with AsyncExitStack() as stack:
client = await stack.enter_async_context(neuro_sdk.get())
storage: Storage = await stack.enter_async_context(ApiStorage(client))
for flow_id in flow_ids:
await storage.project(yaml_id=flow_id).delete()
if root.verbosity >= 0:
root.console.print(f"Flow '{flow_id}' was successfully removed.")
|
PypiClean
|
/ibm_wos_utils-4.7.2.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl/ibm_wos_utils/data_health/batch/utils/constants.py
|
from enum import Enum
"""
Contains constants used for the Data Health monitor.
"""
class Metrics(Enum):
# Column based metrics
ABSENCE_COUNT = "absence_count"
EMPTY_STRINGS = "empty_strings"
UNIQUE_COLUMNS = "unique_columns"
# Collective metrics
CLASS_CONFUSION = "class_confusion"
DUPLICATE_ROWS = "duplicate_rows"
# Metrics on individual columns of the data
COLUMN_BASED_METRICS = [
Metrics.ABSENCE_COUNT.value,
Metrics.EMPTY_STRINGS.value,
Metrics.UNIQUE_COLUMNS.value
]
# Default boundaries for metrics
DEFAULT_THRESHOLDS = {
"{}_BINARY".format(Metrics.CLASS_CONFUSION.value): 0.05, # the distance from confidence boundary
"{}_BINARY_CONFIDENCE_BOUNDARY".format(Metrics.CLASS_CONFUSION.value): 0.5, # the confidence boundary
"{}_MULTICLASS".format(Metrics.CLASS_CONFUSION.value): 0.05 # the difference between top two classes
}
THRESHOLD_LIMIT_DIRECTIONS = {
Metrics.ABSENCE_COUNT.value: "upper_limit",
Metrics.EMPTY_STRINGS.value: "upper_limit",
Metrics.DUPLICATE_ROWS.value: "upper_limit",
Metrics.UNIQUE_COLUMNS.value: "upper_limit",
Metrics.CLASS_CONFUSION.value: "upper_limit"
}
# Metrics supported for classification type models only
CLASSIFICATION_MODEL_METRICS = [
Metrics.CLASS_CONFUSION.value
]
# Metrics supported for batch subscriptions
BATCH_SUBSCRIPTION_PL_METRICS = [
Metrics.ABSENCE_COUNT.value,
Metrics.EMPTY_STRINGS.value,
Metrics.CLASS_CONFUSION.value,
Metrics.DUPLICATE_ROWS.value,
Metrics.UNIQUE_COLUMNS.value
]
BATCH_SUBSCRIPTION_FB_METRICS = [
Metrics.ABSENCE_COUNT.value,
Metrics.EMPTY_STRINGS.value,
Metrics.DUPLICATE_ROWS.value,
Metrics.UNIQUE_COLUMNS.value
]
# Modelling roles
RECORD_ID_MODELING_ROLE = "record-id"
RECORD_TIMESTAMP_MODELING_ROLE = "record-timestamp"
FEATURE_MODELING_ROLE = "feature"
META_FIELD_MODELING_ROLE = "meta-field"
PREDICTION_MODELING_ROLE = "prediction"
PROBABILITY_MODELING_ROLE = "probability"
# Datasets
PAYLOAD = "payload"
FEEDBACK = "feedback"
DATA_HEALTH = "data_health"
|
PypiClean
|
/hescore-hpxml-7.0.2.tar.gz/hescore-hpxml-7.0.2/hescorehpxml/hpxml3.py
|
from .base import HPXMLtoHEScoreTranslatorBase
from collections import OrderedDict
from .exceptions import TranslationError
def convert_to_type(type_, value):
if value is None:
return value
else:
return type_(value)
class HPXML3toHEScoreTranslator(HPXMLtoHEScoreTranslatorBase):
SCHEMA_DIR = 'hpxml-3.0.0'
def check_hpwes(self, v2_p, b):
# multiple verification nodes?
return self.xpath(b, 'h:BuildingDetails/h:GreenBuildingVerifications/h:GreenBuildingVerification/h:Type="Home '
'Performance with ENERGY STAR"')
def sort_foundations(self, fnd, b):
# Sort the foundations from largest area to smallest
def get_fnd_area(fnd):
attached_ids = OrderedDict()
attached_ids['Slab'] = self.xpath(fnd, 'h:AttachedToSlab/@idref')
attached_ids['FrameFloor'] = self.xpath(fnd, 'h:AttachedToFrameFloor/@idref')
return max(
[self.xpath(b, 'sum(//h:{}[contains("{}", h:SystemIdentifier/@id)]/h:Area)'.format(key, value)) for
key, value in attached_ids.items()])
fnd.sort(key=get_fnd_area, reverse=True)
return fnd, get_fnd_area
def get_foundation_walls(self, fnd, b):
attached_ids = self.xpath(fnd, 'h:AttachedToFoundationWall/@idref')
foundationwalls = self.xpath(b, '//h:FoundationWall[contains("{}", h:SystemIdentifier/@id)]'.
format(attached_ids), aslist=True)
return foundationwalls
def get_foundation_slabs(self, fnd, b):
attached_ids = self.xpath(fnd, 'h:AttachedToSlab/@idref')
slabs = self.xpath(b, '//h:Slab[contains("{}", h:SystemIdentifier/@id)]'.format(attached_ids), raise_err=True,
aslist=True)
return slabs
def get_foundation_frame_floors(self, fnd, b):
attached_ids = self.xpath(fnd, 'h:AttachedToFrameFloor/@idref')
frame_floors = self.xpath(b, '//h:FrameFloor[contains("{}",h:SystemIdentifier/@id)]'.format(attached_ids),
aslist=True)
return frame_floors
def attic_has_rigid_sheathing(self, v2_attic, roof):
return self.xpath(roof,
'boolean(h:Insulation/h:Layer[h:NominalRValue > 0][h:InstallationType="continuous"]['
'boolean(h:InsulationMaterial/h:Rigid)])'
# noqa: E501
)
def every_wall_layer_has_nominal_rvalue(self, wall):
# This variable will be true if every wall layer has a NominalRValue *or*
# if there are no insulation layers
wall_layers = self.xpath(wall, 'h:Insulation/h:Layer', aslist=True)
every_layer_has_nominal_rvalue = True # Considered to have nominal R-value unless assembly R-value is used
if wall_layers:
for layer in wall_layers:
if self.xpath(layer, 'h:NominalRValue') is None:
every_layer_has_nominal_rvalue = False
break
elif self.xpath(wall, 'h:Insulation/h:AssemblyEffectiveRValue/text()') is not None:
every_layer_has_nominal_rvalue = False
return every_layer_has_nominal_rvalue
def get_attic_roof_rvalue(self, v2_attic, roof):
# if there is no nominal R-value, it will return 0
return self.xpath(roof, 'sum(h:Insulation/h:Layer/h:NominalRValue)')
def get_attic_roof_assembly_rvalue(self, v2_attic, roof):
# if there is no assembly effective R-value, it will return None
return convert_to_type(float, self.xpath(roof, 'h:Insulation/h:AssemblyEffectiveRValue/text()'))
def every_attic_roof_layer_has_nominal_rvalue(self, v2_attic, roof):
roof_layers = self.xpath(roof, 'h:Insulation/h:Layer', aslist=True)
every_layer_has_nominal_rvalue = True # Considered to have nominal R-value unless assembly R-value is used
if roof_layers:
for layer in roof_layers:
if self.xpath(layer, 'h:NominalRValue') is None:
every_layer_has_nominal_rvalue = False
break
elif self.xpath(roof, 'h:Insulation/h:AssemblyEffectiveRValue/text()') is not None:
every_layer_has_nominal_rvalue = False
return every_layer_has_nominal_rvalue
def get_attic_knee_walls(self, attic):
knee_walls = []
b = self.xpath(attic, 'ancestor::h:Building')
for kneewall_idref in self.xpath(attic, 'h:AttachedToWall/@idref', aslist=True):
wall = self.xpath(
b,
'//h:Wall[h:SystemIdentifier/@id=$kneewallid][h:AtticWallType="knee wall"]',
raise_err=False,
kneewallid=kneewall_idref
)
if wall is not None:
knee_walls.append(wall)
return knee_walls
def get_attic_type(self, attic, atticid):
if self.xpath(attic,
'h:AtticType/h:Attic/h:CapeCod or boolean(h:AtticType/h:FlatRoof) or '
'boolean(h:AtticType/h:CathedralCeiling) or boolean(h:AtticType/h:Attic/h:Conditioned)'):
return 'cath_ceiling'
elif self.xpath(attic, 'boolean(h:AtticType/h:Attic)'):
return 'vented_attic'
else:
raise TranslationError(
'Attic {}: Cannot translate HPXML AtticType to HEScore rooftype.'.format(atticid))
def get_attic_floor_rvalue(self, attic, b):
frame_floors = self.get_attic_floors(attic)
if len(frame_floors) == 0:
return 0
if len(frame_floors) == 1:
return convert_to_type(float, self.xpath(frame_floors[0], 'sum(h:Insulation/h:Layer/h:NominalRValue)'))
frame_floor_dict_ls = []
for frame_floor in frame_floors:
# already confirmed in get_attic_floors that floors are all good with area information
floor_area = convert_to_type(float, self.xpath(frame_floor, 'h:Area/text()'))
rvalue = self.xpath(frame_floor, 'sum(h:Insulation/h:Layer/h:NominalRValue)')
frame_floor_dict_ls.append({'area': floor_area, 'rvalue': rvalue})
# Average
try:
floor_r = sum(x['area'] for x in frame_floor_dict_ls) / \
sum(x['area'] / x['rvalue'] for x in frame_floor_dict_ls)
except ZeroDivisionError:
floor_r = 0
return floor_r
def get_attic_floor_assembly_rvalue(self, attic, b):
frame_floors = self.get_attic_floors(attic)
if len(frame_floors) == 0:
return None
frame_floor_dict_ls = []
for frame_floor in frame_floors:
floor_area = convert_to_type(float, self.xpath(frame_floor, 'h:Area/text()'))
assembly_rvalue = convert_to_type(
float, self.xpath(frame_floor, 'h:Insulation/h:AssemblyEffectiveRValue/text()'))
if assembly_rvalue is None:
return
frame_floor_dict_ls.append({'area': floor_area, 'rvalue': assembly_rvalue})
# Average
try:
floor_r = sum(x['area'] for x in frame_floor_dict_ls) / \
sum(x['area'] / x['rvalue'] for x in frame_floor_dict_ls)
except ZeroDivisionError:
floor_r = None
return convert_to_type(float, floor_r)
def every_attic_floor_layer_has_nominal_rvalue(self, attic, b):
frame_floors = self.get_attic_floors(attic)
every_layer_has_nominal_rvalue = True # Considered to have nominal R-value unless assembly R-value is used
for frame_floor in frame_floors:
for layer in self.xpath(frame_floor, 'h:Insulation/h:Layer', aslist=True):
if self.xpath(layer, 'h:NominalRValue') is None:
every_layer_has_nominal_rvalue = False
break
if self.xpath(frame_floor, 'h:Insulation/h:AssemblyEffectiveRValue/text()') is not None:
every_layer_has_nominal_rvalue = False
break
return every_layer_has_nominal_rvalue
def get_attic_floors(self, attic):
floor_idref = self.xpath(attic, 'h:AttachedToFrameFloor/@idref', aslist=True)
# No frame floor attached
if not floor_idref:
return []
b = self.xpath(attic, 'ancestor::h:Building')
frame_floors = self.xpath(b, '//h:FrameFloor[contains("{}",h:SystemIdentifier/@id)]'.format(floor_idref),
aslist=True, raise_err=True)
return frame_floors
def get_ceiling_area(self, attic):
frame_floors = self.get_attic_floors(attic)
if len(frame_floors) >= 1:
return sum(float(self.xpath(x, 'h:Area/text()', raise_err=True)) for x in frame_floors)
else:
raise TranslationError('For vented attics, a FrameFloor needs to be referenced to determine ceiling_area.')
def get_attic_roof_area(self, roof):
return float(self.xpath(roof, 'h:Area/text()', raise_err=True))
def get_framefloor_assembly_rvalue(self, v2_framefloor, framefloor):
return convert_to_type(float, self.xpath(framefloor, 'h:Insulation/h:AssemblyEffectiveRValue/text()'))
def get_foundation_wall_assembly_rvalue(self, v2_fwall, fwall):
return convert_to_type(float, self.xpath(fwall, 'h:Insulation/h:AssemblyEffectiveRValue/text()'))
def get_slab_assembly_rvalue(self, v2_slab, slab):
return convert_to_type(float, self.xpath(slab, 'h:PerimeterInsulation/h:AssemblyEffectiveRValue/text()'))
def every_framefloor_layer_has_nominal_rvalue(self, v2_framefloor, framefloor):
framefloor_layers = self.xpath(framefloor, 'h:Insulation/h:Layer', aslist=True)
every_layer_has_nominal_rvalue = True # Considered to have nominal R-value unless assembly R-value is used
if framefloor_layers:
for layer in framefloor_layers:
if self.xpath(layer, 'h:NominalRValue') is None:
every_layer_has_nominal_rvalue = False
break
elif self.xpath(framefloor, 'h:Insulation/h:AssemblyEffectiveRValue/text()') is not None:
every_layer_has_nominal_rvalue = False
return every_layer_has_nominal_rvalue
def get_solarscreen(self, wndw_skylight):
return bool(self.xpath(wndw_skylight, 'h:ExteriorShading/h:Type/text()') == 'solar screens')
def get_hescore_walls(self, b):
return self.xpath(
b, 'h:BuildingDetails/h:Enclosure/h:Walls/h:Wall\
[((h:ExteriorAdjacentTo="outside" and not(contains(h:ExteriorAdjacentTo, "garage"))) or\
not(h:ExteriorAdjacentTo)) and not(contains(h:InteriorAdjacentTo, "attic"))]', # noqa: E501
aslist=True)
def check_is_doublepane(self, window, glass_layers):
return (self.xpath(window, 'h:StormWindow') is not None and glass_layers == 'single-pane') or \
glass_layers == 'double-pane'
def check_is_storm_lowe(self, window, glass_layers):
storm_type = self.xpath(window, 'h:StormWindow/h:GlassType/text()')
if storm_type is not None:
return storm_type == 'low-e' and glass_layers == 'single-pane'
return False
def get_duct_location(self, hpxml_duct_location, bldg):
try:
loc_hierarchy = self.duct_location_map[hpxml_duct_location]
if loc_hierarchy is None:
return
except TypeError:
raise TranslationError('Invalid duct location specified')
if loc_hierarchy is None:
return
for loc in loc_hierarchy:
if loc == 'uncond_attic':
check_loc = 'vented_attic'
else:
check_loc = loc
if check_loc not in [zone_floor['foundation_type'] for zone_floor in bldg['zone']['zone_floor']] and \
check_loc not in [zone_roof['roof_type'] for zone_roof in bldg['zone']['zone_roof']]:
if check_loc != 'cond_space':
continue
return loc
# Even though going here means duct location is not existing in neither roof type nor floor type,
# this duct is still likely to be discarded(due to not become the major 3 ducts, not connected to hvac, etc),
# it's also likely that its corresponding roof/floor type is already discarded, so keep it going until
# 'validate_hescore_inputs' error checking
return loc_hierarchy[0]
duct_location_map = {'living space': ['cond_space'],
'unconditioned space': ['uncond_basement', 'vented_crawl', 'unvented_crawl', 'uncond_attic'],
'under slab': ['under_slab'],
'basement': ['uncond_basement', 'cond_space'],
'basement - unconditioned': ['uncond_basement'],
'basement - conditioned': ['cond_space'],
'crawlspace - unvented': ['unvented_crawl'],
'crawlspace - vented': ['vented_crawl'],
'crawlspace - unconditioned': ['vented_crawl', 'unvented_crawl'],
'crawlspace - conditioned': ['cond_space'],
'crawlspace': ['vented_crawl', 'unvented_crawl', 'cond_space'],
'exterior wall': ['exterior_wall'],
'interstitial space': None,
'garage - conditioned': ['cond_space'],
'garage - unconditioned': ['unvented_crawl'],
'garage': ['unvented_crawl'],
'roof deck': ['outside'],
'outside': ['outside'],
'attic': ['uncond_attic', 'cond_space'],
'attic - unconditioned': ['uncond_attic'],
'attic - conditioned': ['cond_space'],
'attic - unvented': ['uncond_attic'],
'attic - vented': ['uncond_attic']}
|
PypiClean
|
/collective.renderfiletypes-2.0b4.tar.gz/collective.renderfiletypes-2.0b4/LICENSE.rst
|
collective.renderfiletypes Copyright 2020, Mikel Larreategi
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License version 2
as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston,
MA 02111-1307 USA.
|
PypiClean
|
/msgraph_beta_sdk-1.0.0a9-py3-none-any.whl/msgraph/generated/identity_governance/lifecycle_workflows/workflows/item/execution_scope/execution_scope_request_builder.py
|
from __future__ import annotations
from dataclasses import dataclass
from kiota_abstractions.get_path_parameters import get_path_parameters
from kiota_abstractions.method import Method
from kiota_abstractions.request_adapter import RequestAdapter
from kiota_abstractions.request_information import RequestInformation
from kiota_abstractions.request_option import RequestOption
from kiota_abstractions.response_handler import ResponseHandler
from kiota_abstractions.serialization import Parsable, ParsableFactory
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union
if TYPE_CHECKING:
from ......models import user_collection_response
from ......models.o_data_errors import o_data_error
from .count import count_request_builder
from .item import user_item_request_builder
class ExecutionScopeRequestBuilder():
"""
Provides operations to manage the executionScope property of the microsoft.graph.identityGovernance.workflow entity.
"""
def __init__(self,request_adapter: RequestAdapter, path_parameters: Optional[Union[Dict[str, Any], str]] = None) -> None:
"""
Instantiates a new ExecutionScopeRequestBuilder and sets the default values.
Args:
pathParameters: The raw url or the Url template parameters for the request.
requestAdapter: The request adapter to use to execute the requests.
"""
if path_parameters is None:
raise Exception("path_parameters cannot be undefined")
if request_adapter is None:
raise Exception("request_adapter cannot be undefined")
# Url template to use to build the URL for the current request builder
self.url_template: str = "{+baseurl}/identityGovernance/lifecycleWorkflows/workflows/{workflow%2Did}/executionScope{?%24top,%24skip,%24search,%24filter,%24count,%24orderby,%24select,%24expand}"
url_tpl_params = get_path_parameters(path_parameters)
self.path_parameters = url_tpl_params
self.request_adapter = request_adapter
def by_user_id(self,user_id: str) -> user_item_request_builder.UserItemRequestBuilder:
"""
Provides operations to manage the executionScope property of the microsoft.graph.identityGovernance.workflow entity.
Args:
user_id: Unique identifier of the item
Returns: user_item_request_builder.UserItemRequestBuilder
"""
if user_id is None:
raise Exception("user_id cannot be undefined")
from .item import user_item_request_builder
url_tpl_params = get_path_parameters(self.path_parameters)
url_tpl_params["user%2Did"] = user_id
return user_item_request_builder.UserItemRequestBuilder(self.request_adapter, url_tpl_params)
async def get(self,request_configuration: Optional[ExecutionScopeRequestBuilderGetRequestConfiguration] = None) -> Optional[user_collection_response.UserCollectionResponse]:
"""
The unique identifier of the Azure AD identity that last modified the workflow object.
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: Optional[user_collection_response.UserCollectionResponse]
"""
request_info = self.to_get_request_information(
request_configuration
)
from ......models.o_data_errors import o_data_error
error_mapping: Dict[str, ParsableFactory] = {
"4XX": o_data_error.ODataError,
"5XX": o_data_error.ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
from ......models import user_collection_response
return await self.request_adapter.send_async(request_info, user_collection_response.UserCollectionResponse, error_mapping)
def to_get_request_information(self,request_configuration: Optional[ExecutionScopeRequestBuilderGetRequestConfiguration] = None) -> RequestInformation:
"""
The unique identifier of the Azure AD identity that last modified the workflow object.
Args:
requestConfiguration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.GET
request_info.headers["Accept"] = ["application/json"]
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.set_query_string_parameters_from_raw_object(request_configuration.query_parameters)
request_info.add_request_options(request_configuration.options)
return request_info
@property
def count(self) -> count_request_builder.CountRequestBuilder:
"""
Provides operations to count the resources in the collection.
"""
from .count import count_request_builder
return count_request_builder.CountRequestBuilder(self.request_adapter, self.path_parameters)
@dataclass
class ExecutionScopeRequestBuilderGetQueryParameters():
"""
The unique identifier of the Azure AD identity that last modified the workflow object.
"""
def get_query_parameter(self,original_name: Optional[str] = None) -> str:
"""
Maps the query parameters names to their encoded names for the URI template parsing.
Args:
originalName: The original query parameter name in the class.
Returns: str
"""
if original_name is None:
raise Exception("original_name cannot be undefined")
if original_name == "count":
return "%24count"
if original_name == "expand":
return "%24expand"
if original_name == "filter":
return "%24filter"
if original_name == "orderby":
return "%24orderby"
if original_name == "search":
return "%24search"
if original_name == "select":
return "%24select"
if original_name == "skip":
return "%24skip"
if original_name == "top":
return "%24top"
return original_name
# Include count of items
count: Optional[bool] = None
# Expand related entities
expand: Optional[List[str]] = None
# Filter items by property values
filter: Optional[str] = None
# Order items by property values
orderby: Optional[List[str]] = None
# Search items by search phrases
search: Optional[str] = None
# Select properties to be returned
select: Optional[List[str]] = None
# Skip the first n items
skip: Optional[int] = None
# Show only the first n items
top: Optional[int] = None
@dataclass
class ExecutionScopeRequestBuilderGetRequestConfiguration():
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request headers
headers: Optional[Dict[str, Union[str, List[str]]]] = None
# Request options
options: Optional[List[RequestOption]] = None
# Request query parameters
query_parameters: Optional[ExecutionScopeRequestBuilder.ExecutionScopeRequestBuilderGetQueryParameters] = None
|
PypiClean
|
/tf_object_detection_api-0.1.tar.gz/tf_object_detection_api-0.1/object_detection/anchor_generators/multiscale_grid_anchor_generator.py
|
import tensorflow.compat.v1 as tf
from object_detection.anchor_generators import grid_anchor_generator
from object_detection.core import anchor_generator
from object_detection.core import box_list_ops
class MultiscaleGridAnchorGenerator(anchor_generator.AnchorGenerator):
"""Generate a grid of anchors for multiple CNN layers of different scale."""
def __init__(self, min_level, max_level, anchor_scale, aspect_ratios,
scales_per_octave, normalize_coordinates=True):
"""Constructs a MultiscaleGridAnchorGenerator.
To construct anchors, at multiple scale resolutions, one must provide a
the minimum level and maximum levels on a scale pyramid. To define the size
of anchor, the anchor scale is provided to decide the size relatively to the
stride of the corresponding feature map. The generator allows one pixel
location on feature map maps to multiple anchors, that have different aspect
ratios and intermediate scales.
Args:
min_level: minimum level in feature pyramid.
max_level: maximum level in feature pyramid.
anchor_scale: anchor scale and feature stride define the size of the base
anchor on an image. For example, given a feature pyramid with strides
[2^3, ..., 2^7] and anchor scale 4. The base anchor size is
4 * [2^3, ..., 2^7].
aspect_ratios: list or tuple of (float) aspect ratios to place on each
grid point.
scales_per_octave: integer number of intermediate scales per scale octave.
normalize_coordinates: whether to produce anchors in normalized
coordinates. (defaults to True).
"""
self._anchor_grid_info = []
self._aspect_ratios = aspect_ratios
self._scales_per_octave = scales_per_octave
self._normalize_coordinates = normalize_coordinates
scales = [2**(float(scale) / scales_per_octave)
for scale in range(scales_per_octave)]
aspects = list(aspect_ratios)
for level in range(min_level, max_level + 1):
anchor_stride = [2**level, 2**level]
base_anchor_size = [2**level * anchor_scale, 2**level * anchor_scale]
self._anchor_grid_info.append({
'level': level,
'info': [scales, aspects, base_anchor_size, anchor_stride]
})
def name_scope(self):
return 'MultiscaleGridAnchorGenerator'
def num_anchors_per_location(self):
"""Returns the number of anchors per spatial location.
Returns:
a list of integers, one for each expected feature map to be passed to
the Generate function.
"""
return len(self._anchor_grid_info) * [
len(self._aspect_ratios) * self._scales_per_octave]
def _generate(self, feature_map_shape_list, im_height=1, im_width=1):
"""Generates a collection of bounding boxes to be used as anchors.
For training, we require the input image shape to be statically defined.
That is, im_height and im_width should be integers rather than tensors.
For inference, im_height and im_width can be either integers (for fixed
image size), or tensors (for arbitrary image size).
Args:
feature_map_shape_list: list of pairs of convnet layer resolutions in the
format [(height_0, width_0), (height_1, width_1), ...]. For example,
setting feature_map_shape_list=[(8, 8), (7, 7)] asks for anchors that
correspond to an 8x8 layer followed by a 7x7 layer.
im_height: the height of the image to generate the grid for. If both
im_height and im_width are 1, anchors can only be generated in
absolute coordinates.
im_width: the width of the image to generate the grid for. If both
im_height and im_width are 1, anchors can only be generated in
absolute coordinates.
Returns:
boxes_list: a list of BoxLists each holding anchor boxes corresponding to
the input feature map shapes.
Raises:
ValueError: if im_height and im_width are not integers.
ValueError: if im_height and im_width are 1, but normalized coordinates
were requested.
"""
anchor_grid_list = []
for feat_shape, grid_info in zip(feature_map_shape_list,
self._anchor_grid_info):
# TODO(rathodv) check the feature_map_shape_list is consistent with
# self._anchor_grid_info
level = grid_info['level']
stride = 2**level
scales, aspect_ratios, base_anchor_size, anchor_stride = grid_info['info']
feat_h = feat_shape[0]
feat_w = feat_shape[1]
anchor_offset = [0, 0]
if isinstance(im_height, int) and isinstance(im_width, int):
if im_height % 2.0**level == 0 or im_height == 1:
anchor_offset[0] = stride / 2.0
if im_width % 2.0**level == 0 or im_width == 1:
anchor_offset[1] = stride / 2.0
if tf.is_tensor(im_height) and tf.is_tensor(im_width):
anchor_offset[0] = stride / 2.0
anchor_offset[1] = stride / 2.0
ag = grid_anchor_generator.GridAnchorGenerator(
scales,
aspect_ratios,
base_anchor_size=base_anchor_size,
anchor_stride=anchor_stride,
anchor_offset=anchor_offset)
(anchor_grid,) = ag.generate(feature_map_shape_list=[(feat_h, feat_w)])
if self._normalize_coordinates:
if im_height == 1 or im_width == 1:
raise ValueError(
'Normalized coordinates were requested upon construction of the '
'MultiscaleGridAnchorGenerator, but a subsequent call to '
'generate did not supply dimension information.')
anchor_grid = box_list_ops.to_normalized_coordinates(
anchor_grid, im_height, im_width, check_range=False)
anchor_grid_list.append(anchor_grid)
return anchor_grid_list
|
PypiClean
|
/kolla-ansible-16.1.0.tar.gz/kolla-ansible-16.1.0/ansible/roles/keystone/files/fernet_rotate_cron_generator.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module creates a list of cron intervals for a node in a group of nodes
# to ensure each node runs a cron in round robbin style.
import argparse
import json
import sys
MINUTE_SPAN = 1
HOUR_SPAN = 60
DAY_SPAN = 24 * HOUR_SPAN
WEEK_SPAN = 7 * DAY_SPAN
class RotationIntervalTooLong(Exception):
pass
def json_exit(msg=None, failed=False, changed=False):
if type(msg) is not dict:
msg = {'msg': str(msg)}
msg.update({'failed': failed, 'changed': changed})
print(json.dumps(msg))
sys.exit()
def generate(host_index, total_hosts, total_rotation_mins):
min = '*' # 0-59
hour = '*' # 0-23
day = '*' # 0-6 (day of week)
crons = []
if host_index >= total_hosts:
return crons
# We need to rotate the key every total_rotation_mins minutes.
# When there are N hosts, each host should rotate once every N *
# total_rotation_mins minutes, in a round-robin manner.
# We can generate a cycle for index 0, then add an offset specific to each
# host.
# NOTE: Minor under-rotation is better than over-rotation since tokens
# may become invalid if keys are over-rotated.
host_rotation_mins = total_rotation_mins * total_hosts
host_rotation_offset = total_rotation_mins * host_index
# Can't currently rotate less than once per week.
if total_rotation_mins > WEEK_SPAN:
msg = ("Unable to schedule fernet key rotation with an interval "
"greater than 1 week divided by the number of hosts")
raise RotationIntervalTooLong(msg)
# Build crons multiple of a day
elif host_rotation_mins > DAY_SPAN:
time = host_rotation_offset
while time + total_rotation_mins <= WEEK_SPAN:
day = time // DAY_SPAN
hour = time % HOUR_SPAN
min = time % HOUR_SPAN
crons.append({'min': min, 'hour': hour, 'day': day})
time += host_rotation_mins
# Build crons for multiple of an hour
elif host_rotation_mins > HOUR_SPAN:
time = host_rotation_offset
while time + total_rotation_mins <= DAY_SPAN:
hour = time // HOUR_SPAN
min = time % HOUR_SPAN
crons.append({'min': min, 'hour': hour, 'day': day})
time += host_rotation_mins
# Build crons for multiple of a minute
else:
time = host_rotation_offset
while time + total_rotation_mins <= HOUR_SPAN:
min = time // MINUTE_SPAN
crons.append({'min': min, 'hour': hour, 'day': day})
time += host_rotation_mins
return crons
def main():
parser = argparse.ArgumentParser(description='''Creates a list of cron
intervals for a node in a group of nodes to ensure each node runs
a cron in round robin style.''')
parser.add_argument('-t', '--time',
help='Time in minutes for a key rotation cycle',
required=True,
type=int)
parser.add_argument('-i', '--index',
help='Index of host starting from 0',
required=True,
type=int)
parser.add_argument('-n', '--number',
help='Number of hosts',
required=True,
type=int)
args = parser.parse_args()
try:
jobs = generate(args.index, args.number, args.time)
except Exception as e:
json_exit(str(e), failed=True)
json_exit({'cron_jobs': jobs})
if __name__ == "__main__":
main()
|
PypiClean
|
/toil-vg-1.6.0.tar.gz/toil-vg-1.6.0/src/toil_vg/iostore.py
|
import sys, os, os.path, json, collections, logging, logging.handlers
import struct, socket, threading, tarfile, shutil
import tempfile
import functools
import random
import time
import dateutil
import traceback
import stat
from toil.realtimeLogger import RealtimeLogger
import datetime
# Need stuff for Amazon s3
try:
import boto3
import botocore
have_s3 = True
except ImportError:
have_s3 = False
pass
# We need some stuff in order to have Azure
try:
import azure
# Make sure to get the 0.11 BlobService, in case the new azure storage
# module is also installed.
from azure.storage.blob import BlobService
import toil.jobStores.azureJobStore
have_azure = True
except ImportError:
have_azure = False
pass
def robust_makedirs(directory):
"""
Make a directory when other nodes may be trying to do the same on a shared
filesystem.
"""
if not os.path.exists(directory):
try:
# Make it if it doesn't exist
os.makedirs(directory)
except OSError:
# If you can't make it, maybe someone else did?
pass
# Make sure it exists and is a directory
assert(os.path.exists(directory) and os.path.isdir(directory))
def write_global_directory(file_store, path, cleanup=False, tee=None, compress=True):
"""
Write the given directory into the file store, and return an ID that can be
used to retrieve it. Writes the files in the directory and subdirectories
into a tar file in the file store.
Does not preserve the name or permissions of the given directory (only of
its contents).
If cleanup is true, directory will be deleted from the file store when this
job and its follow-ons finish.
If tee is passed, a tar.gz of the directory contents will be written to that
filename. The file thus created must not be modified after this function is
called.
"""
write_stream_mode = "w"
if compress:
write_stream_mode = "w|gz"
if tee is not None:
with open(tee, "w") as file_handle:
# We have a stream, so start taring into it
with tarfile.open(fileobj=file_handle, mode=write_stream_mode) as tar:
# Open it for streaming-only write (no seeking)
# We can't just add the root directory, since then we wouldn't be
# able to extract it later with an arbitrary name.
for file_name in os.listdir(path):
# Add each file in the directory to the tar, with a relative
# path
tar.add(os.path.join(path, file_name), arcname=file_name)
# Save the file on disk to the file store.
return file_store.writeGlobalFile(tee)
else:
with file_store.writeGlobalFileStream(cleanup=cleanup) as (file_handle,
file_id):
# We have a stream, so start taring into it
# TODO: don't duplicate this code.
with tarfile.open(fileobj=file_handle, mode=write_stream_mode) as tar:
# Open it for streaming-only write (no seeking)
# We can't just add the root directory, since then we wouldn't be
# able to extract it later with an arbitrary name.
for file_name in os.listdir(path):
# Add each file in the directory to the tar, with a relative
# path
tar.add(os.path.join(path, file_name), arcname=file_name)
# Spit back the ID to use to retrieve it
return file_id
def read_global_directory(file_store, directory_id, path):
"""
Reads a directory with the given tar file id from the global file store and
recreates it at the given path.
The given path, if it exists, must be a directory.
Do not use to extract untrusted directories, since they could sneakily plant
files anywhere on the filesystem.
"""
# Make the path
robust_makedirs(path)
with file_store.readGlobalFileStream(directory_id) as file_handle:
# We need to pull files out of this tar stream
with tarfile.open(fileobj=file_handle, mode="r|*") as tar:
# Open it for streaming-only read (no seeking)
# We need to extract the whole thing into that new directory
tar.extractall(path)
class IOStore(object):
"""
A class that lets you get your input files and save your output files
to/from a local filesystem, Amazon S3, or Microsoft Azure storage
transparently.
This is the abstract base class; other classes inherit from this and fill in
the methods.
"""
def __init__(self):
"""
Make a new IOStore
"""
raise NotImplementedError()
def read_input_file(self, input_path, local_path):
"""
Read an input file from wherever the input comes from and send it to the
given path.
If the file at local_path already exists, it is overwritten.
If the file at local_path already exists and is a directory, behavior is
undefined.
"""
raise NotImplementedError()
def list_input_directory(self, input_path, recursive=False,
with_times=False):
"""
Yields each of the subdirectories and files in the given input path.
If recursive is false, yields files and directories in the given
directory. If recursive is true, yields all files contained within the
current directory, recursively, but does not yield folders.
If with_times is True, yields (name, modification time) pairs instead of
just names, with modification times represented as datetime objects in
the GMT timezone. Modification times may be None on objects that do not
support them.
Gives relative file/directory names.
"""
raise NotImplementedError()
def write_output_file(self, local_path, output_path):
"""
Save the given local file to the given output path. No output directory
needs to exist already.
If the output path already exists, it is overwritten.
If the output path already exists and is a directory, behavior is
undefined.
"""
raise NotImplementedError()
def exists(self, path):
"""
Returns true if the given input or output file exists in the store
already.
"""
raise NotImplementedError()
def get_mtime(self, path):
"""
Returns the modification time of the given gile if it exists, or None
otherwise.
"""
raise NotImplementedError()
def get_size(self, path):
"""
Returns the size in bytes of the given file if it exists, or None
otherwise.
"""
raise NotImplementedError()
@staticmethod
def absolute(store_string):
"""
Convert a relative path IOStore string to an absolute path one. Leaves
strings that aren't FileIOStore specifications alone.
Since new Toil versions change the working directory of SingleMachine
batch system jobs, we need to have absolute paths passed into jobs.
Recommended to be used as an argparse type, so that strings can be
directly be passed to IOStore.get on the nodes.
"""
if store_string == "":
return ""
if store_string[0] == ".":
# It's a relative ./ path
return os.path.abspath(store_string)
if store_string.startswith("file:"):
# It's a file:-prefixed thing that may be a relative path
# Normalize the part after "file:" (which is 5 characters)
return "file:" + os.path.abspath(store_string[5:])
return store_string
@staticmethod
def get(store_string):
"""
Get a concrete IOStore created from the given connection string.
Valid formats are just like for a Toil JobStore, except with container
names being specified on Azure.
Formats:
/absolute/filesystem/path
./relative/filesystem/path
file:filesystem/path
aws:region:bucket (TODO)
aws:region:bucket/path/prefix (TODO)
azure:account:container (instead of a container prefix) (gets keys like
Toil)
azure:account:container/path/prefix (trailing slash added automatically)
"""
# Code adapted from toil's common.py loadJobStore()
if store_string[0] in "/.":
# Prepend file: tot he path
store_string = "file:" + store_string
try:
# Break off the first colon-separated piece.
store_type, store_arguments = store_string.split(":", 1)
except ValueError:
# They probably forgot the . or /
raise RuntimeError("Incorrect IO store specification {}. "
"Local paths must start with . or /".format(store_string))
if store_type == "file":
return FileIOStore(store_arguments)
elif store_type == "aws":
# Break out the AWS arguments
region, bucket_name = store_arguments.split(":", 1)
if "/" in bucket_name:
# Split the bucket from the path
bucket_name, path_prefix = bucket_name.split("/", 1)
else:
# No path prefix
path_prefix = ""
return S3IOStore(region, bucket_name, path_prefix)
elif store_type == "azure":
# Break out the Azure arguments.
account, container = store_arguments.split(":", 1)
if "/" in container:
# Split the container from the path
container, path_prefix = container.split("/", 1)
else:
# No path prefix
path_prefix = ""
return AzureIOStore(account, container, path_prefix)
else:
raise RuntimeError("Unknown IOStore implementation {}".format(
store_type))
class FileIOStore(IOStore):
"""
A class that lets you get input from and send output to filesystem files.
"""
def __init__(self, path_prefix=""):
"""
Make a new FileIOStore that just treats everything as local paths,
relative to the given prefix.
"""
self.path_prefix = path_prefix
def read_input_file(self, input_path, local_path):
"""
Get input from the filesystem.
"""
RealtimeLogger.debug("Loading {} from FileIOStore in {} to {}".format(
input_path, self.path_prefix, local_path))
if os.path.exists(local_path):
# Try deleting the existing item if it already exists
try:
os.unlink(local_path)
except:
# Don't fail here, fail complaining about the assertion, which
# will be more informative.
pass
# Make sure the path is clear for copying
assert(not os.path.exists(local_path))
# Where is the file actually?
real_path = os.path.abspath(os.path.join(self.path_prefix, input_path))
if not os.path.exists(real_path):
RealtimeLogger.error(
"Can't find {} from FileIOStore in {}!".format(input_path,
self.path_prefix))
raise RuntimeError("File {} missing!".format(real_path))
# Make a temporary file
temp_handle, temp_path = tempfile.mkstemp(dir=os.path.dirname(local_path))
os.close(temp_handle)
# Copy to the temp file
shutil.copy2(real_path, temp_path)
# Rename the temp file to the right place, atomically
RealtimeLogger.info("rename {} -> {}".format(temp_path, local_path))
os.rename(temp_path, local_path)
# Look at the file stats
file_stats = os.stat(real_path)
if (file_stats.st_uid == os.getuid() and
file_stats.st_mode & stat.S_IWUSR):
# We own this file and can write to it. We don't want the user
# script messing it up through the symlink.
try:
# Clear the user write bit, so the user can't accidentally
# clobber the file in the actual store through the symlink.
os.chmod(real_path, file_stats.st_mode ^ stat.S_IWUSR)
except OSError:
# If something goes wrong here (like us not having permission to
# change permissions), ignore it.
pass
def list_input_directory(self, input_path, recursive=False,
with_times=False):
"""
Loop over directories on the filesystem.
"""
RealtimeLogger.info("Enumerating {} from "
"FileIOStore in {}".format(input_path, self.path_prefix))
if not os.path.exists(os.path.join(self.path_prefix, input_path)):
# Nothing to list over
return
if not os.path.isdir(os.path.join(self.path_prefix, input_path)):
# Can't list a file, only a directory.
return
for item in os.listdir(os.path.join(self.path_prefix, input_path)):
if(recursive and os.path.isdir(os.path.join(self.path_prefix,
input_path, item))):
# We're recursing and this is a directory.
# Recurse on this.
for subitem in self.list_input_directory(
os.path.join(input_path, item), recursive):
# Make relative paths include this directory name and yield
# them
name_to_yield = os.path.join(item, subitem)
if with_times:
# What is the mtime in seconds since epoch?
mtime_epoch_seconds = os.path.getmtime(os.path.join(
input_path, item, subitem))
# Convert it to datetime
yield name_to_yield, mtime_epoch_seconds
else:
yield name_to_yield
else:
# This isn't a directory or we aren't being recursive
# Just report this individual item.
if with_times:
# What is the mtime in seconds since epoch?
mtime_epoch_seconds = os.path.getmtime(os.path.join(
input_path, item))
yield item, mtime_epoch_seconds
else:
yield item
def write_output_file(self, local_path, output_path):
"""
Write output to the filesystem
"""
RealtimeLogger.debug("Saving {} to FileIOStore in {}".format(
output_path, self.path_prefix))
# What's the real output path to write to?
real_output_path = os.path.join(self.path_prefix, output_path)
# What directory should this go in?
parent_dir = os.path.split(real_output_path)[0]
if parent_dir != "":
# Make sure the directory it goes in exists.
robust_makedirs(parent_dir)
# Make a temporary file
temp_handle, temp_path = tempfile.mkstemp(dir=self.path_prefix)
os.close(temp_handle)
# Copy to the temp file
shutil.copy2(local_path, temp_path)
if os.path.exists(real_output_path):
# At least try to get existing files out of the way first.
try:
os.unlink(real_output_path)
except:
pass
# Rename the temp file to the right place, atomically
os.rename(temp_path, real_output_path)
def exists(self, path):
"""
Returns true if the given input or output file exists in the file system
already.
"""
return os.path.exists(os.path.join(self.path_prefix, path))
def get_mtime(self, path):
"""
Returns the modification time of the given file if it exists, or None
otherwise.
"""
if not self.exists(path):
return None
# What is the mtime in seconds since epoch?
mtime_epoch_seconds = os.path.getmtime(os.path.join(self.path_prefix,
path))
# Convert it to datetime
mtime_datetime = datetime.datetime.utcfromtimestamp(
mtime_epoch_seconds).replace(tzinfo=dateutil.tz.tzutc())
# Return the modification time, timezoned, in UTC
return mtime_datetime
def get_size(self, path):
"""
Returns the size in bytes of the given file if it exists, or None
otherwise.
"""
if not self.exists(path):
return None
# Return the size in bytes of the backing file
return os.stat(os.path.join(self.path_prefix, path)).st_size
class BackoffError(RuntimeError):
"""
Represents an error from running out of retries during exponential back-off.
"""
def backoff_times(retries, base_delay):
"""
A generator that yields times for random exponential back-off. You have to
do the exception handling and sleeping yourself. Stops when the retries run
out.
"""
# Don't wait at all before the first try
yield 0
# What retry are we on?
try_number = 1
# Make a delay that increases
delay = float(base_delay) * 2
while try_number <= retries:
# Wait a random amount between 0 and 2^try_number * base_delay
yield random.uniform(base_delay, delay)
delay *= 2
try_number += 1
# If we get here, we're stopping iteration without succeeding. The caller
# will probably raise an error.
def backoff(original_function, retries=6, base_delay=10):
"""
We define a decorator that does randomized exponential back-off up to a
certain number of retries. Raises BackoffError if the operation doesn't
succeed after backing off for the specified number of retries (which may be
float("inf")).
Unfortunately doesn't really work on generators.
"""
# Make a new version of the function
@functools.wraps(original_function)
def new_function(*args, **kwargs):
# Call backoff times, overriding parameters with stuff from kwargs
for delay in backoff_times(retries=kwargs.get("retries", retries),
base_delay=kwargs.get("base_delay", base_delay)):
# Keep looping until it works or our iterator raises a
# BackoffError
if delay > 0:
# We have to wait before trying again
RealtimeLogger.error("Retry after {} seconds".format(
delay))
time.sleep(delay)
try:
return original_function(*args, **kwargs)
except:
# Report the formatted underlying exception with traceback
RealtimeLogger.error("{} failed due to: {}".format(
original_function.__name__,
"".join(traceback.format_exception(*sys.exc_info()))))
# If we get here, the function we're calling never ran through before we
# ran out of backoff times. Give an error.
raise BackoffError("Ran out of retries calling {}".format(
original_function.__name__))
return new_function
class S3IOStore(IOStore):
"""
A class that lets you get input from and send output to AWS S3 Storage.
"""
def __init__(self, region, bucket_name, name_prefix=""):
"""
Make a new S3IOStore that reads from and writes to the given
container in the given account, adding the given prefix to keys. All
paths will be interpreted as keys or key prefixes.
"""
# Make sure azure libraries actually loaded
assert(have_s3)
self.region = region
self.bucket_name = bucket_name
self.name_prefix = name_prefix
self.s3 = None
def __connect(self):
"""
Make sure we have an S3 Bucket connection, and set one up if we don't.
Creates the S3 bucket if it doesn't exist.
"""
if self.s3 is None:
RealtimeLogger.debug("Connecting to bucket {} in region".format(
self.bucket_name, self.region))
# Configure boto3 for caching assumed role credentials with the same cache Toil uses
botocore_session = botocore.session.get_session()
botocore_session.get_component('credential_provider').get_provider('assume-role').cache = botocore.credentials.JSONFileCache()
boto3_session = boto3.Session(botocore_session=botocore_session)
# Connect to the s3 bucket service where we keep everything
self.s3 = boto3_session.client('s3')
try:
self.s3.head_bucket(Bucket=self.bucket_name)
except:
self.s3.create_bucket(Bucket=self.bucket_name,
CreateBucketConfiguration={'LocationConstraint':self.region})
def read_input_file(self, input_path, local_path):
"""
Get input from S3.
"""
self.__connect()
RealtimeLogger.debug("Loading {} from S3IOStore".format(
input_path))
# Download the file contents.
self.s3.download_file(self.bucket_name, os.path.join(self.name_prefix, input_path), local_path)
def list_input_directory(self, input_path, recursive=False,
with_times=False):
"""
Yields each of the subdirectories and files in the given input path.
If recursive is false, yields files and directories in the given
directory. If recursive is true, yields all files contained within the
current directory, recursively, but does not yield folders.
If with_times is True, yields (name, modification time) pairs instead of
just names, with modification times represented as datetime objects in
the GMT timezone. Modification times may be None on objects that do not
support them.
Gives relative file/directory names.
"""
raise NotImplementedError()
def write_output_file(self, local_path, output_path):
"""
Write output to S3.
"""
self.__connect()
RealtimeLogger.debug("Saving {} to S3IOStore".format(
output_path))
# Download the file contents.
self.s3.upload_file(local_path, self.bucket_name, os.path.join(self.name_prefix, output_path))
def exists(self, path):
"""
Returns true if the given input or output file exists in the store
already.
"""
raise NotImplementedError()
def get_mtime(self, path):
"""
Returns the modification time of the given file if it exists, or None
otherwise.
"""
raise NotImplementedError()
def get_size(self, path):
"""
Returns the size in bytes of the given file if it exists, or None
otherwise.
"""
raise NotImplementedError()
class AzureIOStore(IOStore):
"""
A class that lets you get input from and send output to Azure Storage.
"""
def __init__(self, account_name, container_name, name_prefix=""):
"""
Make a new AzureIOStore that reads from and writes to the given
container in the given account, adding the given prefix to keys. All
paths will be interpreted as keys or key prefixes.
If the name prefix does not end with a trailing slash, and is not empty,
one will be added automatically.
Account keys are retrieved from the AZURE_ACCOUNT_KEY environment
variable or from the ~/.toilAzureCredentials file, as in Toil itself.
"""
# Make sure azure libraries actually loaded
assert(have_azure)
self.account_name = account_name
self.container_name = container_name
self.name_prefix = name_prefix
if self.name_prefix != "" and not self.name_prefix.endswith("/"):
# Make sure it has the trailing slash required.
self.name_prefix += "/"
# Sneak into Toil and use the same keys it uses
self.account_key = toil.jobStores.azureJobStore._fetchAzureAccountKey(
self.account_name)
# This will hold out Azure blob store connection
self.connection = None
def __getstate__(self):
"""
Return the state to use for pickling. We don't want to try and pickle
an open Azure connection.
"""
return (self.account_name, self.account_key, self.container_name,
self.name_prefix)
def __setstate__(self, state):
"""
Set up after unpickling.
"""
self.account_name = state[0]
self.account_key = state[1]
self.container_name = state[2]
self.name_prefix = state[3]
self.connection = None
def __connect(self):
"""
Make sure we have an Azure connection, and set one up if we don't.
"""
if self.connection is None:
RealtimeLogger.debug("Connecting to account {}, using "
"container {} and prefix {}".format(self.account_name,
self.container_name, self.name_prefix))
# Connect to the blob service where we keep everything
self.connection = BlobService(
account_name=self.account_name, account_key=self.account_key)
@backoff
def read_input_file(self, input_path, local_path):
"""
Get input from Azure.
"""
self.__connect()
RealtimeLogger.debug("Loading {} from AzureIOStore".format(
input_path))
# Download the blob. This is known to be synchronous, although it can
# call a callback during the process.
self.connection.get_blob_to_path(self.container_name,
self.name_prefix + input_path, local_path)
def list_input_directory(self, input_path, recursive=False,
with_times=False):
"""
Loop over fake /-delimited directories on Azure. The prefix may or may
not not have a trailing slash; if not, one will be added automatically.
Returns the names of files and fake directories in the given input fake
directory, non-recursively.
If with_times is specified, will yield (name, time) pairs including
modification times as datetime objects. Times on directories are None.
"""
self.__connect()
RealtimeLogger.info("Enumerating {} from AzureIOStore".format(
input_path))
# Work out what the directory name to list is
fake_directory = self.name_prefix + input_path
if fake_directory != "" and not fake_directory.endswith("/"):
# We have a nonempty prefix, and we need to end it with a slash
fake_directory += "/"
# This will hold the marker that we need to send back to get the next
# page, if there is one. See <http://stackoverflow.com/a/24303682>
marker = None
# This holds the subdirectories we found; we yield each exactly once if
# we aren't recursing.
subdirectories = set()
while True:
# Get the results from Azure. We don't use delimiter since Azure
# doesn't seem to provide the placeholder entries it's supposed to.
result = self.connection.list_blobs(self.container_name,
prefix=fake_directory, marker=marker)
RealtimeLogger.info("Found {} files".format(len(result)))
for blob in result:
# Yield each result's blob name, but directory names only once
# Drop the common prefix
relative_path = blob.name[len(fake_directory):]
if (not recursive) and "/" in relative_path:
# We found a file in a subdirectory, and we aren't supposed
# to be recursing.
subdirectory, _ = relative_path.split("/", 1)
if subdirectory not in subdirectories:
# It's a new subdirectory. Yield and remember it
subdirectories.add(subdirectory)
if with_times:
yield subdirectory, None
else:
yield subdirectory
else:
# We found an actual file
if with_times:
mtime = blob.properties.last_modified
if isinstance(mtime, datetime.datetime):
# Make sure we're getting proper localized datetimes
# from the new Azure Storage API.
assert(mtime.tzinfo is not None and
mtime.tzinfo.utcoffset(mtime) is not None)
else:
# Convert mtime from a string as in the old API.
mtime = dateutil.parser.parse(mtime).replace(
tzinfo=dateutil.tz.tzutc())
yield relative_path, mtime
else:
yield relative_path
# Save the marker
marker = result.next_marker
if not marker:
break
@backoff
def write_output_file(self, local_path, output_path):
"""
Write output to Azure. Will create the container if necessary.
"""
self.__connect()
RealtimeLogger.debug("Saving {} to AzureIOStore".format(
output_path))
try:
# Make the container
self.connection.create_container(self.container_name)
except azure.WindowsAzureConflictError:
# The container probably already exists
pass
# Upload the blob (synchronously)
# TODO: catch no container error here, make the container, and retry
self.connection.put_block_blob_from_path(self.container_name,
self.name_prefix + output_path, local_path)
@backoff
def exists(self, path):
"""
Returns true if the given input or output file exists in Azure already.
"""
self.__connect()
marker = None
while True:
try:
# Make the container
self.connection.create_container(self.container_name)
except azure.WindowsAzureConflictError:
# The container probably already exists
pass
# Get the results from Azure.
result = self.connection.list_blobs(self.container_name,
prefix=self.name_prefix + path, marker=marker)
for blob in result:
# Look at each blob
if blob.name == self.name_prefix + path:
# Found it
return True
# Save the marker
marker = result.next_marker
if not marker:
break
return False
@backoff
def get_mtime(self, path):
"""
Returns the modification time of the given blob if it exists, or None
otherwise.
"""
self.__connect()
marker = None
while True:
# Get the results from Azure.
result = self.connection.list_blobs(self.container_name,
prefix=self.name_prefix + path, marker=marker)
for blob in result:
# Look at each blob
if blob.name == self.name_prefix + path:
# Found it
mtime = blob.properties.last_modified
if isinstance(mtime, datetime.datetime):
# Make sure we're getting proper localized datetimes
# from the new Azure Storage API.
assert(mtime.tzinfo is not None and
mtime.tzinfo.utcoffset(mtime) is not None)
else:
# Convert mtime from a string as in the old API.
mtime = dateutil.parser.parse(mtime).replace(
tzinfo=dateutil.tz.tzutc())
return mtime
# Save the marker
marker = result.next_marker
if not marker:
break
return None
@backoff
def get_size(self, path):
"""
Returns the size in bytes of the given blob if it exists, or None
otherwise.
"""
self.__connect()
marker = None
while True:
# Get the results from Azure.
result = self.connection.list_blobs(self.container_name,
prefix=self.name_prefix + path, marker=marker)
for blob in result:
# Look at each blob
if blob.name == self.name_prefix + path:
# Found it
size = blob.properties.content_length
return size
# Save the marker
marker = result.next_marker
if not marker:
break
return None
|
PypiClean
|
/py-pure-client-1.38.0.tar.gz/py-pure-client-1.38.0/pypureclient/flasharray/FA_2_24/models/policy_nfs_response.py
|
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_24 import models
class PolicyNfsResponse(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'items': 'list[PolicyNfs]'
}
attribute_map = {
'items': 'items'
}
required_args = {
}
def __init__(
self,
items=None, # type: List[models.PolicyNfs]
):
"""
Keyword args:
items (list[PolicyNfs]): Returns a list of all items after filtering. If applicable, the values are displayed for each name.
"""
if items is not None:
self.items = items
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PolicyNfsResponse`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def __getitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PolicyNfsResponse`".format(key))
return object.__getattribute__(self, key)
def __setitem__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PolicyNfsResponse`".format(key))
object.__setattr__(self, key, value)
def __delitem__(self, key):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `PolicyNfsResponse`".format(key))
object.__delattr__(self, key)
def keys(self):
return self.attribute_map.keys()
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(PolicyNfsResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PolicyNfsResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
PypiClean
|
/ping_stat-1.0-py3-none-any.whl/ping_stat/utils/__init__.py
|
from inspy_logger import InspyLogger
import time
from ping3 import ping, verbose_ping
from ping_stat.logging import add_child as add_child_logger, PROG_NAME
from rich.logging import RichHandler
import inspect
from ping_stat.utils.decorators import validate_properties
from statistics import mean, median
try:
_ = LOGGER
except (ModuleNotFoundError, NameError) as e:
LOGGER = add_child_logger(f'{PROG_NAME}.utils')
LOGGER.debug(f'Loading {LOGGER.name}')
def get_ping_mean(ping_object, *args, **kwargs):
times = gather_times(ping_object, *args, return_fail_list=False, **kwargs)
p_means = [mean(p_time) for p_time in times if p_time]
return mean(p_means)
def gather_times(
ping_object,
count_timeout_time_for_fails=True,
timeout=5,
return_success_list=True,
return_fail_list=True,
format_fail_times=True
):
ping = ping_object
ping_times = []
failed_pings = []
history = ping.history
print(f'ping.history has {len(history)} entries.')
for item in ping.history:
if item[2] is None:
failed_pings.append(item)
if count_timeout_time_for_fails:
ping_times.append(timeout)
else:
ping_times.append(item[2])
if failed_pings:
if len(failed_pings) >= 2:
noun = 'pings'
elif len(failed_pings) == 1:
noun = 'ping'
print(f'WARNING: Found {len(failed_pings)} failed {noun}')
ret = ()
if return_success_list:
ret = (*ret, ping_times)
if return_fail_list:
ret = (*ret, failed_pings)
if len(ret) == 1:
ret = ret[0]
return ret
class TTLTest:
__auto_run: bool = False
__end_at = None
__history = []
__iterations = 5
__min_ttl = None
__starting_at = 0
__address = 'inspyre.tech'
__timeout = 3
def __init__(
self,
address=__address,
auto_run=__auto_run,
iterations=__iterations,
timeout=__timeout,
starting_at=__starting_at,
end_at=__end_at
):
self.address = str(address)
self.auto_run: bool = auto_run
self.end_at = end_at
self.iterations = iterations
self.starting_at = starting_at
self.timeout = timeout
if self.auto_run:
self.run()
@property
def auto_run(self) -> bool:
return self.__auto_run
@auto_run.setter
def auto_run(self, new: bool):
self.__auto_run = new
@property
def iterations(self):
return self.__iterations
@iterations.setter
def iterations(self, new):
if not isinstance(new, int):
raise TypeError('iterations must be an integer')
self.__iterations = new
@iterations.deleter
def iterations(self):
self.__iterations = 5
@property
def history(self):
return self.__history
@property
def timeout(self):
return self.__timeout
@timeout.setter
def timeout(self, new):
if not isinstance(new, (int, float)):
raise TypeError('Timeout must be a number.')
self.__timeout = new
@timeout.deleter
def timeout(self):
self.__timeout = 3
@property
def address(self) -> str:
return str(self.__test_addr)
@address.setter
def address(self, new):
print(dir(new))
print(type(new))
if not isinstance(new, str):
raise TypeError('Address must be a string!')
self.__test_addr = new
@address.deleter
def address(self):
self.__test_addr = 'inspyre.tech'
@property
def minimum_ttl(self):
return self.__min_ttl
@minimum_ttl.setter
def minimum_ttl(self, new):
if not isinstance(new, (int, None)):
raise TypeError('minimum__ttl must be an integer')
self.__min_ttl = new
@minimum_ttl.deleter
def minimum_ttl(self):
self.minimum_ttl = None
@property
def starting_at(self) -> int:
return self.__starting_at
@starting_at.setter
def starting_at(self, new):
if not isinstance(new, int):
raise TypeError('"starting_at" must be an integer')
self.__starting_at = new
def get_property_list(self):
return [name for name, value in inspect.getmembers(TTLTest) if isinstance(value, property)]
def run(self, address=None, timeout=None, starting_at=None):
addr = address or self.address
if addr != self.address:
self.address = addr
timeout = timeout or self.timeout
if timeout != self.timeout:
self.timeout = timeout
cur_ttl = (starting_at or self.starting_at) - 1
markA = time.time()
while self.minimum_ttl in [None, 0]:
cur_ttl += 1
mark1 = time.time()
print('Trying')
res = ping(self.address, timeout=self.timeout, ttl=cur_ttl)
print(f'Result: {res or "TIMEOUT"}')
mark2 = time.time()
if res:
self.minimum_ttl = cur_ttl
self.__history.append(f'FOUND:{self.minimum_ttl}')
else:
self.__history.append(f'TIMEOUT - {mark2 - mark1}')
markB = time.time()
tries = len(self.history)
for item in self.history:
if item.startswith('FOUND'):
tries -= 1
print(f'{markB - markA} seconds elapsed total. In {len(self.__history)} tries.')
"""
The MIT License (MIT)
Copyright © 2023 Inspyre Softworks - https://inspyre.tech
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE."""
|
PypiClean
|
/avalon-rl-1.0.2.tar.gz/avalon-rl-1.0.2/avalon/agent/common/envs.py
|
import warnings
from typing import Any
from typing import List
from typing import Tuple
from typing import cast
import gym
import numpy as np
from gym.wrappers import TimeLimit
from loguru import logger
from avalon.agent.common import wrappers
from avalon.agent.common.params import DmcEnvironmentParams
from avalon.agent.common.params import EnvironmentParams
from avalon.agent.common.params import ProcgenEnvironmentParams
from avalon.agent.common.test_envs import TestEnvironmentParams
from avalon.agent.common.wrappers import ElapsedTimeWrapper
from avalon.common.type_utils import assert_not_none
def build_env(env_params: EnvironmentParams, torchify: bool = True) -> gym.Env:
# TODO: I should add a "checker" wrapper that checks that observations and actions match that specced in the Space.
seed = env_params.env_index
env: gym.Env
if env_params.suite == "dmc":
# For rendering, you'll need libglew-(dev/2.0/2.1), and MUJOCO_GL=egl
assert isinstance(env_params, DmcEnvironmentParams)
assert env_params.task is not None
assert "-" not in env_params.task, "use underscores in task names"
# This camera config comes from the dreamerv2 repo
camera = dict(
quadruped_walk=2,
quadruped_run=2,
quadruped_escape=2,
quadruped_fetch=2,
locom_rodent_maze_forage=1,
locom_rodent_two_touch=1,
).get(env_params.task, 0)
env = DeepMindControl(
env_params.task,
size=(64, 64),
include_state=env_params.include_proprio,
include_rgb=env_params.include_rgb,
camera=camera,
seed=seed,
)
env = wrappers.RecordEpisodeStatistics(env) # must happen before reward scaling
env = wrappers.ActionRepeat(env, env_params.action_repeat)
# rescales actions from standard ranges to the envs desired range.
if env_params.time_limit:
env = wrappers.TimeLimit(env, max_episode_steps=env_params.time_limit // env_params.action_repeat)
env = wrappers.DictObsActionWrapper(env)
if env_params.include_rgb:
env = wrappers.ImageTransformWrapper(env, key="rgb", greyscale=False, resolution=None)
elif env_params.suite == "godot":
from avalon.agent.godot.godot_gym import AvalonEnv
from avalon.agent.godot.godot_gym import GodotEnvironmentParams
from avalon.agent.godot.godot_gym import GodotObsTransformWrapper
from avalon.agent.godot.godot_gym import ScaleAndSquashAction
assert isinstance(env_params, GodotEnvironmentParams)
assert env_params.time_limit is None, "godot has its own time limit"
assert env_params.action_repeat == 1
# Note: This will seed itself properly using env_index
env = AvalonEnv(env_params)
# We don't use the TimeLimit wrapper because the time limit is dynamic,
# so we trust that the godot env gives the proper TimeLimit.truncated signal
# (which it should) for the timelimit boostrapping to work properly if enabled.
env = GodotObsTransformWrapper(env, greyscale=env_params.greyscale)
if env_params.mode == "train":
from avalon.agent.godot.godot_gym import CurriculumWrapper
env = CurriculumWrapper(
env, # type: ignore[arg-type]
task_difficulty_update=env_params.task_difficulty_update,
meta_difficulty_update=env_params.meta_difficulty_update,
)
env = ScaleAndSquashAction(env, scale=1)
env = wrappers.OneHotActionWrapper(env)
# env = RewardSoftClipWrapper(env, scale=5)
elif env_params.suite == "test":
# Note: haven't implemented proper seeding in these test envs.
assert env_params.action_repeat == 1
from avalon.agent.common.test_envs import get_env
assert type(env_params) == TestEnvironmentParams
env = get_env(env_params.task, env_params)
env = wrappers.DictObsActionWrapper(env)
env = wrappers.OneHotActionWrapper(env)
env = wrappers.RecordEpisodeStatistics(env) # must happen before reward scaling
if env_params.time_limit:
env = wrappers.TimeLimit(env, max_episode_steps=env_params.time_limit // env_params.action_repeat)
elif env_params.suite == "gym":
assert env_params.action_repeat == 1
# Annoyingly, gym envs apply their own time limit already.
logger.info("time limit arg ignored in gym envs")
env = gym.make(assert_not_none(env_params.task))
env.seed(seed)
# Hacky. Relies on the TimeWrapper being the outermost wrapper. Not sure the better way.
assert isinstance(env, (ElapsedTimeWrapper, TimeLimit))
max_steps = env._max_episode_steps
logger.info(f"env has a time limit of {max_steps} steps")
if env_params.pixel_obs_wrapper:
env = wrappers.PixelObsWrapper(env)
env = wrappers.DictObsActionWrapper(env, obs_key="rgb")
else:
env = wrappers.DictObsActionWrapper(env, obs_key="state") # type: ignore
if env_params.pixel_obs_wrapper:
env = wrappers.ImageTransformWrapper(env, key="rgb", greyscale=True, resolution=64)
env = wrappers.OneHotActionWrapper(env)
if env_params.elapsed_time_obs:
env = wrappers.ElapsedTimeWrapper(env, max_steps)
env = wrappers.RecordEpisodeStatistics(env) # must happen before reward scaling
elif env_params.suite == "atari":
assert env_params.task is not None
assert env_params.action_repeat == 4
assert env_params.time_limit == 27000
assert env_params.elapsed_time_obs is False
# These are the settings from dreamerv2
env = Atari(env_params.task, action_repeat=env_params.action_repeat, size=(64, 64), grayscale=True)
env.seed(seed)
if env_params.time_limit:
# danijar applies the time limit in agent-steps, not action-repeated env steps
env = wrappers.TimeLimit(env, max_episode_steps=env_params.time_limit)
env = wrappers.DictObsActionWrapper(env, obs_key="rgb")
env = wrappers.OneHotActionWrapper(env)
env = wrappers.RecordEpisodeStatistics(env) # must happen before reward scaling
# Note the tanh here!
env = wrappers.ScaleRewards(env, func=np.tanh)
# Just converts from hwc to chw
env = wrappers.ImageTransformWrapper(env, key="rgb")
elif env_params.suite == "procgen":
warnings.filterwarnings("ignore", message=".*Future gym versions will require.*")
# Need this import to register the procgen envs?
import procgen # isort: skip
assert isinstance(env_params, ProcgenEnvironmentParams)
assert env_params.task is not None
assert env_params.time_limit is None, "procgen has its own time limits (altho they don't set the info flags)"
env = gym.make(
f"procgen-{env_params.task}-v0",
start_level=env_params.start_level,
num_levels=env_params.num_levels,
distribution_mode=env_params.distribution_mode,
rand_seed=env_params.env_index,
)
env = wrappers.DictObsActionWrapper(env, obs_key="rgb")
env = wrappers.OneHotActionWrapper(env)
env = wrappers.RecordEpisodeStatistics(env) # must happen before reward scaling
env = gym.wrappers.NormalizeReward(env)
env = gym.wrappers.TransformReward(env, lambda reward: np.clip(reward, -10, 10)) # type: ignore[no-any-return]
# Just converts from hwc to chw
env = wrappers.ImageTransformWrapper(env, key="rgb")
else:
assert False
env = wrappers.NormalizeActions(env)
env = wrappers.ScaleRewards(env, env_params.reward_scale)
env = wrappers.ClipActionWrapper(env)
if env_params.frame_stack != 1:
env = wrappers.DictFrameStack(env, num_stack=env_params.frame_stack)
if torchify:
# My worker requires this, but it won't work with eg the builtin gym vecenv.
env = wrappers.Torchify(env)
return env
DMC_TASKS = [
"acrobot_swingup",
"cartpole_balance",
"cartpole_balance_sparse",
"cartpole_swingup",
"cartpole_swingup_sparse",
"cheetah_run",
"cup_catch",
"finger_spin",
"finger_turn_easy",
"finger_turn_hard",
"hopper_hop",
"hopper_stand",
"pendulum_swingup",
"quadruped_run",
"quadruped_walk",
"reacher_easy",
"reacher_hard",
"walker_walk",
"walker_stand",
"walker_run",
]
PROCGEN_ENVS = [
"coinrun",
"starpilot",
"caveflyer",
"dodgeball",
"fruitbot",
"chaser",
"miner",
"jumper",
"leaper",
"maze",
"bigfish",
"heist",
"climber",
"plunder",
"ninja",
"bossfight",
]
class DeepMindControl(gym.Env):
def __init__(
self,
name: str,
size: Tuple[int, int] = (64, 64),
camera: Any = None,
include_state: bool = False,
include_rgb: bool = True,
seed: int = 0,
) -> None:
from dm_control import suite
domain, task = name.split("_", 1)
if domain == "cup": # Only domain with multiple words.
domain = "ball_in_cup"
self._env = suite.load(domain, task, task_kwargs={"random": seed})
self._size = size
if camera is None:
camera = dict(quadruped=2).get(domain, 0)
self._camera = camera
self.include_state = include_state
self.include_rgb = include_rgb
# TODO: fix!
# We just ignore all scalar spaces, because this is how danijar did it (presumably accidentally).
self.scalar_spaces: List[str] = []
@property
def observation_space(self) -> gym.spaces.Dict: # type: ignore[override]
spaces: dict[str, gym.Space] = {}
if self.include_state:
for key, value in self._env.observation_spec().items():
logger.warning("gym spaces do not give observation ranges. no rescaling will be applied.")
if value.shape == ():
self.scalar_spaces.append(key)
continue
spaces[key] = gym.spaces.Box(-np.inf, np.inf, value.shape, dtype=np.float32)
if self.include_rgb:
spaces["rgb"] = gym.spaces.Box(0, 255, self._size + (3,), dtype=np.uint8)
return gym.spaces.Dict(spaces)
@property
def action_space(self) -> gym.spaces.Box: # type: ignore[override]
spec = self._env.action_spec()
return gym.spaces.Box(spec.minimum, spec.maximum, dtype=np.float32)
def step(self, action): # type: ignore
time_step = self._env.step(action)
obs = {}
if self.include_state:
state = {k: v for k, v in dict(time_step.observation).items() if k not in self.scalar_spaces}
state = {k: v.astype(np.float32) if v.dtype == np.float64 else v for k, v in state.items()}
obs |= state
if self.include_rgb:
obs["rgb"] = self.render()
reward = time_step.reward or 0
done = time_step.last()
info = {"discount": np.array(time_step.discount, np.float32)}
return obs, reward, done, info
def reset(self): # type: ignore
time_step = self._env.reset()
obs = {}
if self.include_state:
state = {k: v for k, v in dict(time_step.observation).items() if k not in self.scalar_spaces}
state = {k: v.astype(np.float32) if v.dtype == np.float64 else v for k, v in state.items()}
obs |= state
if self.include_rgb:
obs["rgb"] = self.render()
return obs
def render(self, *args, **kwargs): # type: ignore
if kwargs.get("mode", "rgb_array") != "rgb_array":
raise ValueError("Only render mode 'rgb_array' is supported.")
return self._env.physics.render(*self._size, camera_id=self._camera)
ATARI_TASKS = [
# "adventure",
# "air_raid",
"alien",
"amidar",
"assault",
"asterix",
"asteroids",
"atlantis",
"bank_heist",
"battle_zone",
"beam_rider",
"berzerk",
"bowling",
"boxing",
"breakout",
# "carnival",
"centipede",
"chopper_command",
"crazy_climber",
# "defender",
"demon_attack",
"double_dunk",
# "elevator_action",
"enduro",
"fishing_derby",
"freeway",
"frostbite",
"gopher",
"gravitar",
"hero",
"ice_hockey",
"jamesbond",
# "journey_escape",
# "kaboom",
"kangaroo",
"krull",
"kung_fu_master",
"montezuma_revenge",
"ms_pacman",
"name_this_game",
"phoenix",
"pitfall",
"pong",
# "pooyan",
"private_eye",
"qbert",
"riverraid",
"road_runner",
"robotank",
"seaquest",
"skiing",
"solaris",
"space_invaders",
"star_gunner",
"tennis",
"time_pilot",
"tutankham",
"up_n_down",
"venture",
"video_pinball",
"wizard_of_wor",
"yars_revenge",
"zaxxon",
]
class Atari(gym.Env):
def __init__(
self,
name: str,
action_repeat: int = 4,
size: Tuple[int, int] = (84, 84),
grayscale: bool = True,
noops: int = 30,
life_done: bool = False,
sticky: bool = True,
all_actions: bool = False,
) -> None:
assert size[0] == size[1]
import gym.envs.atari
import gym.wrappers
if name == "james_bond":
name = "jamesbond"
# this source is in https://github.com/mgbellemare/Arcade-Learning-Environment/blob/master/src/gym/envs/atari/environment.py
env = gym.envs.atari.AtariEnv(
game=name,
obs_type="image",
frameskip=1,
repeat_action_probability=0.25 if sticky else 0.0,
full_action_space=all_actions,
)
# Avoid unnecessary rendering in inner env.
env._get_obs = lambda: None # type: ignore
# Tell wrapper that the inner env has no action repeat.
env.spec = gym.envs.registration.EnvSpec("NoFrameskip-v0")
self._env = gym.wrappers.AtariPreprocessing(env, noops, action_repeat, size[0], life_done, grayscale)
self._size = size
self._grayscale = grayscale
@property
def observation_space(self) -> gym.spaces.Box: # type: ignore[override]
shape = self._size + (1 if self._grayscale else 3,)
return gym.spaces.Box(0, 255, shape, np.uint8)
@property
def action_space(self) -> gym.spaces.Box: # type: ignore[override]
return cast(gym.spaces.Box, self._env.action_space)
def step(self, action: int): # type: ignore
image, reward, done, info = self._env.step(action)
if self._grayscale:
image = image[..., None]
# info["is_terminal"] = done
# info["is_first"] = False
# info["is_last"] = done
# image = rearrange(image, "h w c -> c h w")
return image, reward, done, info
def reset(self): # type: ignore
image = self._env.reset()
if self._grayscale:
image = image[..., None]
# image = rearrange(image, "h w c -> c h w")
return image
def close(self): # type: ignore
return self._env.close()
def render(self, mode: str = "human"):
raise NotImplementedError
|
PypiClean
|
/reportlab2-3.6.11.tar.gz/reportlab2-3.6.11/tools/pythonpoint/styles/modern.py
|
__version__='3.3.0'
# style_modern.py
__doc__="""This is an example style sheet. You can create your own, and
have them loaded by the presentation. A style sheet is just a
dictionary, where they keys are style names and the values are
ParagraphStyle objects.
You must provide a function called "getParagraphStyles()" to
return it. In future, we can put things like LineStyles,
TableCellStyles etc. in the same modules.
You might wish to have two parallel style sheets, one for colour
and one for black and white, so you can switch your presentations
easily.
A style sheet MUST define a style called 'Normal'.
"""
from reportlab.lib import styles
from reportlab.lib.enums import TA_CENTER
def getParagraphStyles():
"""Returns a dictionary of styles based on Helvetica"""
stylesheet = {}
ParagraphStyle = styles.ParagraphStyle
para = ParagraphStyle('Normal', None) #the ancestor of all
para.fontName = 'Helvetica'
para.fontSize = 24
para.leading = 28
stylesheet['Normal'] = para
para = ParagraphStyle('BodyText', stylesheet['Normal'])
para.spaceBefore = 12
stylesheet['BodyText'] = para
para = ParagraphStyle('Indent', stylesheet['Normal'])
para.leftIndent = 36
para.firstLineIndent = 0
stylesheet['Indent'] = para
para = ParagraphStyle('Centered', stylesheet['Normal'])
para.alignment = TA_CENTER
stylesheet['Centered'] = para
para = ParagraphStyle('BigCentered', stylesheet['Normal'])
para.spaceBefore = 12
para.alignment = TA_CENTER
stylesheet['BigCentered'] = para
para = ParagraphStyle('Italic', stylesheet['BodyText'])
para.fontName = 'Helvetica-Oblique'
stylesheet['Italic'] = para
para = ParagraphStyle('Title', stylesheet['Normal'])
para.fontName = 'Helvetica'
para.fontSize = 48
para.Leading = 58
para.spaceAfter = 36
para.alignment = TA_CENTER
stylesheet['Title'] = para
para = ParagraphStyle('Heading1', stylesheet['Normal'])
para.fontName = 'Helvetica-Bold'
para.fontSize = 36
para.leading = 44
para.spaceAfter = 36
para.alignment = TA_CENTER
stylesheet['Heading1'] = para
para = ParagraphStyle('Heading2', stylesheet['Normal'])
para.fontName = 'Helvetica-Bold'
para.fontSize = 28
para.leading = 34
para.spaceBefore = 24
para.spaceAfter = 12
stylesheet['Heading2'] = para
para = ParagraphStyle('Heading3', stylesheet['Normal'])
para.fontName = 'Helvetica-BoldOblique'
para.spaceBefore = 24
para.spaceAfter = 12
stylesheet['Heading3'] = para
para = ParagraphStyle('Bullet', stylesheet['Normal'])
para.firstLineIndent = -18
para.leftIndent = 72
para.spaceBefore = 6
para.bulletFontName = 'Symbol'
para.bulletFontSize = 24
para.bulletIndent = 36
stylesheet['Bullet'] = para
para = ParagraphStyle('Bullet2', stylesheet['Bullet'])
para.firstLineIndent = 0
para.bulletIndent = 72
para.leftIndent = 108
stylesheet['Bullet2'] = para
para = ParagraphStyle('Definition', stylesheet['Normal'])
#use this for definition lists
para.firstLineIndent = 0
para.leftIndent = 72
para.bulletIndent = 0
para.spaceBefore = 12
para.bulletFontName = 'Helvetica-BoldOblique'
stylesheet['Definition'] = para
para = ParagraphStyle('Code', stylesheet['Normal'])
para.fontName = 'Courier'
para.fontSize = 16
para.leading = 18
para.leftIndent = 36
stylesheet['Code'] = para
return stylesheet
|
PypiClean
|
/alipay-sdk-python-pycryptodome-3.3.202.tar.gz/alipay-sdk-python-pycryptodome-3.3.202/alipay/aop/api/request/AlipayCommerceTransportEtcMediaGetRequest.py
|
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.AlipayCommerceTransportEtcMediaGetModel import AlipayCommerceTransportEtcMediaGetModel
class AlipayCommerceTransportEtcMediaGetRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, AlipayCommerceTransportEtcMediaGetModel):
self._biz_content = value
else:
self._biz_content = AlipayCommerceTransportEtcMediaGetModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'alipay.commerce.transport.etc.media.get'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
|
PypiClean
|
/nanome-jax-2.0.11.tar.gz/nanome-jax-2.0.11/src/nanome/xgboost/cs_eval_site.py
|
import argparse
import os.path
import warnings
import joblib
import math
import pandas as pd
from scipy import stats
from scipy.stats import PearsonRConstantInputWarning
from sklearn.metrics import mean_squared_error
from tqdm import tqdm
from nanome.common.global_config import logger, set_log_debug_level, set_log_info_level
from nanome.common.global_settings import CHUNKSIZE
from nanome.xgboost.ml_common import READS_COLUMN_LIST, prob_to_llr_2, top3_tools, region_order
def report_pcc(tool, y_test, y_pred, region_name="Genome-wide", dsname="NA12878"):
"""
report performance of tools at region for dataset name
Args:
tool:
y_test:
y_pred:
y_score:
region_name:
dsname:
Returns:
"""
## evaluate model
try: # too few samples will fail
# with warnings.catch_warnings(): # not function
warnings.filterwarnings('ignore', category=PearsonRConstantInputWarning)
coe, pval = stats.pearsonr(y_test, y_pred)
except:
coe, pval = None, None
mse = mean_squared_error(y_test, y_pred)
ret = {
'Dataset': dsname,
'Tool': tool,
'Region': region_name,
'PCC': coe,
'Pvalue': pval,
'MSE': mse,
'#Sites': len(y_test),
}
return ret
def parse_arguments():
parser = argparse.ArgumentParser(prog='cs_eval (NANOME)', description='Consensus model train on data')
parser.add_argument('-i', nargs='+', required=True,
help='input data file')
parser.add_argument('--dsname', type=str, default="NA12878",
help='dataset name, default is NA12878')
parser.add_argument('--model-name', nargs='+', type=str, default="xgboost",
help='model name: rf, xgboost, etc.')
parser.add_argument('--model-file', nargs='+', type=str, default="xgboost.pkl",
help='model file')
parser.add_argument('-o', type=str, required=True,
help='output file dir')
parser.add_argument('--processors', type=int, default=1,
help='number of processors, default is 1')
parser.add_argument('--bs-cov', type=int, default=5,
help='bs-seq coverage cutoff, default is 5')
parser.add_argument('--tool-cov', type=int, default=1,
help='ONT tool coverage cutoff, default is 1')
parser.add_argument('--eval-type', type=str, default='site-level',
help='evaluation type, i.e., site-level')
parser.add_argument('--model-base-dir', type=str, default='.',
help="model file's base dir")
parser.add_argument('--test-lines', type=int, default=None,
help='test top N rows, such as 10000, default is None')
parser.add_argument('--chunksize', type=int, default=CHUNKSIZE,
help=f'chunk size for load large data, default is {CHUNKSIZE}')
parser.add_argument('--save-data', type=str, default=None,
help='if save prediction outputs')
parser.add_argument('--force-llr2', help="if convert megalodon llr to llr2", action='store_true')
parser.add_argument('--verbose', help="if output verbose info", action='store_true')
return parser.parse_args()
if __name__ == '__main__':
## input arguments
args = parse_arguments()
if args.verbose:
set_log_debug_level()
else:
set_log_info_level()
logger.debug(f"args={args}")
## load models
model_list = {}
for k in range(len(args.model_name)):
model_name = args.model_name[k]
model_file = args.model_file[k]
infn = os.path.join(args.model_base_dir, model_file)
# logger.debug(f"load modelname={model_name}, model file: {infn}")
model_cls = joblib.load(infn)
model_list[model_name] = model_cls
logger.debug(f"num of tools = {len(model_list)}")
## predict on each input
site_df_list = []
for infn in tqdm(args.i[:]):
# logger.debug(f"Processing input: {infn}")
df_iter = pd.read_csv(infn, sep='\t', header=0, index_col=False, iterator=True,
chunksize=args.chunksize, nrows=args.test_lines)
df_list = []
for chunck_df in df_iter:
df1 = chunck_df[
READS_COLUMN_LIST + top3_tools + ['k_mer'] +
['Freq', 'Coverage', 'Region']].copy()
df1.dropna(subset=top3_tools, inplace=True, how='any')
df1.dropna(subset=['Freq', 'Coverage', 'Region'], inplace=True, how='any')
df1['k_mer'].fillna('N' * 17, inplace=True)
# df1.info()
# logger.debug(df1['k_mer'])
df_list.append(df1)
df = pd.concat(df_list)
df.drop_duplicates(subset=READS_COLUMN_LIST, inplace=True)
df = df[df['Coverage'] >= args.bs_cov]
if args.force_llr2: # convert ln to log2
df['megalodon'] = df['megalodon'] / math.log(2)
df.reset_index(drop=True, inplace=True)
# logger.debug(f"df={df.shape}")
llr2_df1 = df[READS_COLUMN_LIST + top3_tools + ['Freq', 'Coverage', 'Region']].copy()
top3Features = df[top3_tools]
# logger.debug(top3Features.shape)
seqFeatures = df['k_mer'].apply(lambda x: pd.Series(list(x))).copy()
seqFeatures.columns = [f"DNASeq_{k}" for k in range(len(seqFeatures.columns))]
# logger.debug(seqFeatures.shape)
X12 = pd.concat([top3Features, seqFeatures], axis=1)
# logger.debug(X12.shape)
for model_name in model_list:
mm = model_list[model_name]
if not model_name.endswith('_seq'):
y_score = pd.DataFrame(mm.predict_proba(top3Features), index=top3Features.index)[1]
else:
y_score = pd.DataFrame(mm.predict_proba(X12), index=X12.index)[1]
# logger.debug(f"y_score.shape={y_score.shape}, model_name={model_name}")
llr2_df1[model_name] = y_score.apply(prob_to_llr_2)
# logger.debug(llr2_df1)
## convert llr2 to pred
for tool in top3_tools + args.model_name:
llr2_df1[tool] = llr2_df1[tool].apply(lambda x: 1 if x >= 0 else 0)
# logger.debug(llr2_df1)
# llr2_df1.info()
## df_gp = df[["Chr", "Pos", "Strand", "ID"] + tool_list + ["Freq"]].groupby(by=["Chr", "Pos", "Strand"]).agg(agg_func)
agg_func = {tool: 'mean' for tool in top3_tools + args.model_name}
agg_func.update({'Freq': 'first', 'Coverage': 'first', 'Region': 'first', 'ID': 'count'})
site_df1 = llr2_df1.groupby(by=['Chr', 'Pos', 'Strand']).agg(agg_func)
site_df1 = site_df1[site_df1['ID'] >= args.tool_cov]
site_df1.reset_index(drop=False, inplace=True)
# logger.debug(site_df1)
site_df_list.append(site_df1)
site_df = pd.concat(site_df_list)
site_df.reset_index(drop=True, inplace=True)
if args.save_data is not None:
outfn = os.path.join(args.o, args.save_data)
site_df.to_csv(outfn, index=False)
logger.info(f"save to {outfn}")
# logger.debug(site_df)
# site_df.info()
## evaluate site-level performance
# logger.debug(list(site_df['Region'].unique()))
dataset = []
for tool in top3_tools + args.model_name:
for region in [None] + list(site_df['Region'].unique()):
# logger.debug(region)
y_true = site_df['Freq']
y_pred = site_df[tool]
if region is not None:
region_index = (site_df['Region'] == region)
y_true = y_true[region_index]
y_pred = y_pred[region_index]
ret = report_pcc(tool, y_true, y_pred,
region_name=region if region is not None else 'Genome-wide',
dsname=args.dsname)
# logger.debug(ret)
dataset.append(ret)
ret_df = pd.DataFrame.from_dict(dataset)
ret_df['Region'] = pd.Categorical(ret_df['Region'],
categories=region_order,
ordered=True)
ret_df.sort_values(by=['Dataset', 'Region', 'PCC'], ascending=[True, True, False], inplace=True)
logger.debug(ret_df)
outfn = os.path.join(args.o, f"Consensus_site_level_eval_{args.dsname}.csv")
ret_df.to_csv(outfn, index=False)
logger.info(f"save to {outfn}")
logger.info(f"## consensus site-level eval DONE")
|
PypiClean
|
/plainbox-0.38.0.tar.gz/plainbox-0.38.0/README.rst
|
PlainBox
========
PlainBox is a toolkit consisting of python3 library, development tools,
documentation and examples. It is targeted at developers working on testing or
certification applications and authors creating tests for such applications.
PlainBox can be used to both create simple and comprehensive test tools as well
as to develop and execute test jobs and test scenarios. It was created as a
refined and rewritten core of the CheckBox project. It has a well tested and
documented core, small but active development community and a collection of
associated projects that use it as a lower-level engine/back-end library.
PlainBox has a novel approach to discovering (and probing) hardware and
software that is extensible and not hardwired into the system. It allows test
developers to express association between a particular test and the hardware,
software and configuration constraints that must be met for the test to execute
meaningfully. This feature, along with pluggable test definitions, makes
PlainBox flexible and applicable to many diverse testing situations, ranging
from mobile phones, traditional desktop computers, servers and up to testing
"cloud" installations.
External Documentation Links
============================
* `Using PlainBox <http://plainbox.readthedocs.org/en/latest/usage.html>`_
* `Hacking on PlainBox <http://plainbox.readthedocs.org/en/latest/dev/index.html>`_
* `Testing PlainBox <http://plainbox.readthedocs.org/en/latest/dev/intro.html#running-plainbox-tests>`_
Known Issues
============
https://bugs.launchpad.net/plainbox
|
PypiClean
|
/fhir_types-0.2.4-py3-none-any.whl/fhir_types/FHIR_Dosage_DoseAndRate.py
|
from typing import Any, List, Literal, TypedDict
from .FHIR_CodeableConcept import FHIR_CodeableConcept
from .FHIR_Quantity import FHIR_Quantity
from .FHIR_Range import FHIR_Range
from .FHIR_Ratio import FHIR_Ratio
from .FHIR_string import FHIR_string
# Indicates how the medication is/was taken or should be taken by the patient.
FHIR_Dosage_DoseAndRate = TypedDict(
"FHIR_Dosage_DoseAndRate",
{
# Unique id for the element within a resource (for internal references). This may be any string value that does not contain spaces.
"id": FHIR_string,
# May be used to represent additional information that is not part of the basic definition of the element. To make the use of extensions safe and manageable, there is a strict set of governance applied to the definition and use of extensions. Though any implementer can define an extension, there is a set of requirements that SHALL be met as part of the definition of the extension.
"extension": List[Any],
# May be used to represent additional information that is not part of the basic definition of the element and that modifies the understanding of the element in which it is contained and/or the understanding of the containing element's descendants. Usually modifier elements provide negation or qualification. To make the use of extensions safe and manageable, there is a strict set of governance applied to the definition and use of extensions. Though any implementer can define an extension, there is a set of requirements that SHALL be met as part of the definition of the extension. Applications processing a resource are required to check for modifier extensions.Modifier extensions SHALL NOT change the meaning of any elements on Resource or DomainResource (including cannot change the meaning of modifierExtension itself).
"modifierExtension": List[Any],
# The kind of dose or rate specified, for example, ordered or calculated.
"type": FHIR_CodeableConcept,
# Amount of medication per dose.
"doseRange": FHIR_Range,
# Amount of medication per dose.
"doseQuantity": FHIR_Quantity,
# Amount of medication per unit of time.
"rateRatio": FHIR_Ratio,
# Amount of medication per unit of time.
"rateRange": FHIR_Range,
# Amount of medication per unit of time.
"rateQuantity": FHIR_Quantity,
},
total=False,
)
|
PypiClean
|
/accel-brain-base-1.1.0.tar.gz/accel-brain-base-1.1.0/accelbrainbase/observabledata/_mxnet/adversarialmodel/discriminativemodel/eb_discriminative_model.py
|
from accelbrainbase._mxnet._exception.init_deferred_error import InitDeferredError
from accelbrainbase.observabledata._mxnet.adversarialmodel.discriminative_model import DiscriminativeModel
from mxnet.gluon.block import HybridBlock
from mxnet import gluon
from mxnet import autograd
import numpy as np
import mxnet as mx
from mxnet import MXNetError
from logging import getLogger
class EBDiscriminativeModel(DiscriminativeModel):
'''
Discriminative model, which discriminates true from fake,
in the Energy-based Generative Adversarial Network(EBGAN).
The Energy-based Generative Adversarial Network (EBGAN) model(Zhao, J., et al., 2016) which
views the discriminator as an energy function that attributes low energies to the regions
near the data manifold and higher energies to other regions. The Auto-Encoders have traditionally
been used to represent energy-based models. When trained with some regularization terms,
the Auto-Encoders have the ability to learn an energy manifold without supervision or negative examples.
This means that even when an energy-based Auto-Encoding model is trained to reconstruct a real sample,
the model contributes to discovering the data manifold by itself.
References:
- Zhao, J., Mathieu, M., & LeCun, Y. (2016). Energy-based generative adversarial network. arXiv preprint arXiv:1609.03126.
'''
def __init__(
self,
model,
initializer=None,
learning_rate=1e-05,
optimizer_name="SGD",
hybridize_flag=True,
scale=1.0,
ctx=mx.gpu(),
**kwargs
):
'''
Init.
Args:
model: is-a `mxnet.gluon.hybrid.hybridblock.HybridBlock`.
initializer: is-a `mxnet.initializer` for parameters of model. If `None`, it is drawing from the Xavier distribution.
learning_rate: `float` of learning rate.
optimizer_name: `str` of name of optimizer.
hybridize_flag: Call `mxnet.gluon.HybridBlock.hybridize()` or not.
scale: `float` of scaling factor for initial parameters.
ctx: `mx.cpu()` or `mx.gpu()`.
'''
logger = getLogger("accelbrainbase")
self.__logger = logger
init_deferred_flag = self.init_deferred_flag
self.init_deferred_flag = True
if initializer is None:
self.initializer = mx.initializer.Xavier(
rnd_type="gaussian",
factor_type="in",
magnitude=1
)
else:
if isinstance(initializer, mx.initializer.Initializer) is False:
raise TypeError("The type of `initializer` must be `mxnet.initializer.Initializer`.")
self.initializer = initializer
super().__init__(
model=model,
initializer=initializer,
learning_rate=learning_rate,
optimizer_name=optimizer_name,
hybridize_flag=hybridize_flag,
scale=scale,
ctx=ctx,
**kwargs
)
self.model = model
self.init_deferred_flag = init_deferred_flag
if self.init_deferred_flag is False:
try:
self.collect_params().initialize(self.initializer, force_reinit=True, ctx=ctx)
self.trainer = gluon.Trainer(
self.collect_params(),
optimizer_name,
{
"learning_rate": learning_rate
}
)
if hybridize_flag is True:
try:
self.model.encoder.hybridize()
self.model.decoder.hybridize()
except AttributeError:
pass
except InitDeferredError:
self.__logger.debug("The initialization should be deferred.")
def inference(self, observed_arr):
'''
Draw samples from the fake distribution.
Args:
observed_arr: `mxnet.ndarray` or `mxnet.symbol` of observed data points.
Returns:
`Tuple` of `mxnet.ndarray`s.
'''
return self(observed_arr)
def hybrid_forward(self, F, x):
'''
Hybrid forward with Gluon API.
Args:
F: `mxnet.ndarray` or `mxnet.symbol`.
x: `mxnet.ndarray` of observed data points.
Returns:
`mxnet.ndarray` or `mxnet.symbol` of inferenced feature points.
'''
# rank-3
return self.forward_propagation(F, x)
def forward_propagation(self, F, x):
'''
Hybrid forward with Gluon API.
Args:
F: `mxnet.ndarray` or `mxnet.symbol`.
x: `mxnet.ndarray` of observed data points.
Returns:
`mxnet.ndarray` or `mxnet.symbol` of inferenced feature points.
'''
inferenced_arr = self.model.forward_propagation(F, x)
inferenced_arr = F.reshape_like(inferenced_arr, x)
mse_arr = F.square(x - inferenced_arr)
return F.expand_dims(
F.mean(
mse_arr,
axis=0,
exclude=True
),
axis=-1
)
|
PypiClean
|
/scikit-cache-0.1.2.tar.gz/scikit-cache-0.1.2/scikit_cache/components/estimators.py
|
import logging
from contextlib import contextmanager
from typing import Any
from ..resources import (
CacheKey,
ObjCacheMeta,
)
from ..utils import (
format_bytes_to_str,
hash_for_iterable,
)
estimator_logger = logging.getLogger('scikit_cache.estimator')
class EstimatorsMixin:
"""Mixin for cache controller to work with SKLearn estimators."""
@contextmanager
def make_cached_estimator(self, estimator: Any) -> Any:
"""Make estimator instance with cachable methods.
This is context manager, works like this:
with cache.make_cached_estimator(estimator) as cached_estimator:
cached_estimator.fit()
This function modifies existing estimator instance. Returned instance has same class but it
containes modified ``.fit()`` method.
This "cached estimator" can be used anywhere just as usual SKLearn estimator, but every
time ``.fit()`` method is called it will go to cache to check if estimator was already
calculated and cached.
To enable caching for cached estimator - you need to enable cache using ``cache.enable()``
function. By default, all cached estimator work as normal estimators.
"""
estimator_class = estimator.__class__
if not hasattr(estimator_class, '__original_fit__'):
estimator_class.__original_fit__ = estimator_class.fit
estimator_class.fit = self._estimator_fit_with_cache
estimator_class.__cache_ctrl__ = self
try:
yield estimator
finally:
if hasattr(estimator_class, '__original_fit__'):
estimator_class.fit = estimator_class.__original_fit__
delattr(estimator_class, '__original_fit__')
delattr(estimator_class, '__cache_ctrl__')
@staticmethod
def _estimator_fit_with_cache(instance: Any, *args: Any, **kwargs: Any) -> Any:
"""Function that implements ``BaseEstimator.fit()`` with cache mechanisms."""
from sklearn.utils.validation import check_is_fitted
cache = instance.__cache_ctrl__
# If caching is disabled then use original ``.fit()`` function
if not cache.is_enabled_for_estimators:
return instance.__original_fit__(*args, **kwargs)
# Get hash of all fit params including class and original parameters
estimator_hash = hash_for_iterable((
instance.__class__,
instance.get_params(),
args,
kwargs,
))
# Make cache key
raw_key = f'estimators__{estimator_hash}'
cache_key = CacheKey(raw_key)
# Check if cached result exists (if read mode enabled)
if 'r' in cache.__mode__:
found, cached_result = cache._get(cache_key)
if found:
instance.__dict__ = cached_result.__dict__
check_is_fitted(instance)
cache._log(
'estimator cache hit',
level='info',
logger=estimator_logger,
)
return instance
else:
cache._log(
'estimator cache miss',
level='warning',
logger=estimator_logger,
)
# Call original ``.fit()`` function
fit_result = instance.__original_fit__(*args, **kwargs)
check_is_fitted(fit_result)
# Save fit result to cache
if 'w' in cache.__mode__:
cache_meta = ObjCacheMeta(
raw_key=raw_key,
ttl=cache.default_ttl,
**cache._base_meta.dict(),
)
cache._set(cache_key, fit_result, cache_meta)
size = format_bytes_to_str(cache_meta.object_size)
cache._log(
f'estimator cache write - {size}',
level='info',
logger=estimator_logger,
)
return fit_result
|
PypiClean
|
/pdb2pqr_htmd_propka30-0.0.4.tar.gz/pdb2pqr_htmd_propka30-0.0.4/pdb2pqr/src/quatfit.py
|
__date__ = "28 February 2006"
__author__ = "David Heisterberg, Jan Labanowski, Jens Erik Nielsen, Todd Dolinsky"
import math
from .utilities import *
def findCoordinates(numpoints, refcoords, defcoords, defatomcoords):
"""
Driver for the quaternion file. Provide the coordinates as inputs
and obtain the coordinates for the new atom as output.
Parameters
numpoints: The number of points in each list (int)
refcoords: The reference coordinates, a list of lists of form
[x,y,z] (list)
defcoords: The definition coordinates, a list of lists of form
[x,y,z] (list)
defatomcoords: The definition coordinates for the atom to be
placed in the reference frame (list)
Returns
newcoords: The coordinates of the new atom in the
reference frame (list)
"""
refcenter, fitcenter, rotation = qfit(numpoints, refcoords, defcoords)
newcoords = qtransform(1, defatomcoords, refcenter, fitcenter, rotation)
# Only return the first coordinates
return newcoords[0]
def qtransform(numpoints, defcoords, refcenter, fitcenter, rotation):
"""
Transform the set of defcoords using the reference center, the fit
center, and a rotation matrix.
Parameters
numpoints: The number of points in each list (int)
defcoords: Definition coordinates (list)
refcenter: The reference center (list)
defcenter: The definition center (list)
rotation: The rotation matrix (list)
Returns
newcoords: The coordinates of the new point (list)
"""
if numpoints == 1:
defcoords = [defcoords]
fitcoords = translate(numpoints, defcoords, fitcenter, 1)
rotated = rotmol(numpoints, fitcoords, rotation)
newcoords = translate(numpoints, rotated, refcenter, 2)
return newcoords
def qfit(numpoints, refcoords, defcoords):
"""
Method for getting new atom coordinates from sets of reference
and definition coordinates.
Parameters
numpoints: The number of points in each list (int)
refcoords: List of reference coordinates, with each set
a list of form [x,y,z] (list)
defcoords: List of definition coordinates, with each set
a list of form [x,y,z] (list)
"""
nrot = 30
refcenter, refcoords = center(numpoints, refcoords)
defcenter, defcoords = center(numpoints, defcoords)
q, u = qtrfit(numpoints, defcoords, refcoords, nrot)
rotated = rotmol(numpoints, defcoords, u)
newcoords = translate(numpoints, rotated, refcenter, 2)
return refcenter, defcenter, u
def qchichange(initcoords, refcoords, angle):
"""
Change the chiangle of the reference coordinate using the
initcoords and the given angle
Parameters
initcoords: Coordinates based on the point and basis atoms
(one dimensional list)
difchi : The angle to use (float)
refcoords : The atoms to analyze (list of many coordinates)
Returns
newcoords : The new coordinates of the atoms (list of many coords)
"""
# Initialize
L,R = [],[]
for i in range(3):
L.append(0.0)
R.append([0.0,0.0,0.0])
# Convert to radians and normalize
radangle = math.pi * angle/180.0
normalized = normalize(initcoords)
L[0] = normalized[0]
L[1] = normalized[1]
L[2] = normalized[2]
# Construct the rotation matrix
R[0][0] = math.cos(radangle) + L[0]*L[0] * (1.0 - math.cos(radangle))
R[1][1] = math.cos(radangle) + L[1]*L[1] * (1.0 - math.cos(radangle))
R[2][2] = math.cos(radangle) + L[2]*L[2] * (1.0 - math.cos(radangle))
R[1][0] = L[0]*L[1]*(1.0 - math.cos(radangle)) - L[2] * math.sin(radangle)
R[2][0] = L[0]*L[2]*(1.0 - math.cos(radangle)) + L[1] * math.sin(radangle)
R[0][1] = L[1]*L[0]*(1.0 - math.cos(radangle)) + L[2] * math.sin(radangle)
R[2][1] = L[1]*L[2]*(1.0 - math.cos(radangle)) - L[0] * math.sin(radangle)
R[0][2] = L[2]*L[0]*(1.0 - math.cos(radangle)) - L[1] * math.sin(radangle)
R[1][2] = L[2]*L[1]*(1.0 - math.cos(radangle)) + L[0] * math.sin(radangle)
numpoints = len(refcoords)
newcoords = rotmol(numpoints, refcoords, R)
return newcoords
def rotmol(numpoints, x, u):
"""
Rotate a molecule
Parameters
numpoints: The number of points in the list (int)
x: The input coordinates (list)
u: The left rotation matrix (list)
Returns
out: The rotated coordinates out=u * x (list)
"""
out = []
for i in range(numpoints):
out.append([])
out[i].append(u[0][0] *x[i][0] + u[1][0] * x[i][1] + u[2][0] * x[i][2])
out[i].append(u[0][1] *x[i][0] + u[1][1] * x[i][1] + u[2][1] * x[i][2])
out[i].append(u[0][2] *x[i][0] + u[1][2] * x[i][1] + u[2][2] * x[i][2])
return out
def qtrfit(numpoints, defcoords, refcoords, nrot):
"""
Find the quaternion, q, [and left rotation matrix, u] that minimizes
| qTXq - Y | ^ 2 [|uX - Y| ^ 2]
This is equivalent to maximizing Re (qTXTqY)
The left rotation matrix, u, is obtained from q by
u = qT1q
Parameters
numpoints: The number of points in each list (int)
defcoords: List of definition coordinates, with each set
a list of form [x,y,z] (list)
refcoords: List of fitted coordinates, with each set
a list of form [x,y,z] (list)
nrot : The maximum number of jacobi sweeps
Returns
q : The best-fit quaternion
u : The best-fit left rotation matrix
"""
xxyx = 0.0
xxyy = 0.0
xxyz = 0.0
xyyx = 0.0
xyyy = 0.0
xyyz = 0.0
xzyx = 0.0
xzyy = 0.0
xzyz = 0.0
q = []
c = []
for i in range(numpoints):
xxyx = xxyx + defcoords[i][0] * refcoords[i][0]
xxyy = xxyy + defcoords[i][0] * refcoords[i][1]
xxyz = xxyz + defcoords[i][0] * refcoords[i][2]
xyyx = xyyx + defcoords[i][1] * refcoords[i][0]
xyyy = xyyy + defcoords[i][1] * refcoords[i][1]
xyyz = xyyz + defcoords[i][1] * refcoords[i][2]
xzyx = xzyx + defcoords[i][2] * refcoords[i][0]
xzyy = xzyy + defcoords[i][2] * refcoords[i][1]
xzyz = xzyz + defcoords[i][2] * refcoords[i][2]
for i in range(4):
c.append([])
for j in range(4):
c[i].append(0.0)
c[0][0] = xxyx + xyyy + xzyz
c[0][1] = xzyy - xyyz
c[1][1] = xxyx - xyyy - xzyz
c[0][2] = xxyz - xzyx
c[1][2] = xxyy + xyyx
c[2][2] = xyyy - xzyz - xxyx
c[0][3] = xyyx - xxyy
c[1][3] = xzyx + xxyz
c[2][3] = xyyz + xzyy
c[3][3] = xzyz - xxyx - xyyy
d,v = jacobi(c, nrot) # diagonalize c
for i in range(4):
q.append(v[i][3])
u = q2mat(q)
return q,u
def jacobi(a, nrot):
"""
Jacobi diagonalizer with sorted output, only good for 4x4 matrices
Parameters
a: Matrix to diagonalize (4x4 list)
nrot: Maximum number of sweeps
Returns
d: Eigenvalues
v: Eigenvectors
"""
v = []
d = []
for j in range(4):
d.append(0)
v.append([])
for i in range(4):
v[j].append(0.0)
v[j][j] = 1.0
d[j] = a[j][j]
for l in range(nrot):
dnorm = 0.0
onorm = 0.0
for j in range(4):
dnorm = dnorm + abs(d[j])
for i in range(j):
onorm = onorm + abs(a[i][j])
if dnorm != 0:
if onorm/dnorm <= 1e-12: break
for j in range(1,4):
for i in range(j):
b = a[i][j]
if abs(b) > 0.0:
dma = d[j] - d[i]
if abs(dma) + abs(b) <= abs(dma):
t = b / dma
else:
q = 0.5 * dma/b
t = 1.0/(abs(q) + math.sqrt(1 + q*q))
if q < 0:
t = t * -1
c = 1.0/math.sqrt(t*t + 1)
s = t*c
a[i][j] = 0.0
for k in range(i):
atemp = c * a[k][i] - s * a[k][j]
a[k][j] = s * a[k][i] + c * a[k][j]
a[k][i] = atemp
for k in range(i+1 ,j):
atemp = c * a[i][k] - s * a[k][j]
a[k][j] = s * a[i][k] + c * a[k][j]
a[i][k] = atemp
for k in range(j+1, 4):
atemp = c * a[i][k] - s * a[j][k]
a[j][k] = s * a[i][k] + c * a[j][k]
a[i][k] = atemp
for k in range(4):
vtemp = c * v[k][i] - s * v[k][j]
v[k][j] = s * v[k][i] + c * v[k][j]
v[k][i] = vtemp
dtemp = c*c*d[i] + s*s*d[j] - 2.0*c*s*b
d[j] = s*s*d[i] + c*c*d[j] + 2.0*c*s*b
d[i] = dtemp
nrot = l
for j in range(3):
k = j
dtemp = d[k]
for i in range(j+1,4):
if d[i] < dtemp:
k = i
dtemp = d[k]
if k > j:
d[k] = d[j]
d[j] = dtemp
for i in range(4):
dtemp = v[i][k]
v[i][k] = v[i][j]
v[i][j] = dtemp
return d,v
def q2mat(q):
"""
Generate a left rotation matrix from a normalized quaternion
Parameters
q: The normalized quaternion (list)
Returns
u: The rotation matrix (2-dimensional list)
"""
u = []
for i in range(3):
u.append([])
for j in range(3):
u[i].append(0.0)
u[0][0] = q[0]*q[0] + q[1]*q[1] - q[2]*q[2] - q[3]*q[3]
u[0][1] = 2.0 * (q[1] * q[2] - q[0] * q[3])
u[0][2] = 2.0 * (q[1] * q[3] + q[0] * q[2])
u[1][0] = 2.0 * (q[2] * q[1] + q[0] * q[3])
u[1][1] = q[0]*q[0] - q[1]*q[1] + q[2]*q[2] - q[3]*q[3]
u[1][2] = 2.0 * (q[2] * q[3] - q[0] * q[1])
u[2][0] = 2.0 *(q[3] * q[1] - q[0] * q[2])
u[2][1] = 2.0 * (q[3] * q[2] + q[0] * q[1])
u[2][2] = q[0]*q[0] - q[1]*q[1] - q[2]*q[2] + q[3]*q[3]
return u
def center(numpoints, refcoords):
"""
Center a molecule using equally weighted points
Parameters
numpoints: Number of points
refcoords: List of reference coordinates, with each set
a list of form [x,y,z] (list)
Returns
refcenter: Center of the set of points (list)
relcoords: Moved refcoords relative to refcenter (list)
"""
refcenter = []
relcoords = []
for i in range(3):
refcenter.append(0.0)
for i in range(numpoints):
refcenter[0] += refcoords[i][0]
refcenter[1] += refcoords[i][1]
refcenter[2] += refcoords[i][2]
for i in range(3):
refcenter[i] = refcenter[i] / numpoints
for i in range(numpoints):
relcoords.append([])
relcoords[i].append(refcoords[i][0] - refcenter[0])
relcoords[i].append(refcoords[i][1] - refcenter[1])
relcoords[i].append(refcoords[i][2] - refcenter[2])
return refcenter, relcoords
def translate(numpoints, refcoords, center, mode):
"""
Translate a molecule using equally weighted points
Parameters
numpoints: Number of points
refcoords: List of reference coordinates, with each set
a list of form [x,y,z] (list)
center: Center of the system(list)
mode: If 1, center will be subtracted from refcoords
If 2, center will be added to refcoords
Returns
relcoords: Moved refcoords relative to refcenter (list)
"""
relcoords = []
if mode == 1:
modif = -1
elif mode == 2:
modif = 1
for i in range(numpoints):
relcoords.append([])
relcoords[i].append(refcoords[i][0] + modif * center[0])
relcoords[i].append(refcoords[i][1] + modif * center[1])
relcoords[i].append(refcoords[i][2] + modif * center[2])
return relcoords
|
PypiClean
|
/pulumi_azure_native-2.5.1a1693590910.tar.gz/pulumi_azure_native-2.5.1a1693590910/pulumi_azure_native/servicefabric/v20230701preview/getmanaged_az_resiliency_status.py
|
import copy
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetmanagedAzResiliencyStatusResult',
'AwaitableGetmanagedAzResiliencyStatusResult',
'getmanaged_az_resiliency_status',
'getmanaged_az_resiliency_status_output',
]
@pulumi.output_type
class GetmanagedAzResiliencyStatusResult:
"""
Describes the result of the request to list Managed VM Sizes for Service Fabric Managed Clusters.
"""
def __init__(__self__, base_resource_status=None, is_cluster_zone_resilient=None):
if base_resource_status and not isinstance(base_resource_status, list):
raise TypeError("Expected argument 'base_resource_status' to be a list")
pulumi.set(__self__, "base_resource_status", base_resource_status)
if is_cluster_zone_resilient and not isinstance(is_cluster_zone_resilient, bool):
raise TypeError("Expected argument 'is_cluster_zone_resilient' to be a bool")
pulumi.set(__self__, "is_cluster_zone_resilient", is_cluster_zone_resilient)
@property
@pulumi.getter(name="baseResourceStatus")
def base_resource_status(self) -> Optional[Sequence['outputs.ResourceAzStatusResponse']]:
"""
List of Managed VM Sizes for Service Fabric Managed Clusters.
"""
return pulumi.get(self, "base_resource_status")
@property
@pulumi.getter(name="isClusterZoneResilient")
def is_cluster_zone_resilient(self) -> bool:
"""
URL to get the next set of Managed VM Sizes if there are any.
"""
return pulumi.get(self, "is_cluster_zone_resilient")
class AwaitableGetmanagedAzResiliencyStatusResult(GetmanagedAzResiliencyStatusResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetmanagedAzResiliencyStatusResult(
base_resource_status=self.base_resource_status,
is_cluster_zone_resilient=self.is_cluster_zone_resilient)
def getmanaged_az_resiliency_status(cluster_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetmanagedAzResiliencyStatusResult:
"""
Action to get Az Resiliency Status of all the Base resources constituting Service Fabric Managed Clusters.
:param str cluster_name: The name of the cluster resource.
:param str resource_group_name: The name of the resource group.
"""
__args__ = dict()
__args__['clusterName'] = cluster_name
__args__['resourceGroupName'] = resource_group_name
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
__ret__ = pulumi.runtime.invoke('azure-native:servicefabric/v20230701preview:getmanagedAzResiliencyStatus', __args__, opts=opts, typ=GetmanagedAzResiliencyStatusResult).value
return AwaitableGetmanagedAzResiliencyStatusResult(
base_resource_status=pulumi.get(__ret__, 'base_resource_status'),
is_cluster_zone_resilient=pulumi.get(__ret__, 'is_cluster_zone_resilient'))
@_utilities.lift_output_func(getmanaged_az_resiliency_status)
def getmanaged_az_resiliency_status_output(cluster_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetmanagedAzResiliencyStatusResult]:
"""
Action to get Az Resiliency Status of all the Base resources constituting Service Fabric Managed Clusters.
:param str cluster_name: The name of the cluster resource.
:param str resource_group_name: The name of the resource group.
"""
...
|
PypiClean
|
/pyorbit-package-10.0.7.tar.gz/pyorbit-package-10.0.7/pyorbit/models/gp_quasiperiodic_activity_common.py
|
from pyorbit.subroutines.common import *
from pyorbit.models.abstract_model import *
try:
import george
except:
pass
class GaussianProcess_QuasiPeriodicActivity_Common(AbstractModel):
''' Three parameters out of four are the same for all the datasets, since they are related to
the properties of the physical process rather than the observed effects on a dataset
From Grunblatt+2015, Affer+2016
- theta: is usually related to the rotation period of the star( or one of its harmonics);
- lambda: is the correlation decay timescale, and it can be related to the lifetime of the active regions.
- omega: is the length scale of the periodic component, and can be linked to the size evolution of the active regions;
- h: represents the amplitude of the correlations '''
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
try:
import george
except ImportError:
print("ERROR: george not installed, this will not work")
quit()
self.model_class = 'gp_quasiperiodic_common'
self.internal_likelihood = True
self.delayed_lnlk_computation = True
self.list_pams_common = {
'Prot', # Rotational period of the star
'Pdec', # Decay timescale of activity
'Oamp', # Granulation of activity
'Hamp' # Amplitude of the signal in the covariance matrix
}
self.n_pams = 4
""" Indexing is determined by the way the kernel is constructed, so it
is specific of the Model and not of the Common class"""
self.gp_pams_index = {
'Hamp': 0, # amp2
'Pdec': 1, # metric
'Oamp': 2, # gamma
'Prot': 3 # ln_P
}
self.gp = {}
self._added_datasets = 0
self.dataset_ordering = {}
self._dataset_x0 = []
self._dataset_e2 = []
self._dataset_names = {}
self._dataset_nindex = []
self._dataset_ej2 = []
self._dataset_res = []
self._added_datasets = 0
self._n_cov_matrix = 0
self.internal_parameter_values = None
self.internal_gp_pams = None
self.use_HODLR = False
def convert_val2gp(self, input_pams):
"""
:param input_pam: dictonary with the 'physically meaningful' parameters of the GP kernel
:return: dictonary with the parameters to be fed to 'george'
WARNING: this subroutine is HIGHLY specific of your choice of the kernel! I reccomend to
create a new Class with different transformations if you are planning of using a different
kernel combination
"""
output_pams = np.zeros(self.n_pams, dtype=np.double)
""" You must check _george_ documentation (and possibily do a lot of testing) to know how to convert physical
values to the parameter vector accepted by george.set_parameter_vector() function. Note: these values may be
different from ones accepted by the kernel
"""
output_pams[self.gp_pams_index['Hamp']] = np.log(input_pams['Hamp'])*2
output_pams[self.gp_pams_index['Pdec']] = np.log(input_pams['Pdec'])*2
output_pams[self.gp_pams_index['Oamp']] = 1. / \
(2.*input_pams['Oamp'] ** 2)
output_pams[self.gp_pams_index['Prot']] = np.log(input_pams['Prot'])
return output_pams
def convert_gp2val(self, input_pams):
"""
:param input_pam: dictonary with the parameters to be fed to 'george'
:return: dictonary with the 'physically meaningful' parameters of the GP kernel
WARNING: this subroutine is HIGHLY specific of your choice of the kernel! I reccomend to
create a new Class with different transformations if you are planning of using a different
kernel combination
"""
return {
'Hamp': np.exp(input_pams[self.gp_pams_index['Hamp']]/2.0),
'Pdec': np.exp(input_pams[self.gp_pams_index['Pdec']] / 2.0),
'Oamp': np.sqrt(1. / (2.*input_pams[self.gp_pams_index['Oamp']])),
'Prot': np.exp(input_pams[self.gp_pams_index['Prot']])
}
def initialize_model(self, mc, **kwargs):
if 'use_HODLR' in kwargs:
self.use_HODLR = kwargs['use_HODLR']
if kwargs.get('hyperparameters_condition', False):
self.hyper_condition = self._hypercond_01
else:
self.hyper_condition = self._hypercond_00
if kwargs.get('rotation_decay_condition', False):
self.rotdec_condition = self._hypercond_02
else:
self.rotdec_condition = self._hypercond_00
self.use_stellar_rotation_period = kwargs.get('use_stellar_rotation_period', self.use_stellar_rotation_period)
if self.use_stellar_rotation_period:
self.list_pams_common.update(['rotation_period'])
self.list_pams_common.discard('Prot')
def initialize_model_dataset(self, mc, dataset, **kwargs):
self._dataset_nindex.append([self._n_cov_matrix,
self._n_cov_matrix+dataset.n])
self._dataset_x0.append(dataset.x0)
self._dataset_e2 = np.append(self._dataset_e2, dataset.e**2)
self._dataset_names[dataset.name_ref] = self._added_datasets
self._n_cov_matrix += dataset.n
self._added_datasets += 1
self._dataset_ej2 = self._dataset_e2 * 1.
self._dataset_res = self._dataset_e2 * 0.
self.define_kernel()
return
def define_kernel(self):
gp_pams = np.ones(self.n_pams)
""" Kernel initialized with fake values... don't worry, they'll be overwritten soon"""
kernel = np.exp(gp_pams[0]) * \
george.kernels.ExpSquaredKernel(metric=np.exp(gp_pams[1])) * \
george.kernels.ExpSine2Kernel(
gamma=gp_pams[2], log_period=gp_pams[3])
"""
gp_pams[0] = h^2 -> h^2 * ExpSquaredKernel * ExpSine2Kernel
-> set_parameter_vector() accepts the natural logarithm of this value
gp_pams[1] = metric = r^2 = lambda**2 -> ExpSquaredKernel(metric=r^2)
-> set_parameter_vector() accepts the natural logarithm of this value
gp_pams[2] = Gamma = 1/ (2 omega**2) -> ExpSine2Kernel(gamma, ln_period)
gp_pams[3] = ln_theta = ln_Period -> ExpSine2Kernel(gamma, ln_period)
"""
if self.use_HODLR:
self.gp = george.GP(kernel, solver=george.HODLRSolver, mean=0.00)
print(' *** USING HODLR *** ')
print()
else:
self.gp = george.GP(kernel)
# self.gp = george.GP(kernel, solver=george.HODLRSolver, mean=0.00)
self.gp.compute(self._dataset_x0, self._dataset_ej2)
return
def add_internal_dataset(self, parameter_values, dataset):
d_ind = self._dataset_names[dataset.name_ref]
d_nstart, d_nend = self._dataset_nindex[d_ind]
self._dataset_ej2[d_nstart:d_nend] = self._dataset_e2[d_nstart:d_nend] + dataset.jitter**2.0
self._dataset_res[d_nstart:d_nend] = dataset.residuals
if self.use_stellar_rotation_period:
parameter_values['Prot'] = parameter_values['rotation_period']
self.internal_parameter_values = parameter_values
self.internal_gp_pams = self.convert_val2gp(parameter_values)
def lnlk_compute(self):
""" 2 steps:
1) theta parameters must be converted in physical units (e.g. from logarithmic to linear spaces)
2) physical values must be converted to {\tt george} input parameters
"""
if not self.hyper_condition(self.internal_parameter_values):
return -np.inf
if not self.rotdec_condition(self.internal_parameter_values):
return -np.inf
self.gp.set_parameter_vector(self.internal_gp_pams)
self.gp.compute(self._dataset_x0, np.sqrt(self._dataset_ej2))
return self.gp.log_likelihood(self._dataset_res, quiet=True)
def sample_predict(self, dataset, x0_input=None, return_covariance=False, return_variance=False):
self.gp.set_parameter_vector(self.internal_gp_pams)
self.gp.compute(self._dataset_x0, np.sqrt(self._dataset_ej2))
if x0_input is None:
return self.gp.predict(self._dataset_res, dataset.x0, return_cov=return_covariance, return_var=return_variance)
else:
return self.gp.predict(self._dataset_res, x0_input, return_cov=return_covariance, return_var=return_variance)
def sample_conditional(self, dataset, x0_input=None):
self.gp.set_parameter_vector(self.internal_gp_pams)
self.gp.compute(self._dataset_x0, np.sqrt(self._dataset_ej2))
if x0_input is None:
return self.gp.sample_conditional(self._dataset_res, dataset.x0)
else:
return self.gp.sample_conditional(self._dataset_res, x0_input)
@staticmethod
def _hypercond_00(parameter_values):
#Condition from Rajpaul 2017, Rajpaul+2021
return True
@staticmethod
def _hypercond_01(parameter_values):
# Condition from Rajpaul 2017, Rajpaul+2021
# Taking into account that Pdec^2 = 2*lambda_2^2
return parameter_values['Pdec']**2 > (3. / 4. / np.pi) * parameter_values['Oamp']**2 * parameter_values['Prot']**2
@staticmethod
def _hypercond_02(parameter_values):
#Condition on Rotation period and decay timescale
return parameter_values['Pdec'] > 2. * parameter_values['Prot']
|
PypiClean
|
/opal-azure-cli-lab-0.1.10.tar.gz/opal-azure-cli-lab-0.1.10/HISTORY.rst
|
.. :changelog:
Release History
===============
0.1.7
+++++
* Fixed error: 'ObjectsOperations' object has no attribute 'get_current_user'
0.1.6
+++++
* Minor fixes
0.1.5
+++++
* Minor fixes
0.1.4
+++++
* Minor fixes
0.1.3
+++++
* Minor changes
0.1.2
+++++
* Minor changes
0.1.1
+++++
* Update azure-mgmt-devtestlabs dependency to 2.2.0
0.1.0
+++++
* BREAKING CHANGE: 'show' commands log error message and fail with exit code of 3 upon a missing resource.
0.0.23
++++++
* Minor fixes.
0.0.22
++++++
* Fix regression from knack conversion that replaced table_transformers with transforms.
0.0.21
++++++
* `sdist` is now compatible with wheel 0.31.0
0.0.20
++++++
* Fixed create enviorment.
0.0.19
++++++
* Minor fixes.
0.0.18
++++++
* Minor fixes.
0.0.17
++++++
* Minor fixes.
0.0.16
++++++
* Performance fixes.
0.0.15
++++++
* Update helpfile
0.0.14
++++++
* Update for CLI core changes.
0.0.13
++++++
* Minor fixes.
0.0.12
++++++
* minor fixes
0.0.11 (2017-09-22)
+++++++++++++++++++
* minor fixes
0.0.10 (2017-08-28)
+++++++++++++++++++
* minor fixes
0.0.9 (2017-07-27)
++++++++++++++++++
* minor fixes
0.0.8 (2017-07-07)
++++++++++++++++++
* minor fixes
0.0.7 (2017-06-21)
++++++++++++++++++
* No changes.
0.0.6 (2017-06-13)
++++++++++++++++++
* Minor fixes.
0.0.5 (2017-05-30)
+++++++++++++++++++++
* Adding support for claiming any vm in the lab through `az lab vm claim`
* Adding support for claiming existing vm in the lab through `az lab vm claim`
* Adding table output formatter for `az lab vm list` & `az lab vm show`
0.0.4 (2017-05-05)
+++++++++++++++++++++
* Adding table output formatter for az lab arm-template & az lab artifact-source
0.0.3 (2017-04-28)
+++++++++++++++++++++
* Adding create, show, delete & list commands for environment in the lab.
* Adding show & list commands to view ARM templates in the lab.
* Adding --environment flag in `az lab vm list` to filter VMs by environment in the lab.
0.0.2 (2017-04-17)
+++++++++++++++++++++
* Add convenience command `az lab formula export-artifacts` to export artifact scaffold within a Lab's formula.
* Add commands to manage secrets within a Lab.
0.0.1 (2017-04-03)
+++++++++++++++++++++
* Preview release.
|
PypiClean
|
/neo3-boa-1.0.1.tar.gz/neo3-boa-1.0.1/boa3/internal/model/imports/importsymbol.py
|
import ast
from typing import Dict
from boa3.internal.analyser.importanalyser import ImportAnalyser
from boa3.internal.model.builtin.builtincallable import IBuiltinCallable
from boa3.internal.model.method import Method
from boa3.internal.model.symbol import ISymbol
from boa3.internal.model.type.itype import IType
from boa3.internal.model.variable import Variable
class Import(ISymbol):
"""
A class used to represent an imported package
:ivar variables: a dictionary that maps each variable with its name. Empty by default.
:ivar methods: a dictionary that maps each method with its name. Empty by default.
:ivar types: a dictionary that maps each type with its name. Empty by default.
"""
def __init__(self, origin: str, syntax_tree: ast.AST, import_analyser: ImportAnalyser,
imported_symbols: Dict[str, ISymbol] = None):
if imported_symbols is None:
symbols = import_analyser.symbols
else:
symbols = import_analyser.export_symbols(list(imported_symbols.keys()))
self.variables = {var_id: var for var_id, var in symbols.items() if isinstance(var, Variable)}
self.methods = {fun_id: fun for fun_id, fun in symbols.items() if isinstance(fun, Method)}
self.types = {type_id: tpe for type_id, tpe in symbols.items() if isinstance(tpe, IType)}
self.imports = {alias: imprt for alias, imprt in symbols.items() if isinstance(imprt, Import)}
self._other_symbols = {alias: symbol for alias, symbol in symbols.items()
if not isinstance(symbol, (Variable, Method, IType, Import))}
self._symbols_not_imported = {alias: symbol for alias, symbol in import_analyser.symbols.items()
if alias not in symbols}
for method in self.methods.values():
if not isinstance(method, IBuiltinCallable) and hasattr(method, 'defined_by_entry'):
# methods imported are treated as methods defined in the entry file
method.defined_by_entry = True
self.analyser = import_analyser.analyser
self.origin: str = origin
self.ast: ast.AST = syntax_tree
@property
def shadowing_name(self) -> str:
return 'module'
@property
def symbols(self) -> Dict[str, ISymbol]:
symbol = {}
symbol.update(self.variables)
symbol.update(self.methods)
symbol.update(self.types)
symbol.update(self._other_symbols)
symbol.update(self.imports)
return symbol
@property
def all_symbols(self) -> Dict[str, ISymbol]:
symbol = self.symbols.copy()
symbol.update(self._symbols_not_imported)
return symbol
class BuiltinImport(Import):
"""
A class used to differentiate built-in importings
"""
pass
|
PypiClean
|
/neoteroi_auth-0.0.3.tar.gz/neoteroi_auth-0.0.3/neoteroi/auth/jwts/__init__.py
|
from typing import Any, Dict, Optional, Sequence
import jwt
from jwt.exceptions import InvalidIssuerError, InvalidTokenError
from ..jwks import JWK, JWKS, KeysProvider
from ..jwks.caching import CachingKeysProvider
from ..jwks.openid import AuthorityKeysProvider
from ..jwks.urls import URLKeysProvider
from ..utils import get_logger
class OAuthException(Exception):
"""Base class for exception risen when there is an issue related to OAuth."""
class InvalidAccessToken(Exception):
def __init__(self, details=""):
if details:
message = "Invalid access token: " + details
else:
message = "Invalid access token."
super().__init__(message)
def get_kid(token: str) -> Optional[str]:
"""
Extracts a kid (key id) from a JWT.
"""
headers = jwt.get_unverified_header(token)
if not headers: # pragma: no cover
raise InvalidAccessToken("missing headers")
return headers.get("kid")
class JWTValidator:
def __init__(
self,
*,
valid_issuers: Sequence[str],
valid_audiences: Sequence[str],
authority: Optional[str] = None,
algorithms: Sequence[str] = ["RS256"],
require_kid: bool = True,
keys_provider: Optional[KeysProvider] = None,
keys_url: Optional[str] = None,
cache_time: float = 10800
) -> None:
"""
Creates a new instance of JWTValidator. This class only supports validating
access tokens signed using asymmetric keys and handling JWKs of RSA type.
Parameters
----------
valid_issuers : Sequence[str]
Sequence of acceptable issuers (iss).
valid_audiences : Sequence[str]
Sequence of acceptable audiences (aud).
authority : Optional[str], optional
If provided, keys are obtained from a standard well-known endpoint.
This parameter is ignored if `keys_provider` is given.
algorithms : Sequence[str], optional
Sequence of acceptable algorithms, by default ["RS256"].
require_kid : bool, optional
According to the specification, a key id is optional in JWK. However,
this parameter lets control whether access tokens missing `kid` in their
headers should be handled or rejected. By default True, thus only JWTs
having `kid` header are accepted.
keys_provider : Optional[KeysProvider], optional
If provided, the exact `KeysProvider` to be used when fetching keys.
By default None
keys_url : Optional[str], optional
If provided, keys are obtained from the given URL through HTTP GET.
This parameter is ignored if `keys_provider` is given.
cache_time : float, optional
If >= 0, JWKS are cached in memory and stored for the given amount in
seconds. By default 10800 (3 hours).
"""
if keys_provider:
pass
elif authority:
keys_provider = AuthorityKeysProvider(authority)
elif keys_url:
keys_provider = URLKeysProvider(keys_url)
if keys_provider is None:
raise TypeError(
"Missing `keys_provider`, either provide a `url` source, "
"`authority`, or `keys_provider`."
)
if cache_time:
keys_provider = CachingKeysProvider(keys_provider, cache_time)
self._valid_issuers = list(valid_issuers)
self._valid_audiences = list(valid_audiences)
self._algorithms = list(algorithms)
self._keys_provider: KeysProvider = keys_provider
self.require_kid = require_kid
self.logger = get_logger()
async def get_jwks(self) -> JWKS:
return await self._keys_provider.get_keys()
async def get_jwk(self, kid: str) -> JWK:
jwks = await self.get_jwks()
for jwk in jwks.keys:
if jwk.kid is not None and jwk.kid == kid:
return jwk
raise InvalidAccessToken("kid not recognized")
def _validate_jwt_by_key(
self, access_token: str, jwk: JWK
) -> Optional[Dict[str, Any]]:
for issuer in self._valid_issuers:
try:
return jwt.decode(
access_token,
jwk.pem, # type: ignore
verify=True,
algorithms=self._algorithms,
audience=self._valid_audiences,
issuer=issuer,
)
except InvalidIssuerError:
# pass, because the application might support more than one issuer;
# note that token verification might fail for several other reasons
# that are not catched (e.g. expired signature)
pass
except InvalidTokenError as exc:
self.logger.debug("Invalid access token: ", exc_info=exc)
return None
return None
async def validate_jwt(self, access_token: str) -> Dict[str, Any]:
"""
Validates the given JWT and returns its payload. This method throws exception
if the JWT is not valid (i.e. its signature cannot be verified, for example
because the JWT expired).
"""
kid = get_kid(access_token)
if kid is None and self.require_kid:
# A key id is optional according to the specification,
# but here we expect a kid by default.
# Disabling require_kid makes this method less efficient.
raise InvalidAccessToken("Missing key id (kid).")
if kid is None:
# Unoptimal scenario: the identity provider does not handle key ids,
# thus if more than one JWK is configured in the JWKS, we need to cycle
# and attempt each of them
jwks = await self.get_jwks()
for jwk in jwks.keys:
data = self._validate_jwt_by_key(access_token, jwk)
if data is not None:
return data
else:
# Preferred scenario: the identity provider handles key ids,
# thus we can validate an access token using an exact key
jwk = await self.get_jwk(kid)
data = self._validate_jwt_by_key(access_token, jwk)
if data is not None:
return data
raise InvalidAccessToken()
|
PypiClean
|
/backtrader_lucidinvestor-1.9.7702-py3-none-any.whl/backtrader/filters/daysteps.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
class BarReplayer_Open(object):
'''
This filters splits a bar in two parts:
- ``Open``: the opening price of the bar will be used to deliver an
initial price bar in which the four components (OHLC) are equal
The volume/openinterest fields are 0 for this initial bar
- ``OHLC``: the original bar is delivered complete with the original
``volume``/``openinterest``
The split simulates a replay without the need to use the *replay* filter.
'''
def __init__(self, data):
self.pendingbar = None
data.resampling = 1
data.replaying = True
def __call__(self, data):
ret = True
# Make a copy of the new bar and remove it from stream
newbar = [data.lines[i][0] for i in range(data.size())]
data.backwards() # remove the copied bar from stream
openbar = newbar[:] # Make an open only bar
o = newbar[data.Open]
for field_idx in [data.High, data.Low, data.Close]:
openbar[field_idx] = o
# Nullify Volume/OpenInteres at the open
openbar[data.Volume] = 0.0
openbar[data.OpenInterest] = 0.0
# Overwrite the new data bar with our pending data - except start point
if self.pendingbar is not None:
data._updatebar(self.pendingbar)
ret = False
self.pendingbar = newbar # update the pending bar to the new bar
data._add2stack(openbar) # Add the openbar to the stack for processing
return ret # the length of the stream was not changed
def last(self, data):
'''Called when the data is no longer producing bars
Can be called multiple times. It has the chance to (for example)
produce extra bars'''
if self.pendingbar is not None:
data.backwards() # remove delivered open bar
data._add2stack(self.pendingbar) # add remaining
self.pendingbar = None # No further action
return True # something delivered
return False # nothing delivered here
# Alias
DayStepsFilter = BarReplayer_Open
|
PypiClean
|
/betterlifepsi-0.6.8.tar.gz/betterlifepsi-0.6.8/psi/cli.py
|
import functools
import itertools
import logging
import math
import os
import random
import sys
import click
import flask_migrate
import psycopg2
from psi.app import create_app, init_all
from psi.app.utils import retry
from psi import MIGRATION_DIR
# Using flask's default `click` command line environment
application = create_app()
init_all(application, migrate=False)
log = logging.getLogger(__name__)
@application.cli.command()
@click.argument('retries', default=5)
@click.argument('migrate', default=True)
def wait_on_postgres(retries=5, migrate=True):
"""Block until Postgres is ready (optionally, run any migrations)
Shamelessly appropriated from https://github.com/agconti/wait-for-postgres
"""
dsn = os.environ.get('DATABASE_URL')
@retry(retries, exceptions=(psycopg2.OperationalError,))
def wait():
con = psycopg2.connect(**psycopg2.extensions.parse_dsn(dsn))
con.close()
log.info('Postgres is ready!')
wait()
if migrate:
log.info('Running database migrations, if any')
with application.app_context():
flask_migrate.upgrade(directory=MIGRATION_DIR)
@application.cli.command()
def test():
"""Run the unit tests.
>>> export FLASK_APP=psi.cli:application
>>> flask test
"""
import subprocess
return_code = subprocess.call("pytest -p no:warnings --cov=./", shell=True)
sys.exit(return_code)
@application.cli.command()
def generate_fake_order():
"""
Load a set of fake data to the system
* 10 Suppliers and customers
* 5 purchase orders and sales_orders
"""
from tests.object_faker import object_faker
from psi.app.models import User
from random import randint
from psi.app.service import Info
database = Info.get_db()
user = database.session.query(User).get(1)
for i in range(5):
purchase_order = object_faker.purchase_order(creator=user, number_of_line=randint(1,9))
sales_order = object_faker.sales_order(creator=user, number_of_line=randint(1, 9))
database.session.add(purchase_order)
database.session.add(sales_order)
database.session.commit()
@application.cli.command()
def clean_transaction_data():
"""
Clean all the transaction data, and keep all master data
"""
# TODO.xqliu Disable clean of database for production
from psi.app.service import Info
database = Info.get_db()
database.engine.execute("""
DELETE FROM related_values;
DELETE FROM inventory_in_out_link;
DELETE FROM incoming;
DELETE FROM shipping_line;
DELETE FROM shipping;
DELETE FROM expense;
DELETE FROM receiving_line;
DELETE FROM receiving;
DELETE FROM purchase_order_line;
DELETE FROM purchase_order;
DELETE FROM sales_order_line;
DELETE FROM sales_order;
DELETE FROM inventory_transaction_line;
DELETE FROM inventory_transaction;
commit;
""")
@application.cli.command()
def clean_database():
"""
Clean the database and drop all the tables
This only tested for postgresql at this moment
"""
# TODO.xqliu Disable clean of database for production
from psi.app.service import Info
database = Info.get_db()
database.engine.execute("""
DROP VIEW sales_order_detail RESTRICT;
ALTER TABLE related_values DROP CONSTRAINT related_values_relation_type_id_fkey;
ALTER TABLE incoming DROP CONSTRAINT incoming_category_id_fkey;
ALTER TABLE incoming DROP CONSTRAINT incoming_status_id_fkey;
ALTER TABLE incoming DROP CONSTRAINT incoming_sales_order_id_fkey;
ALTER TABLE incoming DROP CONSTRAINT incoming_organization_id_fkey;
ALTER TABLE shipping_line DROP CONSTRAINT shipping_line_product_id_fkey;
ALTER TABLE shipping_line DROP CONSTRAINT shipping_line_shipping_id_fkey;
ALTER TABLE shipping_line DROP CONSTRAINT shipping_line_sales_order_line_id_fkey;
ALTER TABLE shipping_line DROP CONSTRAINT shipping_line_inventory_transaction_line_id_fkey;
ALTER TABLE payment_method DROP CONSTRAINT payment_method_supplier_id_fkey;
ALTER TABLE expense DROP CONSTRAINT expense_status_id_fkey;
ALTER TABLE expense DROP CONSTRAINT expense_category_id_fkey;
ALTER TABLE expense DROP CONSTRAINT expense_purchase_order_id_fkey;
ALTER TABLE expense DROP CONSTRAINT expense_sales_order_id_fkey;
ALTER TABLE expense DROP CONSTRAINT expense_organization_id_fkey;
ALTER TABLE roles_users DROP CONSTRAINT roles_users_user_id_fkey;
ALTER TABLE roles_users DROP CONSTRAINT roles_users_role_id_fkey;
ALTER TABLE product_image DROP CONSTRAINT product_image_product_id_fkey;
ALTER TABLE product_image DROP CONSTRAINT product_image_image_id_fkey;
ALTER TABLE receiving_line DROP CONSTRAINT receiving_line_product_id_fkey;
ALTER TABLE receiving_line DROP CONSTRAINT receiving_line_receiving_id_fkey;
ALTER TABLE receiving_line DROP CONSTRAINT receiving_line_purchase_order_line_id_fkey;
ALTER TABLE receiving_line DROP CONSTRAINT receiving_line_inventory_transaction_line_id_fkey;
ALTER TABLE shipping DROP CONSTRAINT shipping_status_id_fkey;
ALTER TABLE shipping DROP CONSTRAINT shipping_sales_order_id_fkey;
ALTER TABLE shipping DROP CONSTRAINT shipping_inventory_transaction_id_fkey;
ALTER TABLE shipping DROP CONSTRAINT shipping_organization_id_fkey;
ALTER TABLE shipping DROP CONSTRAINT shipping_type_id_fkey;
ALTER TABLE purchase_order DROP CONSTRAINT purchase_order_supplier_id_fkey;
ALTER TABLE purchase_order DROP CONSTRAINT purchase_order_status_id_fkey;
ALTER TABLE purchase_order DROP CONSTRAINT purchase_order_organization_id_fkey;
ALTER TABLE purchase_order DROP CONSTRAINT purchase_order_type_id_fkey;
ALTER TABLE purchase_order DROP CONSTRAINT to_organization_id_fkey;
ALTER TABLE sales_order DROP CONSTRAINT sales_order_customer_id_fkey;
ALTER TABLE sales_order DROP CONSTRAINT sales_order_organization_id_fkey;
ALTER TABLE sales_order DROP CONSTRAINT sales_order_type_id_fkey;
ALTER TABLE sales_order DROP CONSTRAINT sales_order_status_id_fkey;
ALTER TABLE receiving DROP CONSTRAINT receiving_status_id_fkey;
ALTER TABLE receiving DROP CONSTRAINT receiving_purchase_order_id_fkey;
ALTER TABLE receiving DROP CONSTRAINT receiving_inventory_transaction_id_fkey;
ALTER TABLE receiving DROP CONSTRAINT receiving_organization_id_fkey;
ALTER TABLE "user" DROP CONSTRAINT user_locale_id_fkey;
ALTER TABLE "user" DROP CONSTRAINT user_timezone_id_fkey;
ALTER TABLE "user" DROP CONSTRAINT user_organization_id_fkey;
ALTER TABLE product DROP CONSTRAINT product_category_id_fkey;
ALTER TABLE product DROP CONSTRAINT product_supplier_id_fkey;
ALTER TABLE product DROP CONSTRAINT product_organization_id_fkey;
ALTER TABLE customer DROP CONSTRAINT customer_join_channel_id_fkey;
ALTER TABLE customer DROP CONSTRAINT customer_level_id_fkey;
ALTER TABLE customer DROP CONSTRAINT customer_organization_id_fkey;
ALTER TABLE purchase_order_line DROP CONSTRAINT purchase_order_line_purchase_order_id_fkey;
ALTER TABLE purchase_order_line DROP CONSTRAINT purchase_order_line_product_id_fkey;
ALTER TABLE product_category DROP CONSTRAINT product_category_parent_id_fkey;
ALTER TABLE product_category DROP CONSTRAINT product_category_organization_id_fkey;
ALTER TABLE inventory_transaction DROP CONSTRAINT inventory_transaction_type_id_fkey;
ALTER TABLE inventory_transaction DROP CONSTRAINT inventory_transaction_organization_id_fkey;
ALTER TABLE inventory_transaction_line DROP CONSTRAINT inventory_transaction_line_product_id_fkey;
ALTER TABLE inventory_transaction_line DROP CONSTRAINT inventory_transaction_line_inventory_transaction_id_fkey;
ALTER TABLE sales_order_line DROP CONSTRAINT sales_order_line_sales_order_id_fkey;
ALTER TABLE sales_order_line DROP CONSTRAINT sales_order_line_product_id_fkey;
ALTER TABLE supplier DROP CONSTRAINT supplier_organization_id_fkey;
ALTER TABLE enum_values DROP CONSTRAINT enum_values_type_id_fkey;
ALTER TABLE organization DROP CONSTRAINT organization_type_id_fkey;
ALTER TABLE role DROP CONSTRAINT role_parent_id_fkey;
DROP TABLE related_values;
DROP TABLE inventory_in_out_link;
DROP TABLE incoming;
DROP TABLE shipping_line;
DROP TABLE payment_method;
DROP TABLE expense;
DROP TABLE roles_users;
DROP TABLE product_image;
DROP TABLE receiving_line;
DROP TABLE shipping;
DROP TABLE purchase_order;
DROP TABLE sales_order;
DROP TABLE receiving;
DROP TABLE "user";
DROP TABLE product;
DROP TABLE customer;
DROP TABLE purchase_order_line;
DROP TABLE product_category;
DROP TABLE inventory_transaction;
DROP TABLE inventory_transaction_line;
DROP TABLE sales_order_line;
DROP TABLE supplier;
DROP TABLE enum_values;
DROP TABLE organization;
DROP TABLE role;
DROP TABLE image;
DROP TABLE alembic_version;
commit;
""")
|
PypiClean
|
/pyftdc-0.3.5.tar.gz/pyftdc-0.3.5/pybind11/docs/advanced/cast/functional.rst
|
Functional
##########
The following features must be enabled by including :file:`pybind11/functional.h`.
Callbacks and passing anonymous functions
=========================================
The C++11 standard brought lambda functions and the generic polymorphic
function wrapper ``std::function<>`` to the C++ programming language, which
enable powerful new ways of working with functions. Lambda functions come in
two flavors: stateless lambda function resemble classic function pointers that
link to an anonymous piece of code, while stateful lambda functions
additionally depend on captured variables that are stored in an anonymous
*lambda closure object*.
Here is a simple example of a C++ function that takes an arbitrary function
(stateful or stateless) with signature ``int -> int`` as an argument and runs
it with the value 10.
.. code-block:: cpp
int func_arg(const std::function<int(int)> &f) {
return f(10);
}
The example below is more involved: it takes a function of signature ``int -> int``
and returns another function of the same kind. The return value is a stateful
lambda function, which stores the value ``f`` in the capture object and adds 1 to
its return value upon execution.
.. code-block:: cpp
std::function<int(int)> func_ret(const std::function<int(int)> &f) {
return [f](int i) {
return f(i) + 1;
};
}
This example demonstrates using python named parameters in C++ callbacks which
requires using ``py::cpp_function`` as a wrapper. Usage is similar to defining
methods of classes:
.. code-block:: cpp
py::cpp_function func_cpp() {
return py::cpp_function([](int i) { return i+1; },
py::arg("number"));
}
After including the extra header file :file:`pybind11/functional.h`, it is almost
trivial to generate binding code for all of these functions.
.. code-block:: cpp
#include <pybind11/functional.h>
PYBIND11_MODULE(example, m) {
m.def("func_arg", &func_arg);
m.def("func_ret", &func_ret);
m.def("func_cpp", &func_cpp);
}
The following interactive session shows how to call them from Python.
.. code-block:: pycon
$ python
>>> import example
>>> def square(i):
... return i * i
...
>>> example.func_arg(square)
100L
>>> square_plus_1 = example.func_ret(square)
>>> square_plus_1(4)
17L
>>> plus_1 = func_cpp()
>>> plus_1(number=43)
44L
.. warning::
Keep in mind that passing a function from C++ to Python (or vice versa)
will instantiate a piece of wrapper code that translates function
invocations between the two languages. Naturally, this translation
increases the computational cost of each function call somewhat. A
problematic situation can arise when a function is copied back and forth
between Python and C++ many times in a row, in which case the underlying
wrappers will accumulate correspondingly. The resulting long sequence of
C++ -> Python -> C++ -> ... roundtrips can significantly decrease
performance.
There is one exception: pybind11 detects case where a stateless function
(i.e. a function pointer or a lambda function without captured variables)
is passed as an argument to another C++ function exposed in Python. In this
case, there is no overhead. Pybind11 will extract the underlying C++
function pointer from the wrapped function to sidestep a potential C++ ->
Python -> C++ roundtrip. This is demonstrated in :file:`tests/test_callbacks.cpp`.
.. note::
This functionality is very useful when generating bindings for callbacks in
C++ libraries (e.g. GUI libraries, asynchronous networking libraries, etc.).
The file :file:`tests/test_callbacks.cpp` contains a complete example
that demonstrates how to work with callbacks and anonymous functions in
more detail.
|
PypiClean
|
/proplot-0.9.7.tar.gz/proplot-0.9.7/docs/fonts.py
|
# %% [raw] raw_mimetype="text/restructuredtext"
# .. _ug_fonts:
#
#
# Font selection
# ==============
#
# Proplot registers several new fonts and includes tools
# for adding your own fonts. These features are described below.
#
#
# %% [raw] raw_mimetype="text/restructuredtext"
# .. _ug_fonts_included:
#
# Included fonts
# --------------
#
# Matplotlib provides a `~matplotlib.font_manager` module for working with
# system fonts and classifies fonts into `five font families
# <https://matplotlib.org/stable/gallery/text_labels_and_annotations/fonts_demo.html>`__:
# :rcraw:`font.serif` :rcraw:`font.sans-serif`, :rcraw:`font.monospace`,
# :rcraw:`font.cursive`, and :rcraw:`font.fantasy`. The default font family
# is sans-serif, because sans-serif fonts are generally more suitable for
# figures than serif fonts, and the default font name belonging to this family
# is `DejaVu Sans <https://dejavu-fonts.github.io>`__, which comes packaged with
# matplotlib.
#
# Matplotlib uses DejaVu Sans in part because it includes glyphs for a very wide
# range of symbols, especially mathematical symbols. However in our opinion,
# DejaVu Sans is not very aesthetically pleasing. To improve the font selection while
# keeping things consistent across different workstations, proplot comes packaged
# with several open source sans-serif fonts. It also includes the `TeX Gyre font
# series <https://ctan.org/pkg/tex-gyre?lang=en>`__ and adds them as the default
# entries for all of matplotlib's font famlies:
#
# * The `Century <https://en.wikipedia.org/wiki/Century_type_family>`__ lookalike
# :rcraw:`font.serif` = ``'TeX Gyre Schola'``.
# * The `Helvetica <https://en.wikipedia.org/wiki/Helvetica>`__ lookalike
# :rcraw:`font.sans-serif` = ``'TeX Gyre Heros'``.
# * The `Courier <https://en.wikipedia.org/wiki/Courier_(typeface)>`__ lookalike
# :rcraw:`font.monospace` = ``'TeX Gyre Cursor'``.
# * The `Chancery <https://en.wikipedia.org/wiki/ITC_Zapf_Chancery>`__ lookalike
# :rcraw:`font.cursive` = ``'TeX Gyre Chorus'``.
# * The `Avant Garde <https://en.wikipedia.org/wiki/ITC_Avant_Garde>`__ lookalike
# :rcraw:`font.fantasy` = ``'TeX Gyre Adventor'``.
#
# After importing proplot, the default matplotlib font will be
# `TeX Gyre Heros <https://ctan.org/pkg/tex-gyre-heros>`__,
# which emulates the more conventional and aesthetically pleasing font
# `Helvetica <https://en.wikipedia.org/wiki/Helvetica>`__. The
# full font priority lists for each family are displayed in the
# :ref:`default proplotrc file <ug_proplotrc>`.
#
# To compare different fonts, use the `~proplot.demos.show_fonts` command. By
# default, this displays the sans-serif fonts available on your system and
# packaged with proplot. The default table on the RTD server is shown
# below. The "¤" symbol appears where characters for a particular font are
# unavailable (when making plots, "¤" is replaced with the character from
# a fallback font). Since most TeX Gyre fonts have limited
# character sets, if your plots contain lots of mathematical symbols,
# you may want to set :rcraw:`font.family` to the matplotlib-supplied
# font ``'DejaVu Sans'`` or the proplot-supplied font ``'Fira Math'``
# (see `this page <https://github.com/firamath/firamath>`__ for more on Fira Math).
#
# .. note::
#
# Try to avoid ``.ttf`` files with ``Thin`` in the file name. Some versions of
# matplotlib interpret fonts with the "thin" style as having *normal* weight (see
# `this issue page <https://github.com/matplotlib/matplotlib/issues/8788>`__),
# causing them to override the correct normal weight versions. While proplot
# tries to filter out these files, this cannot be done systematically. In the
# below example, the "Roboto" font may be overridden by its "thin" version
# because the RTD server includes this style.
# %%
import proplot as pplt
fig, axs = pplt.show_fonts()
# %% [raw] raw_mimetype="text/restructuredtext"
# .. _ug_fonts_user:
#
# Using your own fonts
# --------------------
#
# You can register your own fonts by adding files to the ``fonts`` subfolder
# inside `~proplot.config.Configurator.user_folder` and calling
# `~proplot.config.register_fonts`. This command is called on import. You can
# also manually pass file paths to `~proplot.config.register_fonts`.
# To change the default font, use the `~proplot.config.rc`
# object or modify your ``proplotrc``. See the
# :ref:`configuration section <ug_config>` for details.
#
# Sometimes the font you would like to use *is* installed, but the font file
# is not stored under the matplotlib-compatible ``.ttf``, ``.otf``, or ``.afm``
# formats. For example, several macOS fonts are unavailable because they are
# stored as ``.dfont`` collections. Also, while matplotlib nominally supports
# ``.ttc`` collections, proplot ignores them because figures with ``.ttc`` fonts
# `cannot be saved as PDFs <https://github.com/matplotlib/matplotlib/issues/3135>`__.
# You can get matplotlib to use ``.dfont`` and ``.ttc`` collections by
# expanding them into individual ``.ttf`` files with the
# `DFontSplitter application <https://peter.upfold.org.uk/projects/dfontsplitter>`__,
# then saving the files in-place or in the ``~/.proplot/fonts`` folder.
#
# To find font collections, check the paths listed in ``OSXFontDirectories``,
# ``X11FontDirectories``, ``MSUserFontDirectories``, and ``MSFontDirectories``
# under the `matplotlib.font_manager` module.
|
PypiClean
|
/aifs_nni-1.9.5-py3-none-manylinux1_x86_64.whl/aifs_nni-1.9.5.data/data/nni/node_modules/lodash/fp/_mapping.js
|
exports.aliasToReal = {
// Lodash aliases.
'each': 'forEach',
'eachRight': 'forEachRight',
'entries': 'toPairs',
'entriesIn': 'toPairsIn',
'extend': 'assignIn',
'extendAll': 'assignInAll',
'extendAllWith': 'assignInAllWith',
'extendWith': 'assignInWith',
'first': 'head',
// Methods that are curried variants of others.
'conforms': 'conformsTo',
'matches': 'isMatch',
'property': 'get',
// Ramda aliases.
'__': 'placeholder',
'F': 'stubFalse',
'T': 'stubTrue',
'all': 'every',
'allPass': 'overEvery',
'always': 'constant',
'any': 'some',
'anyPass': 'overSome',
'apply': 'spread',
'assoc': 'set',
'assocPath': 'set',
'complement': 'negate',
'compose': 'flowRight',
'contains': 'includes',
'dissoc': 'unset',
'dissocPath': 'unset',
'dropLast': 'dropRight',
'dropLastWhile': 'dropRightWhile',
'equals': 'isEqual',
'identical': 'eq',
'indexBy': 'keyBy',
'init': 'initial',
'invertObj': 'invert',
'juxt': 'over',
'omitAll': 'omit',
'nAry': 'ary',
'path': 'get',
'pathEq': 'matchesProperty',
'pathOr': 'getOr',
'paths': 'at',
'pickAll': 'pick',
'pipe': 'flow',
'pluck': 'map',
'prop': 'get',
'propEq': 'matchesProperty',
'propOr': 'getOr',
'props': 'at',
'symmetricDifference': 'xor',
'symmetricDifferenceBy': 'xorBy',
'symmetricDifferenceWith': 'xorWith',
'takeLast': 'takeRight',
'takeLastWhile': 'takeRightWhile',
'unapply': 'rest',
'unnest': 'flatten',
'useWith': 'overArgs',
'where': 'conformsTo',
'whereEq': 'isMatch',
'zipObj': 'zipObject'
};
/** Used to map ary to method names. */
exports.aryMethod = {
'1': [
'assignAll', 'assignInAll', 'attempt', 'castArray', 'ceil', 'create',
'curry', 'curryRight', 'defaultsAll', 'defaultsDeepAll', 'floor', 'flow',
'flowRight', 'fromPairs', 'invert', 'iteratee', 'memoize', 'method', 'mergeAll',
'methodOf', 'mixin', 'nthArg', 'over', 'overEvery', 'overSome','rest', 'reverse',
'round', 'runInContext', 'spread', 'template', 'trim', 'trimEnd', 'trimStart',
'uniqueId', 'words', 'zipAll'
],
'2': [
'add', 'after', 'ary', 'assign', 'assignAllWith', 'assignIn', 'assignInAllWith',
'at', 'before', 'bind', 'bindAll', 'bindKey', 'chunk', 'cloneDeepWith',
'cloneWith', 'concat', 'conformsTo', 'countBy', 'curryN', 'curryRightN',
'debounce', 'defaults', 'defaultsDeep', 'defaultTo', 'delay', 'difference',
'divide', 'drop', 'dropRight', 'dropRightWhile', 'dropWhile', 'endsWith', 'eq',
'every', 'filter', 'find', 'findIndex', 'findKey', 'findLast', 'findLastIndex',
'findLastKey', 'flatMap', 'flatMapDeep', 'flattenDepth', 'forEach',
'forEachRight', 'forIn', 'forInRight', 'forOwn', 'forOwnRight', 'get',
'groupBy', 'gt', 'gte', 'has', 'hasIn', 'includes', 'indexOf', 'intersection',
'invertBy', 'invoke', 'invokeMap', 'isEqual', 'isMatch', 'join', 'keyBy',
'lastIndexOf', 'lt', 'lte', 'map', 'mapKeys', 'mapValues', 'matchesProperty',
'maxBy', 'meanBy', 'merge', 'mergeAllWith', 'minBy', 'multiply', 'nth', 'omit',
'omitBy', 'overArgs', 'pad', 'padEnd', 'padStart', 'parseInt', 'partial',
'partialRight', 'partition', 'pick', 'pickBy', 'propertyOf', 'pull', 'pullAll',
'pullAt', 'random', 'range', 'rangeRight', 'rearg', 'reject', 'remove',
'repeat', 'restFrom', 'result', 'sampleSize', 'some', 'sortBy', 'sortedIndex',
'sortedIndexOf', 'sortedLastIndex', 'sortedLastIndexOf', 'sortedUniqBy',
'split', 'spreadFrom', 'startsWith', 'subtract', 'sumBy', 'take', 'takeRight',
'takeRightWhile', 'takeWhile', 'tap', 'throttle', 'thru', 'times', 'trimChars',
'trimCharsEnd', 'trimCharsStart', 'truncate', 'union', 'uniqBy', 'uniqWith',
'unset', 'unzipWith', 'without', 'wrap', 'xor', 'zip', 'zipObject',
'zipObjectDeep'
],
'3': [
'assignInWith', 'assignWith', 'clamp', 'differenceBy', 'differenceWith',
'findFrom', 'findIndexFrom', 'findLastFrom', 'findLastIndexFrom', 'getOr',
'includesFrom', 'indexOfFrom', 'inRange', 'intersectionBy', 'intersectionWith',
'invokeArgs', 'invokeArgsMap', 'isEqualWith', 'isMatchWith', 'flatMapDepth',
'lastIndexOfFrom', 'mergeWith', 'orderBy', 'padChars', 'padCharsEnd',
'padCharsStart', 'pullAllBy', 'pullAllWith', 'rangeStep', 'rangeStepRight',
'reduce', 'reduceRight', 'replace', 'set', 'slice', 'sortedIndexBy',
'sortedLastIndexBy', 'transform', 'unionBy', 'unionWith', 'update', 'xorBy',
'xorWith', 'zipWith'
],
'4': [
'fill', 'setWith', 'updateWith'
]
};
/** Used to map ary to rearg configs. */
exports.aryRearg = {
'2': [1, 0],
'3': [2, 0, 1],
'4': [3, 2, 0, 1]
};
/** Used to map method names to their iteratee ary. */
exports.iterateeAry = {
'dropRightWhile': 1,
'dropWhile': 1,
'every': 1,
'filter': 1,
'find': 1,
'findFrom': 1,
'findIndex': 1,
'findIndexFrom': 1,
'findKey': 1,
'findLast': 1,
'findLastFrom': 1,
'findLastIndex': 1,
'findLastIndexFrom': 1,
'findLastKey': 1,
'flatMap': 1,
'flatMapDeep': 1,
'flatMapDepth': 1,
'forEach': 1,
'forEachRight': 1,
'forIn': 1,
'forInRight': 1,
'forOwn': 1,
'forOwnRight': 1,
'map': 1,
'mapKeys': 1,
'mapValues': 1,
'partition': 1,
'reduce': 2,
'reduceRight': 2,
'reject': 1,
'remove': 1,
'some': 1,
'takeRightWhile': 1,
'takeWhile': 1,
'times': 1,
'transform': 2
};
/** Used to map method names to iteratee rearg configs. */
exports.iterateeRearg = {
'mapKeys': [1],
'reduceRight': [1, 0]
};
/** Used to map method names to rearg configs. */
exports.methodRearg = {
'assignInAllWith': [1, 0],
'assignInWith': [1, 2, 0],
'assignAllWith': [1, 0],
'assignWith': [1, 2, 0],
'differenceBy': [1, 2, 0],
'differenceWith': [1, 2, 0],
'getOr': [2, 1, 0],
'intersectionBy': [1, 2, 0],
'intersectionWith': [1, 2, 0],
'isEqualWith': [1, 2, 0],
'isMatchWith': [2, 1, 0],
'mergeAllWith': [1, 0],
'mergeWith': [1, 2, 0],
'padChars': [2, 1, 0],
'padCharsEnd': [2, 1, 0],
'padCharsStart': [2, 1, 0],
'pullAllBy': [2, 1, 0],
'pullAllWith': [2, 1, 0],
'rangeStep': [1, 2, 0],
'rangeStepRight': [1, 2, 0],
'setWith': [3, 1, 2, 0],
'sortedIndexBy': [2, 1, 0],
'sortedLastIndexBy': [2, 1, 0],
'unionBy': [1, 2, 0],
'unionWith': [1, 2, 0],
'updateWith': [3, 1, 2, 0],
'xorBy': [1, 2, 0],
'xorWith': [1, 2, 0],
'zipWith': [1, 2, 0]
};
/** Used to map method names to spread configs. */
exports.methodSpread = {
'assignAll': { 'start': 0 },
'assignAllWith': { 'start': 0 },
'assignInAll': { 'start': 0 },
'assignInAllWith': { 'start': 0 },
'defaultsAll': { 'start': 0 },
'defaultsDeepAll': { 'start': 0 },
'invokeArgs': { 'start': 2 },
'invokeArgsMap': { 'start': 2 },
'mergeAll': { 'start': 0 },
'mergeAllWith': { 'start': 0 },
'partial': { 'start': 1 },
'partialRight': { 'start': 1 },
'without': { 'start': 1 },
'zipAll': { 'start': 0 }
};
/** Used to identify methods which mutate arrays or objects. */
exports.mutate = {
'array': {
'fill': true,
'pull': true,
'pullAll': true,
'pullAllBy': true,
'pullAllWith': true,
'pullAt': true,
'remove': true,
'reverse': true
},
'object': {
'assign': true,
'assignAll': true,
'assignAllWith': true,
'assignIn': true,
'assignInAll': true,
'assignInAllWith': true,
'assignInWith': true,
'assignWith': true,
'defaults': true,
'defaultsAll': true,
'defaultsDeep': true,
'defaultsDeepAll': true,
'merge': true,
'mergeAll': true,
'mergeAllWith': true,
'mergeWith': true,
},
'set': {
'set': true,
'setWith': true,
'unset': true,
'update': true,
'updateWith': true
}
};
/** Used to map real names to their aliases. */
exports.realToAlias = (function() {
var hasOwnProperty = Object.prototype.hasOwnProperty,
object = exports.aliasToReal,
result = {};
for (var key in object) {
var value = object[key];
if (hasOwnProperty.call(result, value)) {
result[value].push(key);
} else {
result[value] = [key];
}
}
return result;
}());
/** Used to map method names to other names. */
exports.remap = {
'assignAll': 'assign',
'assignAllWith': 'assignWith',
'assignInAll': 'assignIn',
'assignInAllWith': 'assignInWith',
'curryN': 'curry',
'curryRightN': 'curryRight',
'defaultsAll': 'defaults',
'defaultsDeepAll': 'defaultsDeep',
'findFrom': 'find',
'findIndexFrom': 'findIndex',
'findLastFrom': 'findLast',
'findLastIndexFrom': 'findLastIndex',
'getOr': 'get',
'includesFrom': 'includes',
'indexOfFrom': 'indexOf',
'invokeArgs': 'invoke',
'invokeArgsMap': 'invokeMap',
'lastIndexOfFrom': 'lastIndexOf',
'mergeAll': 'merge',
'mergeAllWith': 'mergeWith',
'padChars': 'pad',
'padCharsEnd': 'padEnd',
'padCharsStart': 'padStart',
'propertyOf': 'get',
'rangeStep': 'range',
'rangeStepRight': 'rangeRight',
'restFrom': 'rest',
'spreadFrom': 'spread',
'trimChars': 'trim',
'trimCharsEnd': 'trimEnd',
'trimCharsStart': 'trimStart',
'zipAll': 'zip'
};
/** Used to track methods that skip fixing their arity. */
exports.skipFixed = {
'castArray': true,
'flow': true,
'flowRight': true,
'iteratee': true,
'mixin': true,
'rearg': true,
'runInContext': true
};
/** Used to track methods that skip rearranging arguments. */
exports.skipRearg = {
'add': true,
'assign': true,
'assignIn': true,
'bind': true,
'bindKey': true,
'concat': true,
'difference': true,
'divide': true,
'eq': true,
'gt': true,
'gte': true,
'isEqual': true,
'lt': true,
'lte': true,
'matchesProperty': true,
'merge': true,
'multiply': true,
'overArgs': true,
'partial': true,
'partialRight': true,
'propertyOf': true,
'random': true,
'range': true,
'rangeRight': true,
'subtract': true,
'zip': true,
'zipObject': true,
'zipObjectDeep': true
};
|
PypiClean
|
/accelbyte_py_sdk-0.48.0.tar.gz/accelbyte_py_sdk-0.48.0/accelbyte_py_sdk/api/session/models/apimodels_create_configuration_template_request.py
|
# template file: ags_py_codegen
# AccelByte Gaming Services Session Service (3.1.0)
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
from __future__ import annotations
from typing import Any, Dict, List, Optional, Tuple, Union
from ....core import Model
from ..models.models_native_session_setting import ModelsNativeSessionSetting
class ApimodelsCreateConfigurationTemplateRequest(Model):
"""Apimodels create configuration template request (apimodels.CreateConfigurationTemplateRequest)
Properties:
client_version: (clientVersion) REQUIRED str
deployment: (deployment) REQUIRED str
inactive_timeout: (inactiveTimeout) REQUIRED int
invite_timeout: (inviteTimeout) REQUIRED int
joinability: (joinability) REQUIRED str
max_players: (maxPlayers) REQUIRED int
min_players: (minPlayers) REQUIRED int
name: (name) REQUIRED str
persistent: (persistent) REQUIRED bool
requested_regions: (requestedRegions) REQUIRED List[str]
text_chat: (textChat) REQUIRED bool
type_: (type) REQUIRED str
auto_join: (autoJoin) OPTIONAL bool
ds_source: (dsSource) OPTIONAL str
fallback_claim_keys: (fallbackClaimKeys) OPTIONAL List[str]
immutable_storage: (immutableStorage) OPTIONAL bool
max_active_sessions: (maxActiveSessions) OPTIONAL int
native_session_setting: (NativeSessionSetting) OPTIONAL ModelsNativeSessionSetting
preferred_claim_keys: (preferredClaimKeys) OPTIONAL List[str]
psn_base_url: (PSNBaseUrl) OPTIONAL str
tie_teams_session_lifetime: (tieTeamsSessionLifetime) OPTIONAL bool
"""
# region fields
client_version: str # REQUIRED
deployment: str # REQUIRED
inactive_timeout: int # REQUIRED
invite_timeout: int # REQUIRED
joinability: str # REQUIRED
max_players: int # REQUIRED
min_players: int # REQUIRED
name: str # REQUIRED
persistent: bool # REQUIRED
requested_regions: List[str] # REQUIRED
text_chat: bool # REQUIRED
type_: str # REQUIRED
auto_join: bool # OPTIONAL
ds_source: str # OPTIONAL
fallback_claim_keys: List[str] # OPTIONAL
immutable_storage: bool # OPTIONAL
max_active_sessions: int # OPTIONAL
native_session_setting: ModelsNativeSessionSetting # OPTIONAL
preferred_claim_keys: List[str] # OPTIONAL
psn_base_url: str # OPTIONAL
tie_teams_session_lifetime: bool # OPTIONAL
# endregion fields
# region with_x methods
def with_client_version(
self, value: str
) -> ApimodelsCreateConfigurationTemplateRequest:
self.client_version = value
return self
def with_deployment(
self, value: str
) -> ApimodelsCreateConfigurationTemplateRequest:
self.deployment = value
return self
def with_inactive_timeout(
self, value: int
) -> ApimodelsCreateConfigurationTemplateRequest:
self.inactive_timeout = value
return self
def with_invite_timeout(
self, value: int
) -> ApimodelsCreateConfigurationTemplateRequest:
self.invite_timeout = value
return self
def with_joinability(
self, value: str
) -> ApimodelsCreateConfigurationTemplateRequest:
self.joinability = value
return self
def with_max_players(
self, value: int
) -> ApimodelsCreateConfigurationTemplateRequest:
self.max_players = value
return self
def with_min_players(
self, value: int
) -> ApimodelsCreateConfigurationTemplateRequest:
self.min_players = value
return self
def with_name(self, value: str) -> ApimodelsCreateConfigurationTemplateRequest:
self.name = value
return self
def with_persistent(
self, value: bool
) -> ApimodelsCreateConfigurationTemplateRequest:
self.persistent = value
return self
def with_requested_regions(
self, value: List[str]
) -> ApimodelsCreateConfigurationTemplateRequest:
self.requested_regions = value
return self
def with_text_chat(
self, value: bool
) -> ApimodelsCreateConfigurationTemplateRequest:
self.text_chat = value
return self
def with_type(self, value: str) -> ApimodelsCreateConfigurationTemplateRequest:
self.type_ = value
return self
def with_auto_join(
self, value: bool
) -> ApimodelsCreateConfigurationTemplateRequest:
self.auto_join = value
return self
def with_ds_source(self, value: str) -> ApimodelsCreateConfigurationTemplateRequest:
self.ds_source = value
return self
def with_fallback_claim_keys(
self, value: List[str]
) -> ApimodelsCreateConfigurationTemplateRequest:
self.fallback_claim_keys = value
return self
def with_immutable_storage(
self, value: bool
) -> ApimodelsCreateConfigurationTemplateRequest:
self.immutable_storage = value
return self
def with_max_active_sessions(
self, value: int
) -> ApimodelsCreateConfigurationTemplateRequest:
self.max_active_sessions = value
return self
def with_native_session_setting(
self, value: ModelsNativeSessionSetting
) -> ApimodelsCreateConfigurationTemplateRequest:
self.native_session_setting = value
return self
def with_preferred_claim_keys(
self, value: List[str]
) -> ApimodelsCreateConfigurationTemplateRequest:
self.preferred_claim_keys = value
return self
def with_psn_base_url(
self, value: str
) -> ApimodelsCreateConfigurationTemplateRequest:
self.psn_base_url = value
return self
def with_tie_teams_session_lifetime(
self, value: bool
) -> ApimodelsCreateConfigurationTemplateRequest:
self.tie_teams_session_lifetime = value
return self
# endregion with_x methods
# region to methods
def to_dict(self, include_empty: bool = False) -> dict:
result: dict = {}
if hasattr(self, "client_version"):
result["clientVersion"] = str(self.client_version)
elif include_empty:
result["clientVersion"] = ""
if hasattr(self, "deployment"):
result["deployment"] = str(self.deployment)
elif include_empty:
result["deployment"] = ""
if hasattr(self, "inactive_timeout"):
result["inactiveTimeout"] = int(self.inactive_timeout)
elif include_empty:
result["inactiveTimeout"] = 0
if hasattr(self, "invite_timeout"):
result["inviteTimeout"] = int(self.invite_timeout)
elif include_empty:
result["inviteTimeout"] = 0
if hasattr(self, "joinability"):
result["joinability"] = str(self.joinability)
elif include_empty:
result["joinability"] = ""
if hasattr(self, "max_players"):
result["maxPlayers"] = int(self.max_players)
elif include_empty:
result["maxPlayers"] = 0
if hasattr(self, "min_players"):
result["minPlayers"] = int(self.min_players)
elif include_empty:
result["minPlayers"] = 0
if hasattr(self, "name"):
result["name"] = str(self.name)
elif include_empty:
result["name"] = ""
if hasattr(self, "persistent"):
result["persistent"] = bool(self.persistent)
elif include_empty:
result["persistent"] = False
if hasattr(self, "requested_regions"):
result["requestedRegions"] = [str(i0) for i0 in self.requested_regions]
elif include_empty:
result["requestedRegions"] = []
if hasattr(self, "text_chat"):
result["textChat"] = bool(self.text_chat)
elif include_empty:
result["textChat"] = False
if hasattr(self, "type_"):
result["type"] = str(self.type_)
elif include_empty:
result["type"] = ""
if hasattr(self, "auto_join"):
result["autoJoin"] = bool(self.auto_join)
elif include_empty:
result["autoJoin"] = False
if hasattr(self, "ds_source"):
result["dsSource"] = str(self.ds_source)
elif include_empty:
result["dsSource"] = ""
if hasattr(self, "fallback_claim_keys"):
result["fallbackClaimKeys"] = [str(i0) for i0 in self.fallback_claim_keys]
elif include_empty:
result["fallbackClaimKeys"] = []
if hasattr(self, "immutable_storage"):
result["immutableStorage"] = bool(self.immutable_storage)
elif include_empty:
result["immutableStorage"] = False
if hasattr(self, "max_active_sessions"):
result["maxActiveSessions"] = int(self.max_active_sessions)
elif include_empty:
result["maxActiveSessions"] = 0
if hasattr(self, "native_session_setting"):
result["NativeSessionSetting"] = self.native_session_setting.to_dict(
include_empty=include_empty
)
elif include_empty:
result["NativeSessionSetting"] = ModelsNativeSessionSetting()
if hasattr(self, "preferred_claim_keys"):
result["preferredClaimKeys"] = [str(i0) for i0 in self.preferred_claim_keys]
elif include_empty:
result["preferredClaimKeys"] = []
if hasattr(self, "psn_base_url"):
result["PSNBaseUrl"] = str(self.psn_base_url)
elif include_empty:
result["PSNBaseUrl"] = ""
if hasattr(self, "tie_teams_session_lifetime"):
result["tieTeamsSessionLifetime"] = bool(self.tie_teams_session_lifetime)
elif include_empty:
result["tieTeamsSessionLifetime"] = False
return result
# endregion to methods
# region static methods
@classmethod
def create(
cls,
client_version: str,
deployment: str,
inactive_timeout: int,
invite_timeout: int,
joinability: str,
max_players: int,
min_players: int,
name: str,
persistent: bool,
requested_regions: List[str],
text_chat: bool,
type_: str,
auto_join: Optional[bool] = None,
ds_source: Optional[str] = None,
fallback_claim_keys: Optional[List[str]] = None,
immutable_storage: Optional[bool] = None,
max_active_sessions: Optional[int] = None,
native_session_setting: Optional[ModelsNativeSessionSetting] = None,
preferred_claim_keys: Optional[List[str]] = None,
psn_base_url: Optional[str] = None,
tie_teams_session_lifetime: Optional[bool] = None,
**kwargs,
) -> ApimodelsCreateConfigurationTemplateRequest:
instance = cls()
instance.client_version = client_version
instance.deployment = deployment
instance.inactive_timeout = inactive_timeout
instance.invite_timeout = invite_timeout
instance.joinability = joinability
instance.max_players = max_players
instance.min_players = min_players
instance.name = name
instance.persistent = persistent
instance.requested_regions = requested_regions
instance.text_chat = text_chat
instance.type_ = type_
if auto_join is not None:
instance.auto_join = auto_join
if ds_source is not None:
instance.ds_source = ds_source
if fallback_claim_keys is not None:
instance.fallback_claim_keys = fallback_claim_keys
if immutable_storage is not None:
instance.immutable_storage = immutable_storage
if max_active_sessions is not None:
instance.max_active_sessions = max_active_sessions
if native_session_setting is not None:
instance.native_session_setting = native_session_setting
if preferred_claim_keys is not None:
instance.preferred_claim_keys = preferred_claim_keys
if psn_base_url is not None:
instance.psn_base_url = psn_base_url
if tie_teams_session_lifetime is not None:
instance.tie_teams_session_lifetime = tie_teams_session_lifetime
return instance
@classmethod
def create_from_dict(
cls, dict_: dict, include_empty: bool = False
) -> ApimodelsCreateConfigurationTemplateRequest:
instance = cls()
if not dict_:
return instance
if "clientVersion" in dict_ and dict_["clientVersion"] is not None:
instance.client_version = str(dict_["clientVersion"])
elif include_empty:
instance.client_version = ""
if "deployment" in dict_ and dict_["deployment"] is not None:
instance.deployment = str(dict_["deployment"])
elif include_empty:
instance.deployment = ""
if "inactiveTimeout" in dict_ and dict_["inactiveTimeout"] is not None:
instance.inactive_timeout = int(dict_["inactiveTimeout"])
elif include_empty:
instance.inactive_timeout = 0
if "inviteTimeout" in dict_ and dict_["inviteTimeout"] is not None:
instance.invite_timeout = int(dict_["inviteTimeout"])
elif include_empty:
instance.invite_timeout = 0
if "joinability" in dict_ and dict_["joinability"] is not None:
instance.joinability = str(dict_["joinability"])
elif include_empty:
instance.joinability = ""
if "maxPlayers" in dict_ and dict_["maxPlayers"] is not None:
instance.max_players = int(dict_["maxPlayers"])
elif include_empty:
instance.max_players = 0
if "minPlayers" in dict_ and dict_["minPlayers"] is not None:
instance.min_players = int(dict_["minPlayers"])
elif include_empty:
instance.min_players = 0
if "name" in dict_ and dict_["name"] is not None:
instance.name = str(dict_["name"])
elif include_empty:
instance.name = ""
if "persistent" in dict_ and dict_["persistent"] is not None:
instance.persistent = bool(dict_["persistent"])
elif include_empty:
instance.persistent = False
if "requestedRegions" in dict_ and dict_["requestedRegions"] is not None:
instance.requested_regions = [str(i0) for i0 in dict_["requestedRegions"]]
elif include_empty:
instance.requested_regions = []
if "textChat" in dict_ and dict_["textChat"] is not None:
instance.text_chat = bool(dict_["textChat"])
elif include_empty:
instance.text_chat = False
if "type" in dict_ and dict_["type"] is not None:
instance.type_ = str(dict_["type"])
elif include_empty:
instance.type_ = ""
if "autoJoin" in dict_ and dict_["autoJoin"] is not None:
instance.auto_join = bool(dict_["autoJoin"])
elif include_empty:
instance.auto_join = False
if "dsSource" in dict_ and dict_["dsSource"] is not None:
instance.ds_source = str(dict_["dsSource"])
elif include_empty:
instance.ds_source = ""
if "fallbackClaimKeys" in dict_ and dict_["fallbackClaimKeys"] is not None:
instance.fallback_claim_keys = [
str(i0) for i0 in dict_["fallbackClaimKeys"]
]
elif include_empty:
instance.fallback_claim_keys = []
if "immutableStorage" in dict_ and dict_["immutableStorage"] is not None:
instance.immutable_storage = bool(dict_["immutableStorage"])
elif include_empty:
instance.immutable_storage = False
if "maxActiveSessions" in dict_ and dict_["maxActiveSessions"] is not None:
instance.max_active_sessions = int(dict_["maxActiveSessions"])
elif include_empty:
instance.max_active_sessions = 0
if (
"NativeSessionSetting" in dict_
and dict_["NativeSessionSetting"] is not None
):
instance.native_session_setting = (
ModelsNativeSessionSetting.create_from_dict(
dict_["NativeSessionSetting"], include_empty=include_empty
)
)
elif include_empty:
instance.native_session_setting = ModelsNativeSessionSetting()
if "preferredClaimKeys" in dict_ and dict_["preferredClaimKeys"] is not None:
instance.preferred_claim_keys = [
str(i0) for i0 in dict_["preferredClaimKeys"]
]
elif include_empty:
instance.preferred_claim_keys = []
if "PSNBaseUrl" in dict_ and dict_["PSNBaseUrl"] is not None:
instance.psn_base_url = str(dict_["PSNBaseUrl"])
elif include_empty:
instance.psn_base_url = ""
if (
"tieTeamsSessionLifetime" in dict_
and dict_["tieTeamsSessionLifetime"] is not None
):
instance.tie_teams_session_lifetime = bool(dict_["tieTeamsSessionLifetime"])
elif include_empty:
instance.tie_teams_session_lifetime = False
return instance
@classmethod
def create_many_from_dict(
cls, dict_: dict, include_empty: bool = False
) -> Dict[str, ApimodelsCreateConfigurationTemplateRequest]:
return (
{k: cls.create_from_dict(v, include_empty=include_empty) for k, v in dict_}
if dict_
else {}
)
@classmethod
def create_many_from_list(
cls, list_: list, include_empty: bool = False
) -> List[ApimodelsCreateConfigurationTemplateRequest]:
return (
[cls.create_from_dict(i, include_empty=include_empty) for i in list_]
if list_
else []
)
@classmethod
def create_from_any(
cls, any_: any, include_empty: bool = False, many: bool = False
) -> Union[
ApimodelsCreateConfigurationTemplateRequest,
List[ApimodelsCreateConfigurationTemplateRequest],
Dict[Any, ApimodelsCreateConfigurationTemplateRequest],
]:
if many:
if isinstance(any_, dict):
return cls.create_many_from_dict(any_, include_empty=include_empty)
elif isinstance(any_, list):
return cls.create_many_from_list(any_, include_empty=include_empty)
else:
raise ValueError()
else:
return cls.create_from_dict(any_, include_empty=include_empty)
@staticmethod
def get_field_info() -> Dict[str, str]:
return {
"clientVersion": "client_version",
"deployment": "deployment",
"inactiveTimeout": "inactive_timeout",
"inviteTimeout": "invite_timeout",
"joinability": "joinability",
"maxPlayers": "max_players",
"minPlayers": "min_players",
"name": "name",
"persistent": "persistent",
"requestedRegions": "requested_regions",
"textChat": "text_chat",
"type": "type_",
"autoJoin": "auto_join",
"dsSource": "ds_source",
"fallbackClaimKeys": "fallback_claim_keys",
"immutableStorage": "immutable_storage",
"maxActiveSessions": "max_active_sessions",
"NativeSessionSetting": "native_session_setting",
"preferredClaimKeys": "preferred_claim_keys",
"PSNBaseUrl": "psn_base_url",
"tieTeamsSessionLifetime": "tie_teams_session_lifetime",
}
@staticmethod
def get_required_map() -> Dict[str, bool]:
return {
"clientVersion": True,
"deployment": True,
"inactiveTimeout": True,
"inviteTimeout": True,
"joinability": True,
"maxPlayers": True,
"minPlayers": True,
"name": True,
"persistent": True,
"requestedRegions": True,
"textChat": True,
"type": True,
"autoJoin": False,
"dsSource": False,
"fallbackClaimKeys": False,
"immutableStorage": False,
"maxActiveSessions": False,
"NativeSessionSetting": False,
"preferredClaimKeys": False,
"PSNBaseUrl": False,
"tieTeamsSessionLifetime": False,
}
# endregion static methods
|
PypiClean
|
/uniohomeassistant-0.1.3.tar.gz/uniohomeassistant-0.1.3/homeassistant/components/calendar/__init__.py
|
from datetime import timedelta
import logging
import re
from typing import Dict, List, cast
from aiohttp import web
from homeassistant.components import http
from homeassistant.const import HTTP_BAD_REQUEST, STATE_OFF, STATE_ON
from homeassistant.helpers.config_validation import ( # noqa: F401
PLATFORM_SCHEMA,
PLATFORM_SCHEMA_BASE,
time_period_str,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.template import DATE_STR_FORMAT
from homeassistant.util import dt
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
DOMAIN = "calendar"
ENTITY_ID_FORMAT = DOMAIN + ".{}"
SCAN_INTERVAL = timedelta(seconds=60)
async def async_setup(hass, config):
"""Track states and offer events for calendars."""
component = hass.data[DOMAIN] = EntityComponent(
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
)
hass.http.register_view(CalendarListView(component))
hass.http.register_view(CalendarEventView(component))
hass.components.frontend.async_register_built_in_panel(
"calendar", "calendar", "hass:calendar"
)
await component.async_setup(config)
return True
async def async_setup_entry(hass, entry):
"""Set up a config entry."""
return await hass.data[DOMAIN].async_setup_entry(entry)
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
return await hass.data[DOMAIN].async_unload_entry(entry)
def get_date(date):
"""Get the dateTime from date or dateTime as a local."""
if "date" in date:
return dt.start_of_local_day(
dt.dt.datetime.combine(dt.parse_date(date["date"]), dt.dt.time.min)
)
return dt.as_local(dt.parse_datetime(date["dateTime"]))
def normalize_event(event):
"""Normalize a calendar event."""
normalized_event = {}
start = event.get("start")
end = event.get("end")
start = get_date(start) if start is not None else None
end = get_date(end) if end is not None else None
normalized_event["dt_start"] = start
normalized_event["dt_end"] = end
start = start.strftime(DATE_STR_FORMAT) if start is not None else None
end = end.strftime(DATE_STR_FORMAT) if end is not None else None
normalized_event["start"] = start
normalized_event["end"] = end
# cleanup the string so we don't have a bunch of double+ spaces
summary = event.get("summary", "")
normalized_event["message"] = re.sub(" +", "", summary).strip()
normalized_event["location"] = event.get("location", "")
normalized_event["description"] = event.get("description", "")
normalized_event["all_day"] = "date" in event["start"]
return normalized_event
def calculate_offset(event, offset):
"""Calculate event offset.
Return the updated event with the offset_time included.
"""
summary = event.get("summary", "")
# check if we have an offset tag in the message
# time is HH:MM or MM
reg = f"{offset}([+-]?[0-9]{{0,2}}(:[0-9]{{0,2}})?)"
search = re.search(reg, summary)
if search and search.group(1):
time = search.group(1)
if ":" not in time:
if time[0] == "+" or time[0] == "-":
time = f"{time[0]}0:{time[1:]}"
else:
time = f"0:{time}"
offset_time = time_period_str(time)
summary = (summary[: search.start()] + summary[search.end() :]).strip()
event["summary"] = summary
else:
offset_time = dt.dt.timedelta() # default it
event["offset_time"] = offset_time
return event
def is_offset_reached(event):
"""Have we reached the offset time specified in the event title."""
start = get_date(event["start"])
if start is None or event["offset_time"] == dt.dt.timedelta():
return False
return start + event["offset_time"] <= dt.now(start.tzinfo)
class CalendarEventDevice(Entity):
"""A calendar event device."""
@property
def event(self):
"""Return the next upcoming event."""
raise NotImplementedError()
@property
def state_attributes(self):
"""Return the entity state attributes."""
event = self.event
if event is None:
return None
event = normalize_event(event)
return {
"message": event["message"],
"all_day": event["all_day"],
"start_time": event["start"],
"end_time": event["end"],
"location": event["location"],
"description": event["description"],
}
@property
def state(self):
"""Return the state of the calendar event."""
event = self.event
if event is None:
return STATE_OFF
event = normalize_event(event)
start = event["dt_start"]
end = event["dt_end"]
if start is None or end is None:
return STATE_OFF
now = dt.now()
if start <= now < end:
return STATE_ON
return STATE_OFF
async def async_get_events(self, hass, start_date, end_date):
"""Return calendar events within a datetime range."""
raise NotImplementedError()
class CalendarEventView(http.HomeAssistantView):
"""View to retrieve calendar content."""
url = "/api/calendars/{entity_id}"
name = "api:calendars:calendar"
def __init__(self, component: EntityComponent) -> None:
"""Initialize calendar view."""
self.component = component
async def get(self, request, entity_id):
"""Return calendar events."""
entity = self.component.get_entity(entity_id)
start = request.query.get("start")
end = request.query.get("end")
if None in (start, end, entity):
return web.Response(status=HTTP_BAD_REQUEST)
try:
start_date = dt.parse_datetime(start)
end_date = dt.parse_datetime(end)
except (ValueError, AttributeError):
return web.Response(status=HTTP_BAD_REQUEST)
event_list = await entity.async_get_events(
request.app["hass"], start_date, end_date
)
return self.json(event_list)
class CalendarListView(http.HomeAssistantView):
"""View to retrieve calendar list."""
url = "/api/calendars"
name = "api:calendars"
def __init__(self, component: EntityComponent) -> None:
"""Initialize calendar view."""
self.component = component
async def get(self, request: web.Request) -> web.Response:
"""Retrieve calendar list."""
hass = request.app["hass"]
calendar_list: List[Dict[str, str]] = []
for entity in self.component.entities:
state = hass.states.get(entity.entity_id)
calendar_list.append({"name": state.name, "entity_id": entity.entity_id})
return self.json(sorted(calendar_list, key=lambda x: cast(str, x["name"])))
|
PypiClean
|
/sendbird_platform_sdk-0.0.16-py3-none-any.whl/sendbird_platform_sdk/model/gc_update_ban_by_id_response.py
|
import re # noqa: F401
import sys # noqa: F401
from sendbird_platform_sdk.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from sendbird_platform_sdk.exceptions import ApiAttributeError
def lazy_import():
from sendbird_platform_sdk.model.send_bird_user import SendBirdUser
globals()['SendBirdUser'] = SendBirdUser
class GcUpdateBanByIdResponse(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'user': (SendBirdUser,), # noqa: E501
'start_at': (float,), # noqa: E501
'end_at': (float,), # noqa: E501
'description': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'user': 'user', # noqa: E501
'start_at': 'start_at', # noqa: E501
'end_at': 'end_at', # noqa: E501
'description': 'description', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""GcUpdateBanByIdResponse - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
user (SendBirdUser): [optional] # noqa: E501
start_at (float): [optional] # noqa: E501
end_at (float): [optional] # noqa: E501
description (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', True)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""GcUpdateBanByIdResponse - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
user (SendBirdUser): [optional] # noqa: E501
start_at (float): [optional] # noqa: E501
end_at (float): [optional] # noqa: E501
description (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
PypiClean
|
/pulumi_azure_nextgen-0.6.2a1613157620.tar.gz/pulumi_azure_nextgen-0.6.2a1613157620/pulumi_azure_nextgen/apimanagement/backend.py
|
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['Backend']
class Backend(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
backend_id: Optional[pulumi.Input[str]] = None,
credentials: Optional[pulumi.Input[pulumi.InputType['BackendCredentialsContractArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['BackendPropertiesArgs']]] = None,
protocol: Optional[pulumi.Input[Union[str, 'BackendProtocol']]] = None,
proxy: Optional[pulumi.Input[pulumi.InputType['BackendProxyContractArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_id: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
title: Optional[pulumi.Input[str]] = None,
tls: Optional[pulumi.Input[pulumi.InputType['BackendTlsPropertiesArgs']]] = None,
url: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Backend details.
API Version: 2019-12-01.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] backend_id: Identifier of the Backend entity. Must be unique in the current API Management service instance.
:param pulumi.Input[pulumi.InputType['BackendCredentialsContractArgs']] credentials: Backend Credentials Contract Properties
:param pulumi.Input[str] description: Backend Description.
:param pulumi.Input[pulumi.InputType['BackendPropertiesArgs']] properties: Backend Properties contract
:param pulumi.Input[Union[str, 'BackendProtocol']] protocol: Backend communication protocol.
:param pulumi.Input[pulumi.InputType['BackendProxyContractArgs']] proxy: Backend Proxy Contract Properties
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] resource_id: Management Uri of the Resource in External System. This url can be the Arm Resource Id of Logic Apps, Function Apps or Api Apps.
:param pulumi.Input[str] service_name: The name of the API Management service.
:param pulumi.Input[str] title: Backend Title.
:param pulumi.Input[pulumi.InputType['BackendTlsPropertiesArgs']] tls: Backend TLS Properties
:param pulumi.Input[str] url: Runtime Url of the Backend.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if backend_id is None and not opts.urn:
raise TypeError("Missing required property 'backend_id'")
__props__['backend_id'] = backend_id
__props__['credentials'] = credentials
__props__['description'] = description
__props__['properties'] = properties
if protocol is None and not opts.urn:
raise TypeError("Missing required property 'protocol'")
__props__['protocol'] = protocol
__props__['proxy'] = proxy
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['resource_id'] = resource_id
if service_name is None and not opts.urn:
raise TypeError("Missing required property 'service_name'")
__props__['service_name'] = service_name
__props__['title'] = title
__props__['tls'] = tls
if url is None and not opts.urn:
raise TypeError("Missing required property 'url'")
__props__['url'] = url
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:apimanagement/latest:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20160707:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20161010:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20170301:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180101:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180601preview:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20190101:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201preview:Backend"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20200601preview:Backend")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Backend, __self__).__init__(
'azure-nextgen:apimanagement:Backend',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Backend':
"""
Get an existing Backend resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return Backend(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def credentials(self) -> pulumi.Output[Optional['outputs.BackendCredentialsContractResponse']]:
"""
Backend Credentials Contract Properties
"""
return pulumi.get(self, "credentials")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Backend Description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.BackendPropertiesResponse']:
"""
Backend Properties contract
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def protocol(self) -> pulumi.Output[str]:
"""
Backend communication protocol.
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def proxy(self) -> pulumi.Output[Optional['outputs.BackendProxyContractResponse']]:
"""
Backend Proxy Contract Properties
"""
return pulumi.get(self, "proxy")
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> pulumi.Output[Optional[str]]:
"""
Management Uri of the Resource in External System. This url can be the Arm Resource Id of Logic Apps, Function Apps or Api Apps.
"""
return pulumi.get(self, "resource_id")
@property
@pulumi.getter
def title(self) -> pulumi.Output[Optional[str]]:
"""
Backend Title.
"""
return pulumi.get(self, "title")
@property
@pulumi.getter
def tls(self) -> pulumi.Output[Optional['outputs.BackendTlsPropertiesResponse']]:
"""
Backend TLS Properties
"""
return pulumi.get(self, "tls")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type for API Management resource.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def url(self) -> pulumi.Output[str]:
"""
Runtime Url of the Backend.
"""
return pulumi.get(self, "url")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
|
PypiClean
|
/azure-cli-2.51.0.tar.gz/azure-cli-2.51.0/azure/cli/command_modules/network/aaz/2017_03_09_profile/network/nic/ip_config/_list.py
|
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"network nic ip-config list",
)
class List(AAZCommand):
"""List the IP configurations of an NIC.
:example: List the IP configurations of an NIC.
az network nic ip-config list -g MyResourceGroup --nic-name MyNic
"""
_aaz_info = {
"version": "2015-06-15",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.network/networkinterfaces/{}", "2015-06-15", "properties.ipConfigurations"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
self.SubresourceSelector(ctx=self.ctx, name="subresource")
self._execute_operations()
return self._output()
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.nic_name = AAZStrArg(
options=["--nic-name"],
help="Name of the network interface (NIC).",
required=True,
)
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.NetworkInterfacesGet(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.selectors.subresource.required(), client_flatten=True)
return result
class SubresourceSelector(AAZJsonSelector):
def _get(self):
result = self.ctx.vars.instance
return result.properties.ipConfigurations
def _set(self, value):
result = self.ctx.vars.instance
result.properties.ipConfigurations = value
return
class NetworkInterfacesGet(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "MgmtErrorFormat"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"networkInterfaceName", self.ctx.args.nic_name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2015-06-15",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_ListHelper._build_schema_network_interface_read(cls._schema_on_200)
return cls._schema_on_200
class _ListHelper:
"""Helper class for List"""
_schema_ip_configuration_read = None
@classmethod
def _build_schema_ip_configuration_read(cls, _schema):
if cls._schema_ip_configuration_read is not None:
_schema.etag = cls._schema_ip_configuration_read.etag
_schema.id = cls._schema_ip_configuration_read.id
_schema.name = cls._schema_ip_configuration_read.name
_schema.properties = cls._schema_ip_configuration_read.properties
return
cls._schema_ip_configuration_read = _schema_ip_configuration_read = AAZObjectType()
ip_configuration_read = _schema_ip_configuration_read
ip_configuration_read.etag = AAZStrType()
ip_configuration_read.id = AAZStrType()
ip_configuration_read.name = AAZStrType()
ip_configuration_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
properties = _schema_ip_configuration_read.properties
properties.private_ip_address = AAZStrType(
serialized_name="privateIPAddress",
)
properties.private_ip_allocation_method = AAZStrType(
serialized_name="privateIPAllocationMethod",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
)
properties.public_ip_address = AAZObjectType(
serialized_name="publicIPAddress",
)
cls._build_schema_public_ip_address_read(properties.public_ip_address)
properties.subnet = AAZObjectType()
cls._build_schema_subnet_read(properties.subnet)
_schema.etag = cls._schema_ip_configuration_read.etag
_schema.id = cls._schema_ip_configuration_read.id
_schema.name = cls._schema_ip_configuration_read.name
_schema.properties = cls._schema_ip_configuration_read.properties
_schema_network_interface_ip_configuration_read = None
@classmethod
def _build_schema_network_interface_ip_configuration_read(cls, _schema):
if cls._schema_network_interface_ip_configuration_read is not None:
_schema.etag = cls._schema_network_interface_ip_configuration_read.etag
_schema.id = cls._schema_network_interface_ip_configuration_read.id
_schema.name = cls._schema_network_interface_ip_configuration_read.name
_schema.properties = cls._schema_network_interface_ip_configuration_read.properties
return
cls._schema_network_interface_ip_configuration_read = _schema_network_interface_ip_configuration_read = AAZObjectType()
network_interface_ip_configuration_read = _schema_network_interface_ip_configuration_read
network_interface_ip_configuration_read.etag = AAZStrType()
network_interface_ip_configuration_read.id = AAZStrType()
network_interface_ip_configuration_read.name = AAZStrType()
network_interface_ip_configuration_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
properties = _schema_network_interface_ip_configuration_read.properties
properties.load_balancer_backend_address_pools = AAZListType(
serialized_name="loadBalancerBackendAddressPools",
)
properties.load_balancer_inbound_nat_rules = AAZListType(
serialized_name="loadBalancerInboundNatRules",
)
properties.primary = AAZBoolType()
properties.private_ip_address = AAZStrType(
serialized_name="privateIPAddress",
)
properties.private_ip_allocation_method = AAZStrType(
serialized_name="privateIPAllocationMethod",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
)
properties.public_ip_address = AAZObjectType(
serialized_name="publicIPAddress",
)
cls._build_schema_public_ip_address_read(properties.public_ip_address)
properties.subnet = AAZObjectType()
cls._build_schema_subnet_read(properties.subnet)
load_balancer_backend_address_pools = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools
load_balancer_backend_address_pools.Element = AAZObjectType()
_element = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element
_element.etag = AAZStrType()
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
properties = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element.properties
properties.backend_ip_configurations = AAZListType(
serialized_name="backendIPConfigurations",
flags={"read_only": True},
)
properties.load_balancing_rules = AAZListType(
serialized_name="loadBalancingRules",
flags={"read_only": True},
)
properties.outbound_nat_rule = AAZObjectType(
serialized_name="outboundNatRule",
)
cls._build_schema_sub_resource_read(properties.outbound_nat_rule)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
)
backend_ip_configurations = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element.properties.backend_ip_configurations
backend_ip_configurations.Element = AAZObjectType()
cls._build_schema_network_interface_ip_configuration_read(backend_ip_configurations.Element)
load_balancing_rules = _schema_network_interface_ip_configuration_read.properties.load_balancer_backend_address_pools.Element.properties.load_balancing_rules
load_balancing_rules.Element = AAZObjectType()
cls._build_schema_sub_resource_read(load_balancing_rules.Element)
load_balancer_inbound_nat_rules = _schema_network_interface_ip_configuration_read.properties.load_balancer_inbound_nat_rules
load_balancer_inbound_nat_rules.Element = AAZObjectType()
_element = _schema_network_interface_ip_configuration_read.properties.load_balancer_inbound_nat_rules.Element
_element.etag = AAZStrType()
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
properties = _schema_network_interface_ip_configuration_read.properties.load_balancer_inbound_nat_rules.Element.properties
properties.backend_ip_configuration = AAZObjectType(
serialized_name="backendIPConfiguration",
)
cls._build_schema_network_interface_ip_configuration_read(properties.backend_ip_configuration)
properties.backend_port = AAZIntType(
serialized_name="backendPort",
)
properties.enable_floating_ip = AAZBoolType(
serialized_name="enableFloatingIP",
)
properties.frontend_ip_configuration = AAZObjectType(
serialized_name="frontendIPConfiguration",
)
cls._build_schema_sub_resource_read(properties.frontend_ip_configuration)
properties.frontend_port = AAZIntType(
serialized_name="frontendPort",
)
properties.idle_timeout_in_minutes = AAZIntType(
serialized_name="idleTimeoutInMinutes",
)
properties.protocol = AAZStrType()
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
)
_schema.etag = cls._schema_network_interface_ip_configuration_read.etag
_schema.id = cls._schema_network_interface_ip_configuration_read.id
_schema.name = cls._schema_network_interface_ip_configuration_read.name
_schema.properties = cls._schema_network_interface_ip_configuration_read.properties
_schema_network_interface_read = None
@classmethod
def _build_schema_network_interface_read(cls, _schema):
if cls._schema_network_interface_read is not None:
_schema.etag = cls._schema_network_interface_read.etag
_schema.id = cls._schema_network_interface_read.id
_schema.location = cls._schema_network_interface_read.location
_schema.name = cls._schema_network_interface_read.name
_schema.properties = cls._schema_network_interface_read.properties
_schema.tags = cls._schema_network_interface_read.tags
_schema.type = cls._schema_network_interface_read.type
return
cls._schema_network_interface_read = _schema_network_interface_read = AAZObjectType()
network_interface_read = _schema_network_interface_read
network_interface_read.etag = AAZStrType()
network_interface_read.id = AAZStrType()
network_interface_read.location = AAZStrType()
network_interface_read.name = AAZStrType(
flags={"read_only": True},
)
network_interface_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
network_interface_read.tags = AAZDictType()
network_interface_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_interface_read.properties
properties.dns_settings = AAZObjectType(
serialized_name="dnsSettings",
)
properties.enable_ip_forwarding = AAZBoolType(
serialized_name="enableIPForwarding",
)
properties.ip_configurations = AAZListType(
serialized_name="ipConfigurations",
)
properties.mac_address = AAZStrType(
serialized_name="macAddress",
)
properties.network_security_group = AAZObjectType(
serialized_name="networkSecurityGroup",
)
cls._build_schema_network_security_group_read(properties.network_security_group)
properties.primary = AAZBoolType()
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
)
properties.virtual_machine = AAZObjectType(
serialized_name="virtualMachine",
)
cls._build_schema_sub_resource_read(properties.virtual_machine)
dns_settings = _schema_network_interface_read.properties.dns_settings
dns_settings.applied_dns_servers = AAZListType(
serialized_name="appliedDnsServers",
)
dns_settings.dns_servers = AAZListType(
serialized_name="dnsServers",
)
dns_settings.internal_dns_name_label = AAZStrType(
serialized_name="internalDnsNameLabel",
)
dns_settings.internal_fqdn = AAZStrType(
serialized_name="internalFqdn",
)
applied_dns_servers = _schema_network_interface_read.properties.dns_settings.applied_dns_servers
applied_dns_servers.Element = AAZStrType()
dns_servers = _schema_network_interface_read.properties.dns_settings.dns_servers
dns_servers.Element = AAZStrType()
ip_configurations = _schema_network_interface_read.properties.ip_configurations
ip_configurations.Element = AAZObjectType()
cls._build_schema_network_interface_ip_configuration_read(ip_configurations.Element)
tags = _schema_network_interface_read.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_network_interface_read.etag
_schema.id = cls._schema_network_interface_read.id
_schema.location = cls._schema_network_interface_read.location
_schema.name = cls._schema_network_interface_read.name
_schema.properties = cls._schema_network_interface_read.properties
_schema.tags = cls._schema_network_interface_read.tags
_schema.type = cls._schema_network_interface_read.type
_schema_network_security_group_read = None
@classmethod
def _build_schema_network_security_group_read(cls, _schema):
if cls._schema_network_security_group_read is not None:
_schema.etag = cls._schema_network_security_group_read.etag
_schema.id = cls._schema_network_security_group_read.id
_schema.location = cls._schema_network_security_group_read.location
_schema.name = cls._schema_network_security_group_read.name
_schema.properties = cls._schema_network_security_group_read.properties
_schema.tags = cls._schema_network_security_group_read.tags
_schema.type = cls._schema_network_security_group_read.type
return
cls._schema_network_security_group_read = _schema_network_security_group_read = AAZObjectType()
network_security_group_read = _schema_network_security_group_read
network_security_group_read.etag = AAZStrType()
network_security_group_read.id = AAZStrType()
network_security_group_read.location = AAZStrType()
network_security_group_read.name = AAZStrType(
flags={"read_only": True},
)
network_security_group_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
network_security_group_read.tags = AAZDictType()
network_security_group_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_network_security_group_read.properties
properties.default_security_rules = AAZListType(
serialized_name="defaultSecurityRules",
)
properties.network_interfaces = AAZListType(
serialized_name="networkInterfaces",
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
)
properties.security_rules = AAZListType(
serialized_name="securityRules",
)
properties.subnets = AAZListType()
default_security_rules = _schema_network_security_group_read.properties.default_security_rules
default_security_rules.Element = AAZObjectType()
cls._build_schema_security_rule_read(default_security_rules.Element)
network_interfaces = _schema_network_security_group_read.properties.network_interfaces
network_interfaces.Element = AAZObjectType()
cls._build_schema_network_interface_read(network_interfaces.Element)
security_rules = _schema_network_security_group_read.properties.security_rules
security_rules.Element = AAZObjectType()
cls._build_schema_security_rule_read(security_rules.Element)
subnets = _schema_network_security_group_read.properties.subnets
subnets.Element = AAZObjectType()
cls._build_schema_subnet_read(subnets.Element)
tags = _schema_network_security_group_read.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_network_security_group_read.etag
_schema.id = cls._schema_network_security_group_read.id
_schema.location = cls._schema_network_security_group_read.location
_schema.name = cls._schema_network_security_group_read.name
_schema.properties = cls._schema_network_security_group_read.properties
_schema.tags = cls._schema_network_security_group_read.tags
_schema.type = cls._schema_network_security_group_read.type
_schema_public_ip_address_read = None
@classmethod
def _build_schema_public_ip_address_read(cls, _schema):
if cls._schema_public_ip_address_read is not None:
_schema.etag = cls._schema_public_ip_address_read.etag
_schema.id = cls._schema_public_ip_address_read.id
_schema.location = cls._schema_public_ip_address_read.location
_schema.name = cls._schema_public_ip_address_read.name
_schema.properties = cls._schema_public_ip_address_read.properties
_schema.tags = cls._schema_public_ip_address_read.tags
_schema.type = cls._schema_public_ip_address_read.type
return
cls._schema_public_ip_address_read = _schema_public_ip_address_read = AAZObjectType()
public_ip_address_read = _schema_public_ip_address_read
public_ip_address_read.etag = AAZStrType()
public_ip_address_read.id = AAZStrType()
public_ip_address_read.location = AAZStrType()
public_ip_address_read.name = AAZStrType(
flags={"read_only": True},
)
public_ip_address_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
public_ip_address_read.tags = AAZDictType()
public_ip_address_read.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_public_ip_address_read.properties
properties.dns_settings = AAZObjectType(
serialized_name="dnsSettings",
)
properties.idle_timeout_in_minutes = AAZIntType(
serialized_name="idleTimeoutInMinutes",
)
properties.ip_address = AAZStrType(
serialized_name="ipAddress",
)
properties.ip_configuration = AAZObjectType(
serialized_name="ipConfiguration",
)
cls._build_schema_ip_configuration_read(properties.ip_configuration)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
)
properties.public_ip_allocation_method = AAZStrType(
serialized_name="publicIPAllocationMethod",
)
properties.resource_guid = AAZStrType(
serialized_name="resourceGuid",
)
dns_settings = _schema_public_ip_address_read.properties.dns_settings
dns_settings.domain_name_label = AAZStrType(
serialized_name="domainNameLabel",
)
dns_settings.fqdn = AAZStrType()
dns_settings.reverse_fqdn = AAZStrType(
serialized_name="reverseFqdn",
)
tags = _schema_public_ip_address_read.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_public_ip_address_read.etag
_schema.id = cls._schema_public_ip_address_read.id
_schema.location = cls._schema_public_ip_address_read.location
_schema.name = cls._schema_public_ip_address_read.name
_schema.properties = cls._schema_public_ip_address_read.properties
_schema.tags = cls._schema_public_ip_address_read.tags
_schema.type = cls._schema_public_ip_address_read.type
_schema_security_rule_read = None
@classmethod
def _build_schema_security_rule_read(cls, _schema):
if cls._schema_security_rule_read is not None:
_schema.etag = cls._schema_security_rule_read.etag
_schema.id = cls._schema_security_rule_read.id
_schema.name = cls._schema_security_rule_read.name
_schema.properties = cls._schema_security_rule_read.properties
return
cls._schema_security_rule_read = _schema_security_rule_read = AAZObjectType()
security_rule_read = _schema_security_rule_read
security_rule_read.etag = AAZStrType()
security_rule_read.id = AAZStrType()
security_rule_read.name = AAZStrType()
security_rule_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
properties = _schema_security_rule_read.properties
properties.access = AAZStrType(
flags={"required": True},
)
properties.description = AAZStrType()
properties.destination_address_prefix = AAZStrType(
serialized_name="destinationAddressPrefix",
flags={"required": True},
)
properties.destination_port_range = AAZStrType(
serialized_name="destinationPortRange",
)
properties.direction = AAZStrType(
flags={"required": True},
)
properties.priority = AAZIntType()
properties.protocol = AAZStrType(
flags={"required": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
)
properties.source_address_prefix = AAZStrType(
serialized_name="sourceAddressPrefix",
flags={"required": True},
)
properties.source_port_range = AAZStrType(
serialized_name="sourcePortRange",
)
_schema.etag = cls._schema_security_rule_read.etag
_schema.id = cls._schema_security_rule_read.id
_schema.name = cls._schema_security_rule_read.name
_schema.properties = cls._schema_security_rule_read.properties
_schema_sub_resource_read = None
@classmethod
def _build_schema_sub_resource_read(cls, _schema):
if cls._schema_sub_resource_read is not None:
_schema.id = cls._schema_sub_resource_read.id
return
cls._schema_sub_resource_read = _schema_sub_resource_read = AAZObjectType()
sub_resource_read = _schema_sub_resource_read
sub_resource_read.id = AAZStrType()
_schema.id = cls._schema_sub_resource_read.id
_schema_subnet_read = None
@classmethod
def _build_schema_subnet_read(cls, _schema):
if cls._schema_subnet_read is not None:
_schema.etag = cls._schema_subnet_read.etag
_schema.id = cls._schema_subnet_read.id
_schema.name = cls._schema_subnet_read.name
_schema.properties = cls._schema_subnet_read.properties
return
cls._schema_subnet_read = _schema_subnet_read = AAZObjectType()
subnet_read = _schema_subnet_read
subnet_read.etag = AAZStrType()
subnet_read.id = AAZStrType()
subnet_read.name = AAZStrType()
subnet_read.properties = AAZObjectType(
flags={"client_flatten": True},
)
properties = _schema_subnet_read.properties
properties.address_prefix = AAZStrType(
serialized_name="addressPrefix",
)
properties.ip_configurations = AAZListType(
serialized_name="ipConfigurations",
flags={"read_only": True},
)
properties.network_security_group = AAZObjectType(
serialized_name="networkSecurityGroup",
)
cls._build_schema_network_security_group_read(properties.network_security_group)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
)
properties.route_table = AAZObjectType(
serialized_name="routeTable",
)
ip_configurations = _schema_subnet_read.properties.ip_configurations
ip_configurations.Element = AAZObjectType()
cls._build_schema_ip_configuration_read(ip_configurations.Element)
route_table = _schema_subnet_read.properties.route_table
route_table.etag = AAZStrType()
route_table.id = AAZStrType()
route_table.location = AAZStrType()
route_table.name = AAZStrType(
flags={"read_only": True},
)
route_table.properties = AAZObjectType(
flags={"client_flatten": True},
)
route_table.tags = AAZDictType()
route_table.type = AAZStrType(
flags={"read_only": True},
)
properties = _schema_subnet_read.properties.route_table.properties
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
)
properties.routes = AAZListType()
properties.subnets = AAZListType()
routes = _schema_subnet_read.properties.route_table.properties.routes
routes.Element = AAZObjectType()
_element = _schema_subnet_read.properties.route_table.properties.routes.Element
_element.etag = AAZStrType()
_element.id = AAZStrType()
_element.name = AAZStrType()
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
properties = _schema_subnet_read.properties.route_table.properties.routes.Element.properties
properties.address_prefix = AAZStrType(
serialized_name="addressPrefix",
)
properties.next_hop_ip_address = AAZStrType(
serialized_name="nextHopIpAddress",
)
properties.next_hop_type = AAZStrType(
serialized_name="nextHopType",
flags={"required": True},
)
properties.provisioning_state = AAZStrType(
serialized_name="provisioningState",
)
subnets = _schema_subnet_read.properties.route_table.properties.subnets
subnets.Element = AAZObjectType()
cls._build_schema_subnet_read(subnets.Element)
tags = _schema_subnet_read.properties.route_table.tags
tags.Element = AAZStrType()
_schema.etag = cls._schema_subnet_read.etag
_schema.id = cls._schema_subnet_read.id
_schema.name = cls._schema_subnet_read.name
_schema.properties = cls._schema_subnet_read.properties
__all__ = ["List"]
|
PypiClean
|
/dna_audit_new3-0.1-py3-none-any.whl/aws_services/redshift/redshift_service.py
|
import collections
import itertools
import boto3
import botocore
from common.constants import application_constants
from common.utils import helper
from common.utils.initialize_logger import logger
class Redshift:
"""A class that checks the security settings of Amazon Redshift Cluster."""
def __init__(self):
"""
Initializes a Redshift,EC2 client object with the specified maximum number of retries in specified region.
"""
try:
configuration = helper.get_configuration()
self.service = collections.defaultdict(dict)
self.redshift_client = boto3.client('redshift', config=configuration)
self.ec2_client = boto3.client('ec2', config=configuration)
self.resource_list = []
except Exception as ex:
logger.error("Error occurred while initializing Redshift and EC2 client objects: %s", str(ex))
raise ex
def list_resources(self):
"""
Returns a list of all redshift clusters associated with the boto client.
Parameters:
- None
Returns:
- A list of Redshift cluster Identifiers.
Raises:
- botocore.exceptions.ClientError: if there is an error communicating with AWS.
"""
try:
paginator_db_instances = self.redshift_client.get_paginator('describe_clusters')
for page in paginator_db_instances.paginate(PaginationConfig={'PageSize': 20}):
self.resource_list += [cluster['ClusterIdentifier'] for cluster in page['Clusters']]
return self.resource_list
except botocore.exceptions.ClientError as ex:
logger.error("Error occurred when listing RDS resources: %s", str(ex))
raise
def check_encryption_at_rest(self, cluster_identifier):
"""
Check if encryption at rest is enabled for a given redshift .
Args:
cluster_identifier (str): Cluster Identifier of the Redshift Cluster to check.
Returns:
None: This method updates the status of the check in the `self.service` dictionary.
Raises:
ClientError: if there is an error communicating with AWS.
ClusterNotFoundFault: if the requested cluster not found
Exception: If any error occurs during the check.
"""
logger.info("gathering the encryption settings for %s", cluster_identifier)
try:
cluster = self.redshift_client.describe_clusters(ClusterIdentifier=cluster_identifier)['Clusters'][0]
if cluster['Encrypted']:
check_result = application_constants.ResultStatus.PASSED
else:
check_result = application_constants.ResultStatus.FAILED
except (
self.redshift_client.exceptions.ClientError,
self.redshift_client.exceptions.ClusterNotFoundFault) as ex:
logger.error("error while gathering the encryption settings for %s: %s", cluster_identifier, str(ex))
check_result = application_constants.ResultStatus.UNKNOWN
except Exception as ex:
logger.error("error while gathering the encryption settings for %s: %s", cluster_identifier, str(ex))
raise ex
logger.info("Completed fetching encryption settings for redshift cluster : %s", cluster_identifier)
return check_result
def check_redshift_public_accessibility(self, cluster_identifier):
"""
Check if given redshift cluster allows anonymous access.
Args:
cluster_identifier (str): Cluster Identifier of the Redshift Cluster to check.
Returns:
None: This method updates the status of the check in the `self.service` dictionary.
Raises:
ClientError: if there is an error communicating with AWS.
ClusterNotFoundFault: if the requested cluster not found
Exception: If any error occurs during the check.
"""
logger.info("gathering the public accessibility settings for %s", cluster_identifier)
try:
cluster = self.redshift_client.describe_clusters(ClusterIdentifier=cluster_identifier)['Clusters'][0]
if cluster['PubliclyAccessible']:
check_result = application_constants.ResultStatus.FAILED
else:
check_result = application_constants.ResultStatus.PASSED
except (self.redshift_client.exceptions.ClientError,
self.redshift_client.exceptions.ClusterNotFoundFault) as ex:
logger.error("error while gathering the public accessibility settings for %s: %s", cluster_identifier,
str(ex))
check_result = application_constants.ResultStatus.UNKNOWN
except Exception as ex:
logger.error("error while gathering the public accessibility settings for %s: %s", cluster_identifier,
str(ex))
raise ex
logger.info("Completed fetching public accessibility settings for redshift cluster : %s", cluster_identifier)
return check_result
def check_redshift_private_subnet(self, cluster_identifier):
"""
Check if given redshift cluster is only in a private subnet.
Args:
cluster_identifier (str): Cluster Identifier of the Redshift Cluster to check.
Returns:
None: This method updates the status of the check in the `self.service` dictionary.
Raises:
ClientError: if there is an error communicating with AWS.
ClusterNotFoundFault: if the requested cluster not found
ClusterSubnetGroupNotFoundFault: if subnet group of the given cluster not found
Exception: If any error occurs during the check.
"""
logger.info("gathering the private subnet data check for %s", cluster_identifier)
private_subnet = []
try:
subnet_group_name = \
self.redshift_client.describe_clusters(ClusterIdentifier=cluster_identifier)['Clusters'][0][
'ClusterSubnetGroupName']
subnet_groups = \
self.redshift_client.describe_cluster_subnet_groups(ClusterSubnetGroupName=subnet_group_name)[
'ClusterSubnetGroups']
subnets = list(itertools.chain(*[subnet_group['Subnets'] for subnet_group in subnet_groups]))
for subnet in subnets:
private_subnet.append(helper.check_subnet_has_igw(subnet_id=subnet['SubnetIdentifier']))
if any(private_subnet):
check_result = application_constants.ResultStatus.FAILED
else:
check_result = application_constants.ResultStatus.PASSED
except (self.redshift_client.exceptions.ClientError, self.redshift_client.exceptions.ClusterNotFoundFault,
self.redshift_client.exceptions.ClusterSubnetGroupNotFoundFault) as ex:
logger.error("error while performing private subnet data check for %s: %s", cluster_identifier, str(ex))
except Exception as ex:
logger.error("performing the private subnet data check for %s: %s", cluster_identifier, str(ex))
raise ex
logger.info("Completed fetching private subnet data check %s", cluster_identifier)
return check_result
def check_redshift_dedicated_security_group(self, cluster_identifier):
"""
Check if given redshift cluster has a dedicated security group.
Args:
cluster_identifier (str): Cluster Identifier of the Redshift Cluster to check.
Returns:
None: This method updates the status of the check in the `self.service` dictionary.
Raises:
ClientError: if there is an error communicating with AWS.
ClusterNotFoundFault: if the requested cluster not found
Exception: If any error occurs during the check.
"""
logger.info("gathering the dedicated vpc security group settings for %s", cluster_identifier)
try:
security_groups = \
self.redshift_client.describe_clusters(ClusterIdentifier=cluster_identifier)['Clusters'][0][
'VpcSecurityGroups']
if len(security_groups):
security_group_ids = [security_group['VpcSecurityGroupId'] for security_group in security_groups]
paginator = self.ec2_client.get_paginator('describe_network_interfaces')
pages = paginator.paginate(Filters=[{
'Name': 'group-id',
'Values': security_group_ids
}], PaginationConfig={'PageSize': 10})
network_interfaces = [page['NetworkInterfaces'] for page in pages]
network_interfaces = list(itertools.chain(*network_interfaces))
if len(network_interfaces) == 1:
check_result = application_constants.ResultStatus.PASSED
else:
check_result = application_constants.ResultStatus.FAILED
else:
check_result = application_constants.ResultStatus.FAILED
except (self.redshift_client.exceptions.ClientError, self.redshift_client.exceptions.ClusterNotFoundFault) as ex:
logger.error("error while gathering the dedicated vpc settings for %s: %s", cluster_identifier, str(ex))
check_result = application_constants.ResultStatus.UNKNOWN
except Exception as ex:
check_result = application_constants.ResultStatus.UNKNOWN
logger.error("gathering the dedicated vpc settings for %s: %s", cluster_identifier, str(ex))
raise ex
logger.info("Completed fetching dedicated vpc settings for redshift cluster %s", cluster_identifier)
return check_result
def check_redshift_ingress_egress(self, cluster_identifier):
"""
Check if given redshift cluster has security group only with least ingress and egress rules.
Args:
cluster_identifier (str): Cluster Identifier of the Redshift Cluster to check.
Returns:
None: This method updates the status of the check in the `self.service` dictionary.
Raises:
ClientError: if there is an error communicating with AWS.
ClusterNotFoundFault: if the requested cluster not found
Exception: If any error occurs during the check.
"""
logger.info("gathering the ingress engress settings for %s", cluster_identifier)
try:
cluster = self.redshift_client.describe_clusters(ClusterIdentifier=cluster_identifier)['Clusters'][0]
security_groups = [security_group['VpcSecurityGroupId'] for security_group in cluster['VpcSecurityGroups']]
security_group_details = self.ec2_client.describe_security_groups(GroupIds=security_groups)[
'SecurityGroups']
sg_rules = []
for security_group in security_group_details:
sg_rules += security_group['IpPermissions']
least_privilege = helper.is_least_privilege_sg(sg_rules)
check_result = application_constants.ResultStatus.PASSED if least_privilege else application_constants.ResultStatus.FAILED
except (
self.redshift_client.exceptions.ClientError,
self.redshift_client.exceptions.ClusterNotFoundFault) as ex:
logger.error("error while gathering the ingress egress settings for %s: %s", cluster_identifier, str(ex))
check_result = application_constants.ResultStatus.UNKNOWN
except Exception as ex:
logger.error("error while gathering the ingress egress settings for %s: %s", cluster_identifier, str(ex))
raise ex
logger.info("Completed fetching ingress egress settings for redshift cluster %s", cluster_identifier)
return check_result
def check_redshift_tags(self, cluster_identifier, required_tags=None):
"""
Checks if the specified Redshift cluster has the required tags.
Args:
cluster_identifier (str): Cluster Identifier of the Redshift Cluster to check.
Returns:
None: This method updates the status of the check in the `self.service` dictionary.
"""
if required_tags is None:
required_tags = application_constants.Generic.REQUIRED_TAGS
if not required_tags:
check_result = application_constants.ResultStatus.PASSED
else:
logger.info("Checking the tags for %s", cluster_identifier)
try:
tags = self.redshift_client.describe_clusters(ClusterIdentifier=cluster_identifier)['Clusters'][0][
'Tags']
missing_tags = [tag for tag in required_tags if tag not in [t['Key'] for t in tags]]
check_result = application_constants.ResultStatus.PASSED if not any(
missing_tags) else application_constants.ResultStatus.FAILED
except Exception as ex:
logger.exception(f"An error occurred while checking the tags for {cluster_identifier}: {str(ex)}")
check_result = application_constants.ResultStatus.UNKNOWN
logger.info(f"Completed checking the tags for resource : {cluster_identifier}")
return check_result
|
PypiClean
|
/wyze_sdk-2.0.0-py3-none-any.whl/wyze_sdk/api/devices/thermostats.py
|
from collections import defaultdict
from collections.abc import MutableMapping
from datetime import datetime
import itertools
import json
from typing import Optional, Sequence, Tuple, Union
from wyze_sdk.api.base import BaseClient
from wyze_sdk.models.devices import DeviceModels, DeviceProp, Thermostat
from wyze_sdk.models.devices.thermostats import (ThermostatFanMode, ThermostatProps,
ThermostatScenarioType, RoomSensor, RoomSensorProps,
ThermostatSystemMode)
from wyze_sdk.service import WyzeResponse
class ThermostatsClient(BaseClient):
"""A Client that services Wyze thermostats.
"""
def list(self, **kwargs) -> Sequence[Thermostat]:
"""Lists all thermostats available to a Wyze account.
:rtype: Sequence[Thermostat]
"""
return [Thermostat(**device) for device in self._list_thermostats()]
def _list_thermostats(self, **kwargs) -> Sequence[dict]:
return [device for device in super()._list_devices(
) if device["product_model"] in DeviceModels.THERMOSTAT]
def info(self, *, device_mac: str, **kwargs) -> Optional[Thermostat]:
"""Retrieves details of a thermostat.
:param str device_mac: The device mac. e.g. ``CO_EA1_ABCDEF1234567890``
:rtype: Optional[Thermostat]
"""
thermostats = [_thermostat for _thermostat in self._list_thermostats() if _thermostat['mac'] == device_mac]
if len(thermostats) == 0:
return None
thermostat = thermostats[0]
iot_prop = super()._earth_client().get_iot_prop(did=device_mac, keys=[prop_def.pid for prop_def in Thermostat.props().values()])
if "data" in iot_prop.data and "props" in iot_prop.data["data"]:
thermostat.update(iot_prop.data["data"]["props"])
device_info = super()._earth_client().get_device_info(did=device_mac, keys=[prop_def.pid for prop_def in Thermostat.device_info_props().values()])
if "data" in device_info.data and "settings" in device_info.data["data"]:
thermostat.update(device_info.data["data"]["settings"])
return Thermostat(**thermostat)
def get_sensors(self, *, device_mac: str, device_model: str, **kwargs) -> Sequence[RoomSensor]:
"""Retrieves room sensors associated with a thermostat.
Args:
:param str device_mac: The device mac. e.g. ``ABCDEF1234567890``
:param str device_model: The device model. e.g. ``CO_EA1``
:rtype: Sequence[RoomSensor]
"""
# reading through the code in `com.wyze.earth.activity.home.EarthSensorsActivity`,
# the data flow seems to be:
# initView() sets up a refresh handler
# when the view needs refreshing, call getSensors()
# * triggers call to /get_sub_device
# * gathers the sub-device (sensor) IDs
# * calls mergeDate()
# * parses the properties into a coherent format (SensorEntity)
# * calls requestDeviceInfo()
# triggers call to /device_info/batch with did of all sensors and key device_name
# * calls getTempData()
# triggers call to /get_iot_prop/batch with did of all sensors and keys temperature,humidity,temperature_unit,battery
# * triggers call to /get_iot_prop with keys sensor_state,sensor_using,sensor_template,sensor_weight,threshold_temper
# * calls mergeDate()
# * triggers call to getThermostat()
# calls /get_iot_prop on thermostat with keys temperature,humidity,iot_state,auto_comfort
_sensors = [_sub_device for _sub_device in super()._earth_client().get_sub_device(did=device_mac).data["data"]]
if len(_sensors) == 0:
return None
_dids = list(map(lambda _sensor: _sensor['device_id'], _sensors))
_device_info_batch = super()._earth_client().get_device_info(did=_dids, parent_did=device_mac, model=device_model, keys=[prop_def.pid for prop_def in RoomSensor.device_info_props()])
_iot_prop_batch = super()._earth_client().get_iot_prop(did=_dids, parent_did=device_mac, model=device_model, keys=[prop_def.pid for prop_def in RoomSensor.props()])
_iot_prop = super()._earth_client().get_iot_prop(did=device_mac, keys=[prop_def.pid for prop_def in Thermostat.sensor_props().values()])
for _sensor in _sensors:
if "data" in _device_info_batch.data:
_sensor_device_info = next(filter(lambda _data: _data['deviceId'] == _sensor['device_id'], _device_info_batch["data"]))
if "settings" in _sensor_device_info:
_sensor.update(**{"device_setting": _sensor_device_info["settings"]})
if "data" in _iot_prop_batch.data:
_sensor_iot_prop = next(filter(lambda _data: _data['did'] == _sensor['device_id'], _iot_prop_batch["data"]))
if "props" in _sensor_iot_prop:
_sensor.update(**{"device_params": _sensor_iot_prop["props"]})
if "data" in _iot_prop.data and "props" in _iot_prop.data["data"]:
for _prop, _sensor_list in dict(_iot_prop.data["data"]["props"]).items():
_sensor_list = json.loads(_sensor_list)
if isinstance(_sensor_list, MutableMapping):
if _sensor['device_id'] in _sensor_list.keys():
_sensor.update({_prop: _sensor_list.get(_sensor['device_id'])})
return [RoomSensor(**_sensor) for _sensor in _sensors]
def set_system_mode(self, *, device_mac: str, device_model: str, system_mode: ThermostatSystemMode, **kwargs) -> WyzeResponse:
"""Sets the system mode of the thermostat.
:param str device_mac: The device mac. e.g. ``CO_EA1_ABCDEF1234567890``
:param str device_model: The device model. e.g. ``CO_EA1``
:param ThermostatSystemMode system_mode: The new system mode. e.g. ``ThermostatSystemMode.AUTO``
:rtype: WyzeResponse
"""
return self._set_thermostat_properties(device_mac, device_model, DeviceProp(definition=Thermostat.props()["system_mode"], value=system_mode.codes))
def set_fan_mode(self, *, device_mac: str, device_model: str, fan_mode: ThermostatFanMode, **kwargs) -> WyzeResponse:
"""Sets the fan mode of the thermostat.
:param str device_mac: The device mac. e.g. ``CO_EA1_ABCDEF1234567890``
:param str device_model: The device model. e.g. ``CO_EA1``
:param ThermostatFanMode fan_mode: The new fan mode. e.g. ``ThermostatFanMode.CYCLE``
:rtype: WyzeResponse
"""
return self._set_thermostat_properties(device_mac, device_model, DeviceProp(definition=Thermostat.props()["fan_mode"], value=fan_mode.codes))
def set_mode(self, *, device_mac: str, device_model: str, system_mode: ThermostatSystemMode, fan_mode: ThermostatFanMode, **kwargs) -> WyzeResponse:
"""Sets the system and fan modes of the thermostat.
.. note:: Fan mode and system mode cannot be set independently via this method.
:param str device_mac: The device mac. e.g. ``CO_EA1_ABCDEF1234567890``
:param str device_model: The device model. e.g. ``CO_EA1``
:param ThermostatSystemMode system_mode: The new system mode. e.g. ``ThermostatSystemMode.AUTO``
:param ThermostatFanMode fan_mode: The new fan mode. e.g. ``ThermostatFanMode.CYCLE``
:rtype: WyzeResponse
"""
return self._set_thermostat_properties(device_mac, device_model, [
DeviceProp(definition=Thermostat.props()["fan_mode"], value=fan_mode.codes),
DeviceProp(definition=Thermostat.props()["system_mode"], value=system_mode.codes),
])
def set_current_scenario(self, *, device_mac: str, device_model: str, scenario: ThermostatScenarioType, **kwargs) -> WyzeResponse:
"""Sets the current scenario of the thermostat.
:param str device_mac: The device mac. e.g. ``CO_EA1_ABCDEF1234567890``
:param str device_model: The device model. e.g. ``CO_EA1``
:param ThermostatScenarioType scenario: The new scenario. e.g. ``ThermostatScenarioType.HOME``
:rtype: WyzeResponse
"""
return self._set_thermostat_property(device_mac, device_model, DeviceProp(definition=Thermostat.props()["current_scenario"], value=scenario.codes))
def set_heating_setpoint(self, *, device_mac: str, device_model: str, heating_setpoint: int, **kwargs) -> WyzeResponse:
"""Sets the heating setpoint of the thermostat.
:param str device_mac: The device mac. e.g. ``CO_EA1_ABCDEF1234567890``
:param str device_model: The device model. e.g. ``CO_EA1``
:param int heating_setpoint: The new heating setpoint. e.g. ``68``
:rtype: WyzeResponse
"""
return self._set_thermostat_properties(device_mac, device_model, DeviceProp(definition=Thermostat.props()["heating_setpoint"], value=heating_setpoint))
def set_cooling_setpoint(self, *, device_mac: str, device_model: str, cooling_setpoint: int, **kwargs) -> WyzeResponse:
"""Sets the cooling setpoint of the thermostat.
:param str device_mac: The device mac. e.g. ``CO_EA1_ABCDEF1234567890``
:param str device_model: The device model. e.g. ``CO_EA1``
:param int cooling_setpoint: The new cooling setpoint. e.g. ``72``
:rtype: WyzeResponse
"""
return self._set_thermostat_properties(device_mac, device_model, DeviceProp(definition=Thermostat.props()["cooling_setpoint"], value=cooling_setpoint))
def set_temperature(self, *, device_mac: str, device_model: str, cooling_setpoint: int, heating_setpoint: int, **kwargs) -> WyzeResponse:
"""Sets the heating and cooling setpoints of the thermostat.
.. note:: Heating and cooling setpoints cannot be set independently via this method.
:param str device_mac: The device mac. e.g. ``CO_EA1_ABCDEF1234567890``
:param str device_model: The device model. e.g. ``CO_EA1``
:param int cooling_setpoint: The new cooling setpoint. e.g. ``72``
:param int heating_setpoint: The new heating setpoint. e.g. ``68``
:rtype: WyzeResponse
"""
return self._set_thermostat_properties(device_mac, device_model, [
DeviceProp(definition=Thermostat.props()["cooling_setpoint"], value=cooling_setpoint),
DeviceProp(definition=Thermostat.props()["heating_setpoint"], value=heating_setpoint),
])
def _set_thermostat_property(self, device_mac: str, device_model: str, prop: DeviceProp) -> WyzeResponse:
return super()._earth_client().set_iot_prop(did=device_mac, model=device_model, key=prop.definition.pid, value=str(prop.value))
def _set_thermostat_properties(self, device_mac: str, device_model: str, props: Union[DeviceProp, Sequence[DeviceProp]]) -> WyzeResponse:
if not isinstance(props, (list, Tuple)):
props = [props]
the_props = {}
for prop in props:
the_props[prop.definition.pid] = str(prop.api_value)
return super()._earth_client().set_iot_prop_by_topic(
did=device_mac, model=device_model, props=the_props)
def clear_hold(self, *, device_mac: str, device_model: str, **kwargs) -> WyzeResponse:
"""Clears any existing hold on the thermostat and resumes "smart" operations.
:param str device_mac: The device mac. e.g. ``CO_EA1_ABCDEF1234567890``
:param str device_model: The device model. e.g. ``CO_EA1``
:rtype: WyzeResponse
"""
return self._set_thermostat_properties(device_mac, device_model, [
DeviceProp(definition=Thermostat.props()["asw_hold"], value=0),
DeviceProp(definition=Thermostat.props()["device_hold"], value=0),
DeviceProp(definition=Thermostat.props()["device_hold_time"], value=0),
])
def hold(self, *, device_mac: str, device_model: str, until: datetime, **kwargs) -> WyzeResponse:
"""Holds the current thermostat settings until a certain date/time.
:param str device_mac: The device mac. e.g. ``CO_EA1_ABCDEF1234567890``
:param str device_model: The device model. e.g. ``CO_EA1``
:param datetime until: The new end date/time of the hold.
:rtype: WyzeResponse
"""
return self._set_thermostat_properties(device_mac, device_model, [
DeviceProp(definition=Thermostat.props()["device_hold"], value=1),
DeviceProp(definition=Thermostat.props()["device_hold_time"], value=until.timestamp()),
])
def set_lock(self, *, device_mac: str, device_model: str, locked: Union[bool, int], **kwargs) -> WyzeResponse:
"""Sets the device lock for a thermostat.
If set, the thermostat can only be updated via the app and not by using the physical controls.
:param str device_mac: The device mac. e.g. ``CO_EA1_ABCDEF1234567890``
:param str device_model: The device model. e.g. ``CO_EA1``
:param int locked (int): The new locked state. e.g. ``1``
:rtype: WyzeResponse
"""
if not isinstance(locked, bool):
locked = True if locked == 1 else False
return self._set_thermostat_properties(device_mac, device_model, DeviceProp(definition=Thermostat.props()["locked"], value=locked))
def set_behavior(self, *, device_mac: str, device_model: str, behavior: int, **kwargs) -> WyzeResponse:
"""Sets the comfort balance behavior for a thermostat.
This setting allows the user to toggle between preset behaviors for weighing cost savings vs.
climate comfort. An update to this property will modify the device's scenario setpoints.
:param str device_mac: The device mac. e.g. ``CO_EA1_ABCDEF1234567890``
:param str device_model: The device model. e.g. ``CO_EA1``
:param int behavior: The new behavior. e.g. ``1``
:rtype: WyzeResponse
"""
return self._set_thermostat_properties(device_mac, device_model, DeviceProp(definition=Thermostat.props()["save_comfort_balance"], value=behavior))
|
PypiClean
|
/pyplis-1.4.3.tar.gz/pyplis-1.4.3/scripts/ex01_analysis_setup.py
|
from __future__ import (absolute_import, division)
from SETTINGS import check_version, IMG_DIR, OPTPARSE
import pyplis as pyplis
from datetime import datetime
from matplotlib.pyplot import show, close
# Check script version
check_version()
# SCRIPT FUNCTION DEFINITIONS
def create_dataset():
"""Initialize measurement setup and creates dataset from that."""
start = datetime(2015, 9, 16, 7, 6, 00)
stop = datetime(2015, 9, 16, 7, 22, 00)
# Define camera (here the default ecII type is used)
cam_id = "ecII"
# the camera filter setup
filters = [pyplis.utils.Filter(type="on", acronym="F01"),
pyplis.utils.Filter(type="off", acronym="F02")]
# camera location and viewing direction (altitude will be retrieved
# automatically)
geom_cam = {"lon": 15.1129,
"lat": 37.73122,
"elev": 20.0,
"elev_err": 5.0,
"azim": 270.0,
"azim_err": 10.0,
"alt_offset": 15.0,
"focal_length": 25e-3} # altitude offset (above topography)
# Create camera setup
# the camera setup includes information about the filename convention in
# order to identify different image types (e.g. on band, off band, dark..)
# it furthermore includes information about the detector specifics (e.g.
# dimension, pixel size, focal length). Measurement specific parameters
# (e.g. lon, lat, elev, azim) where defined in the dictinary above and
# can be passed as additional keyword dictionary using **geom_cam
# Alternatively, they could also be passed directly, e.g.:
# cam = pyplis.setup.Camera(cam_id, filter_list=filters, lon=15.1129,
# lat=37.73122)
cam = pyplis.setupclasses.Camera(cam_id, filter_list=filters,
**geom_cam)
# Load default information for Etna. This information is stored in
# the source_info.txt file of the Pyplis information. You may also access
# information about any volcano via the available online access to the NOAA
# database using the method pyplis.inout.get_source_info_online(source_id).
source = pyplis.setupclasses.Source("etna")
# Provide wind direction
wind_info = {"dir": 0.0,
"dir_err": 1.0}
# "dir_err" : 15.0}
# Create BaseSetup object (which creates the MeasGeometry object)
stp = pyplis.setupclasses.MeasSetup(IMG_DIR, start, stop, camera=cam,
source=source,
wind_info=wind_info)
print(stp.LINK_OFF_TO_ON)
# Create analysis object (from BaseSetup)
# The dataset takes care of finding all vali
return pyplis.Dataset(stp)
# SCRIPT MAIN FUNCTION
if __name__ == "__main__":
close("all")
ds = create_dataset()
# get on-band image list
on_list = ds.get_list("on")
on_list.goto_next()
off_list = ds.get_list("off")
# activate dark correction in both lists. Dark and offset image lists are
# automatically assigned to plume on and off-band image lists on initiation
# of the dataset object
on_list.darkcorr_mode = True
off_list.darkcorr_mode = True
print("On-band list contains %d images, current image index: %d"
% (on_list.nof, on_list.cfn))
img = on_list.current_img()
# plume distance image retrieved from MeasGeometry class...
plume_dists = on_list.plume_dists
# ...these may be overwritten or set manually if desired
on_list.plume_dists = 10000
# The same applies for the integration step lengths for emission rate
# retrievals
step_lengths = on_list.integration_step_length
on_list.integration_step_length = 1.8 # m
img_shift = img.duplicate()
# images can be shifted using the scipy.ndimage.interpolation.shift method
# this may be required for image registration in dual camera systems.
# Whether this is supposed to be done automatically can be specified using
# the REG_SHIFT_OFF option in a MeasSetup class. It may also be specified
# directly for your cam in the custom camera definition file cam_info.txt
# using io_opts:REG_SHIFT_OFF=1 (see e.g. defintion of camera with ID
# "usgs"). Also, a default registration offset can be defined here using
#
img_shift.shift(dx_abs=-30, dy_abs=55)
img_shift.show(tit="Shifted")
# Set pixel intensities below 2000 to 0 (method of Img class)
img.set_val_below_thresh(val=0, threshold=2000)
# show modified image
img.show()
print(str(img)) # image object has an informative string representation
# IMPORTANT STUFF FINISHED (Below follow tests and display options)
# Import script options
(options, args) = OPTPARSE.parse_args()
# If applicable, do some tests. This is done only if TESTMODE is active:
# testmode can be activated globally (see SETTINGS.py) or can also be
# activated from the command line when executing the script using the
# option --test 1
if int(options.test):
import numpy.testing as npt
from os.path import basename
actual = [plume_dists.mean(), plume_dists.std(),
on_list.get_dark_image().mean()]
npt.assert_allclose(actual=actual,
desired=[10909.873427010458, 221.48844132471388,
190.56119],
rtol=1e-7)
npt.assert_array_equal([418, 2, 2368, 1, 1, 0,
20150916070600,
20150916072200],
[on_list.nof + off_list.nof,
on_list.this.is_darkcorr +
off_list.this.is_darkcorr,
sum(on_list.this.shape),
on_list.cfn,
off_list.cfn,
sum(img.img[img.img < 2000]),
int(ds.setup.start.strftime("%Y%m%d%H%M%S")),
int(ds.setup.stop.strftime("%Y%m%d%H%M%S"))])
print("All tests passed in script: %s" % basename(__file__))
try:
if int(options.show) == 1:
show()
except BaseException:
print("Use option --show 1 if you want the plots to be displayed")
|
PypiClean
|
/uthreads-1.0.tar.gz/uthreads-1.0/examples/support-uthreaded.py
|
# based on the example "chatserver.py" from http://twistedmatrix.com/projects/core/documentation/examples/chatserver.py
from twisted.protocols import basic
from twisted.internet import reactor
from twisted.internet.defer import Deferred
from twisted.python import failure
import datetime
import uthreads
def is_day_366():
today = datetime.date.today()
if today - datetime.date(today.year, 1, 1) == 365:
return True
class ConnectionLost(Exception): pass
class MyChat(basic.LineReceiver):
def __init__(self):
self.lines = []
self.line_deferred = None
def connectionMade(self):
print "Got new caller!"
uthreads.spawn(self.handleConnection())
def connectionLost(self, reason):
print "Lost a caller!"
if self.line_deferred:
self.line_deferred.errback(failure.Failure(ConnectionLost()))
def lineReceived(self, line):
print "received", repr(line)
self.lines.append(line)
if self.line_deferred:
print "found line"
d = self.line_deferred
self.line_deferred = None
d.callback(self.lines.pop(0))
def getLine(self):
assert self.line_deferred is None, "multiple simultaneous getLine calls!"
if self.lines:
return self.lines.pop(0)
self.line_deferred = Deferred()
return self.line_deferred
@uthreads.uthreaded
def handleConnection(self):
try:
yield self.handleCall()
except ConnectionLost:
print "connection lost!"
finally:
self.transport.loseConnection()
def handleCall(self):
self.transport.write(">> Hello, Thank you for contacting Zune technical support.\n")
self.transport.write(">> Please enter your name.\n")
name = (yield self.getLine()).strip()
self.transport.write(">> Welcome, %s!\n" % name)
self.transport.write(">> Please state your problem.\n")
problem = (yield self.getLine()).strip()
self.transport.write(">> Thank you.\n")
if is_day_366():
self.transport.write(">> Due to the overwhelming demand for the new DRM+ Zune, we are experiencing a heavy call volume. Do you want to stay on the line?\n")
yn = (yield self.getLine()).strip()
if yn == "no":
return
while True:
self.transport.write(">> Have you tried hard-resetting your Zune?\n")
thatsnice = (self.getLine()).strip()
if thatsnice == "OPERATOR!":
self.transport.write(">> have a nice day!\n")
return
self.transport.write(">> Let me run some tests..\n")
yield uthreads.sleep(1)
from twisted.internet import protocol
from twisted.application import service, internet
factory = protocol.ServerFactory()
factory.protocol = MyChat
factory.clients = []
application = service.Application("chatserver")
internet.TCPServer(1025, factory).setServiceParent(application)
|
PypiClean
|
/silabs_mltk-0.18.0-1691186878-cp37-cp37m-win_amd64.whl/silabs_mltk-0.18.0.data/purelib/mltk/models/shared/kws_streaming/models/att_rnn.py
|
"""BiRNN model with attention."""
from kws_streaming.layers import modes
from kws_streaming.layers import speech_features
from kws_streaming.layers.compat import tf
import kws_streaming.models.model_utils as utils
def model_parameters(parser_nn):
"""BiRNN attention model parameters."""
parser_nn.add_argument(
'--cnn_filters',
type=str,
default='10,1',
help='Number of output filters in the convolution layers',
)
parser_nn.add_argument(
'--cnn_kernel_size',
type=str,
default='(5,1),(5,1)',
help='Heights and widths of the 2D convolution window',
)
parser_nn.add_argument(
'--cnn_act',
type=str,
default="'relu','relu'",
help='Activation function in the convolution layers',
)
parser_nn.add_argument(
'--cnn_dilation_rate',
type=str,
default='(1,1),(1,1)',
help='Dilation rate to use for dilated convolutions',
)
parser_nn.add_argument(
'--cnn_strides',
type=str,
default='(1,1),(1,1)',
help='Strides of the convolution layers along the height and width',
)
parser_nn.add_argument(
'--rnn_layers',
type=int,
default=2,
help='Number of RNN layers (each RNN is wrapped by Bidirectional)',
)
parser_nn.add_argument(
'--rnn_type',
type=str,
default='gru',
help='RNN type: it can be gru or lstm',
)
parser_nn.add_argument(
'--rnn_units',
type=int,
default=128,
help='Units number in RNN cell',
)
parser_nn.add_argument(
'--dropout1',
type=float,
default=0.1,
help='Percentage of data dropped',
)
parser_nn.add_argument(
'--units2',
type=str,
default='64,32',
help='Number of units in the last set of hidden layers',
)
parser_nn.add_argument(
'--act2',
type=str,
default="'relu','linear'",
help='Activation function of the last set of hidden layers',
)
def model(flags):
"""BiRNN attention model.
It is based on paper:
A neural attention model for speech command recognition
https://arxiv.org/pdf/1808.08929.pdf
Depending on parameter rnn_type, model can be biLSTM or biGRU
Args:
flags: data/model parameters
Returns:
Keras model for training
"""
rnn_types = {'lstm': tf.keras.layers.LSTM, 'gru': tf.keras.layers.GRU}
if flags.rnn_type not in rnn_types:
ValueError('not supported RNN type ', flags.rnn_type)
rnn = rnn_types[flags.rnn_type]
input_audio = tf.keras.layers.Input(
shape=modes.get_input_data_shape(flags, modes.Modes.TRAINING),
batch_size=flags.batch_size)
net = input_audio
if flags.preprocess == 'raw':
# it is a self contained model, user need to feed raw audio only
net = speech_features.SpeechFeatures(
speech_features.SpeechFeatures.get_params(flags))(
net)
net = tf.keras.backend.expand_dims(net)
for filters, kernel_size, activation, dilation_rate, strides in zip(
utils.parse(flags.cnn_filters), utils.parse(flags.cnn_kernel_size),
utils.parse(flags.cnn_act), utils.parse(flags.cnn_dilation_rate),
utils.parse(flags.cnn_strides)):
net = tf.keras.layers.Conv2D(
filters=filters,
kernel_size=kernel_size,
activation=activation,
dilation_rate=dilation_rate,
strides=strides,
padding='same')(
net)
net = tf.keras.layers.BatchNormalization()(net)
shape = net.shape
# input net dimension: [batch, time, feature, channels]
# reshape dimension: [batch, time, feature * channels]
# so that GRU/RNN can process it
net = tf.keras.layers.Reshape((-1, shape[2] * shape[3]))(net)
# dims: [batch, time, feature]
for _ in range(flags.rnn_layers):
net = tf.keras.layers.Bidirectional(
rnn(flags.rnn_units, return_sequences=True, unroll=True))(
net)
feature_dim = net.shape[-1]
middle = net.shape[1] // 2 # index of middle point of sequence
# feature vector at middle point [batch, feature]
mid_feature = net[:, middle, :]
# apply one projection layer with the same dim as input feature
query = tf.keras.layers.Dense(feature_dim)(mid_feature)
# attention weights [batch, time]
att_weights = tf.keras.layers.Dot(axes=[1, 2])([query, net])
att_weights = tf.keras.layers.Softmax(name='attSoftmax')(att_weights)
# apply attention weights [batch, feature]
net = tf.keras.layers.Dot(axes=[1, 1])([att_weights, net])
net = tf.keras.layers.Dropout(rate=flags.dropout1)(net)
for units, activation in zip(
utils.parse(flags.units2), utils.parse(flags.act2)):
net = tf.keras.layers.Dense(units=units, activation=activation)(net)
net = tf.keras.layers.Dense(units=flags.label_count)(net)
if flags.return_softmax:
net = tf.keras.layers.Activation('softmax')(net)
return tf.keras.Model(input_audio, net)
|
PypiClean
|
/aws_sagemaker_remote-0.0.71-py3-none-any.whl/aws_sagemaker_remote/util/cloudformation.py
|
from aws_sagemaker_remote.util.logging_util import print_err
import time
def get_cloudformation_output(cloudformation, stack_name, output_key):
stack = get_cloudformation(
cloudformation=cloudformation, stack_name=stack_name)
if isinstance(output_key, str):
return get_stack_output(stack, output_key)
elif isinstance(output_key, (list, tuple)):
return tuple(get_stack_output(stack, k) for k in output_key)
else:
raise ValueError(
"Parameter `output_key` should be string, list, or tuple, got {}".format(type(output_key)))
def delete_stack(cloudformation, stack_name):
response = cloudformation.delete_stack(
StackName=stack_name,
# RetainResources=[
# 'string',
# ],
# RoleARN='string',
# ClientRequestToken='string'
)
def get_stack_output(stack, output_key):
if stack and 'Outputs' in stack:
for output in stack['Outputs']:
if output['OutputKey'] == output_key:
return output['OutputValue']
return None
def stack_exists(cloudformation, stack_name):
return get_cloudformation(cloudformation, stack_name) is not None
def stack_ready(cloudformation, stack_name):
stack = get_cloudformation(cloudformation, stack_name)
if not stack:
return False
status = stack['StackStatus']
if status in ['UPDATE_COMPLETE', 'CREATE_COMPLETE']:
return True
if status in ['ROLLBACK_COMPLETE']:
print_err(
f"Stack {stack_name} is in status [{status}]. Deleting stack."
)
delete_stack(cloudformation, stack_name)
return stack_ready(cloudformation, stack_name)
if status.endswith('PROGRESS'):
while status.endswith('PROGRESS'):
print_err(
f"Stack {stack_name} is in status [{status}]. Waiting.",
)
time.sleep(10)
stack = get_cloudformation(cloudformation, stack_name)
if not stack:
return False
status = stack['StackStatus']
return stack_ready(cloudformation, stack_name)
else:
print_err(f"Stack {stack_name} is in status [{status}].")
return False
def get_cloudformation(cloudformation, stack_name):
try:
response = cloudformation.describe_stacks(
StackName=stack_name
)
# print(response)
except:
response = None
if response:
response = response['Stacks']
if len(response) > 0:
return response[0]
return None
|
PypiClean
|
/plaid-python-15.5.0.tar.gz/plaid-python-15.5.0/plaid/model/investments_auth_get_numbers.py
|
import re # noqa: F401
import sys # noqa: F401
from plaid.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from plaid.exceptions import ApiAttributeError
def lazy_import():
from plaid.model.numbers_acats import NumbersACATS
from plaid.model.numbers_aton import NumbersATON
globals()['NumbersACATS'] = NumbersACATS
globals()['NumbersATON'] = NumbersATON
class InvestmentsAuthGetNumbers(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'acats': ([NumbersACATS],), # noqa: E501
'aton': ([NumbersATON],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'acats': 'acats', # noqa: E501
'aton': 'aton', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""InvestmentsAuthGetNumbers - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
acats ([NumbersACATS]): [optional] # noqa: E501
aton ([NumbersATON]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""InvestmentsAuthGetNumbers - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
acats ([NumbersACATS]): [optional] # noqa: E501
aton ([NumbersATON]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
PypiClean
|
/TimeEval-1.2.10-py3-none-any.whl/timeeval_experiments/algorithms/sarima.py
|
from durations import Duration
from typing import Any, Dict, Optional
from timeeval import Algorithm, TrainingType, InputDimensionality
from timeeval.adapters import DockerAdapter
from timeeval.params import ParameterConfig
_sarima_parameters: Dict[str, Dict[str, Any]] = {
"exhaustive_search": {
"defaultValue": "False",
"description": "Performs full grid search to find optimal SARIMA-model without considering statistical tests on the data --> SLOW! but finds the optimal model.",
"name": "exhaustive_search",
"type": "boolean"
},
"max_iter": {
"defaultValue": 20,
"description": "The maximum number of function evaluations. smaller = faster, but might not converge.",
"name": "max_iter",
"type": "int"
},
"max_lag": {
"defaultValue": None,
"description": "Refit SARIMA model after that number of points (only helpful if fixed_orders=None)",
"name": "max_lag",
"type": "int"
},
"n_jobs": {
"defaultValue": 1,
"description": "The number of parallel jobs to run for grid search. If ``-1``, then the number of jobs is set to the number of CPU cores.",
"name": "n_jobs",
"type": "int"
},
"period": {
"defaultValue": 1,
"description": "Periodicity (number of periods in season), often it is 4 for quarterly data or 12 for monthly data. Default is no seasonal effect (==1). Must be >= 1.",
"name": "period",
"type": "int"
},
"prediction_window_size": {
"defaultValue": 10,
"description": "Number of points to forecast in one go; smaller = slower, but more accurate.",
"name": "prediction_window_size",
"type": "int"
},
"random_state": {
"defaultValue": 42,
"description": "Seed for random number generation.",
"name": "random_state",
"type": "int"
},
"train_window_size": {
"defaultValue": 500,
"description": "Number of points from the beginning of the series to build model on.",
"name": "train_window_size",
"type": "int"
}
}
def sarima(params: ParameterConfig = None, skip_pull: bool = False, timeout: Optional[Duration] = None) -> Algorithm:
return Algorithm(
name="SARIMA",
main=DockerAdapter(
image_name="registry.gitlab.hpi.de/akita/i/sarima",
skip_pull=skip_pull,
timeout=timeout,
group_privileges="akita",
),
preprocess=None,
postprocess=None,
param_schema=_sarima_parameters,
param_config=params or ParameterConfig.defaults(),
data_as_file=True,
training_type=TrainingType.UNSUPERVISED,
input_dimensionality=InputDimensionality("univariate")
)
|
PypiClean
|
/libervia_desktop-0.8.0b1-py3-none-any.whl/cagou/plugins/plugin_transfer_file.py
|
# Cagou: desktop/mobile frontend for Salut à Toi XMPP client
# Copyright (C) 2016-2021 Jérôme Poisson ([email protected])
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import threading
import sys
from functools import partial
from sat.core import log as logging
from sat.core.i18n import _
from kivy.uix.boxlayout import BoxLayout
from kivy import properties
from kivy.clock import Clock
from plyer import filechooser, storagepath
log = logging.getLogger(__name__)
PLUGIN_INFO = {
"name": _("file"),
"main": "FileTransmitter",
"description": _("transmit a local file"),
"icon_medium": "{media}/icons/muchoslava/png/fichier_50.png",
}
class FileChooserBox(BoxLayout):
callback = properties.ObjectProperty()
cancel_cb = properties.ObjectProperty()
default_path = properties.StringProperty()
class FileTransmitter(BoxLayout):
callback = properties.ObjectProperty()
cancel_cb = properties.ObjectProperty()
native_filechooser = True
default_path = storagepath.get_home_dir()
def __init__(self, *args, **kwargs):
if sys.platform == 'android':
self.native_filechooser = False
self.default_path = storagepath.get_downloads_dir()
super(FileTransmitter, self).__init__(*args, **kwargs)
if self.native_filechooser:
thread = threading.Thread(target=self._nativeFileChooser)
thread.start()
else:
self.add_widget(FileChooserBox(default_path = self.default_path,
callback=self.onFiles,
cancel_cb=partial(self.cancel_cb, self)))
def _nativeFileChooser(self, *args, **kwargs):
title=_("Please select a file to upload")
files = filechooser.open_file(title=title,
path=self.default_path,
multiple=False,
preview=True)
# we want to leave the thread when calling onFiles, so we use Clock
Clock.schedule_once(lambda *args: self.onFiles(files=files), 0)
def onFiles(self, files):
if files:
self.callback(files[0])
else:
self.cancel_cb(self)
|
PypiClean
|
/bubblewrap_cli-1.0.0-py3-none-any.whl/bubblewrap.py
|
class Bubbles:
def __init__(self):
pass
def get_rich_bubble(self, txt, bg_color="blue", fg_color="white", rich=True):
circle_style = f"[{bg_color}]"
circle_close = f"{circle_style[:1]}/{circle_style[1:]}"
body_open = f"[{fg_color} on {bg_color}]"
body_close = f"{body_open[:1]}/{body_open[1:]}"
return f"{circle_style}{circle_close}{body_open}{txt}{body_close}{circle_style}{circle_close}"
def get_ansi_bubbles(self, txt, circle_style, txt_style, reset):
bubble = f"{circle_style}{reset}{txt_style}{txt}{reset}{circle_style}{reset}"
return bubble
def get_rich_chain(self, txt, bg_color="blue", fg_color="white", divider=""):
return Link(txt, bg_color, fg_color, divider)
def get_ansi_chain(self, txt, txt_style, reset, divider=""):
return ANSILink(txt, txt_style, reset, divider)
class Link:
base_str = ""
bg_color = ""
divider = ""
def __init__(self, txt, bg_color, fg_color, divider, prev_link=None):
self.bg_color = bg_color
self.divider = divider
body_open, body_close = get_tags(f"{fg_color} on {bg_color}")
pre_txt = ""
if prev_link is not None:
pre_txt = prev_link.base_str
# divider fg = prev link bg
# divider bg = current link bg
divider_open, divider_close = get_tags(f"{prev_link.bg_color} on {bg_color}")
pre_txt += f"{divider_open}{divider}{divider_close}"
self.base_str = f"{pre_txt}{body_open} {txt} {body_close}"
def end(self):
ending_open, ending_close = get_tags(self.bg_color)
self.base_str += f"{ending_open}{self.divider}{ending_close}"
return self.base_str
def link(self, txt, bg_color, fg_color):
return Link(txt, bg_color, fg_color, self.divider, self)
class ANSILink:
base_str = ""
txt_style = ""
divider = ""
reset = ""
def __init__(self, txt, txt_style, reset, divider, prev_link=None):
self.txt_style = txt_style
self.divider = divider
self.reset = reset
pre_txt = ""
if prev_link is not None:
pre_txt = prev_link.base_str
# divider fg = prev link bg
# divider bg = current link bg
divider_fg = str(int(prev_link.txt_style[-3:-1]) - 10)
divider_bg = str(int(txt_style[-3:-1]))
divider_style = f"\033[{divider_fg};{divider_bg}m"
pre_txt += f"{divider_style}{divider}{reset}"
self.base_str = f"{pre_txt}{txt_style} {txt} {reset}"
def end(self):
divider_fg = str(int(self.txt_style[-3:-1]) - 10)
self.base_str += f"\033[{divider_fg}m{self.divider}{self.reset}"
return self.base_str
def link(self, txt, txt_style):
return ANSILink(txt, txt_style, self.reset, self.divider, self)
def get_tags(style):
open_tag = f"[{style}]"
close_tag = f"{open_tag[:1]}/{open_tag[1:]}"
return open_tag, close_tag
def cli() -> None:
from rich.console import Console
b = Bubbles()
c = Console()
print()
c.print(b.get_rich_bubble("Hello World", bg_color="purple"), end=" ")
c.print(b.get_rich_bubble("I love bubblewrap", bg_color="red"), end=" ")
c.print(b.get_rich_bubble("try it now", bg_color="blue"))
print()
c.print(b.get_rich_bubble("Bubble up words", bg_color="dark_green", fg_color="grey66"), end=" ")
c.print(b.get_rich_bubble("or entire sentences", bg_color="blue"), end=" ")
c.print(b.get_rich_bubble("try it now", bg_color="purple"))
print()
c.print(b.get_rich_bubble("Use Rich, or ANSI colors of your choosing", bg_color="purple"))
print()
c.print(b.get_rich_bubble("Fully control", bg_color="blue"), end=" ")
c.print(b.get_rich_bubble("background", bg_color="dark_green"), end=" ")
c.print(b.get_rich_bubble("and", bg_color="purple"), end=" ")
c.print(b.get_rich_bubble("foreground", bg_color="blue", fg_color="dark_red"), end=" ")
c.print(b.get_rich_bubble("colors", bg_color="grey66", fg_color="grey3"))
print()
c.print(b.get_rich_bubble("Create beautiful CLI applications, with ease!", bg_color="deep_pink4"))
print()
|
PypiClean
|
/kde-material-you-colors-1.6.0b2.tar.gz/kde-material-you-colors-1.6.0b2/src/kde_material_you_colors/utils/color_utils.py
|
import operator
import numpy
import colorsys
import re
from . import math_utils
from material_color_utilities_python.utils.theme_utils import *
def hex2rgb(hex):
hex = hex.lstrip("#")
rgb = tuple(int(hex[i : i + 2], 16) for i in (0, 2, 4))
# print(f'{rgb} {type(rgb)}')
return rgb
def rgb2hex(r, g, b):
hex = "#{:02x}{:02x}{:02x}".format(r, g, b)
return hex
# Blend RGB colors using Oklab
# Adapted from https://github.com/ProtonAOSP/android_frameworks_base/commit/28cc1ae1b1436120f111f1e21ca62e1fc9e0a7df
def cube(x):
x = float(x)
return x * x * x
# Linear -> sRGB
def srgbTransfer(x):
x = float(x)
if x >= 0.0031308:
return 1.055 * float(numpy.power(x, (1.0 / 2.4)) - 0.055)
else:
return 12.92 * x
# sRGB -> Linear
def srgbTransferInv(x):
x = float(x)
if x >= 0.04045:
return float(numpy.power(((x + 0.055) / 1.055), 2.4))
else:
return x / 12.92
def srgbRed(redInt):
return srgbTransferInv(redInt / 255.0)
def srgbGreen(greenInt):
return srgbTransferInv(greenInt / 255.0)
def srgbBlue(blueInt):
return srgbTransferInv(blueInt / 255.0)
def srgbTransferToInt(c):
c = float(c)
res = numpy.round(srgbTransfer(c) * 255.0)
if res < 0:
return 0
elif res > 255:
return 255
else:
return res
def rgbToOklabLp(r, g, b):
r = float(r)
g = float(g)
b = float(b)
return float(numpy.cbrt(0.4122214708 * r + 0.5363325363 * g + 0.0514459929 * b))
def rgbToOklabMp(r, g, b):
r = float(r)
g = float(g)
b = float(b)
return float(numpy.cbrt(0.2119034982 * r + 0.6806995451 * g + 0.1073969566 * b))
def rgbToOklabSp(r, g, b):
r = float(r)
g = float(g)
b = float(b)
return float(numpy.cbrt(0.0883024619 * r + 0.2817188376 * g + 0.6299787005 * b))
def blendColors(colorA, colorB, ratio):
inverseRatio = 1 - ratio
[r1, g1, b1] = hex2rgb(colorA)
[r2, g2, b2] = hex2rgb(colorB)
r1 = srgbRed(r1)
g1 = srgbRed(g1)
b1 = srgbRed(b1)
lp1 = rgbToOklabLp(r1, g1, b1)
mp1 = rgbToOklabMp(r1, g1, b1)
sp1 = rgbToOklabSp(r1, g1, b1)
r2 = srgbRed(r2)
g2 = srgbRed(g2)
b2 = srgbRed(b2)
lp2 = rgbToOklabLp(r2, g2, b2)
mp2 = rgbToOklabMp(r2, g2, b2)
sp2 = rgbToOklabSp(r2, g2, b2)
l = cube(lp1 * inverseRatio + lp2 * ratio)
m = cube(mp1 * inverseRatio + mp2 * ratio)
s = cube(sp1 * inverseRatio + sp2 * ratio)
r = int(srgbTransferToInt(+4.0767416621 * l - 3.3077115913 * m + 0.2309699292 * s))
g = int(srgbTransferToInt(-1.2684380046 * l + 2.6097574011 * m - 0.3413193965 * s))
b = int(srgbTransferToInt(-0.0041960863 * l - 0.7034186147 * m + 1.7076147010 * s))
return rgb2hex(r, g, b)
def hex2alpha(solid_color, opacity):
opacity = int(numpy.ceil(opacity * 2.55))
aa = f"{opacity:x}"
if len(aa) == 1:
aa = f"0{aa}"
argb = solid_color.replace("#", f"#{aa}")
return argb
def rgb2alpha(rgb, opacity):
opacity = int(numpy.ceil(opacity * 2.55))
rgba = (rgb[0], rgb[1], rgb[2], opacity)
return rgba
def hex2rgba(hex, opacity):
hex = hex.lstrip("#")
rgb = tuple(int(hex[i : i + 2], 16) for i in (0, 2, 4))
rgba = rgb2alpha(rgb, opacity)
return rgba
def color_luminance(color):
r, g, b = hex2rgb(color)
lum = 0.2126 * srgbRed(r) + 0.7152 * srgbGreen(g) + 0.0722 * srgbBlue(b)
# print("Luminance:", lum)
return (color, lum)
def sort_colors_luminance(colors, reverse=False):
first_color = colors[0]
all_colors = colors
# print(f"Sorting colors by luminance: {colors}")
colors_with_luminance = []
sorted_colors = ()
for color in colors:
colors_with_luminance.append(color_luminance(color))
colors_with_luminance.sort(key=operator.itemgetter(1), reverse=reverse)
for color in colors_with_luminance:
# print(color[0], color[1])
sorted_colors += (color[0],)
# print(colors_with_luminance)
# print(sorted_colors)
# print(f"Sorted colors: {sorted_colors}")
return sorted_colors
def contrast_ratio(lighter_color, darker_color):
l1 = float(color_luminance(lighter_color)[1])
l2 = float(color_luminance(darker_color)[1])
contrast_ratio = (l1 + 0.05) / (l2 + 0.05)
return contrast_ratio
def blend2contrast(
lighter_color, darker_color, blend_color, min_contrast, blend_step, dark=True
):
# print(f"Test blend2contrast {lighter_color} {darker_color} {blend_color} 4.5 0.1")
if dark:
contrast = contrast_ratio(lighter_color, darker_color)
else:
contrast = contrast_ratio(darker_color, lighter_color)
if contrast < min_contrast:
blend_ratio = 0.0
while contrast < min_contrast:
blend_ratio += blend_step
if dark:
new = blendColors(lighter_color, blend_color, blend_ratio)
contrast = contrast_ratio(new, darker_color)
else:
new = blendColors(lighter_color, blend_color, blend_ratio)
contrast = contrast_ratio(darker_color, new)
# print(f"new: {new} vs {darker_color} blend: {blend_ratio} contrast: {contrast}")
return new
else:
return blendColors(lighter_color, blend_color, 0.12)
def scale_lightness(hex_color, amount):
r, g, b = hex2rgb(hex_color)
# convert rgb to hls
h, s, v = colorsys.rgb_to_hsv(r, g, b)
# manipulate value and convert back to rgb
r, g, b = colorsys.hsv_to_rgb(h, s, amount)
o_hex = rgb2hex(int(r), int(g), int(b))
# print(f"scale_lightness color: {hex_color} * amount: {amount} = {o_hex}")
return o_hex
def lighteen_color(hex_color, min, blend):
current_luminance = color_luminance(hex_color)[1]
# print(f"original luminance: {current_luminance}")
if current_luminance < min:
new_lightness = 255.0 * (1.0 - current_luminance)
# print(f increase lightness to {new_lightness}")
new_color = scale_lightness(hex_color, new_lightness)
else:
new_color = hex_color
o = blendColors(new_color, blend, 0.2)
# print(f"result after blend: {o}")
return o
def scale_saturation(hex_color, amount):
r, g, b = hex2rgb(hex_color)
# convert rgb to hls
h, s, v = colorsys.rgb_to_hsv(r, g, b)
# manipulate value and convert back to rgb
r, g, b = colorsys.hsv_to_rgb(h, amount, v)
o_hex = rgb2hex(int(r), int(g), int(b))
# print(f"scale_lightness color: {hex_color} * amount: {amount} = {o_hex}")
return o_hex
def multiply_saturation(hex_color, amount):
r, g, b = hex2rgb(hex_color)
# convert rgb to hls
h, s, v = colorsys.rgb_to_hsv(r, g, b)
# manipulate value and convert back to rgb
amount = math_utils.clip(s * amount, 0, 1, s)
r, g, b = colorsys.hsv_to_rgb(h, amount, v)
o_hex = rgb2hex(int(r), int(g), int(b))
# print(f"scale_lightness color: {hex_color} * amount: {amount} = {o_hex}")
return o_hex
def multiply_lightness(hex_color, amount):
r, g, b = hex2rgb(hex_color)
# convert rgb to hls
h, s, v = colorsys.rgb_to_hsv(r, g, b)
# manipulate value and convert back to rgb
amount = math_utils.clip(v * amount, 0, 255, v)
r, g, b = colorsys.hsv_to_rgb(h, s, amount)
o_hex = rgb2hex(int(r), int(g), int(b))
# print(f"scale_lightness color: {hex_color} * amount: {amount} = {o_hex}")
return o_hex
def validate_color(color):
"""check if a color is either a valid hex or rgb format
Args:
color (str): Hex or rgb color
Returns:
int: color type rgb(1) or hex(2)
None: for invalid color
"""
is_hex = re.search(r"^#(?:[0-9a-fA-F]{3}){1,2}$", color)
is_rgb = re.search(r"^(?:(?:^|,\s*)([01]?\d\d?|2[0-4]\d|25[0-5])){3}$", color)
if is_rgb:
return 1
elif is_hex:
return 2
else:
return None
def color2hex(color):
format = validate_color(color)
if format == 1:
r, g, b = [int(c) for c in color.split(",")]
return rgb2hex(r, g, b)
elif format == 2:
return color
# Tests
if __name__ == "__main__":
# Test color blend
print("> Test color blend #ff0000 , #00ff00")
print(blendColors("#ff0000", "#00ff00", 0.01))
print(blendColors("#ff0000", "#00ff00", 0.25))
print(blendColors("#ff0000", "#00ff00", 0.5))
print(blendColors("#ff0000", "#00ff00", 0.75))
print(blendColors("#ff0000", "#00ff00", 0.99))
print("> Test color hex2alpha '#ff0000',50")
print(hex2alpha("#ff0000", 50))
color1hex = "#082523"
color1rgb = hex2rgb(color1hex)
color1rgb_alpha = rgb2alpha(color1rgb, 200)
print("> Test color rgb2alpha")
print(color1rgb_alpha)
print("> Test color hex2rgba")
color1rgba = hex2rgba(color1hex, 200)
print(color1rgba)
print("> Test color_luminance")
print(color_luminance(color1hex))
colors_list = (
"#f96767",
"#222250",
"#ff8400",
"#ffd500",
"#00fffb",
"#c1f7fb",
"#00eeff",
)
print(
"> Test sort_colors_luminance '#f96767','#222250','#ff8400','#ffd500','#00fffb','#c1f7fb','#00eeff'"
)
print(sort_colors_luminance(colors_list))
print("> Test contrast_ratio '#475AC6','#1A1A22'")
print(contrast_ratio("#475AC6", "#1A1A22"))
print("> Test blend2contrast '#475AC6','#1A1A22','#c1f7fb',4.5 ,0.1, True")
print(blend2contrast("#475AC6", "#1A1A22", "#c1f7fb", 4.5, 0.1, True))
print("> Test blend2contrast '#e1ffb4','#FEFCF5','#060605', 4.5, 0.01, False")
print(blend2contrast("#e1ffb4", "#FEFCF5", "#060605", 4.5, 0.01, False))
print("> Oklab vs cam16 blend '#ff0000', '#0000ff', .5")
print(f"oklab: {blendColors('#ff0000', '#0000ff', .5)}")
print(
f"cam16: {hexFromArgb(Blend.cam16Ucs(argbFromHex('#ff0000'),argbFromHex('#0000ff'),0.5))}"
)
print("> lighteen_color '#b70708',.15,'#ffffff'")
print(lighteen_color("#b70708", 0.15, "#ffffff"))
test_colors = [
"#000000",
"#4141a6",
"#1dc136",
"#bbb13c",
"#ed19cd",
"#e40f0f",
"#fe6c0b",
"#fff000",
"#36e5d3",
"#131aed",
"#ff0000",
"#00ff00",
"#0000ff",
"#ffffff",
]
for color in test_colors:
print(color_luminance(color))
|
PypiClean
|
/Products.PDFtoOCR-1.1.tar.gz/Products.PDFtoOCR-1.1/README.txt
|
Introduction
============
PDFtoOCR processes text in PDF documents using OCR. This is needed
when text cannot be extracted from a (scanned) PDF. PDFtoOCR uses content rules to
schedule the OCR processing. The processing cannot be done one the fly, for
example with a custom TextIndexNG plugin. Processing large PDF documents using
OCR is a time/processor consuming task.
Configuration
=============
On the operating system
-----------------------
PDF to Text uses three tools that are available for under Linux. The
cooperation with the tools is only tested in Debian. But it the will probably
work in in other \*NIX enviroments.
Install requirements, PDF to OCR uses the following programs:
- pdftotext, checks if OCR processing is necessary
- ghostscript, converts the pdf documents to tiff images
- tesseract, does the OCR processing (make sure you've got all language packs!*)
Set the environment variables:
- The environment variable *$GS* must be set and point to the ghostscript binary.
- The environment variable *$TESSERACT* must be set and point to the tesseract binary.
On the Plone site
-----------------
Add a content rule
- Event trigger: Object modified and object added
- Condition: Content type is file
- Actions: Store OCR output from a PDF in searchable text
Assign content rule to a Plone site or a folder
Install cron4plone and add the following cronjob: portal/@@do_pdf_ocr_index
Usage
=====
Just add a file with a PDF document. Optionally you can select the language so the OCR
engine can use dictionaries when indexing. Only a limited amount of languages are
supported by Tesseract.
An overview of indexed documents is found in the control panel, 'PDF to OCR status'.
In this status page (re)indexing of documents is possible.
PDF Processing
==============
Each time a file is added or modified the unique id (uid) of the file is added
to a queue. This queue is persistent and has two functions, for indexing en reindexing.
The indexing function uses the queue to process the documents. When reindexing is used all
files in the queue history are processed.
If the text from a PDF document is extracted using pdftotext no OCR is done. Else the
OCR extracts the text and stores it the content type file. The ATFile is patched with an
extra field to accommodate the extracted text and the language of the PDF.
Page views:
- @@do_pdf_ocr_index - indexes documents in the queue
- @@do_pdf_ocr_reindex - reindexes all pdf documents in the Plone site
- @@pdf_ocr_status - Show the queue and a history 10 documents
Futher reading:
===============
http://plone.org/documentation/how-to/ocr-in-plone-using-tesseract-ocr/
http://code.google.com/p/tesseract-ocr/
* Make sure you don't got empty language files in /usr/local/share/tessdata/
Maybe a good alternative in the future, uses tesseract but hard to setup and
still too much beta:
http://sites.google.com/site/ocropus/
|
PypiClean
|
/cpg-hail-0.2.90.tar.gz/cpg-hail-0.2.90/hail/ggplot/scale.py
|
import abc
from .geoms import FigureAttribute
from hail.context import get_reference
from .utils import categorical_strings_to_colors, continuous_nums_to_colors
class Scale(FigureAttribute):
def __init__(self, aesthetic_name):
self.aesthetic_name = aesthetic_name
@abc.abstractmethod
def transform_data(self, field_expr):
pass
def create_local_transformer(self, groups_of_dfs, parent):
return lambda x: x
@abc.abstractmethod
def is_discrete(self):
pass
@abc.abstractmethod
def is_continuous(self):
pass
class PositionScale(Scale):
def __init__(self, aesthetic_name, name, breaks, labels):
super().__init__(aesthetic_name)
self.name = name
self.breaks = breaks
self.labels = labels
def update_axis(self, fig):
if self.aesthetic_name == "x":
return fig.update_xaxes
elif self.aesthetic_name == "y":
return fig.update_yaxes
# What else do discrete and continuous scales have in common?
def apply_to_fig(self, parent, fig_so_far):
if self.name is not None:
self.update_axis(fig_so_far)(title=self.name)
if self.breaks is not None:
self.update_axis(fig_so_far)(tickvals=self.breaks)
if self.labels is not None:
self.update_axis(fig_so_far)(ticktext=self.labels)
class PositionScaleGenomic(PositionScale):
def __init__(self, aesthetic_name, reference_genome, name=None):
super().__init__(aesthetic_name, name, None, None)
if isinstance(reference_genome, str):
reference_genome = get_reference(reference_genome)
self.reference_genome = reference_genome
def apply_to_fig(self, parent, fig_so_far):
contig_offsets = dict(list(self.reference_genome.global_positions_dict.items())[:24])
breaks = list(contig_offsets.values())
labels = list(contig_offsets.keys())
self.update_axis(fig_so_far)(tickvals=breaks, ticktext=labels)
def transform_data(self, field_expr):
return field_expr.global_position()
def is_discrete(self):
return False
def is_continuous(self):
return False
class PositionScaleContinuous(PositionScale):
def __init__(self, axis=None, name=None, breaks=None, labels=None, transformation="identity"):
super().__init__(axis, name, breaks, labels)
self.transformation = transformation
def apply_to_fig(self, parent, fig_so_far):
super().apply_to_fig(parent, fig_so_far)
if self.transformation == "identity":
pass
elif self.transformation == "log10":
self.update_axis(fig_so_far)(type="log")
elif self.transformation == "reverse":
self.update_axis(fig_so_far)(autorange="reversed")
else:
raise ValueError(f"Unrecognized transformation {self.transformation}")
def transform_data(self, field_expr):
return field_expr
def is_discrete(self):
return False
def is_continuous(self):
return True
class PositionScaleDiscrete(PositionScale):
def __init__(self, axis=None, name=None, breaks=None, labels=None):
super().__init__(axis, name, breaks, labels)
def apply_to_fig(self, parent, fig_so_far):
super().apply_to_fig(parent, fig_so_far)
def transform_data(self, field_expr):
return field_expr
def is_discrete(self):
return True
def is_continuous(self):
return False
class ScaleContinuous(Scale):
def __init__(self, aesthetic_name):
super().__init__(aesthetic_name)
def transform_data(self, field_expr):
return field_expr
def is_discrete(self):
return False
def is_continuous(self):
return True
class ScaleDiscrete(Scale):
def __init__(self, aesthetic_name):
super().__init__(aesthetic_name)
def transform_data(self, field_expr):
return field_expr
def is_discrete(self):
return True
def is_continuous(self):
return False
class ScaleColorDiscrete(ScaleDiscrete):
def create_local_transformer(self, groups_of_dfs, parent):
categorical_strings = set()
for group_of_dfs in groups_of_dfs:
for df in group_of_dfs:
if self.aesthetic_name in df.attrs:
categorical_strings.add(df.attrs[self.aesthetic_name])
unique_color_mapping = categorical_strings_to_colors(categorical_strings, parent)
def transform(df):
df.attrs[f"{self.aesthetic_name}_legend"] = df.attrs[self.aesthetic_name]
df.attrs[self.aesthetic_name] = unique_color_mapping[df.attrs[self.aesthetic_name]]
return df
return transform
class ScaleColorContinuous(ScaleContinuous):
def create_local_transformer(self, groups_of_dfs, parent):
overall_min = None
overall_max = None
for group_of_dfs in groups_of_dfs:
for df in group_of_dfs:
if self.aesthetic_name in df.columns:
series = df[self.aesthetic_name]
series_min = series.min()
series_max = series.max()
if overall_min is None:
overall_min = series_min
else:
overall_min = min(series_min, overall_min)
if overall_max is None:
overall_max = series_max
else:
overall_max = max(series_max, overall_max)
color_mapping = continuous_nums_to_colors(overall_min, overall_max, parent.continuous_color_scale)
def transform(df):
df[self.aesthetic_name] = df[self.aesthetic_name].map(lambda i: color_mapping(i))
return df
return transform
# Legend names messed up for scale color identity
class ScaleColorDiscreteIdentity(ScaleDiscrete):
pass
def scale_x_log10(name=None):
"""Transforms x axis to be log base 10 scaled.
Parameters
----------
name: :class:`str`
The label to show on x-axis
Returns
-------
:class:`.FigureAttribute`
The scale to be applied.
"""
return PositionScaleContinuous("x", name=name, transformation="log10")
def scale_y_log10(name=None):
"""Transforms y-axis to be log base 10 scaled.
Parameters
----------
name: :class:`str`
The label to show on y-axis
Returns
-------
:class:`.FigureAttribute`
The scale to be applied.
"""
return PositionScaleContinuous("y", name=name, transformation="log10")
def scale_x_reverse(name=None):
"""Transforms x-axis to be vertically reversed.
Parameters
----------
name: :class:`str`
The label to show on x-axis
Returns
-------
:class:`.FigureAttribute`
The scale to be applied.
"""
return PositionScaleContinuous("x", name=name, transformation="reverse")
def scale_y_reverse(name=None):
"""Transforms y-axis to be vertically reversed.
Parameters
----------
name: :class:`str`
The label to show on y-axis
Returns
-------
:class:`.FigureAttribute`
The scale to be applied.
"""
return PositionScaleContinuous("y", name=name, transformation="reverse")
def scale_x_continuous(name=None, breaks=None, labels=None, trans="identity"):
"""The default continuous x scale.
Parameters
----------
name: :class:`str`
The label to show on x-axis
breaks: :class:`list` of :class:`float`
The locations to draw ticks on the x-axis.
labels: :class:`list` of :class:`str`
The labels of the ticks on the axis.
trans: :class:`str`
The transformation to apply to the x-axis. Supports "identity", "reverse", "log10".
Returns
-------
:class:`.FigureAttribute`
The scale to be applied.
"""
return PositionScaleContinuous("x", name=name, breaks=breaks, labels=labels, transformation=trans)
def scale_y_continuous(name=None, breaks=None, labels=None, trans="identity"):
"""The default continuous y scale.
Parameters
----------
name: :class:`str`
The label to show on y-axis
breaks: :class:`list` of :class:`float`
The locations to draw ticks on the y-axis.
labels: :class:`list` of :class:`str`
The labels of the ticks on the axis.
trans: :class:`str`
The transformation to apply to the y-axis. Supports "identity", "reverse", "log10".
Returns
-------
:class:`.FigureAttribute`
The scale to be applied.
"""
return PositionScaleContinuous("y", name=name, breaks=breaks, labels=labels, transformation=trans)
def scale_x_discrete(name=None, breaks=None, labels=None):
"""The default discrete x scale.
Parameters
----------
name: :class:`str`
The label to show on x-axis
breaks: :class:`list` of :class:`str`
The locations to draw ticks on the x-axis.
labels: :class:`list` of :class:`str`
The labels of the ticks on the axis.
Returns
-------
:class:`.FigureAttribute`
The scale to be applied.
"""
return PositionScaleDiscrete("x", name=name, breaks=breaks, labels=labels)
def scale_y_discrete(name=None, breaks=None, labels=None):
"""The default discrete y scale.
Parameters
----------
name: :class:`str`
The label to show on y-axis
breaks: :class:`list` of :class:`str`
The locations to draw ticks on the y-axis.
labels: :class:`list` of :class:`str`
The labels of the ticks on the axis.
Returns
-------
:class:`.FigureAttribute`
The scale to be applied.
"""
return PositionScaleDiscrete("y", name=name, breaks=breaks, labels=labels)
def scale_x_genomic(reference_genome, name=None):
"""The default genomic x scale. This is used when the ``x`` aesthetic corresponds to a :class:`.LocusExpression`.
Parameters
----------
reference_genome:
The reference genome being used.
name: :class:`str`
The label to show on y-axis
Returns
-------
:class:`.FigureAttribute`
The scale to be applied.
"""
return PositionScaleGenomic("x", reference_genome, name=name)
def scale_color_discrete():
"""The default discrete color scale. This maps each discrete value to a color.
Returns
-------
:class:`.FigureAttribute`
The scale to be applied.
"""
return ScaleColorDiscrete("color")
def scale_color_continuous():
"""The default continuous color scale. This linearly interpolates colors between the min and max observed values.
Returns
-------
:class:`.FigureAttribute`
The scale to be applied.
"""
return ScaleColorContinuous("color")
def scale_color_identity():
"""A color scale that assumes the expression specified in the ``color`` aesthetic can be used as a color.
Returns
-------
:class:`.FigureAttribute`
The scale to be applied.
"""
return ScaleColorDiscreteIdentity("color")
def scale_fill_discrete():
"""The default discrete fill scale. This maps each discrete value to a fill color.
Returns
-------
:class:`.FigureAttribute`
The scale to be applied.
"""
return ScaleColorDiscrete("fill")
def scale_fill_continuous():
"""The default discrete fill scale. This linearly interpolates colors between the min and max observed values.
Returns
-------
:class:`.FigureAttribute`
The scale to be applied.
"""
return ScaleColorContinuous("fill")
def scale_fill_identity():
"""A color scale that assumes the expression specified in the ``fill`` aesthetic can be used as a fill color.
Returns
-------
:class:`.FigureAttribute`
The scale to be applied.
"""
return ScaleColorDiscreteIdentity("fill")
|
PypiClean
|
/jupyros-0.7.0a0.tar.gz/jupyros-0.7.0a0/js/node_modules/caniuse-lite/data/regions/PE.js
|
module.exports={C:{"2":0,"3":0,"4":0,"5":0,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":0,"28":0,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0,"38":0,"39":0,"40":0.00483,"41":0.00483,"42":0,"43":0,"44":0,"45":0,"46":0,"47":0,"48":0,"49":0,"50":0,"51":0,"52":0.00483,"53":0,"54":0,"55":0,"56":0,"57":0,"58":0,"59":0,"60":0,"61":0,"62":0,"63":0,"64":0,"65":0,"66":0.00483,"67":0,"68":0,"69":0,"70":0,"71":0,"72":0,"73":0,"74":0,"75":0,"76":0,"77":0,"78":0,"79":0,"80":0,"81":0,"82":0,"83":0,"84":0.00483,"85":0,"86":0,"87":0,"88":0.00966,"89":0,"90":0,"91":0,"92":0,"93":0,"94":0,"95":0,"96":0,"97":0.01932,"98":0,"99":0,"100":0,"101":0,"102":0.00483,"103":0.00483,"104":0.02897,"105":0.00483,"106":0.00966,"107":0.07244,"108":0.2656,"109":0.17867,"110":0,"111":0,"3.5":0,"3.6":0},D:{"4":0,"5":0,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":0,"28":0,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0,"37":0,"38":0.02415,"39":0,"40":0,"41":0,"42":0,"43":0,"44":0,"45":0,"46":0,"47":0.00483,"48":0,"49":0.01449,"50":0,"51":0,"52":0,"53":0.00966,"54":0,"55":0,"56":0,"57":0,"58":0,"59":0,"60":0,"61":0,"62":0,"63":0,"64":0,"65":0.00483,"66":0.00483,"67":0,"68":0.01449,"69":0.00483,"70":0.00483,"71":0,"72":0,"73":0,"74":0.00483,"75":0,"76":0,"77":0.00483,"78":0.00483,"79":0.17384,"80":0.01932,"81":0.01932,"83":0.00966,"84":0.00483,"85":0.00966,"86":0.00483,"87":0.02897,"88":0.00966,"89":0.00483,"90":0.00483,"91":0.05312,"92":0.02897,"93":0.01932,"94":0.02897,"95":0.01932,"96":0.0338,"97":0.02897,"98":0.01449,"99":0.0338,"100":0.02897,"101":0.02897,"102":0.02897,"103":0.09175,"104":0.05795,"105":0.07244,"106":0.05312,"107":0.13521,"108":8.63908,"109":8.89985,"110":0.00483,"111":0.00483,"112":0},F:{"9":0,"11":0,"12":0,"15":0,"16":0,"17":0,"18":0,"19":0,"20":0,"21":0,"22":0,"23":0,"24":0,"25":0,"26":0,"27":0,"28":0.00483,"29":0,"30":0,"31":0,"32":0,"33":0,"34":0,"35":0,"36":0.00483,"37":0,"38":0,"39":0,"40":0,"41":0,"42":0,"43":0,"44":0,"45":0,"46":0,"47":0,"48":0,"49":0,"50":0,"51":0,"52":0,"53":0,"54":0,"55":0,"56":0,"57":0,"58":0,"60":0,"62":0,"63":0,"64":0,"65":0,"66":0,"67":0,"68":0,"69":0,"70":0,"71":0,"72":0,"73":0.00483,"74":0,"75":0,"76":0,"77":0,"78":0,"79":0,"80":0,"81":0,"82":0,"83":0,"84":0,"85":0.00483,"86":0,"87":0,"88":0,"89":0,"90":0,"91":0,"92":0.00483,"93":0.8499,"94":0.60845,"9.5-9.6":0,"10.0-10.1":0,"10.5":0,"10.6":0,"11.1":0,"11.5":0,"11.6":0,"12.1":0},B:{"12":0.00483,"13":0,"14":0,"15":0,"16":0,"17":0,"18":0.00483,"79":0,"80":0,"81":0,"83":0,"84":0,"85":0,"86":0,"87":0,"88":0,"89":0,"90":0,"91":0,"92":0.00483,"93":0,"94":0,"95":0,"96":0,"97":0,"98":0,"99":0,"100":0.00483,"101":0,"102":0,"103":0.00483,"104":0,"105":0.00483,"106":0.00483,"107":0.02415,"108":0.66157,"109":0.67123},E:{"4":0,"5":0,"6":0,"7":0,"8":0,"9":0,"10":0,"11":0,"12":0,"13":0,"14":0.01932,"15":0.00483,_:"0","3.1":0,"3.2":0,"5.1":0,"6.1":0,"7.1":0,"9.1":0,"10.1":0,"11.1":0,"12.1":0,"13.1":0.00966,"14.1":0.02415,"15.1":0.00483,"15.2-15.3":0.00483,"15.4":0.00966,"15.5":0.02415,"15.6":0.07726,"16.0":0.01932,"16.1":0.06278,"16.2":0.07244,"16.3":0.00483},G:{"8":0,"3.2":0,"4.0-4.1":0,"4.2-4.3":0.00201,"5.0-5.1":0.0067,"6.0-6.1":0.00067,"7.0-7.1":0.00268,"8.1-8.4":0,"9.0-9.2":0.00201,"9.3":0.01875,"10.0-10.2":0,"10.3":0.01406,"11.0-11.2":0.00603,"11.3-11.4":0.00402,"12.0-12.1":0.00803,"12.2-12.5":0.18612,"13.0-13.1":0.00536,"13.2":0.00536,"13.3":0.01138,"13.4-13.7":0.02946,"14.0-14.4":0.08034,"14.5-14.8":0.24437,"15.0-15.1":0.06159,"15.2-15.3":0.09976,"15.4":0.15064,"15.5":0.2919,"15.6":0.83621,"16.0":0.94132,"16.1":1.84382,"16.2":1.31558,"16.3":0.11114},P:{"4":0.32158,"5.0-5.4":0.01005,"6.2-6.4":0,"7.2-7.4":0.09044,"8.2":0,"9.2":0.01005,"10.1":0,"11.1-11.2":0.0402,"12.0":0,"13.0":0.0402,"14.0":0.03015,"15.0":0.0201,"16.0":0.07035,"17.0":0.07035,"18.0":0.09044,"19.0":0.80394},I:{"0":0,"3":0,"4":0,"2.1":0,"2.2":0,"2.3":0,"4.1":0.01017,"4.2-4.3":0.00508,"4.4":0,"4.4.3-4.4.4":0.1144},K:{_:"0 10 11 12 11.1 11.5 12.1"},A:{"6":0,"7":0,"8":0,"9":0,"10":0,"11":0.02897,"5.5":0},J:{"7":0,"10":0},N:{"10":0,"11":0},R:{_:"0"},M:{"0":0.10342},Q:{"13.1":0},O:{"0":0.02068},H:{"0":0.20072},L:{"0":67.87162},S:{"2.5":0}};
|
PypiClean
|
/tulipcore-0.1.0a2.tar.gz/tulipcore-0.1.0a2/README.rst
|
tulipcore
=========
tulipcore_ is an alternative gevent_ core loop. It is based on asyncio_ a.k.a.
tulip, the async library for Python 3. With tulipcore_, you can run gevent_
code on top of asyncio_.
tulipcore_ is written and maintained by `Fantix King`_ and is licensed under
MIT license.
Install tulipcore
-----------------
Install Python 3.4 or newer, greenlet_ extension and gevent_ library. Note if
you are running on Python 3.3, you still need to install the asyncio_ library.
Please note, at this point (mid 2014) main line gevent_ is in a progress fully
supporting Python 3. So if you want to take a try right now, you can install
my gevent fork:
.. code:: sh
pip install git+git://github.com/fantix/gevent.git
Install tulipcore:
.. code:: sh
pip install git+git://github.com/decentfox/tulipcore.git
Use tulipcore
-------------
Add this environment variable, it will tell gevent_ to use tulipcore_:
.. code:: sh
GEVENT_LOOP=tulipcore.Loop
For example, you can run the gevent_ test suite with tulipcore_ installed:
.. code:: sh
cd gevent/greentest
GEVENT_LOOP=tulipcore.Loop python testrunner.py
History
-------
This project was originally called gevent3_, which was a wrapper of asyncio_
for Python 3 offering a gevent-compatible API. It was developed in a wrong
direction and I decided to abandon it.
.. _gevent: http://www.gevent.org
.. _gevent3: https://github.com/decentfox/tulipcore/tree/gevent3
.. _asyncio: https://docs.python.org/3/library/asyncio.html
.. _greenlet: https://github.com/python-greenlet/greenlet
.. _Fantix King: http://about.me/fantix
.. _tulipcore: https://github.com/decentfox/tulipcore
|
PypiClean
|
/nuradiomc-2.1.8.tar.gz/nuradiomc-2.1.8/NuRadioReco/modules/iftElectricFieldReconstructor/iftElectricFieldReconstructor.py
|
import numpy as np
from NuRadioReco.utilities import units, fft, trace_utilities, bandpass_filter
import NuRadioReco.utilities.trace_utilities
import NuRadioReco.detector.antennapattern
import NuRadioReco.detector.RNO_G.analog_components
import NuRadioReco.detector.ARIANNA.analog_components
from NuRadioReco.framework.parameters import electricFieldParameters as efp
from NuRadioReco.framework.parameters import channelParameters as chp
import NuRadioReco.modules.iftElectricFieldReconstructor.operators
import NuRadioReco.framework.base_trace
import NuRadioReco.framework.electric_field
import scipy
import nifty5 as ift
import matplotlib.pyplot as plt
import scipy.signal
import radiotools.helper
class IftElectricFieldReconstructor:
"""
Module that uses Information Field Theory to reconstruct the electric field.
A description how this method works can be found at https://arxiv.org/abs/2102.00258
"""
def __init__(self):
self.__antenna_pattern_provider = NuRadioReco.detector.antennapattern.AntennaPatternProvider()
self.__passband = None
self.__filter_type = None
self.__debug = False
self.__efield_scaling = None
self.__amp_dct = None
self.__phase_dct = None
self.__used_channel_ids = None
self.__trace_samples = None
self.__fft_operator = None
self.__n_shifts = None
self.__trace_start_times = None
self.__n_iterations = None
self.__n_samples = None
self.__polarization = None
self.__electric_field_template = None
self.__convergence_level = None
self.__relative_tolerance = None
self.__use_sim = False
self.__pulse_time_prior = None
self.__pulse_time_uncertainty = None
self.__phase_slope = None
self.__slope_passbands = None
self.__energy_fluence_passbands = None
return
def begin(
self,
electric_field_template,
passband=None,
filter_type='butter',
amp_dct=None,
pulse_time_prior=20. * units.ns,
pulse_time_uncertainty=5. * units.ns,
n_iterations=5,
n_samples=20,
polarization='pol',
relative_tolerance=1.e-7,
convergence_level=3,
energy_fluence_passbands=None,
slope_passbands=None,
phase_slope='both',
debug=False
):
"""
Define settings for the reconstruction.
Parameters
----------
electric_field_template: NuRadioReco.framework.base_trace.BaseTrace object
BaseTrace (or child class) object containing an electric field template
that is used to determine the position of the radio pulse in the channel
waveforms.
passband: list of floats or None
Lower and upper bound of the filter that should be applied to the channel
waveforms and the IFT model. If None is passed, no filter is applied
filter_type: string
Name of the filter type to be used. Has to be implemented in the NuRadioReco.utilities.
bandpass_filter.get_filter_response function. Only used if passband is not None
amp_dct: dictionary
Dictionary containing the prior settings for the electric field spectrum
pulse_time_prior: float
Expected pulse time relative to the trace start time. Note that this is the time of the
electric field pulse, not the voltage pulse
pulse_time_uncertainty: float
Uncertainty on the pulse time
n_iterations: integer
Number of times the IFT minimizer iterates. More iterations lead to better results, but
increase run time.
n_samples: integer
Number of prior samples the IFT minimizer uses to find the maximum prior. Also the number of
samples used to estimate uncertainties
polarization: string
Polarization of the reconstructed radio signal. If set to "theta" or "phi", only that
component of the electric field is reconstructed. If set to "pol", both components
are reconstructed.
relative_tolerance: float
Relative improvement for the minimizer in a cycle for the optimization to finish.
convergence_level: integer
Number of cycles the relative improvement of the minimizer has to be below relative_tolerance
for the optimization to finish.
energy_fluence_passbands: list of floats
List of passbands for which the energy fluence is calculated
slope_passbands: list of floats
List of passbands to calculate the ratio of the energy fluences in different passbands.
phase_slope: string
Specifies the sign of the slope of the linear function describing the phase of the electric field.
Options are "negative", "positive" and "both". If "both" is selected, positive and negative slopes
are used and the best fit is selected.
debug: bool
If true, debug plots are drawn.
"""
self.__passband = passband
self.__filter_type = filter_type
self.__debug = debug
self.__n_iterations = n_iterations
self.__n_samples = n_samples
self.__trace_samples = len(electric_field_template.get_times())
self.__polarization = polarization
self.__electric_field_template = electric_field_template
self.__convergence_level = convergence_level
self.__relative_tolerance = relative_tolerance
self.__pulse_time_prior = pulse_time_prior
self.__pulse_time_uncertainty = pulse_time_uncertainty
if phase_slope not in ['both', 'negative', 'positive']:
raise ValueError('Phase slope has to be either both, negative of positive.')
self.__phase_slope = phase_slope
if slope_passbands is None:
self.__slope_passbands = [
[
(130. * units.MHz, 200 * units.MHz),
(200. * units.MHz, 350. * units.MHz)
]
]
if energy_fluence_passbands is None:
self.__energy_fluence_passbands = [
(130. * units.MHz, 500. * units.MHz)
]
else:
self.__slope_passbands = slope_passbands
if amp_dct is None:
self.__amp_dct = {
'n_pix': 64, # spectral bins
# Spectral smoothness (affects Gaussian process part)
'a': .01,
'k0': 2.,
# Power-law part of spectrum:
'sm': -4.9,
'sv': .5,
'im': 2.,
'iv': .5
}
else:
self.__amp_dct = amp_dct
return
def make_priors_plot(self, event, station, detector, channel_ids):
"""
Plots samples from the prior distribution of the electric field.
Parameters
----------
event: NuRadioReco.framework.event.Event object
station: NuRadioReco.framework.station.Station object
detector: NuRadioReco.detector.detector.Detector object or child object
channel_ids: list of floats
IDs of the channels to use for the electric field reconstruction
"""
self.__used_channel_ids = []
self.__efield_scaling = False
self.__used_channel_ids = channel_ids
self.__prepare_traces(event, station, detector)
ref_channel = station.get_channel(self.__used_channel_ids[0])
sampling_rate = ref_channel.get_sampling_rate()
time_domain = ift.RGSpace(self.__trace_samples)
frequency_domain = time_domain.get_default_codomain()
self.__fft_operator = ift.FFTOperator(frequency_domain.get_default_codomain())
amp_operators, filter_operator = self.__get_detector_operators(
station,
detector,
frequency_domain,
sampling_rate,
)
self.__draw_priors(event, station, frequency_domain)
def run(self, event, station, detector, channel_ids, efield_scaling, use_sim=False):
"""
Run the electric field reconstruction
Parameters
----------
event: NuRadioReco.framework.event.Event object
station: NuRadioReco.framework.station.Station object
detector: NuRadioReco.detector.detector.Detector object or child object
channel_ids: list of integers
IDs of the channels to be used for the electric field reconstruction
efield_scaling: boolean
If true, a small variation in the amplitude between channels is included
in the IFT model.
use_sim: boolean
If true, the simChannels are used to identify the position of the radio pulse.
"""
self.__used_channel_ids = [] # only use channels with associated E-field and zenith
self.__efield_scaling = efield_scaling
self.__used_channel_ids = channel_ids
self.__use_sim = use_sim
self.__prepare_traces(event, station, detector)
ref_channel = station.get_channel(self.__used_channel_ids[0])
sampling_rate = ref_channel.get_sampling_rate()
time_domain = ift.RGSpace(self.__trace_samples)
frequency_domain = time_domain.get_default_codomain()
large_frequency_domain = ift.RGSpace(self.__trace_samples * 2, harmonic=True)
self.__fft_operator = ift.FFTOperator(frequency_domain.get_default_codomain())
amp_operators, filter_operator = self.__get_detector_operators(
station,
detector,
frequency_domain,
sampling_rate,
)
final_KL = None
positive_reco_KL = None
negative_reco_KL = None
# Run Positive Phase Slope #
if self.__phase_slope == 'both' or self.__phase_slope == 'positive':
phase_slope = 2. * np.pi * self.__pulse_time_prior * self.__electric_field_template.get_sampling_rate() / self.__trace_samples
phase_uncertainty = 2. * np.pi * self.__pulse_time_uncertainty * self.__electric_field_template.get_sampling_rate() / self.__trace_samples
self.__phase_dct = {
'sm': phase_slope,
'sv': phase_uncertainty,
'im': 0.,
'iv': 10.
}
likelihood = self.__get_likelihood_operator(
frequency_domain,
large_frequency_domain,
amp_operators,
filter_operator
)
self.__draw_priors(event, station, frequency_domain)
ic_sampling = ift.GradientNormController(1E-8, iteration_limit=min(1000, likelihood.domain.size))
H = ift.StandardHamiltonian(likelihood, ic_sampling)
ic_newton = ift.DeltaEnergyController(name='newton',
iteration_limit=200,
tol_rel_deltaE=self.__relative_tolerance,
convergence_level=self.__convergence_level)
minimizer = ift.NewtonCG(ic_newton)
median = ift.MultiField.full(H.domain, 0.)
min_energy = None
best_reco_KL = None
for k in range(self.__n_iterations):
print('----------->>> {} <<<-----------'.format(k))
KL = ift.MetricGaussianKL(median, H, self.__n_samples, mirror_samples=True)
KL, convergence = minimizer(KL)
median = KL.position
if min_energy is None or KL.value < min_energy:
min_energy = KL.value
print('New min Energy', KL.value)
best_reco_KL = KL
if self.__phase_slope == 'both':
suffix = '_positive_phase'
else:
suffix = ''
if self.__debug:
self.__draw_reconstruction(
event,
station,
KL,
suffix
)
positive_reco_KL = best_reco_KL
final_KL = best_reco_KL
# Run Negative Phase Slope ###
if self.__phase_slope == 'both' or self.__phase_slope == 'negative':
phase_slope = 2. * np.pi * (self.__pulse_time_prior * self.__electric_field_template.get_sampling_rate() - self.__trace_samples) / self.__trace_samples
phase_uncertainty = 2. * np.pi * self.__pulse_time_uncertainty * self.__electric_field_template.get_sampling_rate() / self.__trace_samples
self.__phase_dct = {
'sm': phase_slope,
'sv': phase_uncertainty,
'im': 0.,
'iv': 10.
}
likelihood = self.__get_likelihood_operator(
frequency_domain,
large_frequency_domain,
amp_operators,
filter_operator
)
# self.__draw_priors(event, station, frequency_domain)
ic_sampling = ift.GradientNormController(1E-8, iteration_limit=min(1000, likelihood.domain.size))
H = ift.StandardHamiltonian(likelihood, ic_sampling)
ic_newton = ift.DeltaEnergyController(name='newton',
iteration_limit=200,
tol_rel_deltaE=self.__relative_tolerance,
convergence_level=self.__convergence_level)
minimizer = ift.NewtonCG(ic_newton)
median = ift.MultiField.full(H.domain, 0.)
min_energy = None
best_reco_KL = None
for k in range(self.__n_iterations):
print('----------->>> {} <<<-----------'.format(k))
KL = ift.MetricGaussianKL(median, H, self.__n_samples, mirror_samples=True)
KL, convergence = minimizer(KL)
median = KL.position
if min_energy is None or KL.value < min_energy:
min_energy = KL.value
print('New min Energy', KL.value)
best_reco_KL = KL
if self.__phase_slope == 'both':
suffix = '_negative_phase'
else:
suffix = ''
if self.__debug:
self.__draw_reconstruction(
event,
station,
KL,
suffix
)
negative_reco_KL = best_reco_KL
final_KL = best_reco_KL
if self.__phase_slope == 'both':
if negative_reco_KL.value < positive_reco_KL.value:
final_KL = negative_reco_KL
else:
final_KL = positive_reco_KL
self.__store_reconstructed_efields(
event, station, final_KL
)
if self.__debug:
self.__draw_reconstruction(
event,
station,
final_KL,
''
)
return True
def __prepare_traces(
self,
event,
station,
det
):
"""
Prepares the channel waveforms for the reconstruction by correcting
for time differences between channels, cutting them to the
right size and locating the radio pulse.
"""
if self.__debug:
plt.close('all')
fig1 = plt.figure(figsize=(18, 12))
ax1_1 = fig1.add_subplot(len(self.__used_channel_ids), 2, (1, 2 * len(self.__used_channel_ids) - 1))
fig2 = plt.figure(figsize=(18, 12))
self.__noise_levels = np.zeros(len(self.__used_channel_ids))
self.__n_shifts = np.zeros_like(self.__used_channel_ids)
self.__trace_start_times = np.zeros(len(self.__used_channel_ids))
self.__data_traces = np.zeros((len(self.__used_channel_ids), self.__trace_samples))
max_channel_length = 0
passband = [100. * units.MHz, 200 * units.MHz]
sim_channel_traces = []
for channel_id in self.__used_channel_ids:
channel = station.get_channel(channel_id)
if self.__use_sim:
sim_channel_sum = NuRadioReco.framework.base_trace.BaseTrace()
sim_channel_sum.set_trace(np.zeros_like(channel.get_trace()), channel.get_sampling_rate())
sim_channel_sum.set_trace_start_time(channel.get_trace_start_time())
for sim_channel in station.get_sim_station().get_channels_by_channel_id(channel_id):
sim_channel_sum += sim_channel
if sim_channel_sum.get_number_of_samples() > max_channel_length:
max_channel_length = sim_channel_sum.get_number_of_samples()
sim_channel_traces.append(sim_channel_sum)
else:
if channel.get_number_of_samples() > max_channel_length:
max_channel_length = channel.get_number_of_samples()
correlation_sum = np.zeros(self.__electric_field_template.get_number_of_samples() + max_channel_length)
if self.__debug:
plt.close('all')
fig1 = plt.figure(figsize=(16, 8))
ax1_1 = fig1.add_subplot(121)
ax1_1.grid()
ax1_2 = fig1.add_subplot(122)
ax1_2.grid()
fig2 = plt.figure(figsize=(12, 12))
channel_trace_templates = np.zeros((len(self.__used_channel_ids), len(self.__electric_field_template.get_trace())))
for i_channel, channel_id in enumerate(self.__used_channel_ids):
channel = station.get_channel(channel_id)
amp_response = det.get_amplifier_response(station.get_id(), channel_id, self.__electric_field_template.get_frequencies())
antenna_orientation = det.get_antenna_orientation(station.get_id(), channel_id)
antenna_pattern = self.__antenna_pattern_provider.load_antenna_pattern(det.get_antenna_model(station.get_id(), channel_id))
antenna_response = antenna_pattern.get_antenna_response_vectorized(
self.__electric_field_template.get_frequencies(),
channel.get_parameter(chp.signal_receiving_zenith),
0.,
antenna_orientation[0],
antenna_orientation[1],
antenna_orientation[2],
antenna_orientation[3]
)
channel_spectrum_template = fft.time2freq(
self.__electric_field_template.get_filtered_trace(passband, filter_type='butterabs'),
self.__electric_field_template.get_sampling_rate()
) * amp_response * (antenna_response['theta'] + antenna_response['phi'])
channel_trace_template = fft.freq2time(channel_spectrum_template, self.__electric_field_template.get_sampling_rate())
channel_trace_templates[i_channel] = channel_trace_template
channel.apply_time_shift(-channel.get_parameter(chp.signal_time_offset), True)
if self.__use_sim:
sim_channel_traces[i_channel].apply_time_shift(-channel.get_parameter(chp.signal_time_offset), True)
channel_trace = sim_channel_traces[i_channel].get_filtered_trace(passband, filter_type='butterabs')
else:
channel_trace = channel.get_filtered_trace(passband, filter_type='butterabs')
if self.__use_sim:
correlation = radiotools.helper.get_normalized_xcorr(np.abs(scipy.signal.hilbert(channel_trace_template)), np.abs(scipy.signal.hilbert(channel_trace)))
else:
correlation = radiotools.helper.get_normalized_xcorr(channel_trace_template, channel_trace)
correlation = np.abs(correlation)
correlation_sum[:len(correlation)] += correlation
toffset = -(np.arange(0, correlation.shape[0]) - len(channel_trace)) / channel.get_sampling_rate() # - propagation_times[i_channel, i_solution] - channel.get_trace_start_time()
if self.__use_sim:
sim_channel_traces[i_channel].apply_time_shift(channel.get_parameter(chp.signal_time_offset), True)
# else:
# channel.apply_time_shift(channel.get_parameter(chp.signal_time_offset), True)
if self.__debug:
ax1_1.plot(toffset, correlation)
for i_channel, channel_id in enumerate(self.__used_channel_ids):
channel = station.get_channel(channel_id)
time_offset = channel.get_parameter(chp.signal_time_offset)
channel_trace = channel.get_filtered_trace(passband, filter_type='butterabs')
toffset = -(np.arange(0, correlation_sum.shape[0]) - len(channel_trace)) / channel.get_sampling_rate()
if self.__debug:
ax2_1 = fig2.add_subplot(len(self.__used_channel_ids), 2, 2 * i_channel + 1)
ax2_1.grid()
ax2_1.plot(channel.get_times(), channel_trace / units.mV, c='C0', alpha=1.)
ax2_1.set_title('Channel {}'.format(channel_id))
ax2_1.plot(self.__electric_field_template.get_times() + channel.get_trace_start_time() + toffset[np.argmax(correlation_sum)], channel_trace_templates[i_channel] / np.max(channel_trace_templates[i_channel]) * np.max(channel_trace) / units.mV, c='C1')
sim_channel_sum = None
for sim_channel in station.get_sim_station().iter_channels():
if sim_channel.get_id() == channel_id:
if sim_channel_sum is None:
sim_channel_sum = sim_channel
else:
sim_channel_sum += sim_channel
if sim_channel_sum is not None:
sim_channel_sum.apply_time_shift(-channel.get_parameter(chp.signal_time_offset), True)
ax2_1.plot(sim_channel_sum.get_times(), sim_channel_sum.get_filtered_trace(passband, filter_type='butterabs') / units.mV, c='k', alpha=.5)
ax2_1.set_xlim([sim_channel_sum.get_trace_start_time() - 50, sim_channel_sum.get_times()[-1] + 50])
sim_channel_sum.apply_time_shift(channel.get_parameter(chp.signal_time_offset), True)
channel.apply_time_shift(-toffset[np.argmax(correlation_sum)])
self.__data_traces[i_channel] = channel.get_trace()[:self.__trace_samples]
self.__noise_levels[i_channel] = np.sqrt(np.mean(channel.get_trace()[self.__trace_samples + 1:]**2))
self.__n_shifts[i_channel] = int((toffset[np.argmax(correlation_sum)] + time_offset) * channel.get_sampling_rate())
self.__trace_start_times[i_channel] = channel.get_trace_start_time() + (toffset[np.argmax(correlation_sum)] + time_offset)
if self.__debug:
ax2_2 = fig2.add_subplot(len(self.__used_channel_ids), 2, 2 * i_channel + 2)
ax2_2.grid()
ax2_2.plot(np.arange(len(self.__data_traces[i_channel])) / channel.get_sampling_rate(), self.__data_traces[i_channel])
channel.apply_time_shift(channel.get_parameter(chp.signal_time_offset) + toffset[np.argmax(correlation_sum)], True)
self.__scaling_factor = np.max(self.__data_traces)
self.__data_traces /= self.__scaling_factor
self.__noise_levels /= self.__scaling_factor
if self.__debug:
ax1_2.plot(correlation_sum)
fig2.tight_layout()
fig2.savefig('{}_{}_traces.png'.format(event.get_run_number(), event.get_id()))
def __get_detector_operators(
self,
station,
detector,
frequency_domain,
sampling_rate
):
"""
Creates the operators to simulate the detector response.
"""
amp_operators = []
self.__gain_scaling = []
self.__classic_efield_recos = []
frequencies = frequency_domain.get_k_length_array().val / self.__trace_samples * sampling_rate
hardware_responses = np.zeros((len(self.__used_channel_ids), 2, len(frequencies)), dtype=complex)
if self.__passband is not None:
b, a = scipy.signal.butter(10, self.__passband, 'bandpass', analog=True)
w, h = scipy.signal.freqs(b, a, frequencies)
filter_field = ift.Field(ift.DomainTuple.make(frequency_domain), np.abs(h))
filter_operator = ift.DiagonalOperator(filter_field, frequency_domain)
if self.__filter_type == 'butter':
filter_phase = np.unwrap(np.angle(h))
else:
filter_phase = 0
else:
filter_operator = ift.ScalingOperator(1., frequency_domain)
filter_phase = 0
for i_channel, channel_id in enumerate(self.__used_channel_ids):
channel = station.get_channel(channel_id)
receiving_zenith = channel.get_parameter(chp.signal_receiving_zenith)
if channel.has_parameter(chp.signal_receiving_azimuth):
receive_azimuth = channel.get_parameter(chp.signal_receiving_azimuth)
else:
receive_azimuth = 0.
antenna_response = NuRadioReco.utilities.trace_utilities.get_efield_antenna_factor(station, frequencies, [channel_id], detector, receiving_zenith, receive_azimuth, self.__antenna_pattern_provider)[0]
amp_response = detector.get_amplifier_response(station.get_id(), channel_id, frequencies)
amp_gain = np.abs(amp_response)
amp_phase = np.unwrap(np.angle(amp_response))
total_gain = np.abs(amp_gain) * np.abs(antenna_response)
total_phase = np.unwrap(np.angle(antenna_response)) + amp_phase + filter_phase
total_phase[:, total_phase.shape[1] // 2:] *= -1
total_phase[:, total_phase.shape[1] // 2 + 1] = 0
total_phase *= -1
hardware_responses[i_channel, 0] = (total_gain * np.exp(1.j * total_phase))[0]
hardware_responses[i_channel, 1] = (total_gain * np.exp(1.j * total_phase))[1]
max_gain = np.max(np.abs(hardware_responses))
self.__gain_scaling = max_gain
hardware_responses /= max_gain
for i_channel, channel_id in enumerate(self.__used_channel_ids):
amp_field_theta = ift.Field(ift.DomainTuple.make(frequency_domain), hardware_responses[i_channel][0])
amp_field_phi = ift.Field(ift.DomainTuple.make(frequency_domain), hardware_responses[i_channel][1])
amp_operators.append([ift.DiagonalOperator(amp_field_theta), ift.DiagonalOperator(amp_field_phi)])
return amp_operators, filter_operator
def __get_likelihood_operator(
self,
frequency_domain,
large_frequency_domain,
hardware_operators,
filter_operator
):
"""
Creates the IFT model from which the maximum posterior is calculated
"""
power_domain = ift.RGSpace(large_frequency_domain.get_default_codomain().shape[0], harmonic=True)
power_space = ift.PowerSpace(power_domain)
self.__amp_dct['target'] = power_space
A = ift.SLAmplitude(**self.__amp_dct)
self.__power_spectrum_operator = A
correlated_field = ift.CorrelatedField(large_frequency_domain.get_default_codomain(), A)
realizer = ift.Realizer(self.__fft_operator.domain)
realizer2 = ift.Realizer(self.__fft_operator.target)
inserter = NuRadioReco.modules.iftElectricFieldReconstructor.operators.Inserter(realizer.target)
large_sp = correlated_field.target
small_sp = ift.RGSpace(large_sp.shape[0] // 2, large_sp[0].distances)
zero_padder = ift.FieldZeroPadder(small_sp, large_sp.shape, central=False)
domain_flipper = NuRadioReco.modules.iftElectricFieldReconstructor.operators.DomainFlipper(zero_padder.domain, target=ift.RGSpace(small_sp.shape, harmonic=True))
mag_S_h = (domain_flipper @ zero_padder.adjoint @ correlated_field)
mag_S_h = NuRadioReco.modules.iftElectricFieldReconstructor.operators.SymmetrizingOperator(mag_S_h.target) @ mag_S_h
subtract_one = ift.Adder(ift.Field(mag_S_h.target, -6))
mag_S_h = realizer2.adjoint @ (subtract_one @ mag_S_h).exp()
fft_operator = ift.FFTOperator(frequency_domain.get_default_codomain())
scaling_domain = ift.UnstructuredDomain(1)
add_one = ift.Adder(ift.Field(inserter.domain, 1))
polarization_domain = ift.UnstructuredDomain(1)
likelihood = None
self.__efield_trace_operators = []
self.__efield_spec_operators = []
self.__channel_trace_operators = []
self.__channel_spec_operators = []
polarization_inserter = NuRadioReco.modules.iftElectricFieldReconstructor.operators.Inserter(mag_S_h.target)
polarization_field = realizer2 @ polarization_inserter @ (2. * ift.FieldAdapter(polarization_domain, 'pol'))
for i_channel, channel_id in enumerate(self.__used_channel_ids):
phi_S_h = NuRadioReco.modules.iftElectricFieldReconstructor.operators.SlopeSpectrumOperator(frequency_domain.get_default_codomain(), self.__phase_dct['sm'], self.__phase_dct['im'], self.__phase_dct['sv'], self.__phase_dct['iv'])
phi_S_h = realizer2.adjoint @ phi_S_h
scaling_field = (inserter @ add_one @ (.1 * ift.FieldAdapter(scaling_domain, 'scale{}'.format(i_channel))))
if self.__polarization == 'theta':
efield_spec_operator_theta = ((filter_operator @ (mag_S_h * (1.j * phi_S_h).exp())))
efield_spec_operator_phi = None
channel_spec_operator = (hardware_operators[i_channel][0] @ efield_spec_operator_theta)
elif self.__polarization == 'phi':
efield_spec_operator_theta = None
efield_spec_operator_phi = ((filter_operator @ (mag_S_h * (1.j * phi_S_h).exp())))
channel_spec_operator = (hardware_operators[i_channel][1] @ efield_spec_operator_phi)
elif self.__polarization == 'pol':
efield_spec_operator_theta = ((filter_operator @ ((mag_S_h * polarization_field.cos()) * (1.j * phi_S_h).exp())))
efield_spec_operator_phi = ((filter_operator @ ((mag_S_h * polarization_field.sin()) * (1.j * phi_S_h).exp())))
channel_spec_operator = (hardware_operators[i_channel][0] @ efield_spec_operator_theta) + (hardware_operators[i_channel][1] @ efield_spec_operator_phi)
else:
raise ValueError('Unrecognized polarization setting {}. Possible values are theta, phi and pol'.format(self.__polarization))
efield_spec_operators = [
efield_spec_operator_theta,
efield_spec_operator_phi
]
efield_trace_operator = []
if self.__efield_scaling:
for efield_spec_operator in efield_spec_operators:
if efield_spec_operator is not None:
efield_trace_operator.append(((realizer @ fft_operator.inverse @ efield_spec_operator)) * scaling_field)
else:
efield_trace_operator.append(None)
channel_trace_operator = ((realizer @ fft_operator.inverse @ (channel_spec_operator))) * scaling_field
else:
for efield_spec_operator in efield_spec_operators:
if efield_spec_operator is not None:
efield_trace_operator.append(((realizer @ fft_operator.inverse @ efield_spec_operator)))
else:
efield_trace_operator.append(None)
channel_trace_operator = ((realizer @ fft_operator.inverse @ (channel_spec_operator)))
noise_operator = ift.ScalingOperator(self.__noise_levels[i_channel]**2, frequency_domain.get_default_codomain())
data_field = ift.Field(ift.DomainTuple.make(frequency_domain.get_default_codomain()), self.__data_traces[i_channel])
self.__efield_spec_operators.append(efield_spec_operators)
self.__efield_trace_operators.append(efield_trace_operator)
self.__channel_spec_operators.append(channel_spec_operator)
self.__channel_trace_operators.append(channel_trace_operator)
if likelihood is None:
likelihood = ift.GaussianEnergy(mean=data_field, inverse_covariance=noise_operator.inverse)(self.__channel_trace_operators[i_channel])
else:
likelihood += ift.GaussianEnergy(mean=data_field, inverse_covariance=noise_operator.inverse)(self.__channel_trace_operators[i_channel])
return likelihood
def __store_reconstructed_efields(
self,
event,
station,
KL
):
"""
Ads electric fields containing the reconstruction results to the station
"""
if self.__efield_scaling:
for i_channel, channel_id in enumerate(self.__used_channel_ids):
efield = self.__get_reconstructed_efield(KL, i_channel)
station.add_electric_field(efield)
else:
# The reconstructed electric field is the same for all channels, so it does not matter what we pick for
# i_channel
efield = self.__get_reconstructed_efield(KL, 0)
efield.set_channel_ids(self.__used_channel_ids)
station.add_electric_field(efield)
def __get_reconstructed_efield(
self,
KL,
i_channel
):
"""
Creates an electric field object containing the reconstruction results.
"""
median = KL.position
efield_stat_calculators = [ift.StatCalculator(), ift.StatCalculator()]
polarization_stat_calculator = ift.StatCalculator()
energy_fluence_stat_calculator = ift.StatCalculator()
slope_parameter_stat_calculator = ift.StatCalculator()
rec_efield = np.zeros((3, self.__electric_field_template.get_number_of_samples()))
sampling_rate = self.__electric_field_template.get_sampling_rate()
times = np.arange(self.__data_traces.shape[1]) / sampling_rate
freqs = np.fft.rfftfreq(rec_efield.shape[1], 1. / sampling_rate)
for sample in KL.samples:
efield_sample_pol = np.zeros_like(rec_efield)
if self.__efield_trace_operators[i_channel][0] is not None:
efield_sample_theta = self.__efield_trace_operators[i_channel][0].force(median + sample).val
efield_stat_calculators[0].add(efield_sample_theta)
efield_sample_pol[1] = efield_sample_theta
if self.__efield_trace_operators[i_channel][1] is not None:
efield_sample_phi = self.__efield_trace_operators[i_channel][1].force(median + sample).val
efield_stat_calculators[1].add(efield_sample_phi)
efield_sample_pol[2] = efield_sample_phi
if self.__polarization == 'pol':
energy_fluences = trace_utilities.get_electric_field_energy_fluence(
efield_sample_pol,
times
)
polarization_stat_calculator.add(np.arctan(np.sqrt(energy_fluences[2]) / np.sqrt(energy_fluences[1])))
e_fluences = np.zeros((len(self.__energy_fluence_passbands), 3))
for i_passband, passband in enumerate(self.__energy_fluence_passbands):
filter_response = bandpass_filter.get_filter_response(freqs, passband, 'butter', 10)
e_fluence = trace_utilities.get_electric_field_energy_fluence(
fft.freq2time(fft.time2freq(efield_sample_pol, sampling_rate) * filter_response, sampling_rate) * self.__scaling_factor / self.__gain_scaling,
times
)
e_fluence[0] = np.sum(np.abs(e_fluence))
e_fluences[i_passband] = e_fluence
energy_fluence_stat_calculator.add(e_fluences)
slopes = np.zeros((len(self.__slope_passbands), 3))
for i_passband, passbands in enumerate(self.__slope_passbands):
filter_response_1 = bandpass_filter.get_filter_response(freqs, passbands[0], 'butter', 10)
e_fluence_1 = trace_utilities.get_electric_field_energy_fluence(
fft.freq2time(fft.time2freq(efield_sample_pol, sampling_rate) * filter_response_1, sampling_rate) * self.__scaling_factor / self.__gain_scaling,
times
)
e_fluence_1[0] = np.sum(np.abs(e_fluence_1))
filter_response_2 = bandpass_filter.get_filter_response(freqs, passbands[1], 'butter', 10)
e_fluence_2 = trace_utilities.get_electric_field_energy_fluence(
fft.freq2time(fft.time2freq(efield_sample_pol, sampling_rate) * filter_response_2, sampling_rate) * self.__scaling_factor / self.__gain_scaling,
times
)
e_fluence_2[0] = np.sum(np.abs(e_fluence_2))
if self.__polarization == 'pol':
slopes[i_passband] = e_fluence_1[0] / e_fluence_2[0]
elif self.__polarization == 'theta':
slopes[i_passband] = e_fluence_1[1] / e_fluence_2[1]
else:
slopes[i_passband] = e_fluence_1[2] / e_fluence_2[2]
slope_parameter_stat_calculator.add(slopes)
if self.__efield_trace_operators[i_channel][0] is not None:
rec_efield[1] = efield_stat_calculators[0].mean * self.__scaling_factor / self.__gain_scaling
if self.__efield_trace_operators[i_channel][1] is not None:
rec_efield[2] = efield_stat_calculators[1].mean * self.__scaling_factor / self.__gain_scaling
efield = NuRadioReco.framework.electric_field.ElectricField([self.__used_channel_ids[i_channel]])
efield.set_trace(rec_efield, self.__electric_field_template.get_sampling_rate())
if self.__polarization == 'pol':
efield.set_parameter(efp.polarization_angle, polarization_stat_calculator.mean)
efield.set_parameter_error(efp.polarization_angle, np.sqrt(polarization_stat_calculator.var))
energy_fluence_dict = {}
slope_dict = {}
for i_passband, passband in enumerate(self.__energy_fluence_passbands):
energy_fluence_dict['{:.0f}-{:.0f}'.format(passband[0] / units.MHz, passband[1] / units.MHz)] = energy_fluence_stat_calculator.mean[i_passband]
for i_passband, passbands in enumerate(self.__slope_passbands):
slope_dict['{:.0f}-{:.0f}, {:.0f}-{:.0f}'.format(passbands[0][0], passbands[0][1], passbands[1][0], passbands[1][1])] = slope_parameter_stat_calculator.mean[i_passband]
energy_fluence_error = np.sqrt(energy_fluence_stat_calculator.var)
efield.set_parameter(efp.signal_energy_fluence, energy_fluence_dict)
efield.set_parameter_error(efp.signal_energy_fluence, energy_fluence_error)
efield.set_parameter(efp.energy_fluence_ratios, slope_dict)
efield.set_parameter_error(efp.energy_fluence_ratios, np.sqrt(slope_parameter_stat_calculator.var))
return efield
def __draw_priors(
self,
event,
station,
freq_space
):
"""
Draws samples from the prior distribution of the electric field spectrum.
"""
plt.close('all')
fig1 = plt.figure(figsize=(12, 8))
ax1_0 = fig1.add_subplot(3, 2, (1, 2))
ax1_1 = fig1.add_subplot(323)
ax1_2 = fig1.add_subplot(324)
ax1_3 = fig1.add_subplot(325)
ax1_4 = fig1.add_subplot(326)
sampling_rate = station.get_channel(self.__used_channel_ids[0]).get_sampling_rate()
times = np.arange(self.__data_traces.shape[1]) / sampling_rate
freqs = freq_space.get_k_length_array().val / self.__data_traces.shape[1] * sampling_rate
alpha = .8
for i in range(8):
x = ift.from_random('normal', self.__efield_trace_operators[0][0].domain)
efield_spec_sample = self.__efield_spec_operators[0][0].force(x)
ax1_1.plot(freqs / units.MHz, np.abs(efield_spec_sample.val) / np.max(np.abs(efield_spec_sample.val)), c='C{}'.format(i), alpha=alpha)
efield_trace_sample = self.__efield_trace_operators[0][0].force(x)
ax1_2.plot(times, efield_trace_sample.val / np.max(np.abs(efield_trace_sample.val)))
channel_spec_sample = self.__channel_spec_operators[0].force(x)
ax1_3.plot(freqs / units.MHz, np.abs(channel_spec_sample.val)) # / np.max(np.abs(channel_spec_sample.val)), c='C{}'.format(i), alpha=alpha)
channel_trace_sample = self.__channel_trace_operators[0].force(x)
ax1_4.plot(times, channel_trace_sample.val / np.max(np.abs(channel_trace_sample.val)), c='C{}'.format(i), alpha=alpha)
a = self.__power_spectrum_operator.force(x).val
power_freqs = self.__power_spectrum_operator.target[0].k_lengths / self.__data_traces.shape[1] * sampling_rate
ax1_0.plot(power_freqs, a, c='C{}'.format(i), alpha=alpha)
ax1_0.grid()
ax1_0.set_xscale('log')
ax1_0.set_yscale('log')
ax1_0.set_title('Power Spectrum')
ax1_0.set_xlabel('k')
ax1_0.set_ylabel('A')
ax1_1.grid()
ax1_1.set_xlim([50, 750])
ax1_2.grid()
ax1_2.set_xlabel('t [ns]')
ax1_2.set_ylabel('E [a.u.]')
ax1_2.set_title('E-Field Trace')
ax1_3.grid()
ax1_3.set_xlim([50, 750])
ax1_4.grid()
ax1_4.set_xlim([0, 150])
ax1_1.set_xlabel('f [MHz]')
# ax1_2.set_xlabel('t [ns]')
ax1_3.set_xlabel('f [MHz]')
ax1_4.set_xlabel('t [ns]')
ax1_1.set_ylabel('E [a.u.]')
# ax1_2.set_ylabel('E [a.u.]')
ax1_3.set_ylabel('U [a.u.]')
ax1_4.set_ylabel('U [a.u.]')
ax1_1.set_title('E-Field Spectrum')
# ax1_2.set_title('E-Field Trace')
ax1_3.set_title('Channel Spectrum')
ax1_4.set_title('Channel Trace')
fig1.tight_layout()
fig1.savefig('priors_{}_{}.png'.format(event.get_id(), event.get_run_number()))
def __draw_reconstruction(
self,
event,
station,
KL,
suffix=''
):
"""
Draw plots showing the results of the reconstruction.
"""
plt.close('all')
fontsize = 16
n_channels = len(self.__used_channel_ids)
median = KL.position
sampling_rate = station.get_channel(self.__used_channel_ids[0]).get_sampling_rate()
fig1 = plt.figure(figsize=(16, 4 * n_channels))
fig2 = plt.figure(figsize=(16, 4 * n_channels))
freqs = np.fft.rfftfreq(self.__data_traces.shape[1], 1. / sampling_rate)
classic_mean_efield_spec = np.zeros_like(freqs)
classic_mean_efield_spec /= len(self.__used_channel_ids)
for i_channel, channel_id in enumerate(self.__used_channel_ids):
times = np.arange(self.__data_traces.shape[1]) / sampling_rate + self.__trace_start_times[i_channel]
trace_stat_calculator = ift.StatCalculator()
amp_trace_stat_calculator = ift.StatCalculator()
efield_stat_calculators = [ift.StatCalculator(), ift.StatCalculator()]
amp_efield_stat_calculators = [ift.StatCalculator(), ift.StatCalculator()]
if self.__polarization == 'pol':
ax1_1 = fig1.add_subplot(n_channels, 3, 3 * i_channel + 1)
ax1_2 = fig1.add_subplot(n_channels, 3, 3 * i_channel + 2)
ax1_3 = fig1.add_subplot(n_channels, 3, 3 * i_channel + 3, sharey=ax1_2)
ax1_2.set_title(r'$\theta$ component', fontsize=fontsize)
ax1_3.set_title(r'$\varphi$ component', fontsize=fontsize)
else:
ax1_1 = fig1.add_subplot(n_channels, 2, 2 * i_channel + 1)
ax1_2 = fig1.add_subplot(n_channels, 2, 2 * i_channel + 2)
ax2_1 = fig2.add_subplot(n_channels, 1, i_channel + 1)
for sample in KL.samples:
for i_pol, efield_stat_calculator in enumerate(efield_stat_calculators):
channel_sample_trace = self.__channel_trace_operators[i_channel].force(median + sample).val
trace_stat_calculator.add(channel_sample_trace)
amp_trace = np.abs(fft.time2freq(channel_sample_trace, sampling_rate))
amp_trace_stat_calculator.add(amp_trace)
ax2_1.plot(times, channel_sample_trace * self.__scaling_factor / units.mV, c='k', alpha=.2)
ax1_1.plot(freqs / units.MHz, amp_trace * self.__scaling_factor / units.mV, c='k', alpha=.2)
if self.__efield_trace_operators[i_channel][i_pol] is not None:
efield_sample_trace = self.__efield_trace_operators[i_channel][i_pol].force(median + sample).val
efield_stat_calculator.add(efield_sample_trace)
amp_efield = np.abs(fft.time2freq(efield_sample_trace, sampling_rate))
amp_efield_stat_calculators[i_pol].add(amp_efield)
if self.__polarization == 'pol':
if i_pol == 0:
ax1_2.plot(freqs / units.MHz, amp_efield * self.__scaling_factor / self.__gain_scaling / (units.mV / units.m / units.GHz), c='k', alpha=.2)
else:
ax1_3.plot(freqs / units.MHz, amp_efield * self.__scaling_factor / self.__gain_scaling / (units.mV / units.m / units.GHz), c='k', alpha=.2)
else:
ax1_2.plot(freqs / units.MHz, amp_efield * self.__scaling_factor / self.__gain_scaling / (units.mV / units.m / units.GHz), c='k', alpha=.2)
ax1_1.plot(freqs / units.MHz, np.abs(fft.time2freq(self.__data_traces[i_channel], sampling_rate)) * self.__scaling_factor / units.mV, c='C0', label='data')
sim_efield_max = None
channel_snr = None
if station.has_sim_station():
sim_station = station.get_sim_station()
n_drawn_sim_channels = 0
for ray_tracing_id in sim_station.get_ray_tracing_ids():
sim_channel_sum = None
for sim_channel in sim_station.get_channels_by_ray_tracing_id(ray_tracing_id):
if sim_channel.get_id() == channel_id:
if sim_channel_sum is None:
sim_channel_sum = sim_channel
else:
sim_channel_sum += sim_channel
ax1_1.plot(sim_channel.get_frequencies() / units.MHz, np.abs(sim_channel.get_frequency_spectrum()) / units.mV, c='C1', linestyle='--', alpha=.5)
ax2_1.plot(sim_channel.get_times(), sim_channel.get_trace() / units.mV, c='C1', linewidth=6, zorder=1, linestyle='--', alpha=.5)
if sim_channel_sum is not None:
if n_drawn_sim_channels == 0:
sim_channel_label = 'MC truth'
else:
sim_channel_label = None
snr = .5 * (np.max(sim_channel_sum.get_trace()) - np.min(sim_channel_sum.get_trace())) / (self.__noise_levels[i_channel] * self.__scaling_factor)
if channel_snr is None or snr > channel_snr:
channel_snr = snr
ax1_1.plot(
sim_channel_sum.get_frequencies() / units.MHz,
np.abs(sim_channel_sum.get_frequency_spectrum()) / units.mV,
c='C1',
label=sim_channel_label,
alpha=.8,
linewidth=2
)
ax2_1.plot(
sim_channel_sum.get_times(),
sim_channel_sum.get_trace() / units.mV,
c='C1',
linewidth=6,
zorder=1,
label=sim_channel_label
)
n_drawn_sim_channels += 1
efield_sum = None
for efield in station.get_sim_station().get_electric_fields_for_channels([channel_id]):
if efield.get_ray_tracing_solution_id() == ray_tracing_id:
if self.__polarization == 'theta':
ax1_2.plot(efield.get_frequencies() / units.MHz, np.abs(efield.get_frequency_spectrum()[1]) / (units.mV / units.m / units.GHz), c='C1', alpha=.2, linestyle='--')
if self.__polarization == 'phi':
ax1_2.plot(efield.get_frequencies() / units.MHz, np.abs(efield.get_frequency_spectrum()[2]) / (units.mV / units.m / units.GHz), c='C1', alpha=.2, linestyle='--')
if self.__polarization == 'pol':
ax1_2.plot(efield.get_frequencies() / units.MHz, np.abs(efield.get_frequency_spectrum()[1]) / (units.mV / units.m / units.GHz), c='C1', alpha=.2, linestyle='--')
ax1_3.plot(efield.get_frequencies() / units.MHz, np.abs(efield.get_frequency_spectrum()[2]) / (units.mV / units.m / units.GHz), c='C1', alpha=.2, linestyle='--')
if efield_sum is None:
efield_sum = efield
else:
efield_sum += efield
if efield_sum is not None:
if self.__polarization == 'theta':
ax1_2.plot(efield_sum.get_frequencies() / units.MHz, np.abs(efield_sum.get_frequency_spectrum()[1]) / (units.mV / units.m / units.GHz), c='C1', alpha=1.)
if self.__polarization == 'phi':
ax1_2.plot(efield_sum.get_frequencies() / units.MHz, np.abs(efield_sum.get_frequency_spectrum()[2]) / (units.mV / units.m / units.GHz), c='C1', alpha=1.)
if self.__polarization == 'pol':
ax1_2.plot(efield_sum.get_frequencies() / units.MHz, np.abs(efield_sum.get_frequency_spectrum()[1]) / (units.mV / units.m / units.GHz), c='C1', alpha=1.)
ax1_3.plot(efield_sum.get_frequencies() / units.MHz, np.abs(efield_sum.get_frequency_spectrum()[2]) / (units.mV / units.m / units.GHz), c='C1', alpha=1.)
if sim_efield_max is None or np.max(np.abs(efield_sum.get_frequency_spectrum())) > sim_efield_max:
sim_efield_max = np.max(np.abs(efield_sum.get_frequency_spectrum()))
else:
channel_snr = .5 * (np.max(station.get_channel(channel_id).get_trace()) - np.min(station.get_channel(channel_id).get_trace())) / (self.__noise_levels * self.__scaling_factor)
ax2_1.plot(times, self.__data_traces[i_channel] * self.__scaling_factor / units.mV, c='C0', alpha=1., zorder=5, label='data')
ax1_1.plot(freqs / units.MHz, amp_trace_stat_calculator.mean * self.__scaling_factor / units.mV, c='C2', label='IFT reco', linewidth=3, alpha=.6)
ax2_1.plot(times, trace_stat_calculator.mean * self.__scaling_factor / units.mV, c='C2', linestyle='-', zorder=2, linewidth=4, label='IFT reconstruction')
ax2_1.set_xlim([times[0], times[-1]])
if channel_snr is not None:
textbox = dict(boxstyle='round', facecolor='white', alpha=.5)
ax2_1.text(.9, .05, 'SNR={:.1f}'.format(channel_snr), transform=ax2_1.transAxes, bbox=textbox, fontsize=18)
if self.__polarization == 'theta':
ax1_2.plot(freqs / units.MHz, amp_efield_stat_calculators[0].mean * self.__scaling_factor / self.__gain_scaling / (units.mV / units.m / units.GHz), c='C2', alpha=.6)
if self.__polarization == 'phi':
ax1_2.plot(freqs / units.MHz, amp_efield_stat_calculators[1].mean * self.__scaling_factor / self.__gain_scaling / (units.mV / units.m / units.GHz), c='C2', alpha=.6)
if self.__polarization == 'pol':
ax1_2.plot(freqs / units.MHz, np.abs(fft.time2freq(efield_stat_calculators[0].mean, sampling_rate)) * self.__scaling_factor / self.__gain_scaling / (units.mV / units.m / units.GHz), c='C2', alpha=.6)
ax1_3.plot(freqs / units.MHz, np.abs(fft.time2freq(efield_stat_calculators[1].mean, sampling_rate)) * self.__scaling_factor / self.__gain_scaling / (units.mV / units.m / units.GHz), c='C2', alpha=.6)
ax1_1.axvline(self.__passband[0] / units.MHz, c='k', alpha=.5, linestyle=':')
ax1_1.axvline(self.__passband[1] / units.MHz, c='k', alpha=.5, linestyle=':')
ax1_2.axvline(self.__passband[0] / units.MHz, c='k', alpha=.5, linestyle=':')
ax1_2.axvline(self.__passband[1] / units.MHz, c='k', alpha=.5, linestyle=':')
ax1_1.grid()
ax1_2.grid()
ax2_1.grid()
if self.__polarization == 'pol':
ax1_3.axvline(self.__passband[0] / units.MHz, c='k', alpha=.5, linestyle=':')
ax1_3.axvline(self.__passband[1] / units.MHz, c='k', alpha=.5, linestyle=':')
ax1_3.grid()
ax1_3.set_xlim([0, 750])
ax1_3.set_xlabel('f [MHz]')
if i_channel == 0:
ax2_1.legend(fontsize=fontsize)
ax1_1.legend(fontsize=fontsize)
ax1_1.set_xlim([0, 750])
ax1_2.set_xlim([0, 750])
ax1_1.set_title('Channel {}'.format(channel_id), fontsize=fontsize)
ax2_1.set_title('Channel {}'.format(channel_id), fontsize=fontsize)
ax1_1.set_xlabel('f [MHz]', fontsize=fontsize)
ax1_2.set_xlabel('f [MHz]', fontsize=fontsize)
ax1_1.set_ylabel('channel voltage [mV/GHz]', fontsize=fontsize)
ax1_2.set_ylabel('E-Field [mV/m/GHz]', fontsize=fontsize)
ax2_1.set_xlabel('t [ns]', fontsize=fontsize)
ax2_1.set_ylabel('U [mV]', fontsize=fontsize)
ax2_1_dummy = ax2_1.twiny()
ax2_1_dummy.set_xlim(ax2_1.get_xlim())
ax2_1_dummy.set_xticks(np.arange(times[0], times[-1], 10))
def get_ticklabels(ticks):
return ['{:.0f}'.format(tick) for tick in np.arange(times[0], times[-1], 10) - times[0]]
ax2_1_dummy.set_xticklabels(get_ticklabels(np.arange(times[0], times[-1], 10)), fontsize=fontsize)
ax1_1.tick_params(axis='both', labelsize=fontsize)
ax1_2.tick_params(axis='both', labelsize=fontsize)
ax2_1.tick_params(axis='both', labelsize=fontsize)
if self.__polarization == 'pol':
ax1_3.tick_params(axis='both', labelsize=fontsize)
if sim_efield_max is not None:
ax1_2.set_ylim([0, 1.2 * sim_efield_max / (units.mV / units.m / units.GHz)])
fig1.tight_layout()
fig1.savefig('{}_{}_spec_reco{}.png'.format(event.get_run_number(), event.get_id(), suffix))
fig2.tight_layout()
fig2.savefig('{}_{}_trace_reco{}.png'.format(event.get_run_number(), event.get_id(), suffix))
|
PypiClean
|
/Eggsac-0.1.0.tar.gz/Eggsac-0.1.0/eggsac/utils.py
|
from __future__ import absolute_import
import ConfigParser
import datetime
import logging
import os
import re
import shutil
import subprocess
import sys
import tarfile
import tempfile
import virtualenv
import zipfile
_devnull = os.open(os.devnull, os.O_RDWR)
PYTHON_VERSION = sys.version[:3] # e.g., "2.7"
is_win = (sys.platform == 'win32')
def run_command(
cmd, env=os.environ,
logger=None, loggerName=None, log_errors=True,
**kwargs):
"""
Run the given command, possibly modifying it by inserting some
convenient options, with the given environment. Returns an array
of lines from stdout on success; raises a
subprocess.CalledProcessError on failure.
"""
logger = logger or logging.getLogger(loggerName)
# Ignore these passed-in keywords: we know better.
kwargs = kwargs.copy()
for kw in ['stdout', 'stderr', 'universal_newlines', 'env']:
if kw in kwargs:
logger.warn("run_command: Ignoring keyword %s", kw)
del kwargs[kw]
child = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
env=env,
**kwargs)
# TODO: poll child's stdout and stderr so that the parent
# can echo output more or less in real time. It's disconcerting
# with a slow child process, where (apparently) nothing happens
# for several minutes, then thousands of lines of output
# are spewed all at once.
# See http://znasibov.info/blog/html/2010/04/24/inside_python_subprocess_communication.html
(stdout, stderr) = child.communicate()
retcode = child.wait()
if retcode != 0:
for line in stderr.splitlines():
if log_errors:
logger.error("%s", line)
else:
logger.debug("%s", line)
raise subprocess.CalledProcessError(retcode, cmd)
rv = stdout.splitlines() + stderr.splitlines()
logger.debug("in %s, %s => %s",
kwargs.get('cwd', os.getcwd()),
cmd, rv)
# TODO: return (retcode, stdout, stderr)
return rv
class VCS(object):
def __init__(self, root):
self.root = root
def __repr__(self):
return "<%s root=%r>" % (self.__class__.__name__, self.root)
def command(self, params, one_liner=True, *args, **kwargs):
rv = run_command(self.Executable + params, cwd=self.root, *args, **kwargs)
return rv[0] if rv and one_liner else rv
class Git(VCS):
Executable = ["git"]
@classmethod
def find_root(cls, dir):
try:
relroot = subprocess.check_output(
["git", "rev-parse", "--show-cdup"], stderr=_devnull, cwd=dir)
return os.path.normpath(os.path.join(dir, relroot.strip() or "."))
except subprocess.CalledProcessError:
return None
def sha(self):
return self.command(["rev-parse", "--short", "HEAD"])
def revision(self):
return int(self.command(["rev-list", "--count", "HEAD"]))
def current_branch(self):
return self.command(["rev-parse", "--abbrev-ref", "HEAD"])
class Mercurial(VCS):
Executable = ["hg"]
@classmethod
def find_root(cls, dir):
try:
return subprocess.check_output(["hg", "root"], stderr=_devnull, cwd=dir).strip()
except subprocess.CalledProcessError:
return None
def sha(self):
return self.command(["identify", "--id"])
def revision(self):
return int(self.command(["identify", "--num"]))
def current_branch(self):
return self.command(["branch"])
def find_vcs(dir):
for vcs in (Git, Mercurial):
root = vcs.find_root(dir)
if root:
return vcs(root)
return None
class NullContext:
"""
A context manager that can be used in a `with` statement
when nothing needs to be done; e.g., with(foo if bar else NullContext())
"""
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
class FilePush(object):
"""Context Manager that temporarily creates a file at `path` with `contents`,
which is removed on exit. If there was already a file called `path`,
it is preserved."""
def __init__(self, path, content):
self.path, self.content, self.old_path = path, content, None
def __enter__(self):
if os.path.exists(self.path):
# Possible race condition where some other process
# grabs `old_path`, but I don't care.
self.old_path = tempfile.mktemp()
shutil.move(self.path, self.old_path)
with open(self.path, 'wb') as f:
f.write(self.content)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
os.remove(self.path)
if self.old_path:
shutil.move(self.old_path, self.path)
def replace_lines(lines, replacements):
replacements = [(re.compile(p), r) for p,r in replacements]
return [p.sub(r, line) for line in lines for p,r in replacements]
def file_replace_lines(filename, replacements):
"""Replaces lines in `filename` with `replacements`,
a sequence of (uncompiled regex, substition) pairs."""
with open(filename, 'rb') as f:
lines = f.read().decode('utf-8').splitlines()
lines = replace_lines(lines, replacements)
with open(filename, 'wb') as f:
f.write('\n'.join(lines).encode('utf-8'))
def create_virtualenv(venv_dir, use_distribute=False, unzip_setuptools=True, **kwargs):
venv_copyfile = virtualenv.copyfile
try:
# Monkeypatch virtualenv to force stdlibs to be copied, not symlinked
virtualenv.copyfile = lambda src, dst, symlink=True: venv_copyfile(
src, dst, symlink=False)
virtualenv.create_environment(
venv_dir,
unzip_setuptools=unzip_setuptools,
# use_distribute=use_distribute, # Only for virtualenv <= 1.9
never_download=True,
**kwargs)
finally:
virtualenv.copyfile = venv_copyfile
def valid_virtualenv_dir(venv_dir):
activate = ("Scripts", "activate.bat") if is_win else ("bin", "activate")
return venv_dir and os.path.exists(os.path.join(venv_dir, *activate))
def virtualenv_python_exe(venv_dir):
python_exe = ("Scripts", "python.exe") if is_win else ("bin", "python")
return os.path.join(venv_dir, *python_exe)
def cleanup_virtualenv(venv_dir):
# Clean up some large stuff we won't need
for dir in [
"include",
os.path.join("lib", "python"+PYTHON_VERSION, "config"),
]:
subdir = os.path.join(venv_dir, dir)
if os.path.exists(subdir):
shutil.rmtree(subdir)
def check_virtualenv(venv_dir):
if not venv_dir:
venv_dir = os.getenv('VIRTUAL_ENV')
if not venv_dir:
# print "Must supply virtualenv or run under one"
return None
if not valid_virtualenv_dir(venv_dir):
# Using virtualenvwrapper?
if os.getenv("WORKON_HOME"):
venv_dir = os.path.join(os.getenv("WORKON_HOME"), venv_dir)
if not valid_virtualenv_dir(venv_dir):
# print "Can't find virtualenv", venv_dir
return None
else:
return venv_dir
def fake_virtualenv(venv_dir):
"""Fake the effect of 'source activate' for use with subprocess.Popen"""
# Note: Can't use
# activate_this = os.path.join(venv_dir, "bin", "activate_this.py")
# execfile(activate_this, dict(__file__=activate_this))
# as that affects the *current* Python
bin_path = r"\Scripts;" if is_win else "/bin:"
env = os.environ.copy()
env['VIRTUAL_ENV'] = venv_dir
env['PATH'] = venv_dir + bin_path + env['PATH']
if 'PYTHONHOME' in env:
del env['PYTHONHOME']
# Don't bother with PS1 or PROMPT, as this is for a non-interactive shell
return env
_interesting_env_vars = ('PATH', 'PYTHONPATH', 'VIRTUALENV')
def site_packages_path(venv_dir, python_version=None):
return os.path.join(
venv_dir or '',
"lib",
"python%s" % python_version or PYTHON_VERSION,
"site-packages")
def find_egg(venv_dir, package_name, relative=False, python_version=None):
python_version = python_version or PYTHON_VERSION
egg_suffix = "-py%s.egg" % python_version
packages_path = site_packages_path(venv_dir, python_version)
site_packages = os.listdir(packages_path)
match = [egg for egg in site_packages
if egg.startswith(package_name) and egg.endswith(egg_suffix)]
if not match:
raise ValueError("Can't find '%s*%s' in %s" %
(package_name, egg_suffix, packages_path))
elif len(match) > 1:
raise ValueError("Too many matches for %s in %s: %r" %
(package_name, packages_path, match))
else:
return os.path.join(
site_packages_path('' if relative else venv_dir, python_version),
match[0])
def make_activate_relative(venv_dir):
# In bin/activate, replace hard-coded
# VIRTUAL_ENV="/path/to/virtualenv"
# with location-agnostic
# VIRTUAL_ENV="$(cd $(dirname "$BASH_SOURCE")/..; pwd)"
# For zsh, use $0 instead of $BASH_SOURCE
file_replace_lines(
os.path.join(venv_dir, "bin", "activate"),
[(r'^VIRTUAL_ENV=".*"$',
'VIRTUAL_ENV="$(cd $(dirname "$BASH_SOURCE")/..; pwd)"')]
)
def setup_cfg_context_manager(index_url, dir):
"""Temporarily create dir/setup.cfg, setting index_url to speed up package download"""
if index_url:
contents = "[easy_install]\nindex_url=%s\n" % urlify(index_url)
return FilePush(os.path.join(dir, "setup.cfg"), contents)
else:
return NullContext()
def abspath(path):
return os.path.abspath(os.path.expanduser(path))
def urlify(path):
if (path.startswith("http://")
or path.startswith("https://")
or path.startswith("file://")):
return path
else:
return "file://" + abspath(path)
def tar_compression_mode(filename):
if filename.endswith(".tar.gz") or filename.endswith(".tgz"):
return ":gz"
elif filename.endswith(".tar.bz2") or filename.endswith(".tbz2"):
return ":bz2"
else:
return ""
def make_tarfile(output_filename, source_dir):
with tarfile.open(output_filename, "w"+tar_compression_mode(output_filename)) as tar:
tar.add(source_dir, arcname=os.path.basename(source_dir))
def zip_add_directory(zip, source_dir):
"""Recursively add a directory tree to `zip`, an archive."""
relroot = abspath(os.path.join(source_dir, ".."))
for root, dirs, files in os.walk(source_dir):
# add directory `root` (needed for empty dirs)
zip.write(root, os.path.relpath(root, relroot))
for file in files:
filename = os.path.join(root, file)
if os.path.isfile(filename): # regular files only
arcname = os.path.join(os.path.relpath(root, relroot), file)
zip.write(filename, arcname)
def make_zipfile(output_filename, source_dir):
with zipfile.ZipFile(output_filename, "w", zipfile.ZIP_DEFLATED) as zip:
zip_add_directory(zip, source_dir)
def make_dir(target_dir):
if os.path.exists(target_dir):
if not os.path.isdir(target_dir):
raise ValueError("'%s' is not a directory" % target_dir)
else:
os.makedirs(target_dir)
def move_file_to_dir(source_file, target_dir, target_basename=None):
make_dir(target_dir)
target_dir = abspath(target_dir)
target_file = os.path.join(
target_dir, target_basename or os.path.basename(source_file))
if os.path.exists(target_file):
os.remove(target_file)
shutil.move(source_file, target_file)
return target_file
def timestamp(dt=None, precision=3):
dt = dt or datetime.datetime.now()
return dt.strftime("%H:%M:%S.%f")[:-precision]
def read_ini(package_dir, ini_filename, required=True):
ini_path = os.path.join(package_dir, ini_filename)
cfg = ConfigParser.ConfigParser()
read_ok = cfg.read(ini_path)
if required and not read_ok:
raise ValueError("Could not read '%s'" % ini_path)
return cfg
def read_list(cfg, section, option):
multiline = cfg.get(section, option)
return filter(None, [x.strip() for x in multiline.splitlines()])
|
PypiClean
|
/jupyterhub-sdp-0.9.0.1.tar.gz/jupyterhub-sdp-0.9.0.1/share/jupyterhub/static/components/moment/moment.d.ts
|
declare function moment(inp?: moment.MomentInput, format?: moment.MomentFormatSpecification, strict?: boolean): moment.Moment;
declare function moment(inp?: moment.MomentInput, format?: moment.MomentFormatSpecification, language?: string, strict?: boolean): moment.Moment;
declare namespace moment {
type RelativeTimeKey = 's' | 'ss' | 'm' | 'mm' | 'h' | 'hh' | 'd' | 'dd' | 'M' | 'MM' | 'y' | 'yy';
type CalendarKey = 'sameDay' | 'nextDay' | 'lastDay' | 'nextWeek' | 'lastWeek' | 'sameElse' | string;
type LongDateFormatKey = 'LTS' | 'LT' | 'L' | 'LL' | 'LLL' | 'LLLL' | 'lts' | 'lt' | 'l' | 'll' | 'lll' | 'llll';
interface Locale {
calendar(key?: CalendarKey, m?: Moment, now?: Moment): string;
longDateFormat(key: LongDateFormatKey): string;
invalidDate(): string;
ordinal(n: number): string;
preparse(inp: string): string;
postformat(inp: string): string;
relativeTime(n: number, withoutSuffix: boolean,
key: RelativeTimeKey, isFuture: boolean): string;
pastFuture(diff: number, absRelTime: string): string;
set(config: Object): void;
months(): string[];
months(m: Moment, format?: string): string;
monthsShort(): string[];
monthsShort(m: Moment, format?: string): string;
monthsParse(monthName: string, format: string, strict: boolean): number;
monthsRegex(strict: boolean): RegExp;
monthsShortRegex(strict: boolean): RegExp;
week(m: Moment): number;
firstDayOfYear(): number;
firstDayOfWeek(): number;
weekdays(): string[];
weekdays(m: Moment, format?: string): string;
weekdaysMin(): string[];
weekdaysMin(m: Moment): string;
weekdaysShort(): string[];
weekdaysShort(m: Moment): string;
weekdaysParse(weekdayName: string, format: string, strict: boolean): number;
weekdaysRegex(strict: boolean): RegExp;
weekdaysShortRegex(strict: boolean): RegExp;
weekdaysMinRegex(strict: boolean): RegExp;
isPM(input: string): boolean;
meridiem(hour: number, minute: number, isLower: boolean): string;
}
interface StandaloneFormatSpec {
format: string[];
standalone: string[];
isFormat?: RegExp;
}
interface WeekSpec {
dow: number;
doy: number;
}
type CalendarSpecVal = string | ((m?: MomentInput, now?: Moment) => string);
interface CalendarSpec {
sameDay?: CalendarSpecVal;
nextDay?: CalendarSpecVal;
lastDay?: CalendarSpecVal;
nextWeek?: CalendarSpecVal;
lastWeek?: CalendarSpecVal;
sameElse?: CalendarSpecVal;
// any additional properties might be used with moment.calendarFormat
[x: string]: CalendarSpecVal | void; // undefined
}
type RelativeTimeSpecVal = (
string |
((n: number, withoutSuffix: boolean,
key: RelativeTimeKey, isFuture: boolean) => string)
);
type RelativeTimeFuturePastVal = string | ((relTime: string) => string);
interface RelativeTimeSpec {
future: RelativeTimeFuturePastVal;
past: RelativeTimeFuturePastVal;
s: RelativeTimeSpecVal;
ss: RelativeTimeSpecVal;
m: RelativeTimeSpecVal;
mm: RelativeTimeSpecVal;
h: RelativeTimeSpecVal;
hh: RelativeTimeSpecVal;
d: RelativeTimeSpecVal;
dd: RelativeTimeSpecVal;
M: RelativeTimeSpecVal;
MM: RelativeTimeSpecVal;
y: RelativeTimeSpecVal;
yy: RelativeTimeSpecVal;
}
interface LongDateFormatSpec {
LTS: string;
LT: string;
L: string;
LL: string;
LLL: string;
LLLL: string;
// lets forget for a sec that any upper/lower permutation will also work
lts?: string;
lt?: string;
l?: string;
ll?: string;
lll?: string;
llll?: string;
}
type MonthWeekdayFn = (momentToFormat: Moment, format?: string) => string;
type WeekdaySimpleFn = (momentToFormat: Moment) => string;
interface LocaleSpecification {
months?: string[] | StandaloneFormatSpec | MonthWeekdayFn;
monthsShort?: string[] | StandaloneFormatSpec | MonthWeekdayFn;
weekdays?: string[] | StandaloneFormatSpec | MonthWeekdayFn;
weekdaysShort?: string[] | StandaloneFormatSpec | WeekdaySimpleFn;
weekdaysMin?: string[] | StandaloneFormatSpec | WeekdaySimpleFn;
meridiemParse?: RegExp;
meridiem?: (hour: number, minute:number, isLower: boolean) => string;
isPM?: (input: string) => boolean;
longDateFormat?: LongDateFormatSpec;
calendar?: CalendarSpec;
relativeTime?: RelativeTimeSpec;
invalidDate?: string;
ordinal?: (n: number) => string;
ordinalParse?: RegExp;
week?: WeekSpec;
// Allow anything: in general any property that is passed as locale spec is
// put in the locale object so it can be used by locale functions
[x: string]: any;
}
interface MomentObjectOutput {
years: number;
/* One digit */
months: number;
/* Day of the month */
date: number;
hours: number;
minutes: number;
seconds: number;
milliseconds: number;
}
interface Duration {
clone(): Duration;
humanize(withSuffix?: boolean): string;
abs(): Duration;
as(units: unitOfTime.Base): number;
get(units: unitOfTime.Base): number;
milliseconds(): number;
asMilliseconds(): number;
seconds(): number;
asSeconds(): number;
minutes(): number;
asMinutes(): number;
hours(): number;
asHours(): number;
days(): number;
asDays(): number;
weeks(): number;
asWeeks(): number;
months(): number;
asMonths(): number;
years(): number;
asYears(): number;
add(inp?: DurationInputArg1, unit?: DurationInputArg2): Duration;
subtract(inp?: DurationInputArg1, unit?: DurationInputArg2): Duration;
locale(): string;
locale(locale: LocaleSpecifier): Duration;
localeData(): Locale;
toISOString(): string;
toJSON(): string;
/**
* @deprecated since version 2.8.0
*/
lang(locale: LocaleSpecifier): Moment;
/**
* @deprecated since version 2.8.0
*/
lang(): Locale;
/**
* @deprecated
*/
toIsoString(): string;
}
interface MomentRelativeTime {
future: any;
past: any;
s: any;
ss: any;
m: any;
mm: any;
h: any;
hh: any;
d: any;
dd: any;
M: any;
MM: any;
y: any;
yy: any;
}
interface MomentLongDateFormat {
L: string;
LL: string;
LLL: string;
LLLL: string;
LT: string;
LTS: string;
l?: string;
ll?: string;
lll?: string;
llll?: string;
lt?: string;
lts?: string;
}
interface MomentParsingFlags {
empty: boolean;
unusedTokens: string[];
unusedInput: string[];
overflow: number;
charsLeftOver: number;
nullInput: boolean;
invalidMonth: string | void; // null
invalidFormat: boolean;
userInvalidated: boolean;
iso: boolean;
parsedDateParts: any[];
meridiem: string | void; // null
}
interface MomentParsingFlagsOpt {
empty?: boolean;
unusedTokens?: string[];
unusedInput?: string[];
overflow?: number;
charsLeftOver?: number;
nullInput?: boolean;
invalidMonth?: string;
invalidFormat?: boolean;
userInvalidated?: boolean;
iso?: boolean;
parsedDateParts?: any[];
meridiem?: string;
}
interface MomentBuiltinFormat {
__momentBuiltinFormatBrand: any;
}
type MomentFormatSpecification = string | MomentBuiltinFormat | (string | MomentBuiltinFormat)[];
namespace unitOfTime {
type Base = (
"year" | "years" | "y" |
"month" | "months" | "M" |
"week" | "weeks" | "w" |
"day" | "days" | "d" |
"hour" | "hours" | "h" |
"minute" | "minutes" | "m" |
"second" | "seconds" | "s" |
"millisecond" | "milliseconds" | "ms"
);
type _quarter = "quarter" | "quarters" | "Q";
type _isoWeek = "isoWeek" | "isoWeeks" | "W";
type _date = "date" | "dates" | "D";
type DurationConstructor = Base | _quarter;
type DurationAs = Base;
type StartOf = Base | _quarter | _isoWeek | _date;
type Diff = Base | _quarter;
type MomentConstructor = Base | _date;
type All = Base | _quarter | _isoWeek | _date |
"weekYear" | "weekYears" | "gg" |
"isoWeekYear" | "isoWeekYears" | "GG" |
"dayOfYear" | "dayOfYears" | "DDD" |
"weekday" | "weekdays" | "e" |
"isoWeekday" | "isoWeekdays" | "E";
}
interface MomentInputObject {
years?: number;
year?: number;
y?: number;
months?: number;
month?: number;
M?: number;
days?: number;
day?: number;
d?: number;
dates?: number;
date?: number;
D?: number;
hours?: number;
hour?: number;
h?: number;
minutes?: number;
minute?: number;
m?: number;
seconds?: number;
second?: number;
s?: number;
milliseconds?: number;
millisecond?: number;
ms?: number;
}
interface DurationInputObject extends MomentInputObject {
quarters?: number;
quarter?: number;
Q?: number;
weeks?: number;
week?: number;
w?: number;
}
interface MomentSetObject extends MomentInputObject {
weekYears?: number;
weekYear?: number;
gg?: number;
isoWeekYears?: number;
isoWeekYear?: number;
GG?: number;
quarters?: number;
quarter?: number;
Q?: number;
weeks?: number;
week?: number;
w?: number;
isoWeeks?: number;
isoWeek?: number;
W?: number;
dayOfYears?: number;
dayOfYear?: number;
DDD?: number;
weekdays?: number;
weekday?: number;
e?: number;
isoWeekdays?: number;
isoWeekday?: number;
E?: number;
}
interface FromTo {
from: MomentInput;
to: MomentInput;
}
type MomentInput = Moment | Date | string | number | (number | string)[] | MomentInputObject | void; // null | undefined
type DurationInputArg1 = Duration | number | string | FromTo | DurationInputObject | void; // null | undefined
type DurationInputArg2 = unitOfTime.DurationConstructor;
type LocaleSpecifier = string | Moment | Duration | string[] | boolean;
interface MomentCreationData {
input: MomentInput;
format?: MomentFormatSpecification;
locale: Locale;
isUTC: boolean;
strict?: boolean;
}
interface Moment extends Object {
format(format?: string): string;
startOf(unitOfTime: unitOfTime.StartOf): Moment;
endOf(unitOfTime: unitOfTime.StartOf): Moment;
add(amount?: DurationInputArg1, unit?: DurationInputArg2): Moment;
/**
* @deprecated reverse syntax
*/
add(unit: unitOfTime.DurationConstructor, amount: number|string): Moment;
subtract(amount?: DurationInputArg1, unit?: DurationInputArg2): Moment;
/**
* @deprecated reverse syntax
*/
subtract(unit: unitOfTime.DurationConstructor, amount: number|string): Moment;
calendar(time?: MomentInput, formats?: CalendarSpec): string;
clone(): Moment;
/**
* @return Unix timestamp in milliseconds
*/
valueOf(): number;
// current date/time in local mode
local(keepLocalTime?: boolean): Moment;
isLocal(): boolean;
// current date/time in UTC mode
utc(keepLocalTime?: boolean): Moment;
isUTC(): boolean;
/**
* @deprecated use isUTC
*/
isUtc(): boolean;
parseZone(): Moment;
isValid(): boolean;
invalidAt(): number;
hasAlignedHourOffset(other?: MomentInput): boolean;
creationData(): MomentCreationData;
parsingFlags(): MomentParsingFlags;
year(y: number): Moment;
year(): number;
/**
* @deprecated use year(y)
*/
years(y: number): Moment;
/**
* @deprecated use year()
*/
years(): number;
quarter(): number;
quarter(q: number): Moment;
quarters(): number;
quarters(q: number): Moment;
month(M: number|string): Moment;
month(): number;
/**
* @deprecated use month(M)
*/
months(M: number|string): Moment;
/**
* @deprecated use month()
*/
months(): number;
day(d: number|string): Moment;
day(): number;
days(d: number|string): Moment;
days(): number;
date(d: number): Moment;
date(): number;
/**
* @deprecated use date(d)
*/
dates(d: number): Moment;
/**
* @deprecated use date()
*/
dates(): number;
hour(h: number): Moment;
hour(): number;
hours(h: number): Moment;
hours(): number;
minute(m: number): Moment;
minute(): number;
minutes(m: number): Moment;
minutes(): number;
second(s: number): Moment;
second(): number;
seconds(s: number): Moment;
seconds(): number;
millisecond(ms: number): Moment;
millisecond(): number;
milliseconds(ms: number): Moment;
milliseconds(): number;
weekday(): number;
weekday(d: number): Moment;
isoWeekday(): number;
isoWeekday(d: number|string): Moment;
weekYear(): number;
weekYear(d: number): Moment;
isoWeekYear(): number;
isoWeekYear(d: number): Moment;
week(): number;
week(d: number): Moment;
weeks(): number;
weeks(d: number): Moment;
isoWeek(): number;
isoWeek(d: number): Moment;
isoWeeks(): number;
isoWeeks(d: number): Moment;
weeksInYear(): number;
isoWeeksInYear(): number;
dayOfYear(): number;
dayOfYear(d: number): Moment;
from(inp: MomentInput, suffix?: boolean): string;
to(inp: MomentInput, suffix?: boolean): string;
fromNow(withoutSuffix?: boolean): string;
toNow(withoutPrefix?: boolean): string;
diff(b: MomentInput, unitOfTime?: unitOfTime.Diff, precise?: boolean): number;
toArray(): number[];
toDate(): Date;
toISOString(keepOffset?: boolean): string;
inspect(): string;
toJSON(): string;
unix(): number;
isLeapYear(): boolean;
/**
* @deprecated in favor of utcOffset
*/
zone(): number;
zone(b: number|string): Moment;
utcOffset(): number;
utcOffset(b: number|string, keepLocalTime?: boolean): Moment;
isUtcOffset(): boolean;
daysInMonth(): number;
isDST(): boolean;
zoneAbbr(): string;
zoneName(): string;
isBefore(inp?: MomentInput, granularity?: unitOfTime.StartOf): boolean;
isAfter(inp?: MomentInput, granularity?: unitOfTime.StartOf): boolean;
isSame(inp?: MomentInput, granularity?: unitOfTime.StartOf): boolean;
isSameOrAfter(inp?: MomentInput, granularity?: unitOfTime.StartOf): boolean;
isSameOrBefore(inp?: MomentInput, granularity?: unitOfTime.StartOf): boolean;
isBetween(a: MomentInput, b: MomentInput, granularity?: unitOfTime.StartOf, inclusivity?: "()" | "[)" | "(]" | "[]"): boolean;
/**
* @deprecated as of 2.8.0, use locale
*/
lang(language: LocaleSpecifier): Moment;
/**
* @deprecated as of 2.8.0, use locale
*/
lang(): Locale;
locale(): string;
locale(locale: LocaleSpecifier): Moment;
localeData(): Locale;
/**
* @deprecated no reliable implementation
*/
isDSTShifted(): boolean;
// NOTE(constructor): Same as moment constructor
/**
* @deprecated as of 2.7.0, use moment.min/max
*/
max(inp?: MomentInput, format?: MomentFormatSpecification, strict?: boolean): Moment;
/**
* @deprecated as of 2.7.0, use moment.min/max
*/
max(inp?: MomentInput, format?: MomentFormatSpecification, language?: string, strict?: boolean): Moment;
// NOTE(constructor): Same as moment constructor
/**
* @deprecated as of 2.7.0, use moment.min/max
*/
min(inp?: MomentInput, format?: MomentFormatSpecification, strict?: boolean): Moment;
/**
* @deprecated as of 2.7.0, use moment.min/max
*/
min(inp?: MomentInput, format?: MomentFormatSpecification, language?: string, strict?: boolean): Moment;
get(unit: unitOfTime.All): number;
set(unit: unitOfTime.All, value: number): Moment;
set(objectLiteral: MomentSetObject): Moment;
toObject(): MomentObjectOutput;
}
export var version: string;
export var fn: Moment;
// NOTE(constructor): Same as moment constructor
export function utc(inp?: MomentInput, format?: MomentFormatSpecification, strict?: boolean): Moment;
export function utc(inp?: MomentInput, format?: MomentFormatSpecification, language?: string, strict?: boolean): Moment;
export function unix(timestamp: number): Moment;
export function invalid(flags?: MomentParsingFlagsOpt): Moment;
export function isMoment(m: any): m is Moment;
export function isDate(m: any): m is Date;
export function isDuration(d: any): d is Duration;
/**
* @deprecated in 2.8.0
*/
export function lang(language?: string): string;
/**
* @deprecated in 2.8.0
*/
export function lang(language?: string, definition?: Locale): string;
export function locale(language?: string): string;
export function locale(language?: string[]): string;
export function locale(language?: string, definition?: LocaleSpecification | void): string; // null | undefined
export function localeData(key?: string | string[]): Locale;
export function duration(inp?: DurationInputArg1, unit?: DurationInputArg2): Duration;
// NOTE(constructor): Same as moment constructor
export function parseZone(inp?: MomentInput, format?: MomentFormatSpecification, strict?: boolean): Moment;
export function parseZone(inp?: MomentInput, format?: MomentFormatSpecification, language?: string, strict?: boolean): Moment;
export function months(): string[];
export function months(index: number): string;
export function months(format: string): string[];
export function months(format: string, index: number): string;
export function monthsShort(): string[];
export function monthsShort(index: number): string;
export function monthsShort(format: string): string[];
export function monthsShort(format: string, index: number): string;
export function weekdays(): string[];
export function weekdays(index: number): string;
export function weekdays(format: string): string[];
export function weekdays(format: string, index: number): string;
export function weekdays(localeSorted: boolean): string[];
export function weekdays(localeSorted: boolean, index: number): string;
export function weekdays(localeSorted: boolean, format: string): string[];
export function weekdays(localeSorted: boolean, format: string, index: number): string;
export function weekdaysShort(): string[];
export function weekdaysShort(index: number): string;
export function weekdaysShort(format: string): string[];
export function weekdaysShort(format: string, index: number): string;
export function weekdaysShort(localeSorted: boolean): string[];
export function weekdaysShort(localeSorted: boolean, index: number): string;
export function weekdaysShort(localeSorted: boolean, format: string): string[];
export function weekdaysShort(localeSorted: boolean, format: string, index: number): string;
export function weekdaysMin(): string[];
export function weekdaysMin(index: number): string;
export function weekdaysMin(format: string): string[];
export function weekdaysMin(format: string, index: number): string;
export function weekdaysMin(localeSorted: boolean): string[];
export function weekdaysMin(localeSorted: boolean, index: number): string;
export function weekdaysMin(localeSorted: boolean, format: string): string[];
export function weekdaysMin(localeSorted: boolean, format: string, index: number): string;
export function min(moments: Moment[]): Moment;
export function min(...moments: Moment[]): Moment;
export function max(moments: Moment[]): Moment;
export function max(...moments: Moment[]): Moment;
/**
* Returns unix time in milliseconds. Overwrite for profit.
*/
export function now(): number;
export function defineLocale(language: string, localeSpec: LocaleSpecification | void): Locale; // null
export function updateLocale(language: string, localeSpec: LocaleSpecification | void): Locale; // null
export function locales(): string[];
export function normalizeUnits(unit: unitOfTime.All): string;
export function relativeTimeThreshold(threshold: string): number | boolean;
export function relativeTimeThreshold(threshold: string, limit: number): boolean;
export function relativeTimeRounding(fn: (num: number) => number): boolean;
export function relativeTimeRounding(): (num: number) => number;
export function calendarFormat(m: Moment, now: Moment): string;
export function parseTwoDigitYear(input: string): number;
/**
* Constant used to enable explicit ISO_8601 format parsing.
*/
export var ISO_8601: MomentBuiltinFormat;
export var RFC_2822: MomentBuiltinFormat;
export var defaultFormat: string;
export var defaultFormatUtc: string;
export var HTML5_FMT: {
DATETIME_LOCAL: string,
DATETIME_LOCAL_SECONDS: string,
DATETIME_LOCAL_MS: string,
DATE: string,
TIME: string,
TIME_SECONDS: string,
TIME_MS: string,
WEEK: string,
MONTH: string
};
}
export = moment;
|
PypiClean
|
/Transcrypt-3.7.16.tar.gz/Transcrypt-3.7.16/transcrypt/demos/parcel_demo/node_modules/snapdragon/lib/compiler.js
|
'use strict';
var use = require('use');
var define = require('define-property');
var debug = require('debug')('snapdragon:compiler');
var utils = require('./utils');
/**
* Create a new `Compiler` with the given `options`.
* @param {Object} `options`
*/
function Compiler(options, state) {
debug('initializing', __filename);
this.options = utils.extend({source: 'string'}, options);
this.state = state || {};
this.compilers = {};
this.output = '';
this.set('eos', function(node) {
return this.emit(node.val, node);
});
this.set('noop', function(node) {
return this.emit(node.val, node);
});
this.set('bos', function(node) {
return this.emit(node.val, node);
});
use(this);
}
/**
* Prototype methods
*/
Compiler.prototype = {
/**
* Throw an error message with details including the cursor position.
* @param {String} `msg` Message to use in the Error.
*/
error: function(msg, node) {
var pos = node.position || {start: {column: 0}};
var message = this.options.source + ' column:' + pos.start.column + ': ' + msg;
var err = new Error(message);
err.reason = msg;
err.column = pos.start.column;
err.source = this.pattern;
if (this.options.silent) {
this.errors.push(err);
} else {
throw err;
}
},
/**
* Define a non-enumberable property on the `Compiler` instance.
*
* ```js
* compiler.define('foo', 'bar');
* ```
* @name .define
* @param {String} `key` propery name
* @param {any} `val` property value
* @return {Object} Returns the Compiler instance for chaining.
* @api public
*/
define: function(key, val) {
define(this, key, val);
return this;
},
/**
* Emit `node.val`
*/
emit: function(str, node) {
this.output += str;
return str;
},
/**
* Add a compiler `fn` with the given `name`
*/
set: function(name, fn) {
this.compilers[name] = fn;
return this;
},
/**
* Get compiler `name`.
*/
get: function(name) {
return this.compilers[name];
},
/**
* Get the previous AST node.
*/
prev: function(n) {
return this.ast.nodes[this.idx - (n || 1)] || { type: 'bos', val: '' };
},
/**
* Get the next AST node.
*/
next: function(n) {
return this.ast.nodes[this.idx + (n || 1)] || { type: 'eos', val: '' };
},
/**
* Visit `node`.
*/
visit: function(node, nodes, i) {
var fn = this.compilers[node.type];
this.idx = i;
if (typeof fn !== 'function') {
throw this.error('compiler "' + node.type + '" is not registered', node);
}
return fn.call(this, node, nodes, i);
},
/**
* Map visit over array of `nodes`.
*/
mapVisit: function(nodes) {
if (!Array.isArray(nodes)) {
throw new TypeError('expected an array');
}
var len = nodes.length;
var idx = -1;
while (++idx < len) {
this.visit(nodes[idx], nodes, idx);
}
return this;
},
/**
* Compile `ast`.
*/
compile: function(ast, options) {
var opts = utils.extend({}, this.options, options);
this.ast = ast;
this.parsingErrors = this.ast.errors;
this.output = '';
// source map support
if (opts.sourcemap) {
var sourcemaps = require('./source-maps');
sourcemaps(this);
this.mapVisit(this.ast.nodes);
this.applySourceMaps();
this.map = opts.sourcemap === 'generator' ? this.map : this.map.toJSON();
return this;
}
this.mapVisit(this.ast.nodes);
return this;
}
};
/**
* Expose `Compiler`
*/
module.exports = Compiler;
|
PypiClean
|
/atsim.potentials-0.4.1.tar.gz/atsim.potentials-0.4.1/atsim/potentials/_potential.py
|
from ._util import gradient
class Potential(object):
"""Class used to describe a potential to the :func:`~atsim.potentials.writePotentials()` function.
Potential objects encapsulate a python function or callable which is used by
the :meth:`~atsim.potentials.Potential.energy` method to calculate potential energy.
The :meth:`~atsim.potentials.Potential.force` method returns :math:`\\frac{-dU}{dr}`\ . If the energy callable
provides `.deriv()` and `.deriv2()` methods these are used for evaluating the first
and second derivatives of energy with respect to sepration. This allows analytical
derivatives to be defined to the Potential object. When not defined, numerical
derivatives are used instead.
The :func:`gradient` function is used to wrap the energy callable so that
the correct derivative implementation is used.
"""
def __init__(self, speciesA, speciesB, potentialFunction, h = 1e-6):
"""Create a Potential object from a python function or callable that returns energy at a given separation.
:param speciesA: Label of first species in the potential pair
:type spciesA: str
:param speciesB: Label of second species in the potential pair
:type speciesB: str
:param potentialFunction: Python callable which accepts a single parameter (separation) and returns energy for that separation.
:param h: Distance increment used when calculating numerical derivative of energy to calculate forces in .force() method
(if potentialFunction doesn't supply analytical derivative through it's .deriv() method)."""
self._speciesA = speciesA
self._speciesB = speciesB
self._potentialFunction = potentialFunction
self._derivFunction = gradient(self._potentialFunction, h)
@property
def speciesA(self):
return self._speciesA
@property
def speciesB(self):
return self._speciesB
@property
def potentialFunction(self):
return self._potentialFunction
def energy(self, r):
""":param r: Separation
:return: Energy for given separation"""
return self._potentialFunction(r)
def force(self, r):
"""Calculate force for this potential at a given separation.
If this object's potentialFunction has a .deriv() method this will be used to calculate force (allowing analytical derivatives
to be specified).
If potentialFunction doesn't have a deriv method then a numerical derivative of the potential function will be returned instead.
:param r: Separation
:type r: float
:return: -dU/dr at given separation
:rtype: float"""
return -self._derivFunction(r)
|
PypiClean
|
/roboticstoolbox_python-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl/roboticstoolbox/mobile/ParticleFilter.py
|
from collections import namedtuple
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
from matplotlib import animation
import spatialmath.base as smb
"""
Monte-carlo based localisation for estimating vehicle pose based on
odometry and observations of known landmarks.
"""
# TODO: refactor this and EKF, RNG, history, common plots, animation, movie
class ParticleFilter:
def __init__(
self,
robot,
sensor,
R,
L,
nparticles=500,
seed=0,
x0=None,
verbose=False,
animate=False,
history=True,
workspace=None,
):
"""
Particle filter
:param robot: robot motion model
:type robot: :class:`VehicleBase` subclass,
:param sensor: vehicle mounted sensor model
:type sensor: :class:`SensorBase` subclass
:param R: covariance of the zero-mean Gaussian noise added to the particles at each step (diffusion)
:type R: ndarray(3,3)
:param L: covariance used in the sensor likelihood model
:type L: ndarray(2,2)
:param nparticles: number of particles, defaults to 500
:type nparticles: int, optional
:param seed: random number seed, defaults to 0
:type seed: int, optional
:param x0: initial state, defaults to [0, 0, 0]
:type x0: array_like(3), optional
:param verbose: display extra debug information, defaults to False
:type verbose: bool, optional
:param history: retain step-by-step history, defaults to True
:type history: bool, optional
:param workspace: dimension of workspace, see :func:`~spatialmath.base.graphics.expand_dims`
:type workspace: scalar, array_like(2), array_like(4)
This class implements a Monte-Carlo estimator or particle filter for
vehicle state, based on odometry, a landmark map, and landmark
observations. The state of each particle is a possible vehicle
configuration :math:`(x,y,\theta)`. Bootstrap particle resampling is
used.
The working area is defined by ``workspace`` or inherited from the
landmark map attached to the ``sensor`` (see
:func:`~spatialmath.base.graphics.expand_dims`):
============== ======= =======
``workspace`` x-range y-range
============== ======= =======
A (scalar) -A:A -A:A
[A, B] A:B A:B
[A, B, C, D] A:B C:D
============== ======= =======
Particles are initially distributed uniform randomly over this area.
Example::
V = np.diag([0.02, np.radians(0.5)]) ** 2
robot = Bicycle(covar=V, animation="car", workspace=10)
robot.control = RandomPath(workspace=robot)
map = LandmarkMap(nlandmarks=20, workspace=robot.workspace)
W = np.diag([0.1, np.radians(1)]) ** 2
sensor = RangeBearingSensor(robot, map, covar=W, plot=True)
R = np.diag([0.1, 0.1, np.radians(1)]) ** 2
L = np.diag([0.1, 0.1])
pf = ParticleFilter(robot, sensor, R, L, nparticles=1000)
pf.run(T=10)
map.plot()
robot.plot_xy()
pf.plot_xy()
plt.plot(pf.get_std()[:100,:])
.. note:: Set ``seed=0`` to get different behaviour from run to run.
:seealso: :meth:`run`
"""
self._robot = robot
self._sensor = sensor
self.R = R
self.L = L
self.nparticles = nparticles
self._animate = animate
# self.dim = sensor.map.dim
self._history = []
self.x = ()
self.weight = ()
self.w0 = 0.05
self._x0 = x0
# create a private random number stream if required
self._random = np.random.default_rng(seed)
self._seed = seed
self._keep_history = history # keep history
self._htuple = namedtuple("PFlog", "t odo xest std weights")
if workspace is not None:
self._dim = smb.expand_dims(workspace)
else:
self._dim = sensor.map.workspace
self._workspace = self.robot.workspace
# self._init()
def __str__(self):
# ParticleFilter.char Convert to string
#
# PF.char() is a string representing the state of the ParticleFilter
# object in human-readable form.
#
# See also ParticleFilter.display.
def indent(s, n=2):
spaces = " " * n
return s.replace("\n", "\n" + spaces)
s = f"ParticleFilter object: {self.nparticles} particles"
s += "\nR: " + smb.array2str(self.R)
s += "\nL: " + smb.array2str(self.L)
if self.robot is not None:
s += indent("\nrobot: " + str(self.robot))
if self.sensor is not None:
s += indent("\nsensor: " + str(self.sensor))
return s
@property
def robot(self):
"""
Get robot object
:return: robot used in simulation
:rtype: :class:`VehicleBase` subclass
"""
return self._robot
@property
def sensor(self):
"""
Get sensor object
:return: sensor used in simulation
:rtype: :class:`SensorBase` subclass
"""
return self._sensor
@property
def map(self):
"""
Get map object
:return: map used in simulation
:rtype: :class:`LandmarkMap` subclass
"""
return self._map
@property
def verbose(self):
"""
Get verbosity state
:return: verbosity
:rtype: bool
"""
return self._verbose
@property
def history(self):
"""
Get EKF simulation history
:return: simulation history
:rtype: list of namedtuples
At each simulation timestep a namedtuple of is appended to the history
list. It contains, for that time step, estimated state and covariance,
and sensor observation.
:seealso: :meth:`get_t` :meth:`get_xy` :meth:`get_std`
:meth:`get_Pnorm`
"""
return self._history
@property
def workspace(self):
"""
Size of robot workspace
:return: workspace bounds [xmin, xmax, ymin, ymax]
:rtype: ndarray(4)
Returns the bounds of the workspace as specified by constructor
option ``workspace``
"""
return self._workspace
@property
def random(self):
"""
Get private random number generator
:return: NumPy random number generator
:rtype: :class:`numpy.random.Generator`
Has methods including:
- ``integers(low, high, size, endpoint)``
- ``random(size)``
- ``uniform``
- ``normal(mean, std, size)``
- ``multivariate_normal(mean, covar, size)``
The generator is initialized with the seed provided at constructor
time every time ``init`` is called.
"""
return self._random
def _init(self, x0=None, animate=False, ax=None):
# ParticleFilter.init Initialize the particle filter
#
# PF.init() initializes the particle distribution and clears the
# history.
#
# Notes::
# - If initial particle states were given to the constructor the states are
# set to this value, else a random distribution over the map is used.
# - Invoked by the run() method.
self.robot.init()
self.sensor.init()
# clear the history
self._history = []
# create a new private random number generator
if self._seed is not None:
self._random = np.random.default_rng(self._seed)
self._t = 0
# initialize particles
if x0 is None:
x0 = self._x0
if x0 is None:
# create initial particle distribution as uniformly randomly distributed
# over the map workspace and heading angles
x = self.random.uniform(
self.workspace[0], self.workspace[1], size=(self.nparticles,)
)
y = self.random.uniform(
self.workspace[2], self.workspace[3], size=(self.nparticles,)
)
t = self.random.uniform(-np.pi, np.pi, size=(self.nparticles,))
self.x = np.c_[x, y, t]
if animate:
# display the initial particles
(self.h,) = ax.plot(
self.x[:, 0],
self.x[:, 1],
"go",
zorder=0,
markersize=3,
markeredgecolor="none",
alpha=0.3,
label="particle",
)
self.weight = np.ones((self.nparticles,))
def run(self, T=10, x0=None):
"""
Run the particle filter simulation
:param T: maximum simulation time in seconds
:type T: float
:param x0: Initial state, defaults to value given to Vehicle constructor
:type x0: array_like(3) or array_like(2)
Simulates the motion of a vehicle (under the control of a driving agent)
and the particle-filter estimator. The steps are:
- initialize the filter, vehicle and vehicle driver agent, sensor
- for each time step:
- step the vehicle and its driver agent, obtain odometry
- take a sensor reading
- execute the EKF
- save information as a namedtuple to the history list for later display
:seealso: :meth:`history` :meth:`landmark` :meth:`landmarks`
:meth:`get_xy` :meth:`get_t` :meth:`get_std`
:meth:`plot_xy`
"""
self._init(x0=x0)
# anim = Animate(opt.movie)
# display the initial particles
ax = smb.axes_logic(None, 2)
if self._animate:
(self.h,) = ax.plot(
self.x[:, 0],
self.x[:, 1],
"go",
zorder=0,
markersize=3,
markeredgecolor="none",
alpha=0.3,
label="particle",
)
# set(self.h, 'Tag', 'particles')
# self.robot.plot()
# iterate over time
import time
for i in range(round(T / self.robot.dt)):
self._step()
# time.sleep(0.2)
plt.pause(0.2)
# plt.draw()
# anim.add()
# anim.close()
def run_animation(self, T=10, x0=None, format=None, file=None):
"""
Run the particle filter simulation
:param T: maximum simulation time in seconds
:type T: float
:param x0: Initial state, defaults to value given to Vehicle constructor
:type x0: array_like(3) or array_like(2)
:param format: Output format
:type format: str, optional
:param file: File name
:type file: str, optional
:return: Matplotlib animation object
:rtype: :meth:`matplotlib.animation.FuncAnimation`
Simulates the motion of a vehicle (under the control of a driving agent)
and the particle-filter estimator and returns an animation
in various formats::
``format`` ``file`` description
============ ========= ============================
``"html"`` str, None return HTML5 video
``"jshtml"`` str, None return JS+HTML video
``"gif"`` str return animated GIF
``"mp4"`` str return MP4/H264 video
``None`` return a ``FuncAnimation`` object
The allowables types for ``file`` are given in the second column. A str
value is the file name. If ``None`` is an option then return the video as a string.
For the last case, a reference to the animation object must be held if used for
animation in a Jupyter cell::
anim = robot.run_animation(T=20)
The steps are:
- initialize the filter, vehicle and vehicle driver agent, sensor
- for each time step:
- step the vehicle and its driver agent, obtain odometry
- take a sensor reading
- execute the EKF
- save information as a namedtuple to the history list for later display
:seealso: :meth:`history` :meth:`landmark` :meth:`landmarks`
:meth:`get_xy` :meth:`get_t` :meth:`get_std`
:meth:`plot_xy`
"""
fig, ax = plt.subplots()
nframes = round(T / self.robot.dt)
anim = animation.FuncAnimation(
fig=fig,
# func=lambda i: self._step(animate=True, pause=False),
# init_func=lambda: self._init(animate=True),
func=lambda i: self._step(),
init_func=lambda: self._init(ax=ax, animate=True),
frames=nframes,
interval=self.robot.dt * 1000,
blit=False,
repeat=False,
)
# anim._interval = self.dt*1000/2
# anim._repeat = True
ret = None
if format == "html":
ret = anim.to_html5_video() # convert to embeddable HTML5 animation
elif format == "jshtml":
ret = anim.to_jshtml() # convert to embeddable Javascript/HTML animation
elif format == "gif":
anim.save(
file, writer=animation.PillowWriter(fps=1 / self.dt)
) # convert to GIF
ret = None
elif format == "mp4":
anim.save(
file, writer=animation.FFMpegWriter(fps=1 / self.dt)
) # convert to mp4/H264
ret = None
elif format == None:
# return the anim object
return anim
else:
raise ValueError("unknown format")
if ret is not None and file is not None:
with open(file, "w") as f:
f.write(ret)
ret = None
plt.close(fig)
return ret
# self._init(x0=x0)
# # anim = Animate(opt.movie)
# # display the initial particles
# ax = smb.axes_logic(None, 2)
# if self._animate:
# (self.h,) = ax.plot(
# self.x[:, 0],
# self.x[:, 1],
# "go",
# zorder=0,
# markersize=3,
# markeredgecolor="none",
# alpha=0.3,
# label="particle",
# )
# # set(self.h, 'Tag', 'particles')
# # self.robot.plot()
# # iterate over time
# import time
# for i in range(round(T / self.robot.dt)):
# self._step()
# # time.sleep(0.2)
# plt.pause(0.2)
# # plt.draw()
# # anim.add()
# # anim.close()
def _step(self):
# fprintf('---- step\n')
odo = self.robot.step(animate=self._animate) # move the robot
# update the particles based on odometry
self._predict(odo)
# get a sensor reading
z, lm_id = self.sensor.reading()
if z is not None:
self._observe(z, lm_id)
# fprintf(' observe beacon #d\n', lm_id)
self._select()
# our estimate is simply the mean of the particles
x_est = self.x.mean(axis=0)
std_est = self.x.std(axis=0)
# std is more complex for angles, need to account for 2pi wrap
std_est[2] = np.sqrt(np.sum(smb.angdiff(self.x[:, 2], x_est[2]) ** 2)) / (
self.nparticles - 1
)
# display the updated particles
# set(self.h, 'Xdata', self.x(:,1), 'Ydata', self.x(:,2), 'Zdata', self.x(:,3))
if self._animate:
self.h.set_xdata(self.x[:, 0])
self.h.set_ydata(self.x[:, 1])
# if ~isempty(self.anim)
# self.anim.add()
if self._keep_history:
hist = self._htuple(
self.robot._t, odo.copy(), x_est, std_est, self.weight.copy()
)
self._history.append(hist)
def plot_pdf(self):
"""
Plot particle PDF
Displays a discrete PDF of vehicle position. Creates a 3D plot where
the x- and y-axes are the estimated vehicle position and the z-axis is
the particle weight. Each particle is represented by a a vertical line
segment of height equal to particle weight.
"""
ax = smb.plotvol3()
for (x, y, t), weight in zip(self.x, self.weight):
# ax.plot([x, x], [y, y], [0, weight], 'r')
ax.plot([x, x], [y, y], [0, weight], "skyblue", linewidth=3)
ax.plot(x, y, weight, "k.", markersize=6)
ax.grid(True)
ax.set_xlabel("X")
ax.set_ylabel("Y")
ax.set_xlim()
ax.set_zlabel("particle weight")
ax.view_init(29, 59)
def _predict(self, odo):
# step 2
# update the particle state based on odometry and a random perturbation
# Straightforward code:
#
# for i=1:self.nparticles
# x = self.robot.f( self.x(i,:), odo)' + sqrt(self.R)*self.randn[2,0]
# x[2] = angdiff(x[2])
# self.x(i,:) = x
#
# Vectorized code:
self.x = self.robot.f(self.x, odo) + self.random.multivariate_normal(
(0, 0, 0), self.R, size=self.nparticles
)
self.x[:, 2] = smb.angdiff(self.x[:, 2])
def _observe(self, z, lm_id):
# step 3
# predict observation and score the particles
# Straightforward code:
#
# for p = 1:self.nparticles
# # what do we expect observation to be for this particle?
# # use the sensor model h(.)
# z_pred = self.sensor.h( self.x(p,:), lm_id)
#
# # how different is it
# innov[0] = z[0] - z_pred[0]
# innov[1] = angdiff(z[1], z_pred[1])
#
# # get likelihood (new importance). Assume Gaussian but any PDF works!
# # If predicted obs is very different from actual obs this score will be low
# # ie. this particle is not very good at predicting the observation.
# # A lower score means it is less likely to be selected for the next generation...
# # The weight is never zero.
# self.weight(p) = exp(-0.5*innov'*inv(self.L)*innov) + 0.05
# end
#
# Vectorized code:
invL = np.linalg.inv(self.L)
z_pred = self.sensor.h(self.x, lm_id)
z_pred[:, 0] = z[0] - z_pred[:, 0]
z_pred[:, 1] = smb.angdiff(z[1], z_pred[:, 1])
LL = -0.5 * np.r_[invL[0, 0], invL[1, 1], 2 * invL[0, 1]]
e = (
np.c_[z_pred[:, 0] ** 2, z_pred[:, 1] ** 2, z_pred[:, 0] * z_pred[:, 1]]
@ LL
)
self.weight = np.exp(e) + self.w0
def _select(self):
# step 4
# select particles based on their weights
#
# particles with large weights will occupy a greater percentage of the
# y axis in a cummulative plot
cdf = np.cumsum(self.weight) / self.weight.sum()
# so randomly (uniform) choosing y values is more likely to correspond to
# better particles...
iselect = self.random.uniform(0, 1, size=(self.nparticles,))
# find the particle that corresponds to each y value (just a look up)
interpfun = sp.interpolate.interp1d(
cdf,
np.arange(self.nparticles),
assume_sorted=True,
kind="nearest",
fill_value="extrapolate",
)
inextgen = interpfun(iselect).astype(int)
# copy selected particles for next generation..
self.x = self.x[inextgen, :]
def get_t(self):
"""
Get time from simulation
:return: simulation time vector
:rtype: ndarray(n)
Return simulation time vector, starts at zero. The timestep is an
attribute of the ``robot`` object.
"""
return np.array([h.t for h in self._history])
def get_xyt(self):
r"""
Get estimated vehicle trajectory
:return: vehicle trajectory where each row is configuration :math:`(x, y, \theta)`
:rtype: ndarray(n,3)
:seealso: :meth:`plot_xy` :meth:`run` :meth:`history`
"""
return np.array([h.xest[:2] for h in self._history])
def get_std(self):
r"""
Get standard deviation of particles
:return: standard deviation of vehicle position estimate
:rtype: ndarray(n,2)
Return the standard deviation :math:`(\sigma_x, \sigma_y)` of the
particle cloud at each time step.
:seealso: :meth:`get_xyt`
"""
return np.array([h.std for h in self._history])
def plot_xy(self, block=None, **kwargs):
r"""
Plot estimated vehicle position
:param args: position arguments passed to :meth:`~matplotlib.axes.Axes.plot`
:param kwargs: keywords arguments passed to :meth:`~matplotlib.axes.Axes.plot`
:param block: hold plot until figure is closed, defaults to None
:type block: bool, optional
Plot the estimated vehicle path in the xy-plane.
:seealso: :meth:`get_xy`
"""
xyt = self.get_xyt()
plt.plot(xyt[:, 0], xyt[:, 1], **kwargs)
if block is not None:
plt.show(block=block)
if __name__ == "__main__":
from roboticstoolbox import *
map = LandmarkMap(20, workspace=10)
V = np.diag([0.02, np.deg2rad(0.5)]) ** 2
robot = Bicycle(covar=V, animation="car", workspace=map)
robot.control = RandomPath(workspace=map)
W = np.diag([0.1, np.deg2rad(1)]) ** 2
sensor = RangeBearingSensor(robot, map, covar=W, plot=True)
R = np.diag([0.1, 0.1, np.deg2rad(1)]) ** 2
L = np.diag([0.1, 0.1])
pf = ParticleFilter(robot, sensor=sensor, R=R, L=L, nparticles=1000, animate=True)
pf.run(T=10)
|
PypiClean
|
/azure_eventhub-5.11.4-py3-none-any.whl/azure/eventhub/_transport/_base.py
|
from __future__ import annotations
from typing import Tuple, Union, TYPE_CHECKING
from abc import ABC, abstractmethod
if TYPE_CHECKING:
try:
from uamqp import types as uamqp_types
except ImportError:
uamqp_types = None
class AmqpTransport(ABC): # pylint: disable=too-many-public-methods
"""
Abstract class that defines a set of common methods needed by producer and consumer.
"""
KIND: str
# define constants
MAX_FRAME_SIZE_BYTES: int
MAX_MESSAGE_LENGTH_BYTES: int
TIMEOUT_FACTOR: int
CONNECTION_CLOSING_STATES: Tuple
TRANSPORT_IDENTIFIER: str
# define symbols
PRODUCT_SYMBOL: Union[uamqp_types.AMQPSymbol, str, bytes]
VERSION_SYMBOL: Union[uamqp_types.AMQPSymbol, str, bytes]
FRAMEWORK_SYMBOL: Union[uamqp_types.AMQPSymbol, str, bytes]
PLATFORM_SYMBOL: Union[uamqp_types.AMQPSymbol, str, bytes]
USER_AGENT_SYMBOL: Union[uamqp_types.AMQPSymbol, str, bytes]
PROP_PARTITION_KEY_AMQP_SYMBOL: Union[uamqp_types.AMQPSymbol, str, bytes]
@staticmethod
@abstractmethod
def build_message(**kwargs):
"""
Creates a uamqp.Message or pyamqp.Message with given arguments.
:rtype: uamqp.Message or pyamqp.Message
"""
@staticmethod
@abstractmethod
def build_batch_message(**kwargs):
"""
Creates a uamqp.BatchMessage or pyamqp.BatchMessage with given arguments.
:rtype: uamqp.BatchMessage or pyamqp.BatchMessage
"""
@staticmethod
@abstractmethod
def to_outgoing_amqp_message(annotated_message):
"""
Converts an AmqpAnnotatedMessage into an Amqp Message.
:param AmqpAnnotatedMessage annotated_message: AmqpAnnotatedMessage to convert.
:rtype: uamqp.Message or pyamqp.Message
"""
@staticmethod
@abstractmethod
def update_message_app_properties(message, key, value):
"""
Adds the given key/value to the application properties of the message.
:param uamqp.Message or pyamqp.Message message: Message.
:param str key: Key to set in application properties.
:param str Value: Value to set for key in application properties.
:rtype: uamqp.Message or pyamqp.Message
"""
@staticmethod
@abstractmethod
def get_message_encoded_size(message):
"""
Gets the message encoded size given an underlying Message.
:param uamqp.Message or pyamqp.Message message: Message to get encoded size of.
:rtype: int
"""
@staticmethod
@abstractmethod
def get_remote_max_message_size(handler):
"""
Returns max peer message size.
:param AMQPClient handler: Client to get remote max message size on link from.
:rtype: int
"""
@staticmethod
@abstractmethod
def create_retry_policy(config):
"""
Creates the error retry policy.
:param ~azure.eventhub._configuration.Configuration config: Configuration.
"""
@staticmethod
@abstractmethod
def create_link_properties(link_properties):
"""
Creates and returns the link properties.
:param dict[bytes, int] link_properties: The dict of symbols and corresponding values.
:rtype: dict
"""
@staticmethod
@abstractmethod
def create_connection(**kwargs):
"""
Creates and returns the uamqp Connection object.
:keyword str host: The hostname, used by uamqp.
:keyword JWTTokenAuth auth: The auth, used by uamqp.
:keyword str endpoint: The endpoint, used by pyamqp.
:keyword str container_id: Required.
:keyword int max_frame_size: Required.
:keyword int channel_max: Required.
:keyword int idle_timeout: Required.
:keyword Dict properties: Required.
:keyword int remote_idle_timeout_empty_frame_send_ratio: Required.
:keyword error_policy: Required.
:keyword bool debug: Required.
:keyword str encoding: Required.
"""
@staticmethod
@abstractmethod
def close_connection(connection):
"""
Closes existing connection.
:param connection: uamqp or pyamqp Connection.
"""
@staticmethod
@abstractmethod
def get_connection_state(connection):
"""
Gets connection state.
:param connection: uamqp or pyamqp Connection.
"""
@staticmethod
@abstractmethod
def create_send_client(*, config, **kwargs):
"""
Creates and returns the send client.
:param ~azure.eventhub._configuration.Configuration config: The configuration.
:keyword str target: Required. The target.
:keyword JWTTokenAuth auth: Required.
:keyword int idle_timeout: Required.
:keyword network_trace: Required.
:keyword retry_policy: Required.
:keyword keep_alive_interval: Required.
:keyword str client_name: Required.
:keyword dict link_properties: Required.
:keyword properties: Required.
"""
@staticmethod
@abstractmethod
def send_messages(producer, timeout_time, last_exception, logger):
"""
Handles sending of event data messages.
:param ~azure.eventhub._producer.EventHubProducer producer: The producer with handler to send messages.
:param int timeout_time: Timeout time.
:param last_exception: Exception to raise if message timed out. Only used by uamqp transport.
:param logger: Logger.
"""
@staticmethod
@abstractmethod
def set_message_partition_key(message, partition_key, **kwargs):
"""Set the partition key as an annotation on a uamqp message.
:param message: The message to update.
:param str partition_key: The partition key value.
:rtype: None
"""
@staticmethod
@abstractmethod
def add_batch(event_data_batch, outgoing_event_data, event_data):
"""
Add EventData to the data body of the BatchMessage.
:param event_data_batch: BatchMessage to add data to.
:param outgoing_event_data: Transformed EventData for sending.
:param event_data: EventData to add to internal batch events. uamqp use only.
:rtype: None
"""
@staticmethod
@abstractmethod
def create_source(source, offset, selector):
"""
Creates and returns the Source.
:param str source: Required.
:param int offset: Required.
:param bytes selector: Required.
"""
@staticmethod
@abstractmethod
def create_receive_client(*, config, **kwargs):
"""
Creates and returns the receive client.
:param ~azure.eventhub._configuration.Configuration config: The configuration.
:keyword Source source: Required. The source.
:keyword JWTTokenAuth auth: Required.
:keyword int idle_timeout: Required.
:keyword network_trace: Required.
:keyword retry_policy: Required.
:keyword str client_name: Required.
:keyword dict link_properties: Required.
:keyword properties: Required.
:keyword link_credit: Required. The prefetch.
:keyword keep_alive_interval: Required. Missing in pyamqp.
:keyword desired_capabilities: Required.
:keyword streaming_receive: Required.
:keyword message_received_callback: Required.
:keyword timeout: Required.
"""
@staticmethod
@abstractmethod
def open_receive_client(*, handler, client, auth):
"""
Opens the receive client.
:param ReceiveClient handler: The receive client.
:param ~azure.eventhub.EventHubConsumerClient client: The consumer client.
"""
@staticmethod
@abstractmethod
def check_link_stolen(consumer, exception):
"""
Checks if link stolen and handles exception.
:param consumer: The EventHubConsumer.
:param exception: Exception to check.
"""
@staticmethod
@abstractmethod
def create_token_auth(auth_uri, get_token, token_type, config, **kwargs):
"""
Creates the JWTTokenAuth.
:param str auth_uri: The auth uri to pass to JWTTokenAuth.
:param get_token: The callback function used for getting and refreshing
tokens. It should return a valid jwt token each time it is called.
:param bytes token_type: Token type.
:param ~azure.eventhub._configuration.Configuration config: EH config.
:keyword bool update_token: Whether to update token. If not updating token,
then pass 300 to refresh_window. Only used by uamqp.
"""
@staticmethod
@abstractmethod
def create_mgmt_client(address, mgmt_auth, config):
"""
Creates and returns the mgmt AMQP client.
:param _Address address: Required. The Address.
:param JWTTokenAuth mgmt_auth: Auth for client.
:param ~azure.eventhub._configuration.Configuration config: The configuration.
"""
@staticmethod
@abstractmethod
def get_updated_token(mgmt_auth):
"""
Return updated auth token.
:param mgmt_auth: Auth.
"""
@staticmethod
@abstractmethod
def mgmt_client_request(mgmt_client, mgmt_msg, **kwargs):
"""
Send mgmt request.
:param AMQP Client mgmt_client: Client to send request with.
:param str mgmt_msg: Message.
:keyword bytes operation: Operation.
:keyword operation_type: Op type.
:keyword status_code_field: mgmt status code.
:keyword description_fields: mgmt status desc.
"""
@staticmethod
@abstractmethod
def get_error(status_code, description):
"""
Gets error corresponding to status code.
:param status_code: Status code.
:param str description: Description of error.
"""
@staticmethod
@abstractmethod
def check_timeout_exception(base, exception):
"""
Checks if timeout exception.
:param base: ClientBase.
:param exception: Exception to check.
"""
|
PypiClean
|
/alipay_sdk_python-3.6.740-py3-none-any.whl/alipay/aop/api/domain/ContractOrderSendLogDTO.py
|
import json
from alipay.aop.api.constant.ParamConstants import *
class ContractOrderSendLogDTO(object):
def __init__(self):
self._files = None
self._log_id = None
self._remark = None
self._send_time = None
self._sender = None
self._status = None
@property
def files(self):
return self._files
@files.setter
def files(self, value):
self._files = value
@property
def log_id(self):
return self._log_id
@log_id.setter
def log_id(self, value):
self._log_id = value
@property
def remark(self):
return self._remark
@remark.setter
def remark(self, value):
self._remark = value
@property
def send_time(self):
return self._send_time
@send_time.setter
def send_time(self, value):
self._send_time = value
@property
def sender(self):
return self._sender
@sender.setter
def sender(self, value):
self._sender = value
@property
def status(self):
return self._status
@status.setter
def status(self, value):
self._status = value
def to_alipay_dict(self):
params = dict()
if self.files:
if hasattr(self.files, 'to_alipay_dict'):
params['files'] = self.files.to_alipay_dict()
else:
params['files'] = self.files
if self.log_id:
if hasattr(self.log_id, 'to_alipay_dict'):
params['log_id'] = self.log_id.to_alipay_dict()
else:
params['log_id'] = self.log_id
if self.remark:
if hasattr(self.remark, 'to_alipay_dict'):
params['remark'] = self.remark.to_alipay_dict()
else:
params['remark'] = self.remark
if self.send_time:
if hasattr(self.send_time, 'to_alipay_dict'):
params['send_time'] = self.send_time.to_alipay_dict()
else:
params['send_time'] = self.send_time
if self.sender:
if hasattr(self.sender, 'to_alipay_dict'):
params['sender'] = self.sender.to_alipay_dict()
else:
params['sender'] = self.sender
if self.status:
if hasattr(self.status, 'to_alipay_dict'):
params['status'] = self.status.to_alipay_dict()
else:
params['status'] = self.status
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = ContractOrderSendLogDTO()
if 'files' in d:
o.files = d['files']
if 'log_id' in d:
o.log_id = d['log_id']
if 'remark' in d:
o.remark = d['remark']
if 'send_time' in d:
o.send_time = d['send_time']
if 'sender' in d:
o.sender = d['sender']
if 'status' in d:
o.status = d['status']
return o
|
PypiClean
|
/ztfy.jqueryui-0.7.12.tar.gz/ztfy.jqueryui-0.7.12/src/ztfy/jqueryui/resources/js/lang/calendar-el.js
|
/*
* ?
*/
Calendar._DN = new Array
("Κυριακή",
"Δευτέρα",
"Τρίτη",
"Τετάρτη",
"Πέμπτη",
"Παρασκευή",
"Σάββατο",
"Κυριακή");
Calendar._SDN = new Array
("Κυ",
"Δε",
"Tρ",
"Τε",
"Πε",
"Πα",
"Σα",
"Κυ");
Calendar._MN = new Array
("Ιανουάριος",
"Φεβρουάριος",
"Μάρτιος",
"Απρίλιος",
"Μάϊος",
"Ιούνιος",
"Ιούλιος",
"Αύγουστος",
"Σεπτέμβριος",
"Οκτώβριος",
"Νοέμβριος",
"Δεκέμβριος");
Calendar._SMN = new Array
("Ιαν",
"Φεβ",
"Μαρ",
"Απρ",
"Μαι",
"Ιουν",
"Ιουλ",
"Αυγ",
"Σεπ",
"Οκτ",
"Νοε",
"Δεκ");
// tooltips
Calendar._TT = {};
Calendar._TT["INFO"] = "Για το ημερολόγιο";
Calendar._TT["ABOUT"] =
"Επιλογέας ημερομηνίας/ώρας σε DHTML\n" +
"(c) dynarch.com 2002-2005 / Author: Mihai Bazon\n" + // don't translate this this ;-)
"Για τελευταία έκδοση: http://www.dynarch.com/projects/calendar/\n" +
"Distributed under GNU LGPL. See http://gnu.org/licenses/lgpl.html for details." +
"\n\n" +
"Επιλογή ημερομηνίας:\n" +
"- Χρησιμοποιείστε τα κουμπιά \xab, \xbb για επιλογή έτους\n" +
"- Χρησιμοποιείστε τα κουμπιά " + String.fromCharCode(0x2039) + ", " + String.fromCharCode(0x203a) + " για επιλογή μήνα\n" +
"- Κρατήστε κουμπί ποντικού πατημένο στα παραπάνω κουμπιά για πιο γρήγορη επιλογή.";
Calendar._TT["ABOUT_TIME"] = "\n\n" +
"Επιλογή ώρας:\n" +
"- Κάντε κλικ σε ένα από τα μέρη της ώρας για αύξηση\n" +
"- ή Shift-κλικ για μείωση\n" +
"- ή κλικ και μετακίνηση για πιο γρήγορη επιλογή.";
Calendar._TT["TOGGLE"] = "Μπάρα πρώτης ημέρας της εβδομάδας";
Calendar._TT["PREV_YEAR"] = "Προηγ. έτος (κρατήστε για το μενού)";
Calendar._TT["PREV_MONTH"] = "Προηγ. μήνας (κρατήστε για το μενού)";
Calendar._TT["GO_TODAY"] = "Σήμερα";
Calendar._TT["NEXT_MONTH"] = "Επόμενος μήνας (κρατήστε για το μενού)";
Calendar._TT["NEXT_YEAR"] = "Επόμενο έτος (κρατήστε για το μενού)";
Calendar._TT["SEL_DATE"] = "Επιλέξτε ημερομηνία";
Calendar._TT["DRAG_TO_MOVE"] = "Σύρτε για να μετακινήσετε";
Calendar._TT["PART_TODAY"] = " (σήμερα)";
Calendar._TT["MON_FIRST"] = "Εμφάνιση Δευτέρας πρώτα";
Calendar._TT["SUN_FIRST"] = "Εμφάνιση Κυριακής πρώτα";
Calendar._TT["CLOSE"] = "Κλείσιμο";
Calendar._TT["TODAY"] = "Σήμερα";
Calendar._TT["TIME_PART"] = "(Shift-)κλικ ή μετακίνηση για αλλαγή";
// date formats
Calendar._TT["DEF_DATE_FORMAT"] = "dd-mm-y";
Calendar._TT["TT_DATE_FORMAT"] = "D, d M";
Calendar._TT["WK"] = "εβδ";
|
PypiClean
|
/arun_test_package_1961-0.0.1.tar.gz/arun_test_package_1961-0.0.1/src/model_asset_hierarchy.py
|
import json
import time
from pathlib import Path
from typing import Dict
import pandas as pd
from cognite.client import CogniteClient
from cognite.client.data_classes import Asset, AssetUpdate
from pandas import DataFrame
import src.constants as const
from src.model.config import Config
from src.model.project import Project
from src.util import cdf
AIR_INFRA = "airInfra"
AIR_MODELS = "airModels"
AIR_SOURCE = "AIR Application"
AIR_ROOT_ID = "AIR_root_asset"
class ModelAssetHierarchy:
def __init__(self, config: Dict[Path, Config], project: Project):
self.config = config
project_info = const.PROJECT_TO_API_KEYS[project]
print("Project: " + project_info.name)
self.client = CogniteClient(
api_key=project_info.get_deployment_key(),
project=project_info.name,
client_name="AIR Model Asset Client",
base_url=project_info.base_url,
)
self._hierarchy: Dict[str, Dict] = {}
self._existing_hierarchy: DataFrame = pd.DataFrame()
self._data_set_id: int = 0
@property
def hierarchy(self) -> Dict[str, Dict]:
return self._hierarchy if self._hierarchy else self._get_asset_hierarchy()
@property
def existing_hierarchy(self) -> DataFrame:
return self._existing_hierarchy if self._existing_hierarchy.shape[0] else self._get_existing_asset_hierarchy()
@property
def data_set_id(self) -> int:
return self._data_set_id if self._data_set_id else cdf.data_set_id(self.client, "AIR")
def _get_asset_hierarchy(self) -> Dict[str, Dict]:
hierarchy: Dict[str, Dict] = {AIR_MODELS: {}, AIR_INFRA: {}}
for key, value in self.config.items():
if value.air_infrastructure:
hierarchy[AIR_INFRA].update({key: value.to_dict()})
else:
hierarchy[AIR_MODELS].update({key: value.to_dict()})
self._hierarchy = hierarchy
return hierarchy
def _get_existing_asset_hierarchy(self) -> DataFrame:
assets = self.client.assets.list(data_set_ids=[self.data_set_id], limit=-1).to_pandas()
if assets.shape[0] == 0:
existing_hierarchy = pd.DataFrame()
else:
air_asset_id = assets.loc[assets["name"] == "AIR", "id"].iloc[0]
existing_hierarchy = assets[assets["rootId"] == air_asset_id]
self._existing_hierarchy = existing_hierarchy
return existing_hierarchy
def _asset_exists(self, model_name: str) -> bool:
return self.existing_hierarchy.shape[0] and model_name in self.existing_hierarchy["name"].tolist()
def _create_structure_if_not_exist(self):
update = False
if (self.existing_hierarchy.shape[0] == 0) or ("AIR" not in self.existing_hierarchy["name"].tolist()):
self._create_asset(AIR_ROOT_ID, "", name="AIR")
update = True
if (self.existing_hierarchy.shape[0] == 0) or (AIR_MODELS not in self.existing_hierarchy["name"].tolist()):
self._create_asset(AIR_MODELS, "All custom models are stored here.", parent_external_id=AIR_ROOT_ID)
update = True
if (self.existing_hierarchy.shape[0] == 0) or (AIR_INFRA not in self.existing_hierarchy["name"].tolist()):
self._create_asset(
AIR_INFRA, "All AIR infrastructure models are stored here.", parent_external_id=AIR_ROOT_ID
)
update = True
if update:
assets = self.client.assets.list(data_set_ids=self.data_set_id, limit=-1).to_pandas()
while (assets.shape[0] == 0) or (
not all([i in assets["name"].tolist() for i in ["AIR", AIR_MODELS, AIR_INFRA]])
):
time.sleep(1)
assets = self.client.assets.list(data_set_ids=self.data_set_id, limit=-1).to_pandas()
return self._get_existing_asset_hierarchy()
return self.existing_hierarchy
def _create_asset(
self, ext_id: str, desc: str, *, name: str = "", parent_external_id: str = None, meta: Dict = None,
) -> None:
self.client.assets.create(
Asset(
external_id=ext_id,
name=name if name else ext_id,
description=desc,
data_set_id=self.data_set_id,
source=AIR_SOURCE,
parent_external_id=parent_external_id,
metadata=meta,
)
)
def _create_air_asset(self, model_type: str, model_name: str, model_config: Dict) -> None:
self._create_asset(
model_name,
model_config["description"],
meta=self._clean_metadata(model_config),
parent_external_id=self.existing_hierarchy.loc[
self.existing_hierarchy["name"] == model_type, "externalId"
].iloc[0],
)
def _update_asset(self, model_name: str, model_config: Dict) -> None:
model_asset = model_config
metadata = self._clean_metadata(model_config)
cdf_model = self.existing_hierarchy[self.existing_hierarchy["name"] == model_name]
updated_asset = AssetUpdate(id=cdf_model["id"].iloc[0])
update = False
if model_asset["description"] != cdf_model["description"].iloc[0]:
updated_asset = updated_asset.description.set(model_asset["description"])
update = True
if metadata != cdf_model["metadata"].iloc[0]:
updated_asset = updated_asset.metadata.set(metadata)
update = True
if update:
self.client.assets.update(updated_asset)
def create_or_update(self):
self._create_structure_if_not_exist()
for model_type, model_paths in self.hierarchy.items():
for path, model_config in model_paths.items():
model_name = path.name
if self._asset_exists(model_name):
self._update_asset(model_name, model_config)
else:
self._create_air_asset(model_type, model_name, model_config)
@staticmethod
def _clean_metadata(model_config) -> Dict:
if model_config.get("deploy"):
model_config.pop("deploy")
if model_config:
model_version = model_config.get("modelVersion")
if model_version:
model_version = ".".join(model_version.split(".")[:-1])
model_config.update({"modelVersion": model_version})
else:
model_config.update({"modelVersion": ""})
# metadata.update({"schedule": str(model_config.get("schedule"))})
model_config = {
key: json.dumps(value) if isinstance(value, dict) or isinstance(value, list) else str(value)
for key, value in model_config.items()
}
else:
model_config = {}
return model_config
|
PypiClean
|
/genie.libs.conf-23.8-py3-none-any.whl/genie/libs/conf/interface/iosxe/yang/interface.py
|
__all__ = (
'Interface',
'PhysicalInterface',
'VirtualInterface',
'LoopbackInterface',
'EthernetInterface'
)
import re
import contextlib
import abc
import weakref
import string
from enum import Enum
from genie.decorator import managedattribute
from genie.conf.base import ConfigurableBase
from genie.conf.base.exceptions import UnknownInterfaceTypeError
from genie.conf.base.attributes import SubAttributes, KeyedSubAttributes, SubAttributesDict,\
AttributesHelper
from genie.conf.base.cli import CliConfigBuilder
from genie.conf.base.config import YangConfig
from genie.libs.conf.base import \
MAC, \
IPv4Address, IPv4Interface, \
IPv6Address, IPv6Interface
from genie.libs.conf.l2vpn import PseudowireNeighbor
from genie.libs.conf.l2vpn.pseudowire import EncapsulationType
import genie.libs.conf.interface
try:
from ydk.models.ned import ned
from ydk.types import DELETE, Empty
from ydk.services import CRUDService
from ydk.services import CodecService
from ydk.providers import CodecServiceProvider
# patch a netconf provider
from ydk.providers import NetconfServiceProvider as _NetconfServiceProvider
from ydk.providers._provider_plugin import _ClientSPPlugin
class NetconfServiceProvider(_NetconfServiceProvider):
def __init__(self, device):
if 'yang' not in device.mapping:
# Want it, but dont have a connection?
raise Exception("Missing connection of "
"type 'yang' in the device "
"mapping '{map}'".format(map=device.mapping))
alias = device.mapping['yang']
dev = device.connectionmgr.connections[alias]
super().__init__(address=str(dev.connection_info.ip),
port=dev.connection_info.port,
username=dev.connection_info.username,
password=dev.connection_info.password,
protocol = 'ssh')
self.sp_instance = _ClientSPPlugin(self.timeout,
use_native_client=False)
self.sp_instance._nc_manager = dev
def _connect(self, *args, **kwargs): pass
except Exception:
pass
class ConfigurableInterfaceNamespace(ConfigurableBase):
def __init__(self, interface=None):
assert interface
self._interface = interface
_interface = None
@property
def interface(self):
return self._interface
@property
def testbed(self):
return self.interface.testbed
@property
def device(self):
return self.interface.device
class Interface(genie.libs.conf.interface.Interface):
""" base Interface class for IOS-XE devices
"""
def __new__(cls, *args, **kwargs):
factory_cls = cls
if cls is Interface:
try:
name = kwargs['name']
except KeyError:
raise TypeError('\'name\' argument missing')
d_parsed = genie.libs.conf.interface.ParsedInterfaceName(
name, kwargs.get('device', None))
try:
factory_cls = cls._name_to_class_map[d_parsed.type]
except KeyError:
pass
if factory_cls is not cls:
self = factory_cls.__new__(factory_cls, *args, **kwargs)
elif super().__new__ is object.__new__:
self = super().__new__(factory_cls)
else:
self = super().__new__(factory_cls, *args, **kwargs)
return self
bandwidth = managedattribute(
name='bandwidth',
default=None,
type=(None, managedattribute.test_istype(int)))
description = managedattribute(
name='description',
default=None,
type=(None, managedattribute.test_istype(str)))
mtu = managedattribute(
name='mtu',
default=None,
type=(None, managedattribute.test_istype(int)))
shutdown = managedattribute(
name='shutdown',
default=False,
type=(None, managedattribute.test_istype(bool)))
def build_config(self, apply=True, attributes=None, unconfig=False,
**kwargs):
assert not kwargs
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
with self._build_config_create_interface_submode_context(configurations):
self._build_config_interface_submode(configurations=configurations, attributes=attributes, unconfig=unconfig)
if apply:
if configurations:
self.device.configure(configurations, fail_invalid=True)
else:
return str(configurations)
def build_unconfig(self, apply=True, attributes=None, **kwargs):
return self.build_config(apply=apply,
attributes=attributes,
unconfig=True, **kwargs)
@abc.abstractmethod
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class PhysicalInterface(Interface, genie.libs.conf.interface.PhysicalInterface):
@abc.abstractmethod
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class VirtualInterface(Interface, genie.libs.conf.interface.VirtualInterface):
@abc.abstractmethod
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class LoopbackInterface(VirtualInterface, genie.libs.conf.interface.LoopbackInterface):
_interface_name_types = (
'Loopback',
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def build_config(self, apply=True, attributes=None, unconfig=False,
**kwargs):
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
ydk_obj = ned.Native.Interface.Loopback()
# name is a mandatory arguments
keep = string.digits
ydk_obj.name = int(''.join(i for i in attributes.value('name') if i in keep))
if unconfig and attributes.iswildcard:
pass
else:
ipv4 = attributes.value('ipv4')
if ipv4:
ydk_obj.ip.address.primary.address = str(ipv4.ip)
ydk_obj.ip.address.primary.mask = str(ipv4.netmask)
vrf = attributes.value('vrf')
if vrf:
ydk_obj.vrf.forwarding = vrf.name
# instantiate crud service
crud_service = CRUDService()
if apply:
# create netconf connection
ncp = NetconfServiceProvider(self.device)
if unconfig:
crud_service.delete(ncp, ydk_obj)
else:
crud_service.create(ncp, ydk_obj)
else:
if unconfig:
return YangConfig(device=self.device, unconfig=unconfig,
ncp=NetconfServiceProvider,
ydk_obj=ydk_obj,
crud_service=crud_service.delete)
else:
return YangConfig(device=self.device, unconfig=unconfig,
ncp=NetconfServiceProvider,
ydk_obj=ydk_obj,
crud_service=crud_service.create)
class EthernetInterface(PhysicalInterface, genie.libs.conf.interface.EthernetInterface):
_interface_name_types = (
'Ethernet', # TODO verify
'FastEthernet',
# TODO more?
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def build_config(self, apply=True, attributes=None, unconfig=False,
**kwargs):
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
ydk_obj = ned.Native.Interface.Gigabitethernet()
ydk_obj.name = self.name
if unconfig and attributes.iswildcard:
ydk_obj = DELETE()
else:
shutdown = attributes.value('shutdown')
if shutdown is not None:
if unconfig:
# Special case: unconfiguring always applies shutdown
ydk_obj.shutdown = Empty()
elif shutdown:
ydk_obj.shutdown = Empty()
else:
ydk_obj.shutdown = DELETE()
ipv4 = attributes.value('ipv4')
if ipv4:
ydk_obj.ip.address.primary.address = str(ipv4.ip)
ydk_obj.ip.address.primary.mask = str(ipv4.netmask)
vrf = attributes.value('vrf')
if vrf:
ydk_obj.vrf.forwarding = vrf.name
# instantiate crud service
crud_service = CRUDService()
if apply:
# create netconf connection
ncp = NetconfServiceProvider(self.device)
return crud_service.create(ncp, ydk_obj)
else:
return YangConfig(device=self.device, unconfig=unconfig,
ncp=NetconfServiceProvider,
ydk_obj=ydk_obj,
crud_service=crud_service.create)
class GigabitEthernetInterface(PhysicalInterface):
_interface_name_types = (
'GigabitEthernet',
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def build_config(self, apply=True, attributes=None, unconfig=False,
**kwargs):
attributes = AttributesHelper(self, attributes)
configurations = CliConfigBuilder(unconfig=unconfig)
ydk_obj = ned.Native.Interface.Gigabitethernet()
keep = string.digits + '//'
ydk_obj.name = ''.join(i for i in attributes.value('name') if i in keep)
shutdown = attributes.value('shutdown')
if shutdown is not None:
if unconfig:
# Special case: unconfiguring always applies shutdown
ydk_obj.shutdown = Empty()
elif shutdown:
ydk_obj.shutdown = Empty()
else:
ydk_obj.shutdown = DELETE()
ipv4 = attributes.value('ipv4')
if ipv4:
if unconfig:
ydk_obj.ip.address.primary.address = DELETE()
ydk_obj.ip.address.primary.mask = DELETE()
else:
ydk_obj.ip.address.primary.address = str(ipv4.ip)
ydk_obj.ip.address.primary.mask = str(ipv4.netmask)
vrf = attributes.value('vrf')
if vrf:
if unconfig:
ydk_obj.vrf.forwarding = DELETE()
else:
ydk_obj.vrf.forwarding = vrf.name
# instantiate crud service
crud_service = CRUDService()
if apply:
# create netconf connection
ncp = NetconfServiceProvider(self.device)
crud_service.create(ncp, ydk_obj)
else:
return YangConfig(device=self.device,
ydk_obj=ydk_obj,
ncp=NetconfServiceProvider,
crud_service=crud_service.create)
Interface._build_name_to_class_map()
|
PypiClean
|
/criteo_api_marketingsolutions_sdk-2023.7.0.230831-py3-none-any.whl/criteo_api_marketingsolutions_v2023_07/model/transparency_report_entity_message.py
|
import re # noqa: F401
import sys # noqa: F401
from criteo_api_marketingsolutions_v2023_07.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from criteo_api_marketingsolutions_v2023_07.exceptions import ApiAttributeError
def lazy_import():
from criteo_api_marketingsolutions_v2023_07.model.transparency_report_attributes import TransparencyReportAttributes
globals()['TransparencyReportAttributes'] = TransparencyReportAttributes
class TransparencyReportEntityMessage(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'type': (str,), # noqa: E501
'attributes': (TransparencyReportAttributes,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'type': 'type', # noqa: E501
'attributes': 'attributes', # noqa: E501
}
read_only_vars = {
'type', # noqa: E501
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, type, attributes, *args, **kwargs): # noqa: E501
"""TransparencyReportEntityMessage - a model defined in OpenAPI
Args:
type (str):
attributes (TransparencyReportAttributes):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', True)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.type = type
self.attributes = attributes
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, attributes, *args, **kwargs): # noqa: E501
"""TransparencyReportEntityMessage - a model defined in OpenAPI
attributes (TransparencyReportAttributes):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
for arg in args:
if isinstance(arg, dict):
kwargs.update(arg)
else:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.attributes = attributes
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
|
PypiClean
|
/au_nz_jobs-0.1.2.tar.gz/au_nz_jobs-0.1.2/au_nz_jobs/downloader/downloader.py
|
import requests
import json
import pandas as pd
import numpy as np
import re
import time
# naming convention:
# variables and fields in df and database: snake_case
# define the Job class: detailed information of a certain job
class Job:
SEEK_API_URL = "https://www.seek.com.au/api/chalice-search/search"
SEEK_API_URL_JOB = "https://chalice-experience-api.cloud.seek.com.au/job"
def __init__(self, job_id: str):
"""
:param job_id: job id
"""
self.job_id = job_id
# define a function to download the job information
def download(self):
# initiate the url
url = f"{self.SEEK_API_URL_JOB}/{self.job_id}"
# api request
r = requests.get(url=url)
# convert to json
r = r.json()
# get expiryDate,salaryType,hasRoleRequirements,roleRequirements,jobAdDetails,contactMatches, use.get() to
# avoid key error
expiry_date = r.get("expiryDate")
salary_type = r.get("salaryType")
has_role_requirements = r.get("hasRoleRequirements")
role_requirements = r.get("roleRequirements")
job_ad_details = r.get("jobAdDetails")
contact_matches = r.get("contactMatches")
# define the function to get the email and phone number from a single contact
def get_contact(contact):
contact_dict = {'Email': [], 'Phone': []}
for j in range(len(contact)):
if contact[j]['type'] == 'Email':
contact_dict['Email'].append(contact[j]['value'])
elif contact[j]['type'] == 'Phone':
contact_dict['Phone'].append(contact[j]['value'])
# define the regex for email
email_regex = re.compile(r'[\w\.-]+@[\w\.-]+')
# loop through the email list
for j in range(len(contact_dict['Email'])):
# trim the space
contact_dict['Email'][j] = contact_dict['Email'][j].replace(' ', '')
# remove the nbsp
contact_dict['Email'][j] = contact_dict['Email'][j].replace(' ', '')
# get the email
contact_dict['Email'][j] = email_regex.findall(contact_dict['Email'][j])[0]
# trim all non number and letter characters in the beginning and end
contact_dict['Email'][j] = contact_dict['Email'][j].strip('!@#$%^&*()_+-=,./<>?;:\'"[]{}\\|`~')
# remove the special characters
contact_dict['Email'][j] = ''.join(
[x for x in contact_dict['Email'][j] if x.isalnum() or x in ['@', '.', '_', '-']])
# loop through the phone list to trim all the space and only keep the numbers and +
for j in range(len(contact_dict['Phone'])):
contact_dict['Phone'][j] = contact_dict['Phone'][j].replace(' ', '')
contact_dict['Phone'][j] = ''.join([x for x in contact_dict['Phone'][j] if x.isalnum() or x in ['+']])
# replace [] with None
if not contact_dict['Email']:
contact_dict['Email'] = None
if not contact_dict['Phone']:
contact_dict['Phone'] = None
return contact_dict
# get the contact from contact_matches
email = get_contact(contact_matches)['Email']
phone = get_contact(contact_matches)['Phone']
# get companyOverallRating,companyProfileUrl,companyName,companyId under companyReview
# check for companyReview, if not exist, set to None
if r.get("companyReview") is None:
company_overall_rating = None
company_profile_url = None
company_name_review = None
company_id = None
else:
company_overall_rating = r.get("companyReview").get("companyOverallRating")
company_profile_url = r.get("companyReview").get("companyProfileUrl")
company_name_review = r.get("companyReview").get("companyName")
company_id = r.get("companyReview").get("companyId")
# build a dictionary including job_id and all the information above
job_details = {"id": self.job_id, "expiry_date": expiry_date, "salary_type": salary_type,
"has_role_requirements": has_role_requirements, "role_requirements": role_requirements,
"job_ad_details": job_ad_details, "email": email, "phone": phone,
"company_overall_rating": company_overall_rating, "company_profile_url": company_profile_url,
"company_name_review": company_name_review, "company_id": company_id}
# write to attribute
self.job_details = job_details
# return the dictionary
return job_details
# define Jobs class: basic information of the jobs to search
class Jobs:
SEEK_API_URL = "https://www.seek.com.au/api/chalice-search/search"
SEEK_API_URL_JOB = "https://chalice-experience-api.cloud.seek.com.au/job"
def __init__(self, keywords: list, locations: list, work_type: list = None, check_words: list = None):
"""
:param keywords: list of keywords to search
:param locations: list of locations to search
:param work_type: list of work type to search, default to None which means all work types
options: ['full_time', 'part_time', 'contract', 'casual']
"""
self.keywords = keywords
self.locations = locations
self.work_type = work_type
self.check_words = check_words
self.if_downloaded = False
self.if_download_details = False
# work_type id dictionary
self.work_type_dict = {
'full_time': 242,
'part_time': 243,
'contract': 244,
'casual': 245
}
options = ['full_time', 'part_time', 'contract', 'casual']
# check if the work_type is valid and convert work_type to work_type_id
if self.work_type is not None:
for i in self.work_type:
if i not in options:
raise ValueError(f"Invalid work_type: {i}, please choose from {options}")
self.work_type_id = [self.work_type_dict[i] for i in self.work_type]
else:
self.work_type_id = self.work_type_dict.values()
def download(self, date_range: int = 31, sort_mode: str = 'date'):
"""
:param date_range: number of days back from today to search, default to 31
:param sort_mode: sort mode, default to 'date'
options: ['relevance', 'date']
:return: a list of jobs
"""
# convert sort_mode
sort_mode_dict = {
'relevance': 'KeywordRelevance',
'date': 'ListedDate'
}
# check if the sort_mode is valid
if sort_mode not in sort_mode_dict.keys():
raise ValueError(f"Invalid sort_mode: {sort_mode}, please choose from {sort_mode_dict.keys()}")
sort_mode = sort_mode_dict[sort_mode]
# define a function to download for a single pair of keyword and location
def _download_jobs(keyword, location):
# start timer
start_time = time.time()
# initiate the parameters
params = dict(
siteKey="AU-Main",
sourcesystem="houston",
page="1",
seekSelectAllPages="true",
sortmode=sort_mode,
dateRange=date_range,
# unpack the work_type_id and join with comma
worktype=','.join([str(i) for i in self.work_type_id]),
keywords=keyword,
where=location
)
# api request
resp = requests.get(url=self.SEEK_API_URL, params=params)
# convert the response to json
json_resp = resp.json()
# get the total number job count
total_job_count = json_resp.get('totalCount')
# if total_job_count is 0, return empty list
if total_job_count == 0:
print(f"No jobs found for keyword: {keyword}, location: {location} in the last {date_range} days.")
print("You can try again with longer date range or different keywords/location.")
return []
# convert job count to number of pages
if total_job_count % 20 == 0:
pages = total_job_count // 20
else:
pages = total_job_count // 20 + 1
# initiate the jobs list
jobs = []
# loop through the pages
for page in range(1, pages + 1):
# update the page number
params['page'] = page
# api request
resp = requests.get(url=self.SEEK_API_URL, params=params)
# convert the response to json
json_resp = resp.json()
# get the jobs
jobs += json_resp.get('data')
# end timer
end_time = time.time()
print(
f"Downloaded {len(jobs)} jobs for keyword: {keyword}, location: {location} in {(end_time - start_time):.2f} seconds.")
# return the jobs
return jobs
# initiate the jobs list
jobs = []
# loop through the keywords and locations
for keyword in self.keywords:
for location in self.locations:
# download the jobs
jobs += _download_jobs(keyword, location)
# check if the jobs is empty, if yes, return empty dataframe, write if_downloaded to True
if len(jobs) == 0:
print("No jobs found for all keyword/location combination in given date_range.")
print("Please try again with different keywords/locations/date_range.")
self.if_downloaded = True
return pd.DataFrame()
# convert the jobs to dataframe
jobs = pd.DataFrame(jobs)
# define a function to clean the jobs dataframe
def _clean_jobs(df):
# drop the duplicate jobs based on id
df.drop_duplicates(subset=['id'], inplace=True)
# drop the unnecessary columns: logo, isStandOut, automaticInclusion, displayType, templateFileName,
# tracking, solMetadata, branding, categories
df.drop(
columns=['logo', 'isStandOut', 'automaticInclusion', 'displayType', 'templateFileName', 'tracking',
'solMetadata', 'branding', 'categories'], inplace=True)
# drop the columns with names in numbers
df.drop(columns=[i for i in df.columns if i.isdigit()], inplace=True)
# rename all camel case columns to snake case
df.rename(columns={i: re.sub(r'(?<!^)(?=[A-Z])', '_', i).lower() for i in df.columns}, inplace=True)
# convert area_id, suburb_id to Int64
df['area_id'] = df['area_id'].astype('Int64')
df['suburb_id'] = df['suburb_id'].astype('Int64')
# split the advertiser column, example: {'description': 'Seek Limited', 'id': '20242373'}
df['advertiser_title'] = df['advertiser'].apply(lambda x: x['description'])
df['advertiser_id'] = df['advertiser'].apply(lambda x: x['id'])
df.drop(columns=['advertiser'], inplace=True)
# rename the advertiser_title column to advertiser
df.rename(columns={'advertiser_title': 'advertiser'}, inplace=True)
# split the classification column, example: { 'id': '6304', 'description': 'Information & Communication
# Technology'}
df['classification_title'] = df['classification'].apply(lambda x: x['description'])
df['classification_id'] = df['classification'].apply(lambda x: x['id'])
df.drop(columns=['classification'], inplace=True)
# rename the classification_title column to classification
df.rename(columns={'classification_title': 'classification'}, inplace=True)
# split the sub_classification column, example: {'id': '6311', 'description': 'Database Development'}
df['sub_classification_title'] = df['sub_classification'].apply(lambda x: x['description'])
df['sub_classification_id'] = df['sub_classification'].apply(lambda x: x['id'])
df.drop(columns=['sub_classification'], inplace=True)
# rename the sub_classification_title column to sub_classification
df.rename(columns={'sub_classification_title': 'sub_classification'}, inplace=True)
# return the jobs dataframe
return df
# clean the jobs dataframe
jobs = _clean_jobs(jobs)
# write to attribute
self.jobs_df = jobs
self.if_downloaded = True
print(f"After cleaning, download {len(jobs)} jobs in total.")
# return the jobs dataframe
return jobs
# define a function to get classification_df
def _classification_df(self):
"""
:return: a dataframe of classification
"""
# check if the jobs_df is downloaded
if not self.if_downloaded:
raise ValueError("Please download the jobs_df first")
# get the classification dataframe
classification_df = self.jobs_df[['classification', 'classification_id']].drop_duplicates()
# reset the index
classification_df.reset_index(drop=True, inplace=True)
# drop null rows
classification_df.dropna(inplace=True)
# write to attribute
self.classification_df = classification_df
# return the classification dataframe
return classification_df
# define a function to get sub_classification_df
def _sub_classification_df(self):
"""
:return: a dataframe of sub_classification
"""
# check if the jobs_df is downloaded
if not self.if_downloaded:
raise ValueError("Please download the jobs_df first")
# get the sub_classification dataframe
sub_classification_df = self.jobs_df[['sub_classification', 'sub_classification_id']].drop_duplicates()
# reset the index
sub_classification_df.reset_index(drop=True, inplace=True)
# drop null rows
sub_classification_df.dropna(inplace=True)
# write to attribute
self.sub_classification_df = sub_classification_df
# return the sub_classification dataframe
return sub_classification_df
# define a function to get location_df
def _location_df(self):
"""
:return: a dataframe of location
"""
# check if the jobs_df is downloaded
if not self.if_downloaded:
raise ValueError("Please download the jobs_df first")
# get the location dataframe
location_df = self.jobs_df[['location', 'location_id']].drop_duplicates()
# reset the index
location_df.reset_index(drop=True)
# drop null rows
location_df.dropna(inplace=True, how='all')
# write to attribute
self.location_df = location_df
# return the location dataframe
return location_df
# define a function to get area_df
def _area_df(self):
"""
:return: a dataframe of area
"""
# check if the jobs_df is downloaded
if not self.if_downloaded:
raise ValueError("Please download the jobs_df first")
# get the area dataframe
area_df = self.jobs_df[['area', 'area_id']].drop_duplicates()
# reset the index
area_df.reset_index(drop=True, inplace=True)
# drop null rows
area_df.dropna(inplace=True)
# write to attribute
self.area_df = area_df
# return the area dataframe
return area_df
# define a function to get the advertiser_df
def _advertiser_df(self):
"""
:return: a dataframe of advertiser
"""
# check if the jobs_df is downloaded
if not self.if_downloaded:
raise ValueError("Please download the jobs_df first")
# get the advertiser dataframe
advertiser_df = self.jobs_df[['advertiser', 'advertiser_id']].drop_duplicates()
# reset the index
advertiser_df.reset_index(drop=True, inplace=True)
# drop null rows
advertiser_df.dropna(inplace=True)
# write to attribute
self.advertiser_df = advertiser_df
# return the advertiser dataframe
return advertiser_df
# define a function to get the cleaned jobs dataframe
def _jobs_cleaned_df(self):
"""
:return: a dataframe of jobs
"""
# check if the jobs_df is downloaded
if not self.if_downloaded:
raise ValueError("Please download the jobs_df first")
# copy the jobs dataframe
jobs_cleaned_df = self.jobs_df.copy()
# drop the unnecessary columns: classification, sub_classification, location, area, suburb_id, advertiser,
# locationWhereValue, areaWhereValue, suburbWhereValue
jobs_cleaned_df.drop(
columns=['classification', 'sub_classification', 'location', 'area', 'advertiser', 'location_where_value',
'area_where_value', 'suburb_where_value'], inplace=True)
# drop null rows
jobs_cleaned_df.dropna(inplace=True, how='all')
# write to attribute
self.jobs_cleaned_df = jobs_cleaned_df
# return the jobs_cleaned_df
return jobs_cleaned_df
# define a function to do check_words
def _check_words(self, jobs, check_words):
# check for if check_words is None, if so, return the jobs dataframe
if check_words is None:
return jobs
# print the row number of jobs dataframe
print(f"Before checking words, there are {len(jobs)} jobs in total.")
# extract check_words from teaser, create a new column called "check_words_found_teaser", each keyword
# should be a single word, ignore case
import re
jobs["check_words_found_teaser"] = jobs.teaser.apply(
lambda x: re.findall(r"\b(" + "|".join(check_words) + r")\b", x, flags=re.IGNORECASE))
# similar to title
jobs["check_words_found_title"] = jobs.title.apply(
lambda x: re.findall(r"\b(" + "|".join(check_words) + r")\b", x, flags=re.IGNORECASE))
# create a new column called "check_words_found", which is the combination of "check_words_found_teaser"
# and "check_words_found_title"
jobs["check_words_found"] = jobs["check_words_found_teaser"] + jobs["check_words_found_title"]
# lower the words in "check_words_found"
jobs["check_words_found"] = jobs["check_words_found"].apply(lambda x: [i.lower() for i in x])
# eliminate the duplicated value in check_words_found
jobs["check_words_found"] = jobs["check_words_found"].apply(lambda x: list(set(x)))
# create a new column called "check_words_count", which is the length of "check_words_found"
jobs["check_words_count"] = jobs["check_words_found"].apply(lambda x: len(x))
# create a new column called "check_words_checked", which is True if "check_words_count" > 1, otherwise
# False
jobs["check_words_checked"] = jobs["check_words_count"].apply(lambda x: True if x > 0 else False)
# drop the unnecessary columns: check_words_found_teaser, check_words_found_title, check_words_count
jobs.drop(columns=["check_words_found_teaser", "check_words_found_title", "check_words_count"], inplace=True)
# print the row number which check_words_checked is True
print(f"After checking, there are {len(jobs[jobs.check_words_checked])} jobs with check words.")
# return the jobs dataframe
return jobs
# define a function to download job details
def _download_details(self, check_words=None):
# check if the jobs_df is downloaded
if not self.if_downloaded:
raise ValueError("Please download the jobs_df first")
# clean the jobs_df
jobs_cleaned_df = self._jobs_cleaned_df()
# initialize a list to store the jobs_to_download
jobs_to_download = []
# if check_words is None, jobs_to_download is the id column of jobs_cleaned_df
if check_words is None:
jobs_to_download = jobs_cleaned_df.id.tolist()
# if check_words is not None, call the _check_words function
else:
jobs_checked_df = self._check_words(jobs_cleaned_df, check_words)
# the jobs_to_download is the id column of jobs_checked_df where check_words_checked is True
jobs_to_download = jobs_checked_df[jobs_checked_df.check_words_checked].id.tolist()
# check if the jobs_to_download is empty, print the message and return jobs_cleaned_df
# write to attribute: n_jobs_details_downloaded with 0
if len(jobs_to_download) == 0:
print("There is no job to download the details.")
self.n_jobs_details_downloaded = 0
return jobs_cleaned_df
# start timer
start_time = time.time()
# initialize a list to store the jobs_details
jobs_details = []
# loop through the jobs_to_download, create Job class for each job, and download the job details
for job_id in jobs_to_download:
job = Job(job_id=job_id)
job.download()
jobs_details.append(job.job_details)
# convert the jobs_details to a dataframe
jobs_details_df = pd.DataFrame(jobs_details)
# left join the job_cleaned_df and jobs_details_df on id
jobs_details_df = jobs_cleaned_df.merge(jobs_details_df, on="id", how="left")
# write to attribute
self.jobs_details_df = jobs_details_df
self.n_jobs_details_downloaded = len(jobs_details_df)
# print the time taken
print(f"Job details download finished, time taken: {(time.time() - start_time):.2f} seconds")
# return the jobs_details_df
return jobs_details_df
# define a function to get the company dataframe
def _company_review_df(self):
# check if the n_jobs_details_downloaded is 0, if yes, return blank dataframe
if self.n_jobs_details_downloaded == 0:
return pd.DataFrame()
# get the jobs_details_df
jobs_details_df = self.jobs_details_df
# get the company_review_df: company_overall_rating, company_profile_url,
# company_name_review, company_id
company_review_df = jobs_details_df[["company_overall_rating", "company_profile_url",
"company_name_review", "company_id"]].copy()
# drop the duplicated rows and the null rows based on company_id
company_review_df.drop_duplicates(subset="company_id", inplace=True)
company_review_df.dropna(subset=["company_id"], inplace=True)
# write to attribute
self.company_review_df = company_review_df
# return the company_review_df
return company_review_df
# define a function to get all the dataframes
def get_all_dfs(self, date_range=31, sort_mode='date', check_words=None, if_download_details=True):
"""
:return: dataframes of jobs, classification, sub_classification, location, area, advertiser, jobs_cleaned
"""
# if not downloaded, get the jobs dataframe
if not self.if_downloaded:
jobs = self.download(date_range=date_range, sort_mode=sort_mode)
# check if the jobs_cleaned_df is empty, if yes, return
if len(self._jobs_cleaned_df()) == 0:
return
# get the classification dataframe
classification_df = self._classification_df()
# get the sub_classification dataframe
sub_classification_df = self._sub_classification_df()
# get the location dataframe
location_df = self._location_df()
# get the area dataframe
area_df = self._area_df()
# get the advertiser dataframe
advertiser_df = self._advertiser_df()
# get the cleaned jobs dataframe
jobs_cleaned_df = self._jobs_cleaned_df()
# if if_download_details is True, download the job details
if if_download_details:
# get the jobs_details dataframe
jobs_details_df = self._download_details(check_words=check_words)
# set jobs to jobs_details_df
jobs = jobs_details_df
else:
jobs = jobs_cleaned_df
# get the company_review dataframe
company_review_df = self._company_review_df()
# final cleaning for jobs dataframe
# remove the company_overall_rating, company_profile_url, company_name_review columns if found in jobs
if "company_overall_rating" in jobs.columns:
jobs.drop(columns=["company_overall_rating", "company_profile_url", "company_name_review"], inplace=True)
# listing_date, expiry_date to datetime
jobs.listing_date = pd.to_datetime(jobs.listing_date)
jobs.expiry_date = pd.to_datetime(jobs.expiry_date)
# has_role_requirements to boolean
jobs.has_role_requirements = jobs.has_role_requirements.astype(bool)
# advertiser_id, classification_id, sub_classification_id to int
jobs.advertiser_id = jobs.advertiser_id.astype(int)
jobs.classification_id = jobs.classification_id.astype(int)
jobs.sub_classification_id = jobs.sub_classification_id.astype(int)
# rename company_id to review_company_id
jobs.rename(columns={"company_id": "review_company_id"}, inplace=True)
# rename id to job_id
jobs.rename(columns={"id": "job_id"}, inplace=True)
# replacing all null values to np.nan: '[]', '{}', None, blank string,[],{}
jobs.replace({'[]': np.nan, '{}': np.nan, '': np.nan, None: np.nan}, inplace=True)
# for company_review_df, rename company_id to review_company_id
if len(company_review_df) > 0:
company_review_df.rename(columns={"company_id": "review_company_id"}, inplace=True)
# for other dfs other than jobs, change the type of xxx_id to the same as jobs, take care of the Int64
classification_df.classification_id = classification_df.classification_id.astype(int)
sub_classification_df.sub_classification_id = sub_classification_df.sub_classification_id.astype(int)
location_df.location_id = location_df.location_id.astype(int)
area_df.area_id = area_df.area_id.astype('Int64')
advertiser_df.advertiser_id = advertiser_df.advertiser_id.astype(int)
# join all dfs to a single df jobs_wide
jobs_wide = jobs.merge(classification_df, on="classification_id", how="left")
jobs_wide = jobs_wide.merge(sub_classification_df, on="sub_classification_id", how="left")
jobs_wide = jobs_wide.merge(location_df, on="location_id", how="left")
jobs_wide = jobs_wide.merge(area_df, on="area_id", how="left")
jobs_wide = jobs_wide.merge(advertiser_df, on="advertiser_id", how="left")
# if company_review_df is not empty, join it to jobs_wide
if len(company_review_df) > 0:
jobs_wide = jobs_wide.merge(company_review_df, on="review_company_id", how="left")
# generate the dataframes dictionary
df_dict = {'classification': classification_df, 'sub_classification': sub_classification_df,
'location': location_df, 'area': area_df, 'advertiser': advertiser_df,
'jobs': jobs, 'company_review': company_review_df, 'jobs_wide': jobs_wide}
# return the dataframes dictionary
return df_dict
# test the Jobs class
if __name__ == '__main__':
# define the search keywords list
keywords_list = ['data analyst']
# define the search location list
locations_list = ['All New Zealand']
# define the date range
date_range = 3
# define the check words
check_words = ["data", "analyst", "science", "engineer", "engineering", "scientist", "analytics",
"business intelligence", "business intelligence", "business analyst", "power bi", "powerbi",
"power-bi", "tableau", "python", "R", "machine learning", "ai",
"artificial intelligence", "BI"]
# initiate the job class
data_jobs = Jobs(keywords_list, locations_list)
# get all the dataframes
df_dict = data_jobs.get_all_dfs(date_range=date_range, check_words=check_words, if_download_details=True)
|
PypiClean
|
/pakit-0.2.5.tar.gz/pakit-0.2.5/completion/pakit-bash.sh
|
word_in_array() {
local found needle word words
needle="$1"
shift
words=( ${@} )
found=0
for word in ${words[@]} ; do
if [ "$needle" = "$word" ]; then
found=1
break
fi
done
echo $found
}
_pakit() {
local cur prev prog split=false
cur=$(_get_cword "=")
prev="${COMP_WORDS[COMP_CWORD-1]}"
prog="${COMP_WORDS[0]}"
COMPREPLY=()
_expand || return 0
local available opts subcoms
opts="-c -h -v --conf --help --version"
subcoms="install remove update display list available search relink"
if [ "${__COMP_CACHE_PAKIT}x" = "x" ]; then
available=$($prog available --short 2>/dev/null)
__COMP_CACHE_PAKIT=( "$available" )
export __COMP_CACHE_PAKIT
else
available="${__COMP_CACHE_PAKIT[0]}"
fi
_split_longopt && split=true
case "${prev}" in
# file completion
-c|--conf)
_filedir
return 0
;;
# require args, no completion
-h|-v|--help|--version)
return 0
;;
esac
# Subcommand case, have to scan for more than just $prev
local subopts="-h --help"
if [ "$(word_in_array "install" "${COMP_WORDS[@]}")" = "1" ] ||
[ "$(word_in_array "display" "${COMP_WORDS[@]}")" = "1" ]; then
COMPREPLY=( $(compgen -W "${subopts} ${available}" -- "${cur}") )
return 0
elif [ "$(word_in_array "remove" "${COMP_WORDS[@]}")" = "1" ] ||
[ "$(word_in_array "update" "${COMP_WORDS[@]}")" = "1" ]; then
local installed=$($prog list --short 2>/dev/null)
COMPREPLY=( $(compgen -W "${subopts} ${installed}" -- "${cur}") )
return 0
elif [ "$(word_in_array "list" "${COMP_WORDS[@]}")" = "1" ] ||
[ "$(word_in_array "available" "${COMP_WORDS[@]}")" = "1" ]; then
COMPREPLY=( $(compgen -W "${subopts} --short" -- "${cur}") )
return 0
elif [ "$(word_in_array "search" "${COMP_WORDS[@]}")" = "1" ]; then
local search_flags="--case --names"
COMPREPLY=( $(compgen -W "${subopts} ${search_flags}" -- "${cur}") )
return 0
elif [ "$(word_in_array "purge" "${COMP_WORDS[@]}")" = "1" ] ||
[ "$(word_in_array "relink" "${COMP_WORDS[@]}")" = "1" ]; then
COMPREPLY=( $(compgen -W "${subopts}" -- "${cur}") )
return 0
fi
$split && return 0
case "${cur}" in
*)
COMPREPLY=( $(compgen -W "${opts} ${subcoms}" -- "${cur}") )
return 0
;;
esac
}
have pakit && complete -F _pakit ${nospace} pakit
# vim:set ft=sh ts=2 sts=2 sw=2:
|
PypiClean
|
/collective.megaphone-2.1.5.zip/collective.megaphone-2.1.5/src/collective/megaphone/portlets/calltoaction.py
|
from zope.interface import implements
from zope.component import getMultiAdapter
from zope.cachedescriptors.property import Lazy as lazy_property
from zope.annotation.interfaces import IAnnotations
from plone.portlets.interfaces import IPortletDataProvider
from plone.app.portlets.portlets import base
from zope import schema
from zope.formlib import form
from Products.CMFCore.utils import _checkPermission
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from plone.app.vocabularies.catalog import SearchableTextSourceBinder
from plone.app.form.widgets.uberselectionwidget import UberSelectionWidget
from collective.megaphone.config import ANNOTATION_KEY, VIEW_SIGNATURES_PERMISSION
from collective.megaphone.interfaces import IMegaphone
from collective.megaphone.utils import MegaphoneMessageFactory as _
from collective.megaphone.config import DEFAULT_SIG_PORTLET_MIN_COUNT
class ICallToActionPortlet(IPortletDataProvider):
"""A portlet which prompts the user to sign a Megaphone letter or petition.
"""
megaphone_path = schema.Choice(
title=_(u"Megaphone"),
description=_(u"Find the Megaphone you want to display a call to action for."),
required=True,
source=SearchableTextSourceBinder({'object_provides' : IMegaphone.__identifier__},
default_query='path:'))
class Assignment(base.Assignment):
implements(ICallToActionPortlet)
megaphone_path = None
def __init__(self, megaphone_path=None):
self.megaphone_path = megaphone_path
@property
def title(self):
return 'Megaphone: /%s' % (self.megaphone_path or '')
class Renderer(base.Renderer):
render = ViewPageTemplateFile('calltoaction.pt')
@lazy_property
def megaphone(self):
path = self.data.megaphone_path or ''
if path.startswith('/'):
path = path[1:]
if not path:
return None
portal_state = getMultiAdapter((self.context, self.request), name=u'plone_portal_state')
portal = portal_state.portal()
return portal.restrictedTraverse(path, default=None)
@lazy_property
def settings(self):
return IAnnotations(self.megaphone).get(ANNOTATION_KEY, {}).get('signers', {})
@lazy_property
def signers_listing(self):
return self.megaphone.restrictedTraverse('@@signers')
def rendered_signers(self):
batch_size = self.settings.get('sig_portlet_batch_size', 3)
return list(self.signers_listing.rendered_signers(template_id='sig_portlet_template', limit=batch_size))
@property
def megaphone_url(self):
return self.megaphone.absolute_url()
@property
def at_megaphone(self):
return self.request['ACTUAL_URL'].startswith(self.megaphone_url)
@property
def has_min_count(self):
return self.signers_listing.count > self.settings.get('sig_portlet_min_count', DEFAULT_SIG_PORTLET_MIN_COUNT)
def render_text(self):
return self.settings.get('sig_portlet_text', '').replace('\n', '<br/>')
@property
def available(self):
context_state = getMultiAdapter((self.context, self.request), name=u'plone_context_state')
if not context_state.is_view_template():
return False
if self.megaphone is None:
return False
if not self.has_min_count:
return False
return True
@property
def can_view_signatures(self):
return _checkPermission(VIEW_SIGNATURES_PERMISSION, self.megaphone)
class AddForm(base.AddForm):
form_fields = form.Fields(ICallToActionPortlet)
form_fields['megaphone_path'].custom_widget = UberSelectionWidget
label = _(u"Add Megaphone Portlet")
description = _(u"This portlet promotes a Megaphone action letter or petition.")
def create(self, data):
return Assignment(**data)
class EditForm(base.EditForm):
form_fields = form.Fields(ICallToActionPortlet)
form_fields['megaphone_path'].custom_widget = UberSelectionWidget
label = _(u"Edit Megaphone Portlet")
description = _(u"This portlet promotes a Megaphone action letter or petition.")
|
PypiClean
|
/nqm.iotdatabase-1.1.5.tar.gz/nqm.iotdatabase-1.1.5/nqm/iotdatabase/_datasetdata.py
|
import typing
import collections.abc
# ignore too-many-ancestors. We inherit from MutableMapping, which has tons.
# ignore invalid-name. We are copying the name from the JavaScript API.
#pylint: disable=locally-disabled, too-many-ancestors, invalid-name
class Object(collections.abc.MutableMapping):
"""An ``dict`` that can be used like a JavaScript object with dot notation
Example:
>>> obj = Object()
>>> obj["example"] = "hello world"
>>> obj.example
"hello world"
>>> obj.example = "hello users"
>>> obj["example"]
"hello users"
"""
def __init__(self, *args, **kwargs):
self.update(dict(*args, **kwargs))
def __getitem__(self, key):
return self.__dict__[key]
def __setitem__(self, key, value):
self.__dict__[key] = value
def __delitem__(self, key):
del self.__dict__[key]
def __iter__(self):
return iter(self.__dict__)
def __len__(self):
return len(self.__dict__)
def __str__(self):
return str(self.__dict__)
def __repr__(self):
cls = self.__class__
return f"{cls.__module__}.{cls.__name__}({str(self)})"
class MetaData(Object):
"""Stores the dataset metadata"""
description: typing.Text = ""
id: typing.Text = ""
name: typing.Text = ""
parents: typing.Iterable[typing.Text] = []
schemaDefinition: typing.Mapping[typing.Text, typing.Any] = {}
tags: typing.Iterable[typing.Text] = []
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if "schema" in self.__dict__:
# delete schema value and replace it with schemaDefinition
self.schemaDefinition = self.__dict__.pop("schema")
class DatasetMetaData(Object):
"""Base class for various results.
Attributes:
metaData: The dataset metadata.
"""
metaData: MetaData
def __init__(self, *args, **kwargs):
self.metaData = MetaData()
super().__init__(*args, **kwargs)
class DatasetData(DatasetMetaData):
"""Stores the dataset metadata and data.
"""
data: typing.Iterable[typing.Mapping[typing.Text, typing.Any]] = ()
def __init__(self, *args, **kwargs):
self.data = tuple()
super().__init__(*args, **kwargs)
class DatasetCount(DatasetMetaData):
"""Stores the dataset metadata and data count.
"""
count: int = -1
def __init__(self, count, *args, **kwargs):
self.count = count
super().__init__(*args, **kwargs)
|
PypiClean
|
/tb-rest-client-3.5.tar.gz/tb-rest-client-3.5/tb_rest_client/models/models_pe/blob_entity_with_customer_info.py
|
# Copyright 2023. ThingsBoard
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pprint
import re # noqa: F401
import six
class BlobEntityWithCustomerInfo(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'BlobEntityId',
'created_time': 'int',
'tenant_id': 'TenantId',
'customer_id': 'CustomerId',
'owner_id': 'EntityId',
'name': 'str',
'type': 'str',
'content_type': 'str',
'additional_info': 'JsonNode',
'customer_title': 'str',
'customer_is_public': 'object'
}
attribute_map = {
'id': 'id',
'created_time': 'createdTime',
'tenant_id': 'tenantId',
'customer_id': 'customerId',
'owner_id': 'ownerId',
'name': 'name',
'type': 'type',
'content_type': 'contentType',
'additional_info': 'additionalInfo',
'customer_title': 'customerTitle',
'customer_is_public': 'customerIsPublic'
}
def __init__(self, id=None, created_time=None, tenant_id=None, customer_id=None, owner_id=None, name=None, type=None, content_type=None, additional_info=None, customer_title=None, customer_is_public=None): # noqa: E501
"""BlobEntityWithCustomerInfo - a model defined in Swagger""" # noqa: E501
self._id = None
self._created_time = None
self._tenant_id = None
self._customer_id = None
self._owner_id = None
self._name = None
self._type = None
self._content_type = None
self._additional_info = None
self._customer_title = None
self._customer_is_public = None
self.discriminator = None
if id is not None:
self.id = id
if created_time is not None:
self.created_time = created_time
if tenant_id is not None:
self.tenant_id = tenant_id
if customer_id is not None:
self.customer_id = customer_id
if owner_id is not None:
self.owner_id = owner_id
if name is not None:
self.name = name
if type is not None:
self.type = type
if content_type is not None:
self.content_type = content_type
if additional_info is not None:
self.additional_info = additional_info
if customer_title is not None:
self.customer_title = customer_title
if customer_is_public is not None:
self.customer_is_public = customer_is_public
@property
def id(self):
"""Gets the id of this BlobEntityWithCustomerInfo. # noqa: E501
:return: The id of this BlobEntityWithCustomerInfo. # noqa: E501
:rtype: BlobEntityId
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this BlobEntityWithCustomerInfo.
:param id: The id of this BlobEntityWithCustomerInfo. # noqa: E501
:type: BlobEntityId
"""
self._id = id
@property
def created_time(self):
"""Gets the created_time of this BlobEntityWithCustomerInfo. # noqa: E501
Timestamp of the blob entity creation, in milliseconds # noqa: E501
:return: The created_time of this BlobEntityWithCustomerInfo. # noqa: E501
:rtype: int
"""
return self._created_time
@created_time.setter
def created_time(self, created_time):
"""Sets the created_time of this BlobEntityWithCustomerInfo.
Timestamp of the blob entity creation, in milliseconds # noqa: E501
:param created_time: The created_time of this BlobEntityWithCustomerInfo. # noqa: E501
:type: int
"""
self._created_time = created_time
@property
def tenant_id(self):
"""Gets the tenant_id of this BlobEntityWithCustomerInfo. # noqa: E501
:return: The tenant_id of this BlobEntityWithCustomerInfo. # noqa: E501
:rtype: TenantId
"""
return self._tenant_id
@tenant_id.setter
def tenant_id(self, tenant_id):
"""Sets the tenant_id of this BlobEntityWithCustomerInfo.
:param tenant_id: The tenant_id of this BlobEntityWithCustomerInfo. # noqa: E501
:type: TenantId
"""
self._tenant_id = tenant_id
@property
def customer_id(self):
"""Gets the customer_id of this BlobEntityWithCustomerInfo. # noqa: E501
:return: The customer_id of this BlobEntityWithCustomerInfo. # noqa: E501
:rtype: CustomerId
"""
return self._customer_id
@customer_id.setter
def customer_id(self, customer_id):
"""Sets the customer_id of this BlobEntityWithCustomerInfo.
:param customer_id: The customer_id of this BlobEntityWithCustomerInfo. # noqa: E501
:type: CustomerId
"""
self._customer_id = customer_id
@property
def owner_id(self):
"""Gets the owner_id of this BlobEntityWithCustomerInfo. # noqa: E501
:return: The owner_id of this BlobEntityWithCustomerInfo. # noqa: E501
:rtype: EntityId
"""
return self._owner_id
@owner_id.setter
def owner_id(self, owner_id):
"""Sets the owner_id of this BlobEntityWithCustomerInfo.
:param owner_id: The owner_id of this BlobEntityWithCustomerInfo. # noqa: E501
:type: EntityId
"""
self._owner_id = owner_id
@property
def name(self):
"""Gets the name of this BlobEntityWithCustomerInfo. # noqa: E501
blob entity name # noqa: E501
:return: The name of this BlobEntityWithCustomerInfo. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this BlobEntityWithCustomerInfo.
blob entity name # noqa: E501
:param name: The name of this BlobEntityWithCustomerInfo. # noqa: E501
:type: str
"""
self._name = name
@property
def type(self):
"""Gets the type of this BlobEntityWithCustomerInfo. # noqa: E501
blob entity type # noqa: E501
:return: The type of this BlobEntityWithCustomerInfo. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this BlobEntityWithCustomerInfo.
blob entity type # noqa: E501
:param type: The type of this BlobEntityWithCustomerInfo. # noqa: E501
:type: str
"""
self._type = type
@property
def content_type(self):
"""Gets the content_type of this BlobEntityWithCustomerInfo. # noqa: E501
blob content type # noqa: E501
:return: The content_type of this BlobEntityWithCustomerInfo. # noqa: E501
:rtype: str
"""
return self._content_type
@content_type.setter
def content_type(self, content_type):
"""Sets the content_type of this BlobEntityWithCustomerInfo.
blob content type # noqa: E501
:param content_type: The content_type of this BlobEntityWithCustomerInfo. # noqa: E501
:type: str
"""
allowed_values = ["application/pdf", "image/jpeg", "image/png"] # noqa: E501
if content_type not in allowed_values:
raise ValueError(
"Invalid value for `content_type` ({0}), must be one of {1}" # noqa: E501
.format(content_type, allowed_values)
)
self._content_type = content_type
@property
def additional_info(self):
"""Gets the additional_info of this BlobEntityWithCustomerInfo. # noqa: E501
:return: The additional_info of this BlobEntityWithCustomerInfo. # noqa: E501
:rtype: JsonNode
"""
return self._additional_info
@additional_info.setter
def additional_info(self, additional_info):
"""Sets the additional_info of this BlobEntityWithCustomerInfo.
:param additional_info: The additional_info of this BlobEntityWithCustomerInfo. # noqa: E501
:type: JsonNode
"""
self._additional_info = additional_info
@property
def customer_title(self):
"""Gets the customer_title of this BlobEntityWithCustomerInfo. # noqa: E501
Title of the customer # noqa: E501
:return: The customer_title of this BlobEntityWithCustomerInfo. # noqa: E501
:rtype: str
"""
return self._customer_title
@customer_title.setter
def customer_title(self, customer_title):
"""Sets the customer_title of this BlobEntityWithCustomerInfo.
Title of the customer # noqa: E501
:param customer_title: The customer_title of this BlobEntityWithCustomerInfo. # noqa: E501
:type: str
"""
self._customer_title = customer_title
@property
def customer_is_public(self):
"""Gets the customer_is_public of this BlobEntityWithCustomerInfo. # noqa: E501
Parameter that specifies if customer is public # noqa: E501
:return: The customer_is_public of this BlobEntityWithCustomerInfo. # noqa: E501
:rtype: object
"""
return self._customer_is_public
@customer_is_public.setter
def customer_is_public(self, customer_is_public):
"""Sets the customer_is_public of this BlobEntityWithCustomerInfo.
Parameter that specifies if customer is public # noqa: E501
:param customer_is_public: The customer_is_public of this BlobEntityWithCustomerInfo. # noqa: E501
:type: object
"""
self._customer_is_public = customer_is_public
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(BlobEntityWithCustomerInfo, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, BlobEntityWithCustomerInfo):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
PypiClean
|
/augustus_quant-1.0.0-py3-none-any.whl/augustus/system/components/match_engine.py
|
from collections import defaultdict, deque
import pandas as pd
from augustus.constants import ActionType
from augustus.system.metabase_env import augustusEnvBase
from augustus.system.models.base_log import TradeLogBase
from augustus.system.models.signals import SignalByTrigger
from augustus.utils.memo_for_cache import memo
class MatchEngine(augustusEnvBase):
def __init__(self, trade_log):
self.long_log_pure = defaultdict(deque)
self.long_log_with_trigger = defaultdict(deque)
self.short_log_pure = defaultdict(deque)
self.short_log_with_trigger = defaultdict(deque)
self.finished_log = []
self.trade_log: TradeLogBase = trade_log
self.left_trade_settled = False
def _append_finished(self, buy_order, sell_order, size):
log = self.trade_log(buy_order, sell_order, size).generate()
self.finished_log.append(log)
def _search_father(self, order, log_with_trigger):
for i in log_with_trigger:
if i.mkt_id == order.father_mkt_id:
self._append_finished(i, order, order.size)
log_with_trigger.remove(i)
break
def _del_in_mkt_dict(self, mkt_id):
if mkt_id in self.env.orders_child_of_mkt_dict:
del self.env.orders_child_of_mkt_dict[mkt_id]
def _change_order_size_in_pending_mkt_dict(self, mkt_id, track_size):
pending_mkt_dict = self.env.orders_child_of_mkt_dict
if mkt_id in pending_mkt_dict:
for order in pending_mkt_dict[mkt_id]:
order.size = track_size
def _pair_one_by_one(self, order_list, sell_size, order, counteract=False):
buy_order = order_list.popleft()
buy_size = buy_order.track_size
diff = buy_order.track_size = buy_size - sell_size
if diff > 0:
self._append_finished(buy_order, order, sell_size)
order_list.appendleft(buy_order)
if counteract: # 修改dict中订单size
self._change_order_size_in_pending_mkt_dict(
buy_order.mkt_id, buy_order.track_size)
elif diff == 0:
self._append_finished(buy_order, order, sell_size)
if counteract:
self._del_in_mkt_dict(buy_order.mkt_id)
else:
self._append_finished(buy_order, order, buy_size)
sell_size -= buy_size
if counteract:
self._del_in_mkt_dict(buy_order.mkt_id)
self._pair_one_by_one(order_list, sell_size, order, counteract)
def _pair_order(self, long_or_short, order): # order should be sell or short cover
if long_or_short == 'long':
log_pure = self.long_log_pure[order.ticker]
log_with_trigger = self.long_log_with_trigger[order.ticker]
elif long_or_short == 'short':
log_pure = self.short_log_pure[order.ticker]
log_with_trigger = self.short_log_with_trigger[order.ticker]
sell_size = order.size
if isinstance(order.signal, SignalByTrigger):
self._search_father(order, log_with_trigger)
else:
try:
self._pair_one_by_one(log_pure, sell_size, order)
except IndexError:
self._pair_one_by_one(log_with_trigger, sell_size, order, True)
def match_order(self, order):
if order.action_type == ActionType.Buy:
order.track_size = order.size
if order.is_pure():
self.long_log_pure[order.ticker].append(order)
else:
self.long_log_with_trigger[order.ticker].append(order)
elif order.action_type == ActionType.Short:
order.track_size = order.size
if order.is_pure():
self.short_log_pure[order.ticker].append(order)
else:
self.short_log_with_trigger[order.ticker].append(order)
elif order.action_type == ActionType.Sell:
self._pair_order('long', order)
elif order.action_type == ActionType.Cover:
self._pair_order('short', order)
def append_left_trade_to_log(self):
def settle_left_trade(unfinished_order):
log = self.trade_log(unfinished_order, None,
unfinished_order.track_size).settle_left_trade()
self.finished_log.append(log)
for ticker in self.env.tickers:
for order in self.long_log_pure[ticker]:
settle_left_trade(order)
for order in self.long_log_with_trigger[ticker]:
settle_left_trade(order)
for order in self.short_log_pure[ticker]:
settle_left_trade(order)
for order in self.short_log_with_trigger[ticker]:
settle_left_trade(order)
@memo('trade_log')
def generate_trade_log(self):
if self.left_trade_settled is False:
self.append_left_trade_to_log()
self.left_trade_settled = True
log_dict = defaultdict(list)
for log in self.finished_log:
log_dict['ticker'].append(log.ticker)
log_dict['entry_date'].append(log.entry_date)
log_dict['entry_price'].append(log.entry_price)
log_dict['entry_type'].append(log.entry_type)
log_dict['size'].append(log.size)
log_dict['exit_date'].append(log.exit_date)
log_dict['exit_price'].append(log.exit_price)
log_dict['exit_type'].append(log.exit_type)
log_dict['pl_points'].append(log.pl_points)
log_dict['re_pnl'].append(log.re_pnl)
log_dict['comm'].append(log.commission)
return pd.DataFrame(log_dict)
|
PypiClean
|
/xrt-1.6.0.zip/xrt-1.6.0/examples/withRaycing/09_Gratings/GratingFlexPES.py
|
# -*- coding: utf-8 -*-
__author__ = "Konstantin Klementiev"
__date__ = "08 Mar 2016"
import os, sys; sys.path.append(os.path.join('..', '..', '..')) # analysis:ignore
import numpy as np
import copy
#import matplotlib as mpl
import matplotlib.pyplot as plt
import xrt.backends.raycing as raycing
import xrt.backends.raycing.sources as rs
import xrt.backends.raycing.apertures as ra
import xrt.backends.raycing.oes as roe
import xrt.backends.raycing.run as rr
import xrt.backends.raycing.materials as rm
import xrt.plotter as xrtp
import xrt.runner as xrtr
import xrt.backends.raycing.screens as rsc
showIn3D = False
mGold = rm.Material('Au', rho=19.3)
mGoldenGrating = rm.Material(
'Au', rho=19.3, kind='grating',
efficiency=[(1, 2)], efficiencyFile='efficiency1-LEG.txt')
# efficiency=[(1, 1)], efficiencyFile='efficiency1-LEG.pickle')
# efficiency=[(1, 0.3)])
# )
E0 = 80.
dE = 0.01
#distE = 'lines'
#energies = np.linspace(E0-dE, E0+dE, 5)
distE = 'flat'
energies = E0-dE, E0+dE
#=============================================================================
# Do not put many scanEnergies AND s1openings together or else you'll have
# MemoryError due to the numerous plots. You should fix one of the two and scan
# the other.
#=============================================================================
#scanEnergies = np.linspace(E0 - dE*0.75, E0 + dE*0.75, 7)
scanEnergies = E0,
#s1openings = np.linspace(0.01, 0.25, 25)
s1openings = 0.03,
cff = 2.25
pitch = np.radians(2)
fixedExit = 30.
rho = 1221.
class Grating(roe.OE):
def local_g(self, x, y, rho=rho):
return 0, -rho, 0 # constant line spacing along y
def build_beamline(azimuth=0, nrays=raycing.nrays):
beamLine = raycing.BeamLine(azimuth=azimuth, height=0)
rs.GeometricSource(
beamLine, 'MAX-IV',
nrays=nrays, dx=0.187, dz=0.032, dxprime=77e-6, dzprime=70e-6,
distE=distE, energies=energies, polarization='horizontal')
beamLine.fsm0 = rsc.Screen(beamLine, 'FSM0')
beamLine.m1 = roe.ToroidMirror(
beamLine, 'M1', surface=('Au',), material=(mGold,),
limPhysX=(-10., 10.), limPhysY=(-150., 150.), positionRoll=np.pi/2,
R=1e12, alarmLevel=0.2)
beamLine.fsm1 = rsc.Screen(beamLine, 'FSM-M1')
beamLine.m2 = roe.OE(
beamLine, 'M2', surface=('Au',), material=(mGold,),
limPhysX=(-10., 10.), limPhysY=(-150., 150.), alarmLevel=0.2)
beamLine.pg = Grating(
beamLine, 'PlaneGrating', material=mGoldenGrating,
positionRoll=np.pi, limPhysX=(-15., 15.), limPhysY=(-55., 55.),
alarmLevel=0.2)
# beamLine.pg.order = -2,-1,0,1,2,3
beamLine.pg.order = 1
beamLine.fsmPG = rsc.Screen(beamLine, 'FSM-PG')
beamLine.m3 = roe.ToroidMirror(
beamLine, 'M3', material=(mGold,),
positionRoll=-np.pi/2, limPhysX=(-15., 15.), limPhysY=(-150., 150.),
alarmLevel=0.2)
beamLine.fsm3hf = rsc.Screen(beamLine, 'FSM-M3hf')
beamLine.fsm3vf = rsc.Screen(beamLine, 'FSM-M3vf')
beamLine.s1s = [
ra.RectangularAperture(
beamLine, 'vert. slit', [0, 0, fixedExit],
('bottom', 'top'), [-opening/2., +opening/2.])
for opening in s1openings]
beamLine.m4 = roe.ToroidMirror(
beamLine, 'M4', material=(mGold,),
positionRoll=np.pi/2, limPhysX=(-15., 15.), limPhysY=(-150., 150.),
alarmLevel=0.2)
beamLine.fsmExp1 = rsc.Screen(beamLine, 'FSM-Exp1')
beamLine.fsmExp2 = rsc.Screen(beamLine, 'FSM-Exp2')
return beamLine
def run_process(beamLine, shineOnly1stSource=False):
beamSource = beamLine.sources[0].shine()
# beamLine.feFixedMask.propagate(beamSource)
beamFSM0 = beamLine.fsm0.expose(beamSource)
beamM1global, beamM1local = beamLine.m1.reflect(beamSource)
beamFSM1 = beamLine.fsm1.expose(beamM1global)
beamM2global, beamM2local = beamLine.m2.reflect(beamM1global)
beamPGglobal, beamPGlocal = beamLine.pg.reflect(beamM2global)
beamFSMPG = beamLine.fsmPG.expose(beamPGglobal)
beamM3global, beamM3local = beamLine.m3.reflect(beamPGglobal)
beamFSM3hf = beamLine.fsm3hf.expose(beamM3global)
beamFSM3vf = beamLine.fsm3vf.expose(beamM3global)
outDict = {'beamSource': beamSource,
'beamFSM0': beamFSM0,
'beamM1global': beamM1global, 'beamM1local': beamM1local,
'beamFSM1': beamFSM1,
'beamM2global': beamM2global, 'beamM2local': beamM2local,
'beamPGglobal': beamPGglobal, 'beamPGlocal': beamPGlocal,
'beamFSMPG': beamFSMPG,
'beamM3global': beamM3global, 'beamM3local': beamM3local,
'beamFSM3hf': beamFSM3hf, 'beamFSM3vf': beamFSM3vf,
}
for iopening, s1 in enumerate(beamLine.s1s):
if showIn3D:
beamM3globalCopy = beamM3global
else:
beamM3globalCopy = copy.deepcopy(beamM3global)
beamTemp1 = s1.propagate(beamM3globalCopy)
beamFSM3vs = beamLine.fsm3vf.expose(beamM3globalCopy)
beamM4global, beamM4local = beamLine.m4.reflect(beamM3globalCopy)
beamFSMExp1 = beamLine.fsmExp1.expose(beamM4global)
beamFSMExp2 = beamLine.fsmExp2.expose(beamM4global)
sti = '{0:02d}'.format(iopening)
outDict['beamFSM3vsOp'+sti] = beamFSM3vs
outDict['beamM4globalOp'+sti] = beamM4global
outDict['beamM4localOp'+sti] = beamM4local
outDict['beamFSMExp1Op'+sti] = beamFSMExp1
outDict['beamFSMExp2Op'+sti] = beamFSMExp2
if showIn3D:
break
if showIn3D:
beamLine.prepare_flow()
return outDict
rr.run_process = run_process
eps = 1e-5
def align_grating(grating, E, m, cff):
g = grating.local_g(0, 0)
rho = np.dot(g, g)**0.5
order = abs(m) if cff > 1 else -abs(m)
f1 = cff**2 + 1
f2 = cff**2 - 1
if abs(f2) < eps:
raise ValueError('cff is not allowed to be close to 1!')
ml_d = order * rho * rm.ch / E * 1e-7
cosAlpha = np.sqrt(-ml_d**2 * f1 + 2*abs(ml_d) *
np.sqrt(f2**2 + cff**2 * ml_d**2)) / abs(f2)
cosBeta = cff * cosAlpha
alpha = np.arccos(cosAlpha)
beta = -np.arccos(cosBeta)
return alpha, beta
def align_beamline(
beamLine, E0=E0, pitchM1=pitch, cff=cff, fixedExit=fixedExit,
pitchM3=pitch, pitchM4=pitch):
pM1 = 12000.
beamLine.sources[0].center = pM1 * np.sin(2*pitchM1), \
-pM1 * np.cos(2*pitchM1), 0
beamLine.fsm0.center = beamLine.sources[0].center
rM1 = 2. * pM1 * np.sin(pitchM1)
print('M1: r = {0} mm'.format(rM1))
beamLine.m1.center = 0, 0, 0
beamLine.m1.pitch = pitchM1
beamLine.m1.r = rM1
beamLine.fsm1.center = beamLine.m1.center
if isinstance(beamLine.pg.order, int):
m = beamLine.pg.order
else:
m = beamLine.pg.order[0]
alpha, beta = align_grating(beamLine.pg, E0, m=m, cff=cff)
includedAngle = alpha - beta
print('alpha = {0} deg'.format(np.degrees(alpha)))
print('beta = {0} deg'.format(np.degrees(beta)))
print('included angle = {0} deg'.format(np.degrees(includedAngle)))
print('cos(beta)/cos(alpha) = {0}'.format(np.cos(beta)/np.cos(alpha)))
t = -fixedExit / np.tan(includedAngle)
print('t = {0} mm'.format(t))
pPG = 3000.
beamLine.m2.center = 0, pPG - t, 0
beamLine.m2.pitch = (np.pi - includedAngle) / 2.
print('M2 pitch = {0} deg'.format(np.degrees(beamLine.m2.pitch)))
beamLine.m2.yaw = -2 * pitchM1
beamLine.pg.pitch = -(beta + np.pi/2)
print('PG pitch = {0} deg'.format(np.degrees(beamLine.pg.pitch)))
beamLine.pg.center = 0, pPG, fixedExit
beamLine.pg.yaw = -2 * pitchM1
beamLine.fsmPG.center = 0, beamLine.pg.center[1]+1000, fixedExit
pM3 = 1000.
pM3mer = pM1 + pPG + pM3
qM3mer = 5000.
qM3sag = 7000.
sinPitchM3 = np.sin(pitchM3)
rM3 = 2. * sinPitchM3 * qM3sag
RM3 = 2. / sinPitchM3 * (pM3mer*qM3mer) / (pM3mer+qM3mer)
print('M3: r = {0} mm, R = {1} m'.format(rM3, RM3*1e-3))
beamLine.m3.center = 0, pPG + pM3, fixedExit
beamLine.m3.pitch = -2*pitchM1 + pitchM3
beamLine.m3.r = rM3
beamLine.m3.R = RM3
beamLine.fsm3hf.center = -qM3mer * np.sin(2*pitchM3),\
beamLine.m3.center[1] + qM3mer * np.cos(2*pitchM3), fixedExit
beamLine.fsm3vf.center = -qM3sag * np.sin(2*pitchM3),\
beamLine.m3.center[1] + qM3sag * np.cos(2*pitchM3), fixedExit
for s1 in beamLine.s1s:
s1.center = beamLine.fsm3vf.center
pM4mer = 5000.
pM4sag = 3000.
qM4 = 3500.
sinPitchM4 = np.sin(pitchM4)
rM4 = 2. * sinPitchM4 * (pM4sag*qM4) / (pM4sag+qM4)
RM4 = 2. / sinPitchM4 * (pM4mer*qM4) / (pM4mer+qM4)
print('M4: r = {0} mm, R = {1} m'.format(rM4, RM4*1e-3))
dM34 = qM3mer + pM4mer # = qM3sag + pM4sag
beamLine.m4.center = -dM34 * np.sin(2*pitchM3),\
beamLine.m3.center[1] + dM34 * np.cos(2*pitchM3), fixedExit
beamLine.m4.pitch = 2*pitchM1 - 2*pitchM3 + pitchM4
beamLine.m4.r = rM4
beamLine.m4.R = RM4
qFSMExp1 = 1500. # upstream of the focus
beamLine.fsmExp1.center = beamLine.m4.center[0],\
beamLine.m4.center[1] + qM4 - qFSMExp1, fixedExit
beamLine.fsmExp2.center = beamLine.m4.center[0],\
beamLine.m4.center[1] + qM4, fixedExit
def define_plots(beamLine):
plots = []
plotsMono = []
plotsFocus = []
# plot = xrtp.XYCPlot('beamSource', (1,),
# xaxis=xrtp.XYCAxis(r'$x$', 'mm'),
# yaxis=xrtp.XYCAxis(r'$z$', 'mm'))
# plot.xaxis.fwhmFormatStr = '%.3f'
# plot.yaxis.fwhmFormatStr = '%.3f'
# plots.append(plot)
plot = xrtp.XYCPlot(
'beamFSM0', (1,),
xaxis=xrtp.XYCAxis(r'$x$', 'mm'),
yaxis=xrtp.XYCAxis(r'$z$', 'mm'),
title='00-FSM0')
plot.xaxis.fwhmFormatStr = '%.3f'
plot.yaxis.fwhmFormatStr = '%.3f'
plots.append(plot)
#
# plot = xrtp.XYCPlot(
# 'beamM1local', (1,), aspect='auto',
# xaxis=xrtp.XYCAxis(r'$x$', 'mm', limits=[-4, 4]),
# yaxis=xrtp.XYCAxis(r'$y$', 'mm', limits=[-150, 150]),
# title='01-M1local')
# plots.append(plot)
#
plot = xrtp.XYCPlot(
'beamFSM1', (1,),
xaxis=xrtp.XYCAxis(r'$x$', 'mm', limits=[-4, 4]),
yaxis=xrtp.XYCAxis(r'$z$', 'mm', limits=[-4, 4]),
title='02-FSM1')
plots.append(plot)
#
# plot = xrtp.XYCPlot(
# 'beamM2local', (1,), aspect='auto',
# xaxis=xrtp.XYCAxis(r'$x$', 'mm', limits=[-4, 4]),
# yaxis=xrtp.XYCAxis(r'$y$', 'mm', limits=[-55, 55]),
# title='03-M2local')
# plots.append(plot)
#
plot = xrtp.XYCPlot(
'beamPGlocal', (1,), aspect='auto',
xaxis=xrtp.XYCAxis(r'$x$', 'mm', limits=[-4, 4]),
yaxis=xrtp.XYCAxis(r'$y$', 'mm', limits=[-55, 55]),
title='04-PGlocal')
plots.append(plot)
plot = xrtp.XYCPlot(
'beamFSMPG', (1,),
xaxis=xrtp.XYCAxis(r'$x$', 'mm', limits=[-4, 4]),
yaxis=xrtp.XYCAxis(r'$z$', 'mm', limits=[-4, 4]),
title='05-FSMPG')
plots.append(plot)
plot = xrtp.XYCPlot(
'beamFSMPG', (1,),
xaxis=xrtp.XYCAxis(r'$x$', 'mm', limits=[-4, 4]),
yaxis=xrtp.XYCAxis(r'$z$', 'mm', limits=[-4, 4]),
caxis=xrtp.XYCAxis('path', 'mm'),
title='05-FSMPG-P')
plot.caxis.offset = 16000
plots.append(plot)
plot = xrtp.XYCPlot(
'beamM3local', (1,), aspect='auto',
xaxis=xrtp.XYCAxis(r'$x$', 'mm', limits=[-4, 4]),
yaxis=xrtp.XYCAxis(r'$y$', 'mm', limits=[-150, 150]),
title='06-M3local')
plots.append(plot)
plot = xrtp.XYCPlot(
'beamFSM3hf', (1,),
xaxis=xrtp.XYCAxis(r'$x$', 'mm', limits=[-0.5, 0.5]),
yaxis=xrtp.XYCAxis(r'$z$', 'mm',
limits=[-0.5, 0.5]),
title='07-FSM3hf')
plots.append(plot)
for is1, (s1, op) in enumerate(zip(beamLine.s1s, s1openings)):
sti = '{0:02d}'.format(is1)
plot = xrtp.XYCPlot(
'beamFSM3vf', (1,),
xaxis=xrtp.XYCAxis(r'$x$', 'mm', limits=[-0.5, 0.5]),
yaxis=xrtp.XYCAxis(r'$z$', 'mm',
limits=[-0.5, 0.5]),
title='08-FSM3vfOp'+sti, oe=s1)
plots.append(plot)
plot = xrtp.XYCPlot(
'beamFSM3vsOp'+sti, (1,),
xaxis=xrtp.XYCAxis(r'$x$', 'mm', limits=[-0.5, 0.5]),
yaxis=xrtp.XYCAxis(r'$z$', 'mm',
limits=[-0.5, 0.5]),
title='09-FSM3vsOp'+sti)
plots.append(plot)
plotsMono.append(plot)
plot = xrtp.XYCPlot(
'beamM4localOp'+sti, (1,), aspect='auto',
xaxis=xrtp.XYCAxis(r'$x$', 'mm', limits=[-4, 4]),
yaxis=xrtp.XYCAxis(r'$y$', 'mm', limits=[-150, 150]),
title='10-M4localOp'+sti)
plots.append(plot)
plotsMono.append(plot)
plot = xrtp.XYCPlot(
'beamFSMExp1Op'+sti, (1,),
xaxis=xrtp.XYCAxis(r'$x$', 'mm', limits=[-1, 1]),
yaxis=xrtp.XYCAxis(r'$z$', 'mm',
limits=[-1, 1]),
title='11-FSMExp1Op'+sti)
plot.xaxis.fwhmFormatStr = '%.3f'
plot.yaxis.fwhmFormatStr = '%.3f'
plots.append(plot)
plotsMono.append(plot)
plot = xrtp.XYCPlot(
'beamFSMExp2Op'+sti, (1,),
xaxis=xrtp.XYCAxis(r'$x$', 'mm', limits=[-0.25, 0.25]),
yaxis=xrtp.XYCAxis(r'$z$', 'mm',
limits=[-0.25, 0.25]),
caxis=xrtp.XYCAxis('energy', 'eV', bins=256, ppb=1),
title='12-FSMExp2Op'+sti)
plot.xaxis.fwhmFormatStr = '%.3f'
plot.yaxis.fwhmFormatStr = '%.3f'
if len(s1openings) > 1:
plot.textPanel = plot.fig.text(
0.8, 0.8, u'slit opening\n{0:.0f} µm'.format(op*1e3),
transform=plot.fig.transFigure, size=14, color='r', ha='left')
plots.append(plot)
plotsMono.append(plot)
plotsFocus.append(plot)
for plot in plots:
if "energy" in plot.caxis.label:
plot.caxis.limits = [E0-dE, E0+dE]
plot.caxis.offset = E0
if plot in plotsMono:
plot.caxis.fwhmFormatStr = '%.4f'
else:
plot.caxis.fwhmFormatStr = None
return plots, plotsMono, plotsFocus
def plot_generator(plots, plotsMono, plotsFocus, beamLine):
for ienergy, energy in enumerate(scanEnergies):
align_beamline(beamLine, E0=energy)
for plot in plots:
plot.saveName = 'FlexPES-{0}-{1}.png'.format(
plot.title, ienergy)
yield
if len(s1openings) > 1:
flux = np.array([plot.intensity for plot in plotsFocus])
dE = np.array([E0/plot.dE*1e-4 for plot in plotsFocus])
op = np.array(s1openings)
fig = plt.figure(figsize=(5, 4), dpi=72)
fig.subplots_adjust(right=0.88, bottom=0.12)
ax1 = fig.add_subplot(111)
ax2 = ax1.twinx()
ax1.set_title('At E = 40 eV')
ax1.plot(op*1e3, dE, '-r', lw=2)
ax2.plot(op*1e3, flux/max(flux), '-b', lw=2)
ax1.set_xlabel(u'slit opening (µm)')
ax1.set_ylabel(r'energy resolution $E/dE\times10^{4}$', color='r')
ax2.set_ylabel('relative flux', color='b')
fig.savefig('FlexPES-dE.png')
def main():
beamLine = build_beamline(azimuth=-2*pitch, nrays=10000)
align_beamline(beamLine)
if showIn3D:
beamLine.glow(scale=[100, 10, 1000], centerAt='M2')
return
plots, plotsMono, plotsFocus = define_plots(beamLine)
args = [plots, plotsMono, plotsFocus, beamLine]
xrtr.run_ray_tracing(plots, repeats=1, beamLine=beamLine,
generator=plot_generator, generatorArgs=args,
processes='half')
#this is necessary to use multiprocessing in Windows, otherwise the new Python
#contexts cannot be initialized:
if __name__ == '__main__':
main()
|
PypiClean
|
/dictionary_diff-0.0.2-py3-none-any.whl/dictionary_diff/dict_diff.py
|
from dictionary_diff.change import _Remove
def equivalent(dict1: dict, dict2: dict, equivalent_func) -> bool:
"""
:return: True if and only if all members of the dicts are
:func:`~dictionary_diff.diff.equivalent`
:rtype: bool
"""
if not len(dict1) == len(dict2):
return False
for key in dict1:
if key not in dict2 or not equivalent_func(dict1[key], dict2[key]):
return False
return True
def diff(orig: dict, other: dict, equivalent_func, diff_func) -> dict:
"""
:param orig: The original dict
:param other: The dict the diff is taken of
:param equivalent_func: This method is used for determining if two elements
(of any types) are equivalent,
defaults to :func:`~dictionary_diff.diff.equivalent`
:return: The diff, so that :func:`apply_diff(orig, diff) <dictionary_diff.diff.apply_diff>`
returns something :func:`~dictionary_diff.diff.equivalent` to other
:rtype: dict
"""
new_dict = {}
for difference in find_different(orig, other, equivalent_func):
new_dict[difference] = \
diff_func(orig.get(difference, None), other[difference], equivalent_func)
for removed in find_removed(orig, other):
new_dict[removed] = _Remove(orig[removed])
return new_dict
def find_different(orig: dict, other: dict, equivalent_func) -> list:
"""
:return: a list of keys k whose values are not :func:`~dictionary_diff.diff.equivalent`
in orig and other, such that k is a subset of other's keys
:rtype: list
"""
found_keys = []
for key in other:
if key not in orig or not equivalent_func(other[key], orig[key]):
found_keys.append(key)
return found_keys
def find_equivalent(orig: dict, other: dict, equivalent_func) -> list:
"""
:return: a list of keys k whose values are :func:`~dictionary_diff.diff.equivalent`
in orig and other, such that k is a subset of orig's and other's keys
:rtype: list
"""
found_keys = []
for key in other:
if key in orig and equivalent_func(other[key], orig[key]):
found_keys.append(key)
return found_keys
def find_added(orig: dict, other: dict) -> list:
"""
:return: a list of keys k that are in other but not orig,
such that k is a subset of other's keys
:rtype: list
"""
return find_removed(orig=other, other=orig)
def find_removed(orig: dict, other: dict) -> list:
"""
:return: a list of keys k that are in orig but not other, such that k is a subset of orig's keys
:rtype: list
"""
found = []
for key in orig:
if key not in other:
found.append(key)
return found
def apply_diff(orig: dict, difference: dict, apply_diff_func) -> dict:
"""
Applies the diff to orig
:param orig: The original dict
:param difference: The diff to apply
:return: a dict, so that
:func:`apply_diff(something, dict_diff(something, other)) <dictionary_diff.diff.apply_diff>`
is :func:`~dictionary_diff.diff.equivalent` to other
:rtype: dict
"""
applied = orig.copy()
for difference_key in difference:
if isinstance(difference[difference_key], _Remove):
applied.pop(difference_key, None)
else:
applied[difference_key] = \
apply_diff_func(orig.get(difference_key, None), difference[difference_key])
return applied
|
PypiClean
|
/flaris-0.1.0-py3-none-any.whl/framework/rendering/font.py
|
from typing import Dict
import freetype
import glm # pytype: disable=import-error
import OpenGL.GL as gl
from framework import resource
from framework.rendering.character import Character
__all__ = ["Font"]
class Font: # pylint: disable=too-few-public-methods
"""Represents a font.
The character map is lazily loaded at runtime.
Attributes:
characters: A dictionary that maps ASCII characters to `Character`
objects.
"""
def __init__(self, path: str, size: int = 100):
"""Save constructor arguments.
Args:
path: Path to a font file relative to the assets directory.
size: The font size in points.
"""
self.path = path
self.size = size
self._characters = {}
@property
def characters(self) -> Dict[str, Character]:
"""Return a `dict` that maps ASCII characters to `Character` objects."""
if self._characters:
return self._characters
gl.glPixelStorei(gl.GL_UNPACK_ALIGNMENT, 1)
print(resource.path(self.path))
face = freetype.Face(resource.path(self.path))
face.set_char_size(self.size * 64)
for i in range(128):
face.load_char(chr(i))
texture = gl.glGenTextures(1)
gl.glBindTexture(gl.GL_TEXTURE_2D, texture)
gl.glTexImage2D(gl.GL_TEXTURE_2D, 0, gl.GL_RED,
face.glyph.bitmap.width, face.glyph.bitmap.rows, 0,
gl.GL_RED, gl.GL_UNSIGNED_BYTE,
face.glyph.bitmap.buffer)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_S,
gl.GL_CLAMP_TO_EDGE)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_WRAP_T,
gl.GL_CLAMP_TO_EDGE)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MIN_FILTER,
gl.GL_LINEAR)
gl.glTexParameteri(gl.GL_TEXTURE_2D, gl.GL_TEXTURE_MAG_FILTER,
gl.GL_LINEAR)
character = Character(
texture,
glm.ivec2(face.glyph.bitmap.width, face.glyph.bitmap.rows),
glm.ivec2(face.glyph.bitmap_left, face.glyph.bitmap_top),
face.glyph.advance.x)
self._characters[chr(i)] = character
return self._characters
|
PypiClean
|
/nataili-0.3.4-py3-none-any.whl/annotator/uniformer/mmseg/datasets/builder.py
|
import copy
import platform
import random
from functools import partial
import numpy as np
from annotator.uniformer.mmcv.parallel import collate
from annotator.uniformer.mmcv.runner import get_dist_info
from annotator.uniformer.mmcv.utils import Registry, build_from_cfg
from annotator.uniformer.mmcv.utils.parrots_wrapper import DataLoader, PoolDataLoader
from torch.utils.data import DistributedSampler
if platform.system() != 'Windows':
# https://github.com/pytorch/pytorch/issues/973
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
hard_limit = rlimit[1]
soft_limit = min(4096, hard_limit)
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit))
DATASETS = Registry('dataset')
PIPELINES = Registry('pipeline')
def _concat_dataset(cfg, default_args=None):
"""Build :obj:`ConcatDataset by."""
from .dataset_wrappers import ConcatDataset
img_dir = cfg['img_dir']
ann_dir = cfg.get('ann_dir', None)
split = cfg.get('split', None)
num_img_dir = len(img_dir) if isinstance(img_dir, (list, tuple)) else 1
if ann_dir is not None:
num_ann_dir = len(ann_dir) if isinstance(ann_dir, (list, tuple)) else 1
else:
num_ann_dir = 0
if split is not None:
num_split = len(split) if isinstance(split, (list, tuple)) else 1
else:
num_split = 0
if num_img_dir > 1:
assert num_img_dir == num_ann_dir or num_ann_dir == 0
assert num_img_dir == num_split or num_split == 0
else:
assert num_split == num_ann_dir or num_ann_dir <= 1
num_dset = max(num_split, num_img_dir)
datasets = []
for i in range(num_dset):
data_cfg = copy.deepcopy(cfg)
if isinstance(img_dir, (list, tuple)):
data_cfg['img_dir'] = img_dir[i]
if isinstance(ann_dir, (list, tuple)):
data_cfg['ann_dir'] = ann_dir[i]
if isinstance(split, (list, tuple)):
data_cfg['split'] = split[i]
datasets.append(build_dataset(data_cfg, default_args))
return ConcatDataset(datasets)
def build_dataset(cfg, default_args=None):
"""Build datasets."""
from .dataset_wrappers import ConcatDataset, RepeatDataset
if isinstance(cfg, (list, tuple)):
dataset = ConcatDataset([build_dataset(c, default_args) for c in cfg])
elif cfg['type'] == 'RepeatDataset':
dataset = RepeatDataset(
build_dataset(cfg['dataset'], default_args), cfg['times'])
elif isinstance(cfg.get('img_dir'), (list, tuple)) or isinstance(
cfg.get('split', None), (list, tuple)):
dataset = _concat_dataset(cfg, default_args)
else:
dataset = build_from_cfg(cfg, DATASETS, default_args)
return dataset
def build_dataloader(dataset,
samples_per_gpu,
workers_per_gpu,
num_gpus=1,
dist=True,
shuffle=True,
seed=None,
drop_last=False,
pin_memory=True,
dataloader_type='PoolDataLoader',
**kwargs):
"""Build PyTorch DataLoader.
In distributed training, each GPU/process has a dataloader.
In non-distributed training, there is only one dataloader for all GPUs.
Args:
dataset (Dataset): A PyTorch dataset.
samples_per_gpu (int): Number of training samples on each GPU, i.e.,
batch size of each GPU.
workers_per_gpu (int): How many subprocesses to use for data loading
for each GPU.
num_gpus (int): Number of GPUs. Only used in non-distributed training.
dist (bool): Distributed training/test or not. Default: True.
shuffle (bool): Whether to shuffle the data at every epoch.
Default: True.
seed (int | None): Seed to be used. Default: None.
drop_last (bool): Whether to drop the last incomplete batch in epoch.
Default: False
pin_memory (bool): Whether to use pin_memory in DataLoader.
Default: True
dataloader_type (str): Type of dataloader. Default: 'PoolDataLoader'
kwargs: any keyword argument to be used to initialize DataLoader
Returns:
DataLoader: A PyTorch dataloader.
"""
rank, world_size = get_dist_info()
if dist:
sampler = DistributedSampler(
dataset, world_size, rank, shuffle=shuffle)
shuffle = False
batch_size = samples_per_gpu
num_workers = workers_per_gpu
else:
sampler = None
batch_size = num_gpus * samples_per_gpu
num_workers = num_gpus * workers_per_gpu
init_fn = partial(
worker_init_fn, num_workers=num_workers, rank=rank,
seed=seed) if seed is not None else None
assert dataloader_type in (
'DataLoader',
'PoolDataLoader'), f'unsupported dataloader {dataloader_type}'
if dataloader_type == 'PoolDataLoader':
dataloader = PoolDataLoader
elif dataloader_type == 'DataLoader':
dataloader = DataLoader
data_loader = dataloader(
dataset,
batch_size=batch_size,
sampler=sampler,
num_workers=num_workers,
collate_fn=partial(collate, samples_per_gpu=samples_per_gpu),
pin_memory=pin_memory,
shuffle=shuffle,
worker_init_fn=init_fn,
drop_last=drop_last,
**kwargs)
return data_loader
def worker_init_fn(worker_id, num_workers, rank, seed):
"""Worker init func for dataloader.
The seed of each worker equals to num_worker * rank + worker_id + user_seed
Args:
worker_id (int): Worker id.
num_workers (int): Number of workers.
rank (int): The rank of current process.
seed (int): The random seed to use.
"""
worker_seed = num_workers * rank + worker_id + seed
np.random.seed(worker_seed)
random.seed(worker_seed)
|
PypiClean
|
/gpcm-0.2.0.tar.gz/gpcm-0.2.0/experiments/paper/comparison_process.py
|
import matplotlib.pyplot as plt
import numpy as np
import wbml.out as out
from wbml.experiment import WorkingDirectory
from wbml.metric import smll, rmse
from wbml.plot import tex, tweak, pdfcrop
# Setup script.
tex()
wd = WorkingDirectory("_experiments", "comparison_process")
wd_results = WorkingDirectory("_experiments", "comparison", observe=True)
def kernel_analysis(data, scheme, model, metric, until=4):
"""Analyse the prediction for a kernel."""
k = wd_results.load(data, "data.pickle")["k"]
t, mean, var = wd_results.load(data, scheme, model, "k_pred.pickle")
inds = t <= until
if metric == "smll":
return smll(mean[inds], var[inds], k[inds])
elif metric == "rmse":
return rmse(mean[inds], k[inds])
else:
raise ValueError(f'Bad metric "{metric}".')
for model, kernel in [("gpcm", "eq"), ("cgpcm", "ceq-1"), ("rgpcm", "matern12")]:
with out.Section(model.upper()):
with out.Section("SMLL"):
out.kv("MF", kernel_analysis(kernel, "mean-field", model, "smll"))
out.kv("S", kernel_analysis(kernel, "structured", model, "smll"))
with out.Section("RMSE"):
out.kv("MF", kernel_analysis(kernel, "mean-field", model, "rmse"))
out.kv("S", kernel_analysis(kernel, "structured", model, "rmse"))
def plot_kernel_predictions(model, data_name, legend=True, first=False):
"""Plot the prediction for a kernel."""
k = wd_results.load(data_name, "data.pickle")["k"]
t, mean1, var1 = wd_results.load(data_name, "structured", model, "k_pred.pickle")
t, mean2, var2 = wd_results.load(data_name, "mean-field", model, "k_pred.pickle")
plt.plot(t, k, label="Truth", style="train")
plt.plot(t, mean1, label="Structured", style="pred")
plt.fill_between(
t,
mean1 - 1.96 * np.sqrt(var1),
mean1 + 1.96 * np.sqrt(var1),
style="pred",
)
plt.plot(t, mean1 + 1.96 * np.sqrt(var1), style="pred", lw=1)
plt.plot(t, mean1 - 1.96 * np.sqrt(var1), style="pred", lw=1)
plt.plot(t, mean2, label="Mean-field", style="pred2")
plt.fill_between(
t,
mean2 - 1.96 * np.sqrt(var2),
mean2 + 1.96 * np.sqrt(var2),
style="pred2",
)
plt.plot(t, mean2 + 1.96 * np.sqrt(var2), style="pred2", lw=1)
plt.plot(t, mean2 - 1.96 * np.sqrt(var2), style="pred2", lw=1)
plt.yticks([0, 0.5, 1])
plt.xticks([0, 2, 4])
plt.xlim(0, 4)
plt.ylim(-0.25, 1.25)
if not first:
plt.gca().set_yticklabels([])
tweak(legend=legend)
plt.figure(figsize=(7.5, 3))
plt.subplot(1, 3, 1)
plt.title("GPCM on EQ")
plot_kernel_predictions("gpcm", "eq", legend=False, first=True)
plt.subplot(1, 3, 2)
plt.title("CGPCM on CEQ")
plot_kernel_predictions("cgpcm", "ceq-1", legend=False)
plt.subplot(1, 3, 3)
plt.title("RGPCM on Matern–$\\frac{1}{2}$")
plot_kernel_predictions("rgpcm", "matern12")
plt.savefig(wd.file("comparison.pdf"))
pdfcrop(wd.file("comparison.pdf"))
plt.show()
|
PypiClean
|
/plone.patternslib-1.3.0-py3-none-any.whl/plone/patternslib/static/components/moment/src/lib/create/from-anything.js
|
import isArray from '../utils/is-array';
import isObject from '../utils/is-object';
import isObjectEmpty from '../utils/is-object-empty';
import isUndefined from '../utils/is-undefined';
import isNumber from '../utils/is-number';
import isDate from '../utils/is-date';
import map from '../utils/map';
import { createInvalid } from './valid';
import { Moment, isMoment } from '../moment/constructor';
import { getLocale } from '../locale/locales';
import { hooks } from '../utils/hooks';
import checkOverflow from './check-overflow';
import { isValid } from './valid';
import { configFromStringAndArray } from './from-string-and-array';
import { configFromStringAndFormat } from './from-string-and-format';
import { configFromString } from './from-string';
import { configFromArray } from './from-array';
import { configFromObject } from './from-object';
function createFromConfig (config) {
var res = new Moment(checkOverflow(prepareConfig(config)));
if (res._nextDay) {
// Adding is smart enough around DST
res.add(1, 'd');
res._nextDay = undefined;
}
return res;
}
export function prepareConfig (config) {
var input = config._i,
format = config._f;
config._locale = config._locale || getLocale(config._l);
if (input === null || (format === undefined && input === '')) {
return createInvalid({nullInput: true});
}
if (typeof input === 'string') {
config._i = input = config._locale.preparse(input);
}
if (isMoment(input)) {
return new Moment(checkOverflow(input));
} else if (isDate(input)) {
config._d = input;
} else if (isArray(format)) {
configFromStringAndArray(config);
} else if (format) {
configFromStringAndFormat(config);
} else {
configFromInput(config);
}
if (!isValid(config)) {
config._d = null;
}
return config;
}
function configFromInput(config) {
var input = config._i;
if (isUndefined(input)) {
config._d = new Date(hooks.now());
} else if (isDate(input)) {
config._d = new Date(input.valueOf());
} else if (typeof input === 'string') {
configFromString(config);
} else if (isArray(input)) {
config._a = map(input.slice(0), function (obj) {
return parseInt(obj, 10);
});
configFromArray(config);
} else if (isObject(input)) {
configFromObject(config);
} else if (isNumber(input)) {
// from milliseconds
config._d = new Date(input);
} else {
hooks.createFromInputFallback(config);
}
}
export function createLocalOrUTC (input, format, locale, strict, isUTC) {
var c = {};
if (locale === true || locale === false) {
strict = locale;
locale = undefined;
}
if ((isObject(input) && isObjectEmpty(input)) ||
(isArray(input) && input.length === 0)) {
input = undefined;
}
// object construction must be done this way.
// https://github.com/moment/moment/issues/1423
c._isAMomentObject = true;
c._useUTC = c._isUTC = isUTC;
c._l = locale;
c._i = input;
c._f = format;
c._strict = strict;
return createFromConfig(c);
}
|
PypiClean
|
/Tailbone-0.9.45.tar.gz/Tailbone-0.9.45/tailbone/views/batch/pricing.py
|
from __future__ import unicode_literals, absolute_import
import six
from rattail.db import model
from rattail.time import localtime
from webhelpers2.html import tags, HTML
from tailbone.views.batch import BatchMasterView
class PricingBatchView(BatchMasterView):
"""
Master view for pricing batches.
"""
model_class = model.PricingBatch
model_row_class = model.PricingBatchRow
default_handler_spec = 'rattail.batch.pricing:PricingBatchHandler'
model_title_plural = "Pricing Batches"
route_prefix = 'batch.pricing'
url_prefix = '/batches/pricing'
template_prefix = '/batch/pricing'
creatable = True
downloadable = True
bulk_deletable = True
rows_editable = True
rows_bulk_deletable = True
configurable = True
labels = {
'min_diff_threshold': "Min $ Diff",
'min_diff_percent': "Min % Diff",
'auto_generate_from_srp_breach': "Automatic (from SRP Breach)",
}
grid_columns = [
'id',
'description',
'start_date',
'created',
'created_by',
'rowcount',
# 'status_code',
'complete',
'executed',
'executed_by',
]
form_fields = [
'id',
'input_filename',
'description',
'start_date',
'min_diff_threshold',
'min_diff_percent',
'calculate_for_manual',
'auto_generate_from_srp_breach',
'notes',
'created',
'created_by',
'rowcount',
'shelved',
'complete',
'executed',
'executed_by',
]
row_labels = {
'upc': "UPC",
'vendor_id': "Vendor ID",
'regular_unit_cost': "Reg. Cost",
'price_diff': "$ Diff",
'price_diff_percent': "% Diff",
'brand_name': "Brand",
'price_markup': "Markup",
'manually_priced': "Manual",
}
row_grid_columns = [
'sequence',
'upc',
'brand_name',
'description',
'size',
'vendor_id',
'discounted_unit_cost',
'old_price',
'new_price',
'price_margin',
'price_diff',
'price_diff_percent',
'manually_priced',
'status_code',
]
row_form_fields = [
'sequence',
'product',
'upc',
'brand_name',
'description',
'size',
'department_number',
'department_name',
'subdepartment_number',
'subdepartment_name',
'family_code',
'report_code',
'alternate_code',
'vendor',
'vendor_item_code',
'regular_unit_cost',
'discounted_unit_cost',
'suggested_price',
'old_price',
'new_price',
'price_diff',
'price_diff_percent',
'price_markup',
'price_margin',
'old_price_margin',
'margin_diff',
'status_code',
'status_text',
]
def allow_future_pricing(self):
return self.batch_handler.allow_future()
def configure_form(self, f):
super(PricingBatchView, self).configure_form(f)
app = self.get_rattail_app()
batch = f.model_instance
if self.creating or self.editing:
if self.allow_future_pricing():
f.set_type('start_date', 'date_jquery')
f.set_helptext('start_date', "Only set this for a \"FUTURE\" batch.")
else:
f.remove('start_date')
else: # viewing or deleting
if not self.allow_future_pricing():
if not batch.start_date:
f.remove('start_date')
f.set_type('min_diff_threshold', 'currency')
# input_filename
if self.creating:
f.set_type('input_filename', 'file')
else:
f.set_readonly('input_filename')
f.set_renderer('input_filename', self.render_downloadable_file)
# auto_generate_from_srp_breach
if self.creating:
f.set_type('auto_generate_from_srp_breach', 'boolean')
else:
f.remove_field('auto_generate_from_srp_breach')
# note, the input file is normally required, but should *not* be if the
# user wants to auto-generate the new batch
if self.request.method == 'POST':
if self.request.POST.get('auto_generate_from_srp_breach') == 'true':
f.set_required('input_filename', False)
def get_batch_kwargs(self, batch, **kwargs):
kwargs = super(PricingBatchView, self).get_batch_kwargs(batch, **kwargs)
kwargs['start_date'] = batch.start_date
kwargs['min_diff_threshold'] = batch.min_diff_threshold
kwargs['min_diff_percent'] = batch.min_diff_percent
kwargs['calculate_for_manual'] = batch.calculate_for_manual
# are we auto-generating from SRP breach?
if self.request.POST.get('auto_generate_from_srp_breach') == 'true':
# assign batch param
params = kwargs.get('params', {})
params['auto_generate_from_srp_breach'] = True
kwargs['params'] = params
# provide default description
if not kwargs.get('description'):
kwargs['description'] = "auto-generated from SRP breach"
return kwargs
def configure_row_grid(self, g):
super(PricingBatchView, self).configure_row_grid(g)
g.set_joiner('vendor_id', lambda q: q.outerjoin(model.Vendor))
g.set_sorter('vendor_id', model.Vendor.id)
g.set_filter('vendor_id', model.Vendor.id)
g.set_renderer('vendor_id', self.render_vendor_id)
g.set_renderer('subdepartment_number', self.render_subdepartment_number)
g.set_type('old_price', 'currency')
g.set_type('new_price', 'currency')
g.set_type('price_diff', 'currency')
g.set_renderer('current_price', self.render_current_price)
g.set_renderer('true_margin', self.render_true_margin)
def render_vendor_id(self, row, field):
vendor = row.vendor
if not vendor:
return
text = vendor.id or "(no id)"
return HTML.tag('span', c=text, title=vendor.name)
def render_subdepartment_number(self, row, field):
if row.subdepartment_number:
if row.subdepartment_name:
return HTML.tag('span', title=row.subdepartment_name,
c=six.text_type(row.subdepartment_number))
return row.subdepartment_number
def render_true_margin(self, row, field):
margin = row.true_margin
if margin:
margin = six.text_type(margin)
else:
margin = HTML.literal(' ')
if row.old_true_margin is not None:
title = "WAS: {}".format(row.old_true_margin)
else:
title = "WAS: NULL"
return HTML.tag('span', title=title, c=[margin])
def row_grid_extra_class(self, row, i):
extra_class = None
# primary class comes from row status
if row.status_code in (row.STATUS_PRODUCT_NOT_FOUND,
row.STATUS_CANNOT_CALCULATE_PRICE,
row.STATUS_PRICE_BREACHES_SRP):
extra_class = 'warning'
elif row.status_code in (row.STATUS_PRICE_INCREASE,
row.STATUS_PRICE_DECREASE):
extra_class = 'notice'
# but we want to indicate presence of current price also
if row.current_price:
extra_class = "{} has-current-price".format(extra_class or '')
return extra_class
def render_current_price(self, row, field):
value = row.current_price
if value is None:
return ""
if value < 0:
text = "(${:0,.2f})".format(0 - value)
else:
text = "${:0,.2f}".format(value)
if row.current_price_ends:
ends = localtime(self.rattail_config, row.current_price_ends, from_utc=True)
ends = "ends on {}".format(ends.date())
else:
ends = "never ends"
title = "{}, {}".format(
self.enum.PRICE_TYPE.get(row.current_price_type, "unknown type"),
ends)
return HTML.tag('span', title=title, c=text)
def configure_row_form(self, f):
super(PricingBatchView, self).configure_row_form(f)
# readonly fields
f.set_readonly('product')
f.set_readonly('upc')
f.set_readonly('brand_name')
f.set_readonly('description')
f.set_readonly('size')
f.set_readonly('department_number')
f.set_readonly('department_name')
f.set_readonly('vendor')
# product
f.set_renderer('product', self.render_product)
# currency fields
f.set_type('suggested_price', 'currency')
f.set_type('old_price', 'currency')
f.set_type('new_price', 'currency')
f.set_type('price_diff', 'currency')
# vendor
f.set_renderer('vendor', self.render_vendor)
def render_vendor(self, row, field):
vendor = row.vendor
if not vendor:
return ""
text = "({}) {}".format(vendor.id, vendor.name)
url = self.request.route_url('vendors.view', uuid=vendor.uuid)
return tags.link_to(text, url)
def get_row_csv_fields(self):
fields = super(PricingBatchView, self).get_row_csv_fields()
if 'vendor_uuid' in fields:
i = fields.index('vendor_uuid')
fields.insert(i + 1, 'vendor_id')
fields.insert(i + 2, 'vendor_abbreviation')
fields.insert(i + 3, 'vendor_name')
else:
fields.append('vendor_id')
fields.append('vendor_abbreviation')
fields.append('vendor_name')
return fields
# TODO: this is the same as xlsx row! should merge/share somehow?
def get_row_csv_row(self, row, fields):
csvrow = super(PricingBatchView, self).get_row_csv_row(row, fields)
vendor = row.vendor
if 'vendor_id' in fields:
csvrow['vendor_id'] = (vendor.id or '') if vendor else ''
if 'vendor_abbreviation' in fields:
csvrow['vendor_abbreviation'] = (vendor.abbreviation or '') if vendor else ''
if 'vendor_name' in fields:
csvrow['vendor_name'] = (vendor.name or '') if vendor else ''
return csvrow
# TODO: this is the same as csv row! should merge/share somehow?
def get_row_xlsx_row(self, row, fields):
xlrow = super(PricingBatchView, self).get_row_xlsx_row(row, fields)
vendor = row.vendor
if 'vendor_id' in fields:
xlrow['vendor_id'] = (vendor.id or '') if vendor else ''
if 'vendor_abbreviation' in fields:
xlrow['vendor_abbreviation'] = (vendor.abbreviation or '') if vendor else ''
if 'vendor_name' in fields:
xlrow['vendor_name'] = (vendor.name or '') if vendor else ''
return xlrow
def configure_get_simple_settings(self):
return [
# options
{'section': 'rattail.batch',
'option': 'pricing.allow_future',
'type': bool},
]
def defaults(config, **kwargs):
base = globals()
PricingBatchView = kwargs.get('PricingBatchView', base['PricingBatchView'])
PricingBatchView.defaults(config)
def includeme(config):
defaults(config)
|
PypiClean
|
/uk_postcodes_parsing-1.1.2-py3-none-any.whl/uk_postcodes_parsing/postcode_utils.py
|
import re
from typing import Union
# Tests for district
DISTRICT_SPLIT_REGEX = re.compile(r"^([a-z]{1,2}\d)([a-z])$", re.I)
# Tests for the unit section of a postcode
UNIT_REGEX = re.compile(r"[a-z]{2}$", re.I)
# Tests for the inward code section of a postcode
INCODE_REGEX = re.compile(r"\d[a-z]{2}$", re.I)
# Tests for the outward code section of a postcode
OUTCODE_REGEX = re.compile(r"^[a-z]{1,2}\d[a-z\d]?$", re.I)
# Tests for a valid postcode
POSTCODE_REGEX = re.compile(r"^[a-z]{1,2}\d[a-z\d]?\s*\d[a-z]{2}$", re.I)
# Tests for the area section of a postcode
AREA_REGEX = re.compile(r"^[a-z]{1,2}", re.I)
def sanitize(string: str) -> str:
"""Sanitizes a string by removing whitespace and converting to uppercase.
Args:
s (str): The string to sanitize
Returns:
str: The sanitized string
"""
return string.replace(" ", "").upper()
def is_valid(postcode: str) -> bool:
"""
Checks if a postcode is valid using the `POSTCODE_REGEX`.
Args:
postcode (str): The postcode to check
Returns:
bool: True if the postcode is valid, False otherwise
"""
return re.match(POSTCODE_REGEX, postcode) is not None
def is_valid_outcode(outcode: str) -> bool:
"""
Checks if a string representing an outcode is valid using the `OUTCODE_REGEX`.
Args:
outcode (str): The postcode to check
Returns:
bool: True if the postcode is valid, False otherwise
"""
return re.match(OUTCODE_REGEX, outcode) is not None
def to_normalised(postcode: str) -> Union[str, None]:
"""
Normalises a postcode by removing whitespace, converting to uppercase, and formatting.
Args:
postcode (str): The postcode to normalise
Returns:
str: The normalised postcode
"""
outcode = to_outcode(postcode)
if outcode is None:
return None
incode = to_incode(postcode)
return None if incode is None else f"{outcode} {incode}"
def to_outcode(postcode: str) -> Union[str, None]:
"""Extract the outcode from a postcode string.
Args:
postcode (str): The postcode to extract the outcode from
Returns:
str: The outcode
"""
if not is_valid(postcode):
return None
return re.sub(INCODE_REGEX, "", sanitize(postcode))
def to_incode(postcode: str) -> Union[str, None]:
"""Extract the incode from a postcode string.
Args:
postcode (str): The postcode to extract the incode from
Returns:
str: The incode
"""
if not is_valid(postcode):
return None
incode = re.findall(INCODE_REGEX, sanitize(postcode))
return incode[0] if incode else None
def to_area(postcode: str) -> Union[str, None]:
"""Extract the area from a postcode string.
Args:
postcode (str): The postcode to extract the area from
Returns:
str: The area
"""
if not is_valid(postcode):
return None
area = re.findall(AREA_REGEX, sanitize(postcode))
return area[0] if area else None
def to_sector(postcode: str) -> Union[str, None]:
"""Extract the sector from a postcode string.
Args:
postcode (str): The postcode to extract the sector from
Returns:
str: The sector
"""
outcode = to_outcode(postcode)
if outcode is None:
return None
incode = to_incode(postcode)
return None if incode is None else f"{outcode} {incode[0]}"
def to_unit(postcode: str) -> Union[str, None]:
"""Extract the unit from a postcode string.
Args:
postcode (str): The postcode to extract the unit from
Returns:
str: The unit
"""
if not is_valid(postcode):
return None
unit = re.findall(UNIT_REGEX, sanitize(postcode))
return unit[0] if unit else None
def to_district(postcode: str) -> Union[str, None]:
"""Extract the district from a postcode string.
Args:
postcode (str): The postcode to extract the district from
Returns:
str: The district
"""
outcode = to_outcode(postcode)
if outcode is None:
return None
district = re.match(DISTRICT_SPLIT_REGEX, outcode)
return district[1] if district else outcode
def to_sub_district(postcode: str) -> Union[str, None]:
"""Extract the sub-district from a postcode string.
Args:
postcode (str): The postcode to extract the sub-district from
Returns:
str: The sub-district
"""
outcode = to_outcode(postcode)
if outcode is None:
return None
split = re.match(DISTRICT_SPLIT_REGEX, outcode)
return None if split is None else outcode
|
PypiClean
|
/bitmovin_api_sdk-1.171.0-py3-none-any.whl/bitmovin_api_sdk/models/streams_video_response.py
|
from enum import Enum
from datetime import datetime
from six import string_types, iteritems
from bitmovin_api_sdk.common.poscheck import poscheck_model
from bitmovin_api_sdk.models.streams_ad_config_response import StreamsAdConfigResponse
from bitmovin_api_sdk.models.streams_content_protection_response import StreamsContentProtectionResponse
from bitmovin_api_sdk.models.streams_style_config_response import StreamsStyleConfigResponse
from bitmovin_api_sdk.models.streams_trimming_status import StreamsTrimmingStatus
from bitmovin_api_sdk.models.streams_video_status import StreamsVideoStatus
import pprint
import six
class StreamsVideoResponse(object):
@poscheck_model
def __init__(self,
id_=None,
asset_url=None,
title=None,
description=None,
created_at=None,
status=None,
style_config=None,
encoding_tasks=None,
poster_url=None,
ad_config=None,
content_protection=None,
trimming=None):
# type: (string_types, string_types, string_types, string_types, datetime, StreamsVideoStatus, StreamsStyleConfigResponse, list[StreamsVideoEncodingTask], string_types, StreamsAdConfigResponse, StreamsContentProtectionResponse, StreamsTrimmingStatus) -> None
self._id = None
self._asset_url = None
self._title = None
self._description = None
self._created_at = None
self._status = None
self._style_config = None
self._encoding_tasks = list()
self._poster_url = None
self._ad_config = None
self._content_protection = None
self._trimming = None
self.discriminator = None
if id_ is not None:
self.id = id_
if asset_url is not None:
self.asset_url = asset_url
if title is not None:
self.title = title
if description is not None:
self.description = description
if created_at is not None:
self.created_at = created_at
if status is not None:
self.status = status
if style_config is not None:
self.style_config = style_config
if encoding_tasks is not None:
self.encoding_tasks = encoding_tasks
if poster_url is not None:
self.poster_url = poster_url
if ad_config is not None:
self.ad_config = ad_config
if content_protection is not None:
self.content_protection = content_protection
if trimming is not None:
self.trimming = trimming
@property
def openapi_types(self):
types = {
'id': 'string_types',
'asset_url': 'string_types',
'title': 'string_types',
'description': 'string_types',
'created_at': 'datetime',
'status': 'StreamsVideoStatus',
'style_config': 'StreamsStyleConfigResponse',
'encoding_tasks': 'list[StreamsVideoEncodingTask]',
'poster_url': 'string_types',
'ad_config': 'StreamsAdConfigResponse',
'content_protection': 'StreamsContentProtectionResponse',
'trimming': 'StreamsTrimmingStatus'
}
return types
@property
def attribute_map(self):
attributes = {
'id': 'id',
'asset_url': 'assetUrl',
'title': 'title',
'description': 'description',
'created_at': 'createdAt',
'status': 'status',
'style_config': 'styleConfig',
'encoding_tasks': 'encodingTasks',
'poster_url': 'posterUrl',
'ad_config': 'adConfig',
'content_protection': 'contentProtection',
'trimming': 'trimming'
}
return attributes
@property
def id(self):
# type: () -> string_types
"""Gets the id of this StreamsVideoResponse.
The identifier of the stream
:return: The id of this StreamsVideoResponse.
:rtype: string_types
"""
return self._id
@id.setter
def id(self, id_):
# type: (string_types) -> None
"""Sets the id of this StreamsVideoResponse.
The identifier of the stream
:param id_: The id of this StreamsVideoResponse.
:type: string_types
"""
if id_ is not None:
if not isinstance(id_, string_types):
raise TypeError("Invalid type for `id`, type has to be `string_types`")
self._id = id_
@property
def asset_url(self):
# type: () -> string_types
"""Gets the asset_url of this StreamsVideoResponse.
The asset URL of the stream
:return: The asset_url of this StreamsVideoResponse.
:rtype: string_types
"""
return self._asset_url
@asset_url.setter
def asset_url(self, asset_url):
# type: (string_types) -> None
"""Sets the asset_url of this StreamsVideoResponse.
The asset URL of the stream
:param asset_url: The asset_url of this StreamsVideoResponse.
:type: string_types
"""
if asset_url is not None:
if not isinstance(asset_url, string_types):
raise TypeError("Invalid type for `asset_url`, type has to be `string_types`")
self._asset_url = asset_url
@property
def title(self):
# type: () -> string_types
"""Gets the title of this StreamsVideoResponse.
The title of the stream
:return: The title of this StreamsVideoResponse.
:rtype: string_types
"""
return self._title
@title.setter
def title(self, title):
# type: (string_types) -> None
"""Sets the title of this StreamsVideoResponse.
The title of the stream
:param title: The title of this StreamsVideoResponse.
:type: string_types
"""
if title is not None:
if not isinstance(title, string_types):
raise TypeError("Invalid type for `title`, type has to be `string_types`")
self._title = title
@property
def description(self):
# type: () -> string_types
"""Gets the description of this StreamsVideoResponse.
The description of the stream
:return: The description of this StreamsVideoResponse.
:rtype: string_types
"""
return self._description
@description.setter
def description(self, description):
# type: (string_types) -> None
"""Sets the description of this StreamsVideoResponse.
The description of the stream
:param description: The description of this StreamsVideoResponse.
:type: string_types
"""
if description is not None:
if not isinstance(description, string_types):
raise TypeError("Invalid type for `description`, type has to be `string_types`")
self._description = description
@property
def created_at(self):
# type: () -> datetime
"""Gets the created_at of this StreamsVideoResponse.
Creation timestamp, returned as UTC expressed in ISO 8601 format: YYYY-MM-DDThh:mm:ssZ
:return: The created_at of this StreamsVideoResponse.
:rtype: datetime
"""
return self._created_at
@created_at.setter
def created_at(self, created_at):
# type: (datetime) -> None
"""Sets the created_at of this StreamsVideoResponse.
Creation timestamp, returned as UTC expressed in ISO 8601 format: YYYY-MM-DDThh:mm:ssZ
:param created_at: The created_at of this StreamsVideoResponse.
:type: datetime
"""
if created_at is not None:
if not isinstance(created_at, datetime):
raise TypeError("Invalid type for `created_at`, type has to be `datetime`")
self._created_at = created_at
@property
def status(self):
# type: () -> StreamsVideoStatus
"""Gets the status of this StreamsVideoResponse.
The status of the stream
:return: The status of this StreamsVideoResponse.
:rtype: StreamsVideoStatus
"""
return self._status
@status.setter
def status(self, status):
# type: (StreamsVideoStatus) -> None
"""Sets the status of this StreamsVideoResponse.
The status of the stream
:param status: The status of this StreamsVideoResponse.
:type: StreamsVideoStatus
"""
if status is not None:
if not isinstance(status, StreamsVideoStatus):
raise TypeError("Invalid type for `status`, type has to be `StreamsVideoStatus`")
self._status = status
@property
def style_config(self):
# type: () -> StreamsStyleConfigResponse
"""Gets the style_config of this StreamsVideoResponse.
:return: The style_config of this StreamsVideoResponse.
:rtype: StreamsStyleConfigResponse
"""
return self._style_config
@style_config.setter
def style_config(self, style_config):
# type: (StreamsStyleConfigResponse) -> None
"""Sets the style_config of this StreamsVideoResponse.
:param style_config: The style_config of this StreamsVideoResponse.
:type: StreamsStyleConfigResponse
"""
if style_config is not None:
if not isinstance(style_config, StreamsStyleConfigResponse):
raise TypeError("Invalid type for `style_config`, type has to be `StreamsStyleConfigResponse`")
self._style_config = style_config
@property
def encoding_tasks(self):
# type: () -> list[StreamsVideoEncodingTask]
"""Gets the encoding_tasks of this StreamsVideoResponse.
List of encoding status information
:return: The encoding_tasks of this StreamsVideoResponse.
:rtype: list[StreamsVideoEncodingTask]
"""
return self._encoding_tasks
@encoding_tasks.setter
def encoding_tasks(self, encoding_tasks):
# type: (list) -> None
"""Sets the encoding_tasks of this StreamsVideoResponse.
List of encoding status information
:param encoding_tasks: The encoding_tasks of this StreamsVideoResponse.
:type: list[StreamsVideoEncodingTask]
"""
if encoding_tasks is not None:
if not isinstance(encoding_tasks, list):
raise TypeError("Invalid type for `encoding_tasks`, type has to be `list[StreamsVideoEncodingTask]`")
self._encoding_tasks = encoding_tasks
@property
def poster_url(self):
# type: () -> string_types
"""Gets the poster_url of this StreamsVideoResponse.
Poster URL
:return: The poster_url of this StreamsVideoResponse.
:rtype: string_types
"""
return self._poster_url
@poster_url.setter
def poster_url(self, poster_url):
# type: (string_types) -> None
"""Sets the poster_url of this StreamsVideoResponse.
Poster URL
:param poster_url: The poster_url of this StreamsVideoResponse.
:type: string_types
"""
if poster_url is not None:
if not isinstance(poster_url, string_types):
raise TypeError("Invalid type for `poster_url`, type has to be `string_types`")
self._poster_url = poster_url
@property
def ad_config(self):
# type: () -> StreamsAdConfigResponse
"""Gets the ad_config of this StreamsVideoResponse.
:return: The ad_config of this StreamsVideoResponse.
:rtype: StreamsAdConfigResponse
"""
return self._ad_config
@ad_config.setter
def ad_config(self, ad_config):
# type: (StreamsAdConfigResponse) -> None
"""Sets the ad_config of this StreamsVideoResponse.
:param ad_config: The ad_config of this StreamsVideoResponse.
:type: StreamsAdConfigResponse
"""
if ad_config is not None:
if not isinstance(ad_config, StreamsAdConfigResponse):
raise TypeError("Invalid type for `ad_config`, type has to be `StreamsAdConfigResponse`")
self._ad_config = ad_config
@property
def content_protection(self):
# type: () -> StreamsContentProtectionResponse
"""Gets the content_protection of this StreamsVideoResponse.
:return: The content_protection of this StreamsVideoResponse.
:rtype: StreamsContentProtectionResponse
"""
return self._content_protection
@content_protection.setter
def content_protection(self, content_protection):
# type: (StreamsContentProtectionResponse) -> None
"""Sets the content_protection of this StreamsVideoResponse.
:param content_protection: The content_protection of this StreamsVideoResponse.
:type: StreamsContentProtectionResponse
"""
if content_protection is not None:
if not isinstance(content_protection, StreamsContentProtectionResponse):
raise TypeError("Invalid type for `content_protection`, type has to be `StreamsContentProtectionResponse`")
self._content_protection = content_protection
@property
def trimming(self):
# type: () -> StreamsTrimmingStatus
"""Gets the trimming of this StreamsVideoResponse.
Stream trimming information
:return: The trimming of this StreamsVideoResponse.
:rtype: StreamsTrimmingStatus
"""
return self._trimming
@trimming.setter
def trimming(self, trimming):
# type: (StreamsTrimmingStatus) -> None
"""Sets the trimming of this StreamsVideoResponse.
Stream trimming information
:param trimming: The trimming of this StreamsVideoResponse.
:type: StreamsTrimmingStatus
"""
if trimming is not None:
if not isinstance(trimming, StreamsTrimmingStatus):
raise TypeError("Invalid type for `trimming`, type has to be `StreamsTrimmingStatus`")
self._trimming = trimming
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if value is None:
continue
if isinstance(value, list):
if len(value) == 0:
continue
result[self.attribute_map.get(attr)] = [y.value if isinstance(y, Enum) else y for y in [x.to_dict() if hasattr(x, "to_dict") else x for x in value]]
elif hasattr(value, "to_dict"):
result[self.attribute_map.get(attr)] = value.to_dict()
elif isinstance(value, Enum):
result[self.attribute_map.get(attr)] = value.value
elif isinstance(value, dict):
result[self.attribute_map.get(attr)] = {k: (v.to_dict() if hasattr(v, "to_dict") else v) for (k, v) in value.items()}
else:
result[self.attribute_map.get(attr)] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, StreamsVideoResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
PypiClean
|
/cloudreactor-api-client-0.3.1.3.tar.gz/cloudreactor-api-client-0.3.1.3/cloudreactor_api_client/models/aws_ecs_execution_method.py
|
from typing import Any, Dict, List, Optional, Type, TypeVar, Union, cast
import attr
from ..models.aws_ecs_execution_method_tags import AwsEcsExecutionMethodTags
from ..models.aws_ecs_launch_type import AwsEcsLaunchType
from ..types import UNSET, Unset
T = TypeVar("T", bound="AwsEcsExecutionMethod")
@attr.s(auto_attribs=True)
class AwsEcsExecutionMethod:
"""AwsEcsExecutionMethods contain configuration for running Tasks in
AWS ECS.
Attributes:
type (Union[Unset, str]):
task_definition_arn (Union[Unset, str]):
task_definition_infrastructure_website_url (Union[Unset, str]):
allocated_cpu_units (Union[Unset, int]):
allocated_memory_mb (Union[Unset, int]):
tags (Union[Unset, None, AwsEcsExecutionMethodTags]):
subnets (Union[Unset, List[str]]):
subnet_infrastructure_website_urls (Union[Unset, None, List[Optional[str]]]):
security_groups (Union[Unset, List[str]]):
security_group_infrastructure_website_urls (Union[Unset, None, List[Optional[str]]]):
assign_public_ip (Union[Unset, bool]):
task_arn (Union[Unset, str]):
launch_type (Union[Unset, AwsEcsLaunchType]): Default: AwsEcsLaunchType.FARGATE.
cluster_arn (Union[Unset, str]):
cluster_infrastructure_website_url (Union[Unset, str]):
execution_role (Union[Unset, str]):
execution_role_infrastructure_website_url (Union[Unset, str]):
task_role (Union[Unset, str]):
task_role_infrastructure_website_url (Union[Unset, str]):
platform_version (Union[Unset, str]):
"""
type: Union[Unset, str] = UNSET
task_definition_arn: Union[Unset, str] = UNSET
task_definition_infrastructure_website_url: Union[Unset, str] = UNSET
allocated_cpu_units: Union[Unset, int] = UNSET
allocated_memory_mb: Union[Unset, int] = UNSET
tags: Union[Unset, None, AwsEcsExecutionMethodTags] = UNSET
subnets: Union[Unset, List[str]] = UNSET
subnet_infrastructure_website_urls: Union[Unset, None, List[Optional[str]]] = UNSET
security_groups: Union[Unset, List[str]] = UNSET
security_group_infrastructure_website_urls: Union[Unset, None, List[Optional[str]]] = UNSET
assign_public_ip: Union[Unset, bool] = UNSET
task_arn: Union[Unset, str] = UNSET
launch_type: Union[Unset, AwsEcsLaunchType] = AwsEcsLaunchType.FARGATE
cluster_arn: Union[Unset, str] = UNSET
cluster_infrastructure_website_url: Union[Unset, str] = UNSET
execution_role: Union[Unset, str] = UNSET
execution_role_infrastructure_website_url: Union[Unset, str] = UNSET
task_role: Union[Unset, str] = UNSET
task_role_infrastructure_website_url: Union[Unset, str] = UNSET
platform_version: Union[Unset, str] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
type = self.type
task_definition_arn = self.task_definition_arn
task_definition_infrastructure_website_url = self.task_definition_infrastructure_website_url
allocated_cpu_units = self.allocated_cpu_units
allocated_memory_mb = self.allocated_memory_mb
tags: Union[Unset, None, Dict[str, Any]] = UNSET
if not isinstance(self.tags, Unset):
tags = self.tags.to_dict() if self.tags else None
subnets: Union[Unset, List[str]] = UNSET
if not isinstance(self.subnets, Unset):
subnets = self.subnets
subnet_infrastructure_website_urls: Union[Unset, None, List[Optional[str]]] = UNSET
if not isinstance(self.subnet_infrastructure_website_urls, Unset):
if self.subnet_infrastructure_website_urls is None:
subnet_infrastructure_website_urls = None
else:
subnet_infrastructure_website_urls = self.subnet_infrastructure_website_urls
security_groups: Union[Unset, List[str]] = UNSET
if not isinstance(self.security_groups, Unset):
security_groups = self.security_groups
security_group_infrastructure_website_urls: Union[Unset, None, List[Optional[str]]] = UNSET
if not isinstance(self.security_group_infrastructure_website_urls, Unset):
if self.security_group_infrastructure_website_urls is None:
security_group_infrastructure_website_urls = None
else:
security_group_infrastructure_website_urls = self.security_group_infrastructure_website_urls
assign_public_ip = self.assign_public_ip
task_arn = self.task_arn
launch_type: Union[Unset, str] = UNSET
if not isinstance(self.launch_type, Unset):
launch_type = self.launch_type.value
cluster_arn = self.cluster_arn
cluster_infrastructure_website_url = self.cluster_infrastructure_website_url
execution_role = self.execution_role
execution_role_infrastructure_website_url = self.execution_role_infrastructure_website_url
task_role = self.task_role
task_role_infrastructure_website_url = self.task_role_infrastructure_website_url
platform_version = self.platform_version
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
if type is not UNSET:
field_dict["type"] = type
if task_definition_arn is not UNSET:
field_dict["task_definition_arn"] = task_definition_arn
if task_definition_infrastructure_website_url is not UNSET:
field_dict["task_definition_infrastructure_website_url"] = task_definition_infrastructure_website_url
if allocated_cpu_units is not UNSET:
field_dict["allocated_cpu_units"] = allocated_cpu_units
if allocated_memory_mb is not UNSET:
field_dict["allocated_memory_mb"] = allocated_memory_mb
if tags is not UNSET:
field_dict["tags"] = tags
if subnets is not UNSET:
field_dict["subnets"] = subnets
if subnet_infrastructure_website_urls is not UNSET:
field_dict["subnet_infrastructure_website_urls"] = subnet_infrastructure_website_urls
if security_groups is not UNSET:
field_dict["security_groups"] = security_groups
if security_group_infrastructure_website_urls is not UNSET:
field_dict["security_group_infrastructure_website_urls"] = security_group_infrastructure_website_urls
if assign_public_ip is not UNSET:
field_dict["assign_public_ip"] = assign_public_ip
if task_arn is not UNSET:
field_dict["task_arn"] = task_arn
if launch_type is not UNSET:
field_dict["launch_type"] = launch_type
if cluster_arn is not UNSET:
field_dict["cluster_arn"] = cluster_arn
if cluster_infrastructure_website_url is not UNSET:
field_dict["cluster_infrastructure_website_url"] = cluster_infrastructure_website_url
if execution_role is not UNSET:
field_dict["execution_role"] = execution_role
if execution_role_infrastructure_website_url is not UNSET:
field_dict["execution_role_infrastructure_website_url"] = execution_role_infrastructure_website_url
if task_role is not UNSET:
field_dict["task_role"] = task_role
if task_role_infrastructure_website_url is not UNSET:
field_dict["task_role_infrastructure_website_url"] = task_role_infrastructure_website_url
if platform_version is not UNSET:
field_dict["platform_version"] = platform_version
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
type = d.pop("type", UNSET)
task_definition_arn = d.pop("task_definition_arn", UNSET)
task_definition_infrastructure_website_url = d.pop("task_definition_infrastructure_website_url", UNSET)
allocated_cpu_units = d.pop("allocated_cpu_units", UNSET)
allocated_memory_mb = d.pop("allocated_memory_mb", UNSET)
_tags = d.pop("tags", UNSET)
tags: Union[Unset, None, AwsEcsExecutionMethodTags]
if _tags is None:
tags = None
elif isinstance(_tags, Unset):
tags = UNSET
else:
tags = AwsEcsExecutionMethodTags.from_dict(_tags)
subnets = cast(List[str], d.pop("subnets", UNSET))
subnet_infrastructure_website_urls = cast(
List[Optional[str]], d.pop("subnet_infrastructure_website_urls", UNSET)
)
security_groups = cast(List[str], d.pop("security_groups", UNSET))
security_group_infrastructure_website_urls = cast(
List[Optional[str]], d.pop("security_group_infrastructure_website_urls", UNSET)
)
assign_public_ip = d.pop("assign_public_ip", UNSET)
task_arn = d.pop("task_arn", UNSET)
_launch_type = d.pop("launch_type", UNSET)
launch_type: Union[Unset, AwsEcsLaunchType]
if isinstance(_launch_type, Unset):
launch_type = UNSET
else:
launch_type = AwsEcsLaunchType(_launch_type)
cluster_arn = d.pop("cluster_arn", UNSET)
cluster_infrastructure_website_url = d.pop("cluster_infrastructure_website_url", UNSET)
execution_role = d.pop("execution_role", UNSET)
execution_role_infrastructure_website_url = d.pop("execution_role_infrastructure_website_url", UNSET)
task_role = d.pop("task_role", UNSET)
task_role_infrastructure_website_url = d.pop("task_role_infrastructure_website_url", UNSET)
platform_version = d.pop("platform_version", UNSET)
aws_ecs_execution_method = cls(
type=type,
task_definition_arn=task_definition_arn,
task_definition_infrastructure_website_url=task_definition_infrastructure_website_url,
allocated_cpu_units=allocated_cpu_units,
allocated_memory_mb=allocated_memory_mb,
tags=tags,
subnets=subnets,
subnet_infrastructure_website_urls=subnet_infrastructure_website_urls,
security_groups=security_groups,
security_group_infrastructure_website_urls=security_group_infrastructure_website_urls,
assign_public_ip=assign_public_ip,
task_arn=task_arn,
launch_type=launch_type,
cluster_arn=cluster_arn,
cluster_infrastructure_website_url=cluster_infrastructure_website_url,
execution_role=execution_role,
execution_role_infrastructure_website_url=execution_role_infrastructure_website_url,
task_role=task_role,
task_role_infrastructure_website_url=task_role_infrastructure_website_url,
platform_version=platform_version,
)
aws_ecs_execution_method.additional_properties = d
return aws_ecs_execution_method
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def __setitem__(self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties
|
PypiClean
|
/zdppy_requests-0.1.3-py3-none-any.whl/zdppy_requests/user_agent.py
|
import random
agents = [
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.107 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.164 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.107 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.1 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/14.1.2 Safari/605.1.15",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:69.0) Gecko/20100101 Firefox/69.0",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.100 Safari/537.36 OPR/63.0.3368.43",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36 Edge/18.18362",
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; LCTE; rv:11.0) like Gecko",
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/534.54.16 (KHTML, like Gecko) Version/5.1.4 Safari/534.54.16",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3722.400 QQBrowser/10.5.3739.400",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36 QIHU 360SE",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36 QIHU 360EE",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 UBrowser/6.2.3964.2 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 SE 2.X MetaSr 1.0",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.98 Safari/537.36 LBBROWSER",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3947.100 Safari/537.36",
"Mozilla/5.0 (Linux; Android 7.1.1; OPPO R9sk) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.111 Mobile Safari/537.36",
"Mozilla/5.0 (Android 7.1.1; Mobile; rv:68.0) Gecko/68.0 Firefox/68.0",
"Mozilla/5.0 (Linux; Android 7.1.1; OPPO R9sk Build/NMF26F) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.157 Mobile Safari/537.36 OPR/53.0.2569.141117",
"Mozilla/5.0 (Linux; Android 7.1.1; OPPO R9sk) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.90 Mobile Safari/537.36 EdgA/42.0.2.3819",
"Mozilla/5.0 (Linux; U; Android 7.1.1; zh-cn; OPPO R9sk Build/NMF26F) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/66.0.3359.126 MQQBrowser/9.6 Mobile Safari/537.36",
"Mozilla/5.0 (Linux; U; Android 7.1.1; zh-cn; OPPO R9sk Build/NMF26F) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/70.0.3538.80 Mobile Safari/537.36 OppoBrowser/10.5.1.2",
"Mozilla/5.0 (Linux; Android 7.1.1; OPPO R9sk Build/NMF26F; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/62.0.3202.97 Mobile Safari/537.36",
"Mozilla/5.0 (Linux; Android 7.1.1; OPPO R9sk Build/NMF26F) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/70.0.3538.80 Mobile Safari/537.36 360 Alitephone Browser (1.5.0.90/1.0.100.1078) mso_sdk(1.0.0)",
"Mozilla/5.0 (Linux; U; Android 7.1.1; zh-CN; OPPO R9sk Build/NMF26F) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/57.0.2987.108 UCBrowser/12.6.0.1040 Mobile Safari/537.36",
"Mozilla/5.0 (Linux; Android 7.1.1; OPPO R9sk Build/NMF26F; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/70.0.3538.80 Mobile Safari/537.36 LieBaoFast/5.12.3",
"Mozilla/5.0 (Linux; Android 7.1.1; OPPO R9sk Build/NMF26F; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/48.0.2564.116 Mobile Safari/537.36 T7/9.1 baidubrowser/7.19.13.0 (Baidu; P1 7.1.1)",
"Mozilla/5.0 (Linux; Android 7.1.1; OPPO R9sk Build/NMF26F; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/68.0.3440.106 Mobile Safari/537.36 AWP/2.0 SogouMSE,SogouMobileBrowser/5.22.8",
"Mozilla/5.0 (Linux; Android 7.1.1; OPPO R9sk Build/NMF26F; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/70.0.3538.80 Mobile Safari/537.36 Mb2345Browser/11.0.1",
"Mozilla/5.0 (iPhone; U; CPU iPhone OS 3_0 like Mac OS X; en-us) AppleWebKit/420.1 (KHTML, like Gecko) Version/3.0 Mobile/1A542a Safari/419.3",
"Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_0 like Mac OS X; en-us) AppleWebKit/532.9 (KHTML, like Gecko) Version/4.0.5 Mobile/8A293 Safari/6531.22.7",
"Mozilla/5.0 (iPhone 6s; CPU iPhone OS 11_4_1 like Mac OS X) AppleWebKit/604.3.5 (KHTML, like Gecko) Version/11.0 MQQBrowser/8.3.0 Mobile/15B87 Safari/604.1 MttCustomUA/2 QBWebViewType/1 WKType/1",
"Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B334b Safari/531.21.10",
"Mozilla/5.0 (iPod; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5",
"Mozilla/5.0 (BlackBerry; U; BlackBerry 9800; en) AppleWebKit/534.1+ (KHTML, like Gecko) Version/6.0.0.337 Mobile Safari/534.1+",
"Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.0; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/233.70 Safari/534.6 TouchPad/1.0",
"Mozilla/5.0 (SymbianOS/9.4; Series60/5.0 NokiaN97-1/20.0.019; Profile/MIDP-2.1 Configuration/CLDC-1.1) AppleWebKit/525 (KHTML, like Gecko) BrowserNG/7.1.18124",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0; HTC; Titan)",
]
def random_user_agent(include_header=False):
agent_str = random.choice(agents)
if include_header:
return "User-Agent", agent_str
return agent_str
|
PypiClean
|
/hsbcore-1.0.0.tar.gz/hsbcore-1.0.0/betcore/horse/AA.py
|
import json,logging,re,socket,time
from random import random
from urllib import urlencode
from betcore.exception import UnknownTradeException, DownlineExcpetion
from betcore.browser import BaseBrowser
from betcore.horse.config import COMMENT_CHARS, LANG_MAP, COUNTRY_MAP, TOTE_MAP
from betcore.util import logging_timing, record_trade, getReqBuyNiceTickets,timimng_func,relogin_when_except,getLastWeekDate,get_attr_byres
logger = logging.getLogger(__name__)
class AA_Resource(BaseBrowser):
def __init__(self,account,db=None,host="",login_host="",proxy_enabled=0,proxy_setting="",source_ip=None,loc_map={},cookies={}):
BaseBrowser.__init__(self,"aa",source_ip=source_ip,cookies=cookies)
self.account=account
self.host,self.login_host=host,login_host
self.trans_hosts,self.rc_hosts = [],[]
self.is_first_time_scrap=1
self.timeout=0.2
if db is not None: self.db,self.cursor=db,db.connect()
self.member_profile={}
def do_login(self):
logger.info("开始登陆AA网站%s",self.account)
self.session.headers.pop('Host',None);self.session.headers.pop('Referer',None)
self.get_res_content('http://%s' %self.host)
logger.info("提交登陆信息")
self.session.headers['Content-Type']='application/x-www-form-urlencoded; charset=UTF-8'
rs=self.get_res_by_json('http://%s/web-login/sv/login/' %self.host, {'un':self.account[0],'pw':self.account[1],'cap':'*V_S-DK-l#9LF','':''})
if rs['login']==True:
logger.info("pin码登陆")
rs = self.get_res_by_json('http://%s/web-login/sv/pin/' % self.host, {'pn':self.account[2]})
logger.info(rs)
if rs.get('error',None)=='LOGIN_REQUIRED':
raise DownlineExcpetion(self)
if rs.get("url").find("reset-password")>0:
result=0,"require reset passwd"
else:
self.get_res_content('http://%s%s' % (self.host,rs.get("url")))
#取得账号信息.验证是否登录成功
token=self.load_userid()
if token:
result=1,"login successful"
else:
result=0,"login failed"
else:
raise DownlineExcpetion(self)
return result
def reset_passwd(self,old_sec_code,new_sec_code):
"重置密码"
if old_sec_code==new_sec_code:
result=self.get_res_by_json("http://%s/web-login/sv/extend-password?tocPlusUrl=null" %self.host)
logger.info("延长密码:%s",result)
else:
result=self.init_login(new_sec_code)
return result
def init_login(self,new_sec_code):
logger.info("开始登陆AA网站%s",self.account)
self.session.headers.pop('Host',None);self.session.headers.pop('Referer',None)
self.get_res_content('http://%s' %self.host)
logger.info("提交登陆信息")
self.session.headers['Content-Type']='application/x-www-form-urlencoded; charset=UTF-8'
rs=self.get_res_by_json('http://%s/web-login/sv/login/' %self.host, {'un':self.account[0],'pw':self.account[1],'cap':'*V_S-DK-l#9LF','':''})
if rs['login']==True:
logger.info("pin码登陆")
url='http://%s/web-login/sv/pin/' % self.host
rs=self.get_res_by_json(url, {'pn':self.account[2]})
logger.info(rs)
if rs.get('error',None)=='LOGIN_REQUIRED':
raise DownlineExcpetion(self)
if rs.get("url").find("reset-password")>0:
url = 'http://%s/web-login/sv/reset-password-service' %self.host
rs=self.get_res_by_json(url,{'npi':new_sec_code[1], 'npw':new_sec_code[0], 'cpw':new_sec_code[0], 'tocPlusUrl':'null'})
logger.info(rs)
else:
raise DownlineExcpetion(self)
logger.info("%s初始化成功" %self.account[0])
return 1
def get_member_profile(self):
host=self.rc_hosts[0] if self.rc_hosts else self.login_host
url = 'http://%s/player-tote-backend/s/member/profile' %host
self.session.headers.update({'Host':host,'Referer':'http://%s/web-login/tote/activate' %host})
profile = self.get_res_by_json(url)
if not profile.get("error",None)=='LOGIN_REQUIRED':
self.member_profile={'credit':profile['availableBalance'],'profit':profile['accountProfitLoss'], 'login_token':profile['loginTokenValid'],'user_id':profile["userId"]}
return self.member_profile
@relogin_when_except("重登AA...")
def load_userid(self):
try:
profile=self.get_member_profile()
self.userId = profile["user_id"]
logger.info("登陆AA成功,userid:%s",self.userId)
return profile['login_token']
except:
raise DownlineExcpetion(self)
def check_last_week_win(self):
user_code=self.get_res_by_json("http://%s/partner-server/s/getCurrentUser" %self.rc_hosts[0])['user']['userCode']
monday,sunday=getLastWeekDate()
st,end=monday.strftime("%d-%m-%Y"),sunday.strftime("%d-%m-%Y")
rt=self.get_res_by_json("http://%s/report-server/s/shTaxPayment/publicSHReport?userCode=%s&fromDate=%s&untilDate=%s&viewMode=dollar" %(self.rc_hosts[0],user_code,st,end))
return round(rt['result']['uplineTote'][0]['balance'],2)
def get_race_msg(self,pid):
"获取当前赛事消息"
url="http://%s/horseracing-server/s/getRaceMessages?pCardId=%s&viewMode=dollar" %(self.rc_hosts[0],pid)
rt=self.get_res_content(url)
return rt
def testServerSpeech(self,servers):
"检查各个地址抓数据的速度"
race_list = self.getSummaryRaceList(host=self.rc_hosts[0])
speech_map = {}
race_id = None
for l in race_list:
if l['timeout'] >= 2:
card_id = l['card_list'][0][0]
race_id = race_id if race_id is not None else self.get_race_info( card_id )[0]
for i in range(10):
for s in servers:
start = time.time()
try:
url = 'http://%s/player-tote-backend/s/race/orderlist/wp?raceId=%s¤cy=RMB&mode=DOLLAR&h=%s' %(s, race_id, time.time())
self.get_res_content(url)
elapsed = abs((time.time() - start)*1000)
except:
elapsed = 10000
elapsed_list = speech_map.get(s,[])
elapsed_list.append(int(elapsed))
speech_map.update({s:elapsed_list})
break
for k,v in speech_map.items():
speech_map.update({k:(socket.getaddrinfo(k, 80)[0][4][0],sum(v)/len(v))})
return sorted(speech_map.iteritems(), key=lambda x:x[1][1])
@relogin_when_except("重登AA...")
def getSummaryRaceList(self, content=None,host=None):
logger.info("开始从AA网站上获取赛事列表")
try:
url = 'http://%s/player-tote-backend/s/card/list?lang=%s' %(self.rc_hosts[0] if self.rc_hosts else host,self.lang)
if content is None:
if self.is_first_time_scrap:
rt=self.get_res(url)
if rt is None:
raise DownlineExcpetion(self)
self.is_first_time_scrap=0
else:
rt=self.get_res(url,retry=0,timeout=self.timeout)
rt=rt.json() if rt is not None else []
else:
rt=json.loads(content)
race_list = []
for e in rt:
race= {"timeout":"", "country":"", "location":"", "race_date":"","card_list":[],"tote_list":[],"race_type":""}
card_list,_,timeout, country_and_type,_,date,_,_,location,_,_ = e
country_and_type = country_and_type.strip()
country, race_type = re.findall( r'([\w ]+)-?([\w ]*)', country_and_type)[0]
country, race_type = country.strip(), race_type.strip()
race_type = "Horse" if len(race_type)==0 else race_type
race["country"] = COUNTRY_MAP.get(country,country)
race["timeout"] = timeout
race["location"] = re.findall( r'([-\w/.() ]+)', location)[0].strip()
race["card_list"] = card_list
tl = []
for c in card_list:
toteName = "FC" if c[3]=="Q" else TOTE_MAP.get(c[3],c[3])
tl.append("%s_%s" %(TOTE_MAP.get(c[1],c[1]), toteName))
race["tote_list"] = tl
race["race_date"] = time.strftime( "%d-%m-%Y", time.strptime(time.ctime(date/1000)))
race["race_type"] = race_type
race_list.append( race )
#logger.info("已成功从AA上获取赛事条数:%s" % len(race_list))
return race_list
except:
logger.exception(rt)
if rt.content.find("internal error")>0:
logger.info("AA无赛事")
return []
else:
logger.info(rt)
logger.info(rt.content)
raise DownlineExcpetion(self)
def saveSummaryRaceList(self, race_list):
if race_list:
self.cursor.execute( "delete from races where belong='%s'" %self.site )
insertSql = '''insert into races( belong, timeout, country, location, race_date, card_list, tote_list, race_type ) values("%s", %d, "%s", "%s", "%s", "%s", "%s","%s")'''
for r in race_list:
self.cursor.execute( insertSql %(self.site, int(r["timeout"]),r["country"],r["location"],r["race_date"],r["card_list"],r["tote_list"],r["race_type"]))
logger.info("已成功更新保存AA赛事信息")
return 1
else:
logger.info("抓取AA赛事信息失败")
return 0
def process_wp_data(self,key,race_info,chance_volume_floor,fake_tickets):
insertSql = '''insert into wp_eatbet( key, race_num, horse_num, win, place, discount, lwin, lplace, bet_type, location, tote_code,belong )
values( "%s", %d, %d, %d, %d, %s, %s, %s, "%s" , "%s", "%s", "%s")'''
loc,tc,rn=race_info
@logging_timing("完成保存AA彩池赌票吃票数据到数据库")
def save_wp(content):
belist = content.split("#TOTE_WIN_PLACE.EAT#")
bet_data = []
eat_data = []
if len(belist) == 2:
cursor = self.db.connect()
for l in belist[0].split("\r\n"):
if len(l) > 0 and not l.isspace() and not l[0] in COMMENT_CHARS:
bet_data.append( [float(n) for n in l.split()])
for l in belist[1].split("\r\n"):
if len(l) > 0 and not l.isspace() and not l[0] in COMMENT_CHARS:
eat_data.append( [float(n) for n in l.split()])
wpNiceTickets,wNiceTickets,pNiceTickets = getReqBuyNiceTickets( bet_data,"RQB",chance_volume_floor,fake_tickets)
for bd in wpNiceTickets+wNiceTickets+pNiceTickets:
cursor.execute( insertSql %('%s' %str(key),rn,bd[0],bd[1],bd[2],bd[3],bd[4],bd[5],"EAT",loc,tc,self.site))
wpNiceTickets,wNiceTickets,pNiceTickets = getReqBuyNiceTickets( eat_data,"RQE",chance_volume_floor,fake_tickets)
for ed in wpNiceTickets+wNiceTickets+pNiceTickets:
cursor.execute( insertSql %('%s' %str(key),rn,ed[0],ed[1],ed[2],ed[3],ed[4],ed[5],"BET" ,loc,tc,self.site))
return 1
return save_wp
@logging_timing("全部完成从AA抓取独赢位置挂单数据")
@relogin_when_except("重登AA...")
def collect_wp(self,key,race_info,chance_volume_floor,fake_tickets):
content = timimng_func("完成抓取AA上的数据",self.get_res_content,'http://%s/player-tote-backend/s/race/orderlist2/wp?raceId=%s&cardId=%s¤cy=RMB&mode=DOLLAR&version=0&_=%s' %(self.rc_hosts[0], key[1],key[0],random()))
self.process_wp_data(key,race_info,chance_volume_floor,fake_tickets)(content)
return 1
@record_trade("AA交易")
@relogin_when_except("重登AA...")
def buyWP(self,key,race_num,ticket):
"赌或吃独赢/位置"
horse_num, win, place, discount, lwin, lplace, type=ticket
if key is not None:
cardId = key[0]
data = '''{"list":[{"selNum":"%s","win":%s,"place":%s,"price":%s,"lwin":%s,"lplace":%s}],"type":"%s","cardId":%s,"raceNum":"%s","saveOnMatch":%s}''' %( horse_num ,win,place,discount/100.0,lwin/10.0,lplace/10.0,type,cardId ,race_num,"true")
url = 'http://%s/player-tote-backend/s/order/placeOrderWP?%s&_=%s' %(self.trans_hosts[0],urlencode({'data':data}),time.time())
rt = self.get_res_content(url,timeout=5)
if rt.find("login")>0:
logger.info(rt)
raise DownlineExcpetion(self)
if rt.find("errors")>0:
logger.info(rt)
return "REJECTED",0
try:
rs = json.loads(rt)['result'][0]
except KeyError:
logger.exception(rt)
raise
status = rs['status']
volume = max(win, place)
if status == "FULL":
logger.info("%s在AA上%s票, 场次:%s,马:%s, 独赢:%s, 位置:%s, 折扣:%s, 极限:%s/%s" %(self.account[0],LANG_MAP[type], race_num, horse_num, win, place, discount, lwin, lplace))
elif status == "PARTIAL":
logger.info( rs )
etc=rs.get('etc',None)
if etc is not None:
exec(etc.replace('null','0'))
volume = matched_amt
logger.info("%s在AA%s票只能部分成交,票数:%s" %(self.account[0],LANG_MAP[type],volume))
else:
volume = int(volume/2)
logger.info("%S在AA%s票只能部分成交,估计票数:%s" %(self.account[0],LANG_MAP[type],volume))
elif status== "REJECTED":
volume=0
logger.info("%s在AA%s票失败:%s" %(self.account[0],LANG_MAP[type],rt))
if rt.find("INVALID_USER_STATUS") > 0:
logger.info("用户状态无效:%s" %rt)
raise UnknownTradeException()
else:
logger.info("未知错误:%s" %rt)
raise UnknownTradeException()
return status,volume
else:
return "REJECTED",0
def peddingWP(self, key, bet_type,race_num, pedding_seq):
result=[]
try:
if key is not None:
for psub_seq in [ pedding_seq[i:i+25] for i in range(0,len(pedding_seq),25)]:
ps=[]
for p in psub_seq:
hn,w,p,dis,lw,lp=p
ps.append('{"selNum":"%s","win":%s,"place":%s,"price":%s,"lwin":%s,"lplace":%s,"row":2}' %(hn,w,p,dis/100.0,lw/10.0,lp/10.0))
cardId = key[0]
data = '''{"list":[%s],"type":"%s","cardId":%s,"raceNum":%s,"isWin":false,"isPlace":false,"isAll":false}''' %(",".join(ps) ,bet_type,cardId ,race_num)
url = 'http://%s/player-tote-backend/s/order/placeOrderWP?%s&_=%s' %(self.trans_hosts[0],urlencode({'data':data}),random())
res=self.get_res_by_json(url,timeout=5)
sub_res=res['result']
for r in sub_res:
exec(r.get("etc")).replace('null', '0')
result.append((r['status'],order_id))
except NameError:
logger.info(sub_res)
except KeyError:
logger.info(res)
return result
def cancel_pedding_wp(self,order_id_list):
url = 'http://%s/player-tote-backend/s/order/cancelOrder' %self.trans_hosts[0]
id_seq=",".join([str(e) for e in order_id_list])
logger.info("AA撤单id:%s" %id_seq)
rt = self.get_res(url,{'idSequence':id_seq}).text
if rt.find("OK") >0:
return 1
elif rt.find("ORDER_NOT_EXIST") >0:
logger.info("order_id:%s,%s" %(id_seq,rt))
return 0
else:
logger.info(rt)
return -1
def get_tote_price(self,key,race_num):
"根据cardId获取最近的一场比赛的赔率"
url = "http://%s/player-tote-backend/s/race/getTotePrices?raceId=%s" %(self.rc_hosts[0],key[1])
rt=self.get_res_content(url)
try:
rt=json.loads(rt)["totes"]
except ValueError:
logger.info(rt)
return {}
else:
tote_prices={int(k):(0 if float(v['winPrice'])==199.8 else float(v['winPrice']),0 if float(v['placePriceMax'])==199.8 else float(v['placePriceMax']))for k,v in rt.items() if not v['scratched']}
return tote_prices
def get_race_info(self, cardId, race_num=None):
"根据cardId获取最近将要举行的一场比赛的raceId"
try:
url='http://%s/player-tote-backend/s/combo/main?cardId=%s' %(self.rc_hosts[0] if self.rc_hosts else self.rc_hosts[0], cardId)
rt=self.get_res_content(url)
rt=rt.split("_preload_")
race_list_by_card = json.loads(rt[1][1:-2].replace("'","\""))
race_id,race_num=-1,-1
for r in race_list_by_card["/race/listByCard"]["raceList"]:
if r['status']=='OPEN':
race_id,race_num=r['id'],r['number']
break
pid=get_attr_byres(rt[2],'"pid":',",")
return race_id,race_num,pid
except DownlineExcpetion:
raise DownlineExcpetion(self)
except:
logger.info(rt)
logger.exception( "carid:%s,race_num:%s" %(cardId,race_num))
raise DownlineExcpetion(self)
def getAllRaceIdByCard(self,cardId):
"获取一个cardId对应的所有计划举行赛事的raceId列表"
url = "http://%s/player-tote-backend/s/card/listScratchContenderByCard?cardId=%s" %(self.rc_hosts[0], cardId)
raceId, race_num, pid = self.get_race_info(cardId)
raceList = json.loads(self.get_res_content(url))["raceList"]
rl = []
for r in raceList:
if r["raceNumber"] >= race_num:
rl.append( r )
logger.debug("cardId为%s对应的所有计划举行赛事的raceId列表为%s" %(cardId, raceList))
return rl
def getWPTrans(self,key,race_num):
"查询一场比赛的交易情况"
cardId,raceId,pid = eval(key) if isinstance(key, basestring) else key
rt = self.get_res_by_json("http://%s/report-server/s/trans/mine?raceId=%s&cardId=%s&physicalRaceId=%s&quinellaOnly=false" %(self.rc_hosts[0], raceId, cardId, pid))
matchlist = rt["groups"]["matchedList"]
match= []
for e in matchlist:
if "marketAbbr" in e and e["market"] == "TOTE_WIN_PLACE":
bet_type,horse,win,place,discount,lwin,lplace = e["betType"],e["selectionNumber"],e["volume"] if e["winLimit"] else 0, e["volume"] if e["placeLimit"] else 0,e["discount"],e["winLimit"],e["placeLimit"]
match.append((horse,bet_type.upper(),win,place,discount,lwin,lplace))
unmatchlist = rt["groups"]["unmatchedList"]
unmatch=[]
for e in unmatchlist:
if "marketAbbr" in e and e["market"] == "TOTE_WIN_PLACE":
order_id,bet_type,horse,win,place,discount,lwin,lplace = e["id"],e["betType"],e["selectionNumber"],e["volume"] if e["winLimit"] else 0, e["volume"] if e["placeLimit"] else 0,e["discount"],e["winLimit"],e["placeLimit"]
unmatch.append((horse,order_id,bet_type.upper(),win,place,discount,lwin,lplace))
return match,unmatch
def getWPSummaryTransHistByDate(self, date):
"根据日期获取该天的交易总账"
rt=json.loads(self.get_res_content("http://%s/player-tote-backend/s/transHist/main?userId=%s" %(self.rc_hosts[0], self.userId)))["past"]["TOTE_WINPLACE"]
trans=[]
for th in rt:
logger.info(th)
d=time.strftime("%d-%m-%Y", time.strptime(time.ctime(th['date']/1000)))
if d == date:
country, race_type, location = re.findall(r'(?:([\w]+) - )?([\w ]+), ([\w ]+)',th["cardName"])[0]
cty = race_type if country=="" else country
rt = "Horse" if country=="" else race_type
trans.append({"country":COUNTRY_MAP.get(cty,cty),"location":location.strip(), "race_type":rt, "toteName":"AU" if th["tote_code"]=="TB" else th["tote_code"], "volume":th["volume"], "tax":th["tax"], "profit":th["profit"],"key":(th["cardId"],d), "belong":"AA"})
return trans
def getWPTransHistByKey(self, key):
"根据CardId查询每场比赛的明细账目"
detail=json.loads(self.get_res_content("http://%s/player-tote-backend/s/transHist/detail?userId=%s&cardId=%s&marketTypeGroup=TOTE_WINPLACE" %(self.rc_hosts[0], self.userId, key)))
transDetail=[{"race_num" :e["raceNumber"],"horse_num":int(e["selectionNumber"]),"bet_type":e["betType"], "volume":e["volume"],"discount":e["discount"],"lwin":e["winLimit"],"lplace":e["placeLimit"],"profit":e["balance"],"position":int(e["position"]),"winDividend":e["winDividend"], "placeDividend":e["placeDividend"] } for e in detail["transactionDetails"]]
return transDetail
if __name__ == '__main__':
pass
|
PypiClean
|
/django-je-0.0.1.tar.gz/django-je-0.0.1/django/db/backends/oracle/utils.py
|
import datetime
from .base import Database
class InsertVar:
"""
A late-binding cursor variable that can be passed to Cursor.execute
as a parameter, in order to receive the id of the row created by an
insert statement.
"""
types = {
"AutoField": int,
"BigAutoField": int,
"SmallAutoField": int,
"IntegerField": int,
"BigIntegerField": int,
"SmallIntegerField": int,
"PositiveBigIntegerField": int,
"PositiveSmallIntegerField": int,
"PositiveIntegerField": int,
"FloatField": Database.NATIVE_FLOAT,
"DateTimeField": Database.TIMESTAMP,
"DateField": Database.Date,
"DecimalField": Database.NUMBER,
}
def __init__(self, field):
internal_type = getattr(field, "target_field", field).get_internal_type()
self.db_type = self.types.get(internal_type, str)
self.bound_param = None
def bind_parameter(self, cursor):
self.bound_param = cursor.cursor.var(self.db_type)
return self.bound_param
def get_value(self):
return self.bound_param.getvalue()
class Oracle_datetime(datetime.datetime):
"""
A datetime object, with an additional class attribute
to tell cx_Oracle to save the microseconds too.
"""
input_size = Database.TIMESTAMP
@classmethod
def from_datetime(cls, dt):
return Oracle_datetime(
dt.year,
dt.month,
dt.day,
dt.hour,
dt.minute,
dt.second,
dt.microsecond,
)
class BulkInsertMapper:
BLOB = "TO_BLOB(%s)"
CLOB = "TO_CLOB(%s)"
DATE = "TO_DATE(%s)"
INTERVAL = "CAST(%s as INTERVAL DAY(9) TO SECOND(6))"
NUMBER = "TO_NUMBER(%s)"
TIMESTAMP = "TO_TIMESTAMP(%s)"
types = {
"AutoField": NUMBER,
"BigAutoField": NUMBER,
"BigIntegerField": NUMBER,
"BinaryField": BLOB,
"BooleanField": NUMBER,
"DateField": DATE,
"DateTimeField": TIMESTAMP,
"DecimalField": NUMBER,
"DurationField": INTERVAL,
"FloatField": NUMBER,
"IntegerField": NUMBER,
"PositiveBigIntegerField": NUMBER,
"PositiveIntegerField": NUMBER,
"PositiveSmallIntegerField": NUMBER,
"SmallAutoField": NUMBER,
"SmallIntegerField": NUMBER,
"TextField": CLOB,
"TimeField": TIMESTAMP,
}
def dsn(settings_dict):
if settings_dict["PORT"]:
host = settings_dict["HOST"].strip() or "localhost"
return Database.makedsn(host, int(settings_dict["PORT"]), settings_dict["NAME"])
return settings_dict["NAME"]
|
PypiClean
|
/xs_transformers-1.0.7-py3-none-any.whl/xs_transformers/models/layoutlm/__init__.py
|
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from ...utils import (
OptionalDependencyNotAvailable,
_LazyModule,
is_tf_available,
is_tokenizers_available,
is_torch_available,
)
_import_structure = {
"configuration_layoutlm": [
"LAYOUTLM_PRETRAINED_CONFIG_ARCHIVE_MAP",
"LayoutLMConfig",
"LayoutLMOnnxConfig",
],
"tokenization_layoutlm": ["LayoutLMTokenizer"],
}
try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["tokenization_layoutlm_fast"] = ["LayoutLMTokenizerFast"]
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_layoutlm"] = [
"LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST",
"LayoutLMForMaskedLM",
"LayoutLMForSequenceClassification",
"LayoutLMForTokenClassification",
"LayoutLMForQuestionAnswering",
"LayoutLMModel",
"LayoutLMPreTrainedModel",
]
try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
_import_structure["modeling_tf_layoutlm"] = [
"TF_LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFLayoutLMForMaskedLM",
"TFLayoutLMForSequenceClassification",
"TFLayoutLMForTokenClassification",
"TFLayoutLMForQuestionAnswering",
"TFLayoutLMMainLayer",
"TFLayoutLMModel",
"TFLayoutLMPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_layoutlm import (
LAYOUTLM_PRETRAINED_CONFIG_ARCHIVE_MAP,
LayoutLMConfig,
LayoutLMOnnxConfig,
)
from .tokenization_layoutlm import LayoutLMTokenizer
try:
if not is_tokenizers_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .tokenization_layoutlm_fast import LayoutLMTokenizerFast
try:
if not is_torch_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_layoutlm import (
LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST,
LayoutLMForMaskedLM,
LayoutLMForQuestionAnswering,
LayoutLMForSequenceClassification,
LayoutLMForTokenClassification,
LayoutLMModel,
LayoutLMPreTrainedModel,
)
try:
if not is_tf_available():
raise OptionalDependencyNotAvailable()
except OptionalDependencyNotAvailable:
pass
else:
from .modeling_tf_layoutlm import (
TF_LAYOUTLM_PRETRAINED_MODEL_ARCHIVE_LIST,
TFLayoutLMForMaskedLM,
TFLayoutLMForQuestionAnswering,
TFLayoutLMForSequenceClassification,
TFLayoutLMForTokenClassification,
TFLayoutLMMainLayer,
TFLayoutLMModel,
TFLayoutLMPreTrainedModel,
)
else:
import sys
sys.modules[__name__] = _LazyModule(
__name__, globals()["__file__"], _import_structure, module_spec=__spec__
)
|
PypiClean
|
/polyaxon-schemas-0.6.0.tar.gz/polyaxon-schemas-0.6.0/polyaxon_schemas/ml/layers/noise.py
|
from __future__ import absolute_import, division, print_function
from marshmallow import fields, validate
from polyaxon_schemas.ml.layers.base import BaseLayerConfig, BaseLayerSchema
class GaussianNoiseSchema(BaseLayerSchema):
stddev = fields.Float()
@staticmethod
def schema_config():
return GaussianNoiseConfig
class GaussianNoiseConfig(BaseLayerConfig):
"""Apply additive zero-centered Gaussian noise.
This is useful to mitigate overfitting
(you could see it as a form of random data augmentation).
Gaussian Noise (GS) is a natural choice as corruption process
for real valued inputs.
As it is a regularization layer, it is only active at training time.
Args:
stddev: float, standard deviation of the noise distribution.
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as input.
Polyaxonfile usage:
```yaml
GaussianNoise:
stddev: 0.5
```
"""
IDENTIFIER = 'GaussianNoise'
SCHEMA = GaussianNoiseSchema
def __init__(self, stddev, **kwargs):
super(GaussianNoiseConfig, self).__init__(**kwargs)
self.stddev = stddev
class GaussianDropoutSchema(BaseLayerSchema):
rate = fields.Float(validate=validate.Range(0, 1))
@staticmethod
def schema_config():
return GaussianDropoutConfig
class GaussianDropoutConfig(BaseLayerConfig):
"""Apply multiplicative 1-centered Gaussian noise.
As it is a regularization layer, it is only active at training time.
Args:
rate: float, drop probability (as with `Dropout`).
The multiplicative noise will have
standard deviation `sqrt(rate / (1 - rate))`.
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as input.
References:
- [Dropout: A Simple Way to Prevent Neural Networks from Overfitting
Srivastava, Hinton, et al.
2014](http://www.cs.toronto.edu/~rsalakhu/papers/srivastava14a.pdf)
Polyaxonfile usage:
```yaml
GaussianDropout:
rate: 0.7
```
"""
IDENTIFIER = 'GaussianDropout'
SCHEMA = GaussianDropoutSchema
def __init__(self, rate, **kwargs):
super(GaussianDropoutConfig, self).__init__(**kwargs)
self.rate = rate
class AlphaDropoutSchema(BaseLayerSchema):
rate = fields.Float(validate=validate.Range(0, 1))
noise_shape = fields.List(fields.Int(), default=None, missing=None)
seed = fields.Int(default=None, missing=None)
@staticmethod
def schema_config():
return AlphaDropoutConfig
class AlphaDropoutConfig(BaseLayerConfig):
"""Applies Alpha Dropout to the input.
Alpha Dropout is a `Dropout` that keeps mean and variance of inputs
to their original values, in order to ensure the self-normalizing property
even after this dropout.
Alpha Dropout fits well to Scaled Exponential Linear Units
by randomly setting activations to the negative saturation value.
Args:
rate: float, drop probability (as with `Dropout`).
The multiplicative noise will have
standard deviation `sqrt(rate / (1 - rate))`.
seed: A Python integer to use as random seed.
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as input.
References:
- [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515)
Polyaxonfile usage:
```yaml
AlphaDropout:
rate: 0.7
```
"""
IDENTIFIER = 'AlphaDropout'
SCHEMA = AlphaDropoutSchema
def __init__(self, rate, noise_shape=None, seed=None, **kwargs):
super(AlphaDropoutConfig, self).__init__(**kwargs)
self.rate = rate
self.noise_shape = noise_shape
self.seed = seed
|
PypiClean
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.