content
stringlengths 7
928k
| avg_line_length
float64 3.5
33.8k
| max_line_length
int64 6
139k
| alphanum_fraction
float64 0.08
0.96
| licenses
list | repository_name
stringlengths 7
104
| path
stringlengths 4
230
| size
int64 7
928k
| lang
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|
import logging
logger = logging.getLogger('spacel.provision.app.alarm.endpoint.factory')
class AlarmEndpointFactory(object):
def __init__(self, factories):
self._factories = factories
def add_endpoints(self, template, endpoints):
endpoint_resources = {}
logger.debug('Injecting %d endpoints.', len(endpoints))
for name, params in endpoints.items():
factory = self._factory_for_type(params, name)
if not factory:
continue
actions = factory.add_endpoints(template, name, params)
if actions:
endpoint_resources[name] = {
'name': factory.resource_name(name),
'actions': actions
}
else:
logger.debug('Endpoint %s was invalid.', name)
if endpoint_resources:
logger.debug('Built endpoints: %s', endpoint_resources)
return endpoint_resources
def _factory_for_type(self, params, name):
endpoint_type = params.get('type')
if not endpoint_type:
logger.warning('Endpoint %s is missing "type".', name)
return None
factory = self._factories.get(endpoint_type)
if not factory:
logger.warning('Endpoint %s has invalid "type". Valid types: %s',
name, sorted(self._factories.keys()))
return None
return factory
| 33.767442 | 77 | 0.587466 | [
"MIT"
] | mycloudandme/spacel-provision | src/spacel/provision/app/alarm/endpoint/factory.py | 1,452 | Python |
# encoding: utf-8
"""
The *pathspec* package provides pattern matching for file paths. So far
this only includes Git's wildmatch pattern matching (the style used for
".gitignore" files).
The following classes are imported and made available from the root of
the `pathspec` package:
- :class:`pathspec.pathspec.PathSpec`
- :class:`pathspec.pattern.Pattern`
- :class:`pathspec.pattern.RegexPattern`
- :class:`pathspec.util.RecursionError`
The following functions are also imported:
- :func:`pathspec.util.iter_tree`
- :func:`pathspec.util.lookup_pattern`
- :func:`pathspec.util.match_files`
"""
from __future__ import unicode_literals
from .pathspec import PathSpec
from .pattern import Pattern, RegexPattern
from .util import iter_tree, lookup_pattern, match_files, RecursionError
from ._meta import (
__author__,
__copyright__,
__credits__,
__license__,
__version__,
)
# Load pattern implementations.
from . import patterns
# Expose `GitIgnorePattern` class in the root module for backward
# compatibility with v0.4.
from .patterns.gitwildmatch import GitIgnorePattern
| 24.659091 | 72 | 0.784332 | [
"MIT"
] | AlexxyQQ/From-Moon-to-Earth | venv/Lib/site-packages/pathspec/__init__.py | 1,085 | Python |
import pytest
from django.core import mail
from app.mail.owl import TemplOwl # type: ignore
pytestmark = [pytest.mark.django_db]
@pytest.fixture(autouse=True)
def _enable_email(settings):
settings.EMAIL_ENABLED = True
@pytest.fixture
def owl():
return TemplOwl(
to='[email protected]',
template_id=100500,
)
def test_sending(owl):
owl.send()
assert len(mail.outbox) == 1
@pytest.mark.parametrize('switch', [
lambda settings: setattr(settings, 'EMAIL_ENABLED', False),
])
def test_kill_switch(owl, switch, settings):
switch(settings)
owl.send()
assert len(mail.outbox) == 0
def test_attaching(owl):
owl.attach(filename='testing_file_name_100500.txt', content=b'just testing')
assert len(owl.msg.attachments) == 1
assert 'testing_file_name_100500.txt' in owl.msg.attachments[0]
| 19.295455 | 80 | 0.699647 | [
"MIT"
] | Abdujabbar/education-backend | src/app/tests/owl/tests_owl_functional.py | 849 | Python |
#!/usr/bin/python3
# *****************************************************************************
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# ******************************************************************************
import argparse
import os
import sys
from datalab.notebook_lib import *
from fabric import *
parser = argparse.ArgumentParser()
parser.add_argument('--hostname', type=str, default='')
parser.add_argument('--keyfile', type=str, default='')
parser.add_argument('--os_user', type=str, default='')
parser.add_argument('--application', type=str, default='')
args = parser.parse_args()
def general_clean():
try:
conn.sudo('systemctl stop ungit')
conn.sudo('systemctl stop inactive.timer')
conn.sudo('rm -f /etc/systemd/system/inactive.service')
conn.sudo('rm -f /etc/systemd/system/inactive.timer')
conn.sudo('rm -rf /opt/inactivity')
conn.sudo('npm -g uninstall ungit')
conn.sudo('rm -f /etc/systemd/system/ungit.service')
conn.sudo('systemctl daemon-reload')
remove_os_pkg(['nodejs', 'npm'])
conn.sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
except Exception as err:
print('Error: {0}'.format(err))
sys.exit(1)
def clean_jupyter():
try:
conn.sudo('systemctl stop jupyter-notebook')
conn.sudo('pip3 uninstall -y notebook jupyter')
conn.sudo('rm -rf /usr/local/share/jupyter/')
conn.sudo('rm -rf /home/{}/.jupyter/'.format(args.os_user))
conn.sudo('rm -rf /home/{}/.ipython/'.format(args.os_user))
conn.sudo('rm -rf /home/{}/.ipynb_checkpoints/'.format(args.os_user))
conn.sudo('rm -rf /home/{}/.local/share/jupyter/'.format(args.os_user))
conn.sudo('rm -f /etc/systemd/system/jupyter-notebook.service')
conn.sudo('systemctl daemon-reload')
except Exception as err:
print('Error: {0}'.format(err))
sys.exit(1)
def clean_jupyterlab():
try:
conn.sudo('systemctl stop jupyterlab-notebook')
conn.sudo('pip3 uninstall -y jupyterlab')
#conn.sudo('rm -rf /usr/local/share/jupyter/')
conn.sudo('rm -rf /home/{}/.jupyter/'.format(args.os_user))
conn.sudo('rm -rf /home/{}/.ipython/'.format(args.os_user))
conn.sudo('rm -rf /home/{}/.ipynb_checkpoints/'.format(args.os_user))
conn.sudo('rm -rf /home/{}/.local/share/jupyter/'.format(args.os_user))
conn.sudo('rm -f /etc/systemd/system/jupyterlab-notebook.service')
conn.sudo('systemctl daemon-reload')
except Exception as err:
print('Error: {0}'.format(err))
sys.exit(1)
def clean_zeppelin():
try:
conn.sudo('systemctl stop zeppelin-notebook')
conn.sudo('rm -rf /opt/zeppelin* /var/log/zeppelin /var/run/zeppelin')
if os.environ['notebook_multiple_clusters'] == 'true':
conn.sudo('systemctl stop livy-server')
conn.sudo('rm -rf /opt/livy* /var/run/livy')
conn.sudo('rm -f /etc/systemd/system/livy-server.service')
conn.sudo('rm -f /etc/systemd/system/zeppelin-notebook.service')
conn.sudo('systemctl daemon-reload')
except Exception as err:
print('Error: {0}'.format(err))
sys.exit(1)
def clean_rstudio():
try:
remove_os_pkg(['rstudio-server'])
conn.sudo('rm -f /home/{}/.Rprofile'.format(args.os_user))
conn.sudo('rm -f /home/{}/.Renviron'.format(args.os_user))
except Exception as err:
print('Error:', str(err))
sys.exit(1)
def clean_tensor():
try:
clean_jupyter()
conn.sudo('systemctl stop tensorboard')
conn.sudo('systemctl disable tensorboard')
conn.sudo('systemctl daemon-reload')
except Exception as err:
print('Error: {0}'.format(err))
sys.exit(1)
def clean_tensor_rstudio():
try:
clean_rstudio()
conn.sudo('systemctl stop tensorboard')
conn.sudo('systemctl disable tensorboard')
conn.sudo('systemctl daemon-reload')
except Exception as err:
print('Error: {0}'.format(err))
sys.exit(1)
def clean_tensor_jupyterlab():
try:
clean_jupyterlab()
conn.sudo('systemctl stop tensorboard')
conn.sudo('systemctl disable tensorboard')
conn.sudo('systemctl daemon-reload')
except Exception as err:
print('Error: {0}'.format(err))
sys.exit(1)
def clean_deeplearning():
try:
conn.sudo('systemctl stop ungit')
conn.sudo('systemctl stop inactive.timer')
conn.sudo('rm -f /etc/systemd/system/inactive.service')
conn.sudo('rm -f /etc/systemd/system/inactive.timer')
conn.sudo('rm -rf /opt/inactivity')
conn.sudo('npm -g uninstall ungit')
conn.sudo('rm -f /etc/systemd/system/ungit.service')
conn.sudo('systemctl daemon-reload')
remove_os_pkg(['nodejs', 'npm'])
conn.sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
# conn.sudo('systemctl stop tensorboard')
# conn.sudo('systemctl disable tensorboard')
# conn.sudo('systemctl daemon-reload')
clean_jupyter()
except Exception as err:
print('Error: {0}'.format(err))
sys.exit(1)
if __name__ == "__main__":
print('Configure connections')
global conn
conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
if os.environ['conf_cloud_provider'] == 'azure':
from datalab.actions_lib import ensure_right_mount_paths
ensure_right_mount_paths()
de_master_name = '{}-{}-{}-de-{}-m'.format(
os.environ['conf_service_base_name'],
os.environ['project_name'],
os.environ['endpoint_name'],
os.environ['computational_name'])
de_ami_id = AzureMeta().get_instance_image(os.environ['azure_resource_group_name'],
de_master_name)
default_ami_id = 'default'
else:
de_master_name = '{}-{}-{}-de-{}-m'.format(
os.environ['conf_service_base_name'],
os.environ['project_name'],
os.environ['endpoint_name'],
os.environ['computational_name'])
de_ami_id = get_ami_id_by_instance_name(de_master_name)
default_ami_id = get_ami_id(
os.environ['aws_{}_image_name'.format(os.environ['conf_os_family'])])
if de_ami_id != default_ami_id:
if args.application in os.environ['dataengine_image_notebooks'].split(','):
if args.application == 'deeplearning':
clean_deeplearning()
else:
general_clean()
if args.application == 'jupyter':
clean_jupyter()
elif args.application == 'zeppelin':
clean_zeppelin()
elif args.application == 'rstudio':
clean_rstudio()
elif args.application == 'tensor':
clean_tensor()
elif args.application == 'tensor-rstudio':
clean_tensor_rstudio()
elif args.application == 'tensor-jupyterlab':
clean_tensor_jupyterlab()
else:
print('Found default ami, do not make clean')
#conn.close()
sys.exit(0) | 38.657143 | 91 | 0.612343 | [
"Apache-2.0"
] | pjfanning/incubator-datalab | infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py | 8,118 | Python |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 27 17:18:43 2020
@author: admangli
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import math
dataset = pd.read_csv('Ads_CTR_Optimisation.csv').values
#%%
slot_machines = 10
#%% Random ad selection reward
import random
random_reward = 0
for i in range(len(dataset)):
random_reward += dataset[i, random.randint(0, slot_machines - 1)]
#%%
number_of_ad_selections = [0]*slot_machines
reward_sums = [0]*slot_machines
ad_selection_sequence = []
UCB_range = np.zeros((slot_machines, 2)) # To get an idea of underlying distributino
# Generate initial seed, selecting each machine at least once randomly
for round in range(0, slot_machines):
target_ad = random.randint(0, slot_machines - 1)
while (number_of_ad_selections[target_ad] == 1):
target_ad = random.randint(0, slot_machines - 1)
number_of_ad_selections[target_ad] += 1
reward_sums[target_ad] += dataset[round][target_ad]
ad_selection_sequence.append(target_ad)
for round in range(slot_machines, len(dataset)):
# Calculate Ri and Delta for each ad for the current round
Ri = [0]*slot_machines
Deltai = [0]*slot_machines
max_UCB = 0
target_ad = -1
for ad in range(0, slot_machines):
Ri[ad] = reward_sums[ad] / number_of_ad_selections[ad]
Deltai[ad] = math.sqrt(1.5 * math.log(round + 1)/number_of_ad_selections[ad])
UCB_range[ad, 0] = Ri[ad] + Deltai[ad]
UCB_range[ad, 1] = Ri[ad] - Deltai[ad]
if UCB_range[ad, 0] > max_UCB: # Pick the ad with maximum UCB = Ri + Delta for current round
max_UCB = UCB_range[ad, 0]
target_ad = ad
# Increment selected ad's reward and number of selections
if target_ad != -1:
number_of_ad_selections[target_ad] += 1
reward_sums[target_ad] += dataset[round][target_ad]
ad_selection_sequence.append(target_ad)
#%% Visualize results
# Plot a histogram showing how many times each ad was selected
plt.hist(ad_selection_sequence)
plt.xlabel('Ad Number')
plt.ylabel('Number of selections')
plt.title('Ad selection comparision')
plt.show() | 31 | 104 | 0.691504 | [
"MIT"
] | adityamanglik/Algorithm-Implementations | Machine Learning/Sklearn Implementations/Reinforcement Learning/Upper_Confidence_Bound.py | 2,201 | Python |
_base_ = [
'../_base_/models/icnet_r50-d8.py', '../_base_/datasets/cityscapes.py',
'../_base_/default_runtime.py', '../_base_/schedules/schedule_160k.py'
]
model = dict(
backbone=dict(
resnet_cfg=dict(
init_cfg=dict(
type='Pretrained', checkpoint='open-mmlab://resnet50_v1c'))))
| 29.818182 | 77 | 0.615854 | [
"Apache-2.0"
] | Junjun2016/ICNet | configs/icnet/icnet_r50-d8-pretrained_512x1024_160k_cityscapes.py | 328 | Python |
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from .base import Loss
class AdaCos(Loss):
"""PyTorch implementation of AdaCos. See Ref[1] for paper
This implementation is different from the most open-source implementations in following ways:
1) expects raw logits of size (bs x num_classes) not (bs, embedding_size)
2) despite AdaCos being dynamic, still add an optional margin parameter
3) calculate running average stats of B and θ, not batch-wise stats as in original paper
4) normalize input logits, not embeddings and weights
Args:
margin (float): margin in radians
momentum (float): momentum for running average of B and θ
Input:
y_pred (torch.Tensor): shape BS x N_classes
y_true (torch.Tensor): one-hot encoded. shape BS x N_classes
Reference:
[1] Adaptively Scaling Cosine Logits for Effectively Learning Deep Face Representations
"""
def __init__(self, embedding_size, num_classes, final_criterion, margin=0, momentum=0.95):
super(AdaCos, self).__init__()
self.final_criterion = final_criterion
self.margin = margin
self.momentum = momentum
self.prev_s = 10
self.running_B = 1000 # default value is chosen so that initial S is ~10
self.running_theta = math.pi / 4
self.eps = 1e-7
self.register_parameter("weight", torch.nn.Parameter(torch.zeros(num_classes, embedding_size)))
nn.init.xavier_uniform_(self.weight)
self.idx = 0
def forward(self, embedding, y_true):
cos_theta = F.linear(F.normalize(embedding), F.normalize(self.weight)).clamp(-1 + self.eps, 1 - self.eps)
# cos_theta = torch.cos(torch.acos(cos_theta + self.margin))
if y_true.dim() != 1:
y_true_one_hot = y_true.float()
else:
y_true_one_hot = torch.zeros_like(cos_theta)
y_true_one_hot.scatter_(1, y_true.unsqueeze(1), 1.0)
with torch.no_grad():
B_batch = cos_theta[y_true_one_hot.eq(0)].mul(self.prev_s).exp().sum().div(embedding.size(0))
self.running_B = self.running_B * self.momentum + B_batch * (1 - self.momentum)
theta = torch.acos(cos_theta.clamp(-1 + self.eps, 1 - self.eps))
# originally authors use median, but I use mean
theta_batch = theta[y_true_one_hot.ne(0)].mean().clamp_max(math.pi / 4)
self.running_theta = self.running_theta * self.momentum + theta_batch * (1 - self.momentum)
self.prev_s = self.running_B.log() / torch.cos(self.running_theta)
self.idx += 1
if self.idx % 1000 == 0:
print(
f"\nRunning B: {self.running_B:.2f}. Running theta: {self.running_theta:.2f}. Running S: {self.prev_s:.2f}"
)
return self.final_criterion(cos_theta * self.prev_s, y_true_one_hot)
| 41.014085 | 123 | 0.655563 | [
"MIT"
] | YevheniiSemendiak/pytorch-tools | pytorch_tools/losses/angular.py | 2,914 | Python |
"""
IGN Instituto Geográfico Nacional Sismología Feed.
Fetches GeoRSS feed from IGN Instituto Geográfico Nacional Sismología.
"""
from datetime import datetime
from typing import Optional
import dateparser as dateparser
from georss_client import FeedEntry, GeoRssFeed
from georss_client.consts import CUSTOM_ATTRIBUTE
from georss_client.feed_manager import FeedManagerBase
ATTRIBUTION = "Instituto Geográfico Nacional"
IMAGE_URL_PATTERN = (
"http://www.ign.es/web/resources/sismologia/www/"
"dir_images_terremotos/detalle/{}.gif"
)
REGEXP_ATTR_MAGNITUDE = r"magnitud (?P<{}>[^ ]+) ".format(CUSTOM_ATTRIBUTE)
REGEXP_ATTR_REGION = r"magnitud [^ ]+ en (?P<{}>[A-ZÁÉÓÜÑ0-9 \-\.]+) en".format(
CUSTOM_ATTRIBUTE
)
REGEXP_ATTR_PUBLISHED_DATE = r"-Info.terremoto: (?P<{}>.+)$".format(CUSTOM_ATTRIBUTE)
REGEXP_ATTR_SHORT_ID = (
r"http:\/\/www\.ign\.es\/web\/ign\/portal\/"
r"sis-catalogo-terremotos\/-\/catalogo-terremotos\/"
r"detailTerremoto\?evid=(?P<{}>\w+)$".format(CUSTOM_ATTRIBUTE)
)
URL = "http://www.ign.es/ign/RssTools/sismologia.xml"
class IgnSismologiaFeedManager(FeedManagerBase):
"""Feed Manager for IGN Sismología feed."""
def __init__(
self,
generate_callback,
update_callback,
remove_callback,
coordinates,
filter_radius=None,
filter_minimum_magnitude=None,
):
"""Initialize the IGN Sismología Feed Manager."""
feed = IgnSismologiaFeed(
coordinates,
filter_radius=filter_radius,
filter_minimum_magnitude=filter_minimum_magnitude,
)
super().__init__(feed, generate_callback, update_callback, remove_callback)
class IgnSismologiaFeed(GeoRssFeed):
"""IGN Sismología feed."""
def __init__(
self, home_coordinates, filter_radius=None, filter_minimum_magnitude=None
):
"""Initialise this service."""
super().__init__(home_coordinates, URL, filter_radius=filter_radius)
self._filter_minimum_magnitude = filter_minimum_magnitude
def __repr__(self):
"""Return string representation of this feed."""
return "<{}(home={}, url={}, radius={}, magnitude={})>".format(
self.__class__.__name__,
self._home_coordinates,
self._url,
self._filter_radius,
self._filter_minimum_magnitude,
)
def _new_entry(self, home_coordinates, rss_entry, global_data):
"""Generate a new entry."""
return IgnSismologiaFeedEntry(home_coordinates, rss_entry)
def _filter_entries(self, entries):
"""Filter the provided entries."""
entries = super()._filter_entries(entries)
if self._filter_minimum_magnitude:
# Return only entries that have an actual magnitude value, and
# the value is equal or above the defined threshold.
return list(
filter(
lambda entry: entry.magnitude
and entry.magnitude >= self._filter_minimum_magnitude,
entries,
)
)
return entries
class IgnSismologiaFeedEntry(FeedEntry):
"""IGN Sismología feed entry."""
def __init__(self, home_coordinates, rss_entry):
"""Initialise this service."""
super().__init__(home_coordinates, rss_entry)
@property
def attribution(self) -> str:
"""Return the attribution of this entry."""
return ATTRIBUTION
@property
def published(self) -> Optional[datetime]:
"""Return the published date of this entry."""
published_date = self._search_in_title(REGEXP_ATTR_PUBLISHED_DATE)
if published_date:
published_date = dateparser.parse(published_date)
return published_date
@property
def magnitude(self) -> Optional[float]:
"""Return the magnitude of this entry."""
magnitude = self._search_in_description(REGEXP_ATTR_MAGNITUDE)
if magnitude:
magnitude = float(magnitude)
return magnitude
@property
def region(self) -> Optional[float]:
"""Return the region of this entry."""
return self._search_in_description(REGEXP_ATTR_REGION)
def _short_id(self) -> Optional[str]:
"""Return the short id of this entry."""
return self._search_in_external_id(REGEXP_ATTR_SHORT_ID)
@property
def image_url(self) -> Optional[str]:
"""Return the image url of this entry."""
short_id = self._short_id()
if short_id:
return IMAGE_URL_PATTERN.format(short_id)
return None
| 33.035714 | 85 | 0.65773 | [
"Apache-2.0"
] | exxamalte/python-georss-ign-sismologia-client | georss_ign_sismologia_client/__init__.py | 4,639 | Python |
lista = [1,2,3,4,5]
for i in range(0,5):
print(lista[i])
| 16.25 | 20 | 0.523077 | [
"MIT"
] | jwellington58/Wellingtonlp220172vacation | Wellington_python/exemplo_listas.py | 65 | Python |
''' convenience functions for ANOVA type analysis with OLS
Note: statistical results of ANOVA are not checked, OLS is
checked but not whether the reported results are the ones used
in ANOVA
includes form2design for creating dummy variables
TODO:
* ...
*
'''
import numpy as np
#from scipy import stats
import statsmodels.api as sm
def data2dummy(x, returnall=False):
'''convert array of categories to dummy variables
by default drops dummy variable for last category
uses ravel, 1d only'''
x = x.ravel()
groups = np.unique(x)
if returnall:
return (x[:, None] == groups).astype(int)
else:
return (x[:, None] == groups).astype(int)[:,:-1]
def data2proddummy(x):
'''creates product dummy variables from 2 columns of 2d array
drops last dummy variable, but not from each category
singular with simple dummy variable but not with constant
quickly written, no safeguards
'''
#brute force, assumes x is 2d
#replace with encoding if possible
groups = np.unique(map(tuple, x.tolist()))
#includes singularity with additive factors
return (x==groups[:,None,:]).all(-1).T.astype(int)[:,:-1]
def data2groupcont(x1,x2):
'''create dummy continuous variable
Parameters
----------
x1 : 1d array
label or group array
x2 : 1d array (float)
continuous variable
Notes
-----
useful for group specific slope coefficients in regression
'''
if x2.ndim == 1:
x2 = x2[:,None]
dummy = data2dummy(x1, returnall=True)
return dummy * x2
# Result strings
#the second leaves the constant in, not with NIST regression
#but something fishy with res.ess negative in examples ?
#not checked if these are all the right ones
anova_str0 = '''
ANOVA statistics (model sum of squares excludes constant)
Source DF Sum Squares Mean Square F Value Pr > F
Model %(df_model)i %(ess)f %(mse_model)f %(fvalue)f %(f_pvalue)f
Error %(df_resid)i %(ssr)f %(mse_resid)f
CTotal %(nobs)i %(uncentered_tss)f %(mse_total)f
R squared %(rsquared)f
'''
anova_str = '''
ANOVA statistics (model sum of squares includes constant)
Source DF Sum Squares Mean Square F Value Pr > F
Model %(df_model)i %(ssmwithmean)f %(mse_model)f %(fvalue)f %(f_pvalue)f
Error %(df_resid)i %(ssr)f %(mse_resid)f
CTotal %(nobs)i %(uncentered_tss)f %(mse_total)f
R squared %(rsquared)f
'''
def anovadict(res):
'''update regression results dictionary with ANOVA specific statistics
not checked for completeness
'''
ad = {}
ad.update(res.__dict__) #dict doesn't work with cached attributes
anova_attr = ['df_model', 'df_resid', 'ess', 'ssr','uncentered_tss',
'mse_model', 'mse_resid', 'mse_total', 'fvalue', 'f_pvalue',
'rsquared']
for key in anova_attr:
ad[key] = getattr(res, key)
ad['nobs'] = res.model.nobs
ad['ssmwithmean'] = res.uncentered_tss - res.ssr
return ad
def form2design(ss, data):
'''convert string formula to data dictionary
ss : string
* I : add constant
* varname : for simple varnames data is used as is
* F:varname : create dummy variables for factor varname
* P:varname1*varname2 : create product dummy variables for
varnames
* G:varname1*varname2 : create product between factor and
continuous variable
data : dict or structured array
data set, access of variables by name as in dictionaries
Returns
-------
vars : dictionary
dictionary of variables with converted dummy variables
names : list
list of names, product (P:) and grouped continuous
variables (G:) have name by joining individual names
sorted according to input
Examples
--------
>>> xx, n = form2design('I a F:b P:c*d G:c*f', testdata)
>>> xx.keys()
['a', 'b', 'const', 'cf', 'cd']
>>> n
['const', 'a', 'b', 'cd', 'cf']
Notes
-----
with sorted dict, separate name list wouldn't be necessary
'''
vars = {}
names = []
for item in ss.split():
if item == 'I':
vars['const'] = np.ones(data.shape[0])
names.append('const')
elif not ':' in item:
vars[item] = data[item]
names.append(item)
elif item[:2] == 'F:':
v = item.split(':')[1]
vars[v] = data2dummy(data[v])
names.append(v)
elif item[:2] == 'P:':
v = item.split(':')[1].split('*')
vars[''.join(v)] = data2proddummy(np.c_[data[v[0]],data[v[1]]])
names.append(''.join(v))
elif item[:2] == 'G:':
v = item.split(':')[1].split('*')
vars[''.join(v)] = data2groupcont(data[v[0]], data[v[1]])
names.append(''.join(v))
else:
raise ValueError('unknown expression in formula')
return vars, names
def dropname(ss, li):
'''drop names from a list of strings,
names to drop are in space delimeted list
does not change original list
'''
newli = li[:]
for item in ss.split():
newli.remove(item)
return newli
if __name__ == '__main__':
# Test Example with created data
# ------------------------------
nobs = 1000
testdataint = np.random.randint(3, size=(nobs,4)).view([('a',int),('b',int),('c',int),('d',int)])
testdatacont = np.random.normal( size=(nobs,2)).view([('e',float), ('f',float)])
import numpy.lib.recfunctions
dt2 = numpy.lib.recfunctions.zip_descr((testdataint, testdatacont),flatten=True)
# concatenate structured arrays
testdata = np.empty((nobs,1), dt2)
for name in testdataint.dtype.names:
testdata[name] = testdataint[name]
for name in testdatacont.dtype.names:
testdata[name] = testdatacont[name]
#print form2design('a',testdata)
if 0: # print only when nobs is small, e.g. nobs=10
xx, n = form2design('F:a',testdata)
print xx
print form2design('P:a*b',testdata)
print data2proddummy((np.c_[testdata['a'],testdata['b']]))
xx, names = form2design('a F:b P:c*d',testdata)
#xx, names = form2design('I a F:b F:c F:d P:c*d',testdata)
xx, names = form2design('I a F:b P:c*d', testdata)
xx, names = form2design('I a F:b P:c*d G:a*e f', testdata)
X = np.column_stack([xx[nn] for nn in names])
# simple test version: all coefficients equal to one
y = X.sum(1) + 0.01*np.random.normal(size=(nobs))
rest1 = sm.OLS(y,X).fit() #results
print rest1.params
print anova_str % anovadict(rest1)
X = np.column_stack([xx[nn] for nn in dropname('ae f', names)])
# simple test version: all coefficients equal to one
y = X.sum(1) + 0.01*np.random.normal(size=(nobs))
rest1 = sm.OLS(y,X).fit()
print rest1.params
print anova_str % anovadict(rest1)
# Example: from Bruce
# -------------------
#get data and clean it
#^^^^^^^^^^^^^^^^^^^^^
# requires file 'dftest3.data' posted by Bruce
# read data set and drop rows with missing data
dt_b = np.dtype([('breed', int), ('sex', int), ('litter', int),
('pen', int), ('pig', int), ('age', float),
('bage', float), ('y', float)])
dta = np.genfromtxt('dftest3.data', dt_b,missing='.', usemask=True)
print 'missing', [dta.mask[k].sum() for k in dta.dtype.names]
m = dta.mask.view(bool)
droprows = m.reshape(-1,len(dta.dtype.names)).any(1)
# get complete data as plain structured array
# maybe doesn't work with masked arrays
dta_use_b1 = dta[~droprows,:].data
print dta_use_b1.shape
print dta_use_b1.dtype
#Example b1: variables from Bruce's glm
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# prepare data and dummy variables
xx_b1, names_b1 = form2design('I F:sex age', dta_use_b1)
# create design matrix
X_b1 = np.column_stack([xx_b1[nn] for nn in dropname('', names_b1)])
y_b1 = dta_use_b1['y']
# estimate using OLS
rest_b1 = sm.OLS(y_b1, X_b1).fit()
# print results
print rest_b1.params
print anova_str % anovadict(rest_b1)
#compare with original version only in original version
#print anova_str % anovadict(res_b0)
# Example: use all variables except pig identifier
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
allexog = ' '.join(dta.dtype.names[:-1])
#'breed sex litter pen pig age bage'
xx_b1a, names_b1a = form2design('I F:breed F:sex F:litter F:pen age bage', dta_use_b1)
X_b1a = np.column_stack([xx_b1a[nn] for nn in dropname('', names_b1a)])
y_b1a = dta_use_b1['y']
rest_b1a = sm.OLS(y_b1a, X_b1a).fit()
print rest_b1a.params
print anova_str % anovadict(rest_b1a)
for dropn in names_b1a:
print '\nResults dropping', dropn
X_b1a_ = np.column_stack([xx_b1a[nn] for nn in dropname(dropn, names_b1a)])
y_b1a_ = dta_use_b1['y']
rest_b1a_ = sm.OLS(y_b1a_, X_b1a_).fit()
#print rest_b1a_.params
print anova_str % anovadict(rest_b1a_)
| 31.774306 | 101 | 0.605289 | [
"BSD-3-Clause"
] | AnaMP/statsmodels | statsmodels/sandbox/regression/try_ols_anova.py | 9,151 | Python |
from lxml import objectify
from kloppy.domain import Period, PitchDimensions, Dimension
from kloppy.infra.utils import Readable
from .models import *
def noop(x):
return x
def _load_provider_parameters(parent_elm, value_mapper=None) -> Dict:
if parent_elm is None:
return {}
if not value_mapper:
value_mapper = noop
return {
str(param.find('Name')): value_mapper(param.find('Value'))
for param in parent_elm.iterchildren(tag='ProviderParameter')
if param.find('Value') != ''
}
def _load_periods(global_config_elm, frame_rate: int) -> List[Period]:
provider_params = _load_provider_parameters(
global_config_elm.find('ProviderGlobalParameters'),
value_mapper=int
)
period_names = ['first_half', 'second_half', 'first_extra_half', 'second_extra_half']
periods = []
for idx, period_name in enumerate(period_names):
start_key = f"{period_name}_start"
end_key = f"{period_name}_end"
if start_key in provider_params:
periods.append(
Period(
id=idx + 1,
start_timestamp=float(provider_params[start_key]) / frame_rate,
end_timestamp=float(provider_params[end_key]) / frame_rate
)
)
else:
# done
break
return periods
def _load_players(players_elm, team_map: Dict[str, Team]) -> List[Player]:
return [
Player(
team=team_map[player_elm.attrib['teamId']],
jersey_no=str(player_elm.find('ShirtNumber')),
player_id=player_elm.attrib['id'],
name=str(player_elm.find('Name')),
attributes=_load_provider_parameters(players_elm.find('ProviderPlayerParameters'))
)
for player_elm in players_elm.iterchildren(tag='Player')
]
def _load_data_format_specifications(data_format_specifications_elm) -> List[DataFormatSpecification]:
return [
DataFormatSpecification.from_xml_element(data_format_specification_elm)
for data_format_specification_elm
in data_format_specifications_elm.iterchildren(tag='DataFormatSpecification')
]
def _load_sensors(sensors_elm) -> List[Sensor]:
return [
Sensor.from_xml_element(sensor_elm)
for sensor_elm
in sensors_elm.iterchildren(tag="Sensor")
]
def _load_pitch_dimensions(meta_data_elm, sensors: List[Sensor]) -> Union[None, PitchDimensions]:
normalized = False
for sensor in sensors:
if sensor.sensor_id == 'position':
if sensor.channels[0].unit == 'normalized':
normalized = True
break
field_size_path = objectify.ObjectPath("Metadata.Sessions.Session[0]")
field_size_elm = field_size_path.find(meta_data_elm).find('FieldSize')
if field_size_elm is not None and normalized:
return PitchDimensions(
x_dim=Dimension(0, 1),
y_dim=Dimension(0, 1),
x_per_meter=1 / int(field_size_elm.find('Width')),
y_per_meter=1 / int(field_size_elm.find('Height'))
)
else:
return None
def load_meta_data(meta_data_file: Readable) -> EPTSMetaData:
root = objectify.fromstring(meta_data_file.read())
meta_data = root.find('Metadata')
score_path = objectify.ObjectPath("Metadata.Sessions.Session[0].MatchParameters.Score")
score_elm = score_path.find(meta_data)
_team_map = {
score_elm.attrib['idLocalTeam']: Team.HOME,
score_elm.attrib['idVisitingTeam']: Team.AWAY
}
players = _load_players(meta_data.find('Players'), _team_map)
data_format_specifications = _load_data_format_specifications(root.find('DataFormatSpecifications'))
device_path = objectify.ObjectPath("Metadata.Devices.Device[0].Sensors")
sensors = _load_sensors(device_path.find(meta_data))
_channel_map = {
channel.channel_id: channel
for sensor in sensors
for channel in sensor.channels
}
_player_map = {
player.player_id: player for player in players
}
player_channels = [
PlayerChannel(
player_channel_id=player_channel_elm.attrib['id'],
player=_player_map[player_channel_elm.attrib['playerId']],
channel=_channel_map[player_channel_elm.attrib['channelId']]
)
for player_channel_elm in meta_data.find('PlayerChannels').iterchildren(tag='PlayerChannel')
]
team_name_map = {
_team_map[team_elm.attrib['id']]: str(team_elm.find('Name'))
for team_elm
in meta_data.find('Teams').iterchildren(tag='Team')
}
frame_rate = int(meta_data.find('GlobalConfig').find('FrameRate'))
periods = _load_periods(meta_data.find('GlobalConfig'), frame_rate)
pitch_dimensions = _load_pitch_dimensions(meta_data, sensors)
return EPTSMetaData(
home_team_name=team_name_map[Team.HOME],
away_team_name=team_name_map[Team.AWAY],
players=players,
periods=periods,
pitch_dimensions=pitch_dimensions,
data_format_specifications=data_format_specifications,
player_channels=player_channels,
frame_rate=frame_rate,
sensors=sensors
) | 32.140244 | 104 | 0.667046 | [
"BSD-3-Clause"
] | dmallory42/kloppy | kloppy/infra/serializers/tracking/epts/meta_data.py | 5,271 | Python |
# Copyright 2020 Konstruktor, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import click
from tethys.apps.base import AppBase
@click.group()
def cli():
"""
The tethys CLI for managing your environment.
"""
@cli.group(name="apps")
def apps_entry():
"""
Tethys apps manager
"""
for app_cls in AppBase.get_apps():
add_click_entry = getattr(app_cls, "add_click_entry", None)
if add_click_entry:
add_click_entry(apps_entry)
| 26.864865 | 74 | 0.72334 | [
"ECL-2.0",
"Apache-2.0"
] | tethys-platform/tethys | tethys/bin/cli.py | 994 | Python |
"""
This code was taken from https://github.com/ActiveState/appdirs and modified
to suite our purposes.
"""
import os
import sys
from pip._vendor import six
def user_cache_dir(appname):
r"""
Return full path to the user-specific cache dir for this application.
"appname" is the name of application.
Typical user cache directories are:
Mac OS X: ~/Library/Caches/<AppName>
Unix: ~/.cache/<AppName> (XDG default)
Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache
On Windows the only suggestion in the MSDN docs is that local settings go
in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
non-roaming app data dir (the default returned by `user_data_dir`). Apps
typically put cache data somewhere *under* the given dir here. Some
examples:
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
...\Acme\SuperApp\Cache\1.0
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
"""
if sys.platform == "win32":
# Get the base path
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
# Add our app name and Cache directory to it
path = os.path.join(path, appname, "Cache")
elif sys.platform == "darwin":
# Get the base path
path = os.path.expanduser("~/Library/Caches")
# Add our app name to it
path = os.path.join(path, appname)
else:
# Get the base path
path = os.getenv("XDG_CACHE_HOME", os.path.expanduser("~/.cache"))
# Add our app name to it
path = os.path.join(path, appname)
return path
# -- Windows support functions --
def _get_win_folder_from_registry(csidl_name):
"""
This is a fallback technique at best. I'm not sure if using the
registry for this guarantees us the correct answer for all CSIDL_*
names.
"""
import _winreg
shell_folder_name = {
"CSIDL_APPDATA": "AppData",
"CSIDL_COMMON_APPDATA": "Common AppData",
"CSIDL_LOCAL_APPDATA": "Local AppData",
}[csidl_name]
key = _winreg.OpenKey(
_winreg.HKEY_CURRENT_USER,
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
)
directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
return directory
def _get_win_folder_with_pywin32(csidl_name):
from win32com.shell import shellcon, shell
directory = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
# Try to make this a unicode path because SHGetFolderPath does
# not return unicode strings when there is unicode data in the
# path.
try:
directory = six.text_type(directory)
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in directory:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
try:
import win32api
directory = win32api.GetShortPathName(directory)
except ImportError:
pass
except UnicodeError:
pass
return directory
def _get_win_folder_with_ctypes(csidl_name):
csidl_const = {
"CSIDL_APPDATA": 26,
"CSIDL_COMMON_APPDATA": 35,
"CSIDL_LOCAL_APPDATA": 28,
}[csidl_name]
buf = ctypes.create_unicode_buffer(1024)
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in buf:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
buf2 = ctypes.create_unicode_buffer(1024)
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
buf = buf2
return buf.value
if sys.platform == "win32":
try:
import win32com.shell # noqa
_get_win_folder = _get_win_folder_with_pywin32
except ImportError:
try:
import ctypes
_get_win_folder = _get_win_folder_with_ctypes
except ImportError:
_get_win_folder = _get_win_folder_from_registry
| 31.136691 | 78 | 0.647874 | [
"MIT"
] | chadrik/pip | pip/appdirs.py | 4,328 | Python |
import os
AWS_ACCESS_KEY_ID_PERF = os.environ['AWS_ACCESS_KEY_ID_PERF']
AWS_SECRET_ACCESS_KEY_PERF = os.environ['AWS_SECRET_ACCESS_KEY_PERF']
AWS_ACCESS_KEY_ID_DELETE_PERF = os.environ['AWS_ACCESS_KEY_ID_DELETE_PERF']
AWS_SECRET_ACCESS_KEY_DELETE_PERF = os.environ['AWS_SECRET_ACCESS_KEY_DELETE_PERF']
BUCKET_PERF = os.environ['BUCKET_PERF']
AWS_ACCESS_KEY_ID_PSAP = os.environ['AWS_ACCESS_KEY_ID_PSAP']
AWS_SECRET_ACCESS_KEY_PSAP = os.environ['AWS_SECRET_ACCESS_KEY_PSAP']
BUCKET_PSAP = os.environ['BUCKET_PSAP']
AWS_ACCESS_KEY_ID_RH_PERF = os.environ['AWS_ACCESS_KEY_ID_RH_PERF']
AWS_SECRET_ACCESS_KEY_RH_PERF = os.environ['AWS_SECRET_ACCESS_KEY_RH_PERF']
BUCKET_RH_PERF = os.environ['BUCKET_RH_PERF']
GITHUB_TOKEN = os.environ['GITHUB_TOKEN']
LOGS = os.environ.get('LOGS', 'logs')
mandatory_tags = {'Budget': 'PERF-DEPT'}
print('Run all policies pre active region')
regions = ['us-east-1', 'us-east-2', 'us-west-1', 'us-west-2', 'eu-central-1', 'ap-south-1', 'eu-north-1', 'ap-northeast-1', 'ap-southeast-1', 'ap-southeast-2', 'eu-west-3', 'sa-east-1']
for region in regions:
os.system(f"""sudo podman run --rm --name cloud-governance-tagging -e account='perf' -e policy=tag_resources -e AWS_ACCESS_KEY_ID={AWS_ACCESS_KEY_ID_DELETE_PERF} -e AWS_SECRET_ACCESS_KEY={AWS_SECRET_ACCESS_KEY_DELETE_PERF} -e AWS_DEFAULT_REGION={region} -e tag_operation=update -e mandatory_tags="{mandatory_tags}" -e log_level=INFO -v /etc/localtime:/etc/localtime quay.io/ebattat/cloud-governance:latest""")
| 59.92 | 413 | 0.789052 | [
"Apache-2.0"
] | athiruma/cloud-governance | jenkins/tagging/tagging.py | 1,498 | Python |
from movies.settings import *
| 15 | 29 | 0.8 | [
"MIT"
] | Wonqu/rz_movies | app/movies/settings/settings_dev.py | 30 | Python |
# https://github.com/theeko74/pdfc
# modified by brio50 on 2022/01/23, working with gs version 9.54.0
"""
Simple python wrapper script to use ghoscript function to compress PDF files.
Compression levels:
0: default
1: prepress
2: printer
3: ebook
4: screen
Dependency: Ghostscript.
On MacOSX install via command line `brew install ghostscript`.
"""
import argparse
import subprocess
import os.path
import sys
import shutil
def compress(input_file_path, output_file_path, level=0, method=1):
"""Function to compress PDF via Ghostscript command line interface"""
quality = {
0: '/default',
1: '/prepress',
2: '/printer',
3: '/ebook',
4: '/screen'
}
# Check if valid path
if not os.path.isfile(input_file_path):
print(f"Error: invalid path for input file: {input_file_path}")
sys.exit(1)
# Check if file is a PDF by extension
if input_file_path.split('.')[-1].lower() != 'pdf': # not sure this is the most robust solution
print(f"Error: input file is not a PDF: {input_file_path}")
sys.exit(1)
gs = get_ghostscript_path()
file_name = input_file_path.split('/')[-1] # everything after last '/'
print("Compressing PDF \"{}\"...".format(file_name))
if method == 1:
# https://gist.github.com/lkraider/f0888da30bc352f9d167dfa4f4fc8213
cmd = [gs, '-sDEVICE=pdfwrite',
'-dNumRenderingThreads=2',
'-dPDFSETTINGS={}'.format(quality[level]),
'-dCompatibilityLevel=1.5',
'-dNOPAUSE', '-dQUIET', '-dBATCH', '-dSAFER',
# font settings
'-dSubsetFonts=true',
'-dCompressFonts=true',
'-dEmbedAllFonts=true',
# color format`
'-sProcessColorModel=DeviceRGB',
'-sColorConversionStrategy=RGB',
'-sColorConversionStrategyForImages=RGB',
'-dConvertCMYKImagesToRGB=true',
# image resample
'-dDetectDuplicateImages=true',
'-dColorImageDownsampleType=/Bicubic',
'-dColorImageResolution=300',
'-dGrayImageDownsampleType=/Bicubic',
'-dGrayImageResolution=300',
'-dMonoImageDownsampleType=/Subsample',
'-dMonoImageResolution=300',
'-dDownsampleColorImages=true',
# preset overrides
'-dDoThumbnails=false',
'-dCreateJobTicket=false',
'-dPreserveEPSInfo=false',
'-dPreserveOPIComments=false',
'-dPreserveOverprintSettings=false',
'-dUCRandBGInfo=/Remove',
'-sOutputFile={}'.format(output_file_path),
input_file_path]
elif method == 2:
cmd = [gs, '-sDEVICE=pdfwrite',
'-dNumRenderingThreads=2',
'-dPDFSETTINGS={}'.format(quality[level]),
'-dCompatibilityLevel=1.4',
'-dNOPAUSE', '-dQUIET', '-dBATCH', '-dSAFER',
'-dDetectDuplicateImages=true',
'-sOutputFile={}'.format(output_file_path),
input_file_path]
try:
# execute
subprocess.call(cmd, stderr=sys.stdout)
except:
# print ghostscript command for debug
print(" ".join(cmd))
if not os.path.exists(output_file_path):
raise Exception(f"Ghostscript failed to create {output_file_path}, time to debug...\n",
" ".join(cmd))
initial_size = round(os.path.getsize(input_file_path) / (1024 * 1024), 2)
final_size = round(os.path.getsize(output_file_path) / (1024 * 1024), 2)
ratio = round(100 - ((final_size / initial_size) * 100), 1)
print(f"Initial file size is {initial_size}MB",
f"; Final file size is {final_size}MB",
f"; Compression Ratio = {ratio}%\n")
if final_size > initial_size and method == 1:
print('-' * 100)
print('Compression Failed\nTrying another ghostscript compression method...')
print('-' * 100)
info = compress(input_file_path, output_file_path, 4, 2)
initial_size = info[0]
final_size = info[1]
ratio = info[2]
return [initial_size, final_size, ratio]
def get_ghostscript_path():
gs_names = ['gs', 'gswin32', 'gswin64']
for name in gs_names:
if shutil.which(name):
return shutil.which(name)
raise FileNotFoundError(f'No GhostScript executable was found on path ({"/".join(gs_names)})')
def main():
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument('input', help='Relative or absolute path of the input PDF file')
parser.add_argument('-o', '--out', help='Relative or absolute path of the output PDF file')
parser.add_argument('-c', '--compress', type=int, help='Compression level from 0 to 4')
parser.add_argument('-b', '--backup', action='store_true', help="Backup the old PDF file")
parser.add_argument('--open', action='store_true', default=False,
help='Open PDF after compression')
args = parser.parse_args()
# In case no compression level is specified, default is 2 '/ printer'
if not args.compress:
args.compress = 2
# In case no output file is specified, store in temp file
if not args.out:
args.out = 'temp.pdf'
# Run
compress(args.input, args.out, power=args.compress)
# In case no output file is specified, erase original file
if args.out == 'temp.pdf':
if args.backup:
shutil.copyfile(args.input, args.input.replace(".pdf", "_BACKUP.pdf"))
shutil.copyfile(args.out, args.input)
os.remove(args.out)
# In case we want to open the file after compression
if args.open:
if args.out == 'temp.pdf' and args.backup:
subprocess.call(['open', args.input])
else:
subprocess.call(['open', args.out])
if __name__ == '__main__':
main()
| 35.598837 | 100 | 0.600033 | [
"MIT"
] | brio50/groups-io | gs_compress.py | 6,123 | Python |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.api import httpbody_pb2 # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.recommendationengine_v1beta1.services.user_event_service import pagers
from google.cloud.recommendationengine_v1beta1.types import import_
from google.cloud.recommendationengine_v1beta1.types import user_event
from google.cloud.recommendationengine_v1beta1.types import user_event as gcr_user_event
from google.cloud.recommendationengine_v1beta1.types import user_event_service
from google.protobuf import any_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import UserEventServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import UserEventServiceGrpcTransport
from .transports.grpc_asyncio import UserEventServiceGrpcAsyncIOTransport
class UserEventServiceClientMeta(type):
"""Metaclass for the UserEventService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = OrderedDict() # type: Dict[str, Type[UserEventServiceTransport]]
_transport_registry["grpc"] = UserEventServiceGrpcTransport
_transport_registry["grpc_asyncio"] = UserEventServiceGrpcAsyncIOTransport
def get_transport_class(cls,
label: str = None,
) -> Type[UserEventServiceTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class UserEventServiceClient(metaclass=UserEventServiceClientMeta):
"""Service for ingesting end user actions on the customer
website.
"""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "recommendationengine.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
UserEventServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(info)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
UserEventServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> UserEventServiceTransport:
"""Returns the transport used by the client instance.
Returns:
UserEventServiceTransport: The transport used by the client
instance.
"""
return self._transport
@staticmethod
def event_store_path(project: str,location: str,catalog: str,event_store: str,) -> str:
"""Returns a fully-qualified event_store string."""
return "projects/{project}/locations/{location}/catalogs/{catalog}/eventStores/{event_store}".format(project=project, location=location, catalog=catalog, event_store=event_store, )
@staticmethod
def parse_event_store_path(path: str) -> Dict[str,str]:
"""Parses a event_store path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/catalogs/(?P<catalog>.+?)/eventStores/(?P<event_store>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str, ) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(billing_account=billing_account, )
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str,str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str, ) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder, )
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str,str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str, ) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization, )
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str,str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str, ) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(project=project, )
@staticmethod
def parse_common_project_path(path: str) -> Dict[str,str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str, ) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(project=project, location=location, )
@staticmethod
def parse_common_location_path(path: str) -> Dict[str,str]:
"""Parse a location path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
return m.groupdict() if m else {}
def __init__(self, *,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, UserEventServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the user event service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, UserEventServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")))
client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
is_mtls = True
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
if is_mtls:
client_cert_source_func = mtls.default_client_cert_source()
else:
client_cert_source_func = None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
if is_mtls:
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = self.DEFAULT_ENDPOINT
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
"values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, UserEventServiceTransport):
# transport is a UserEventServiceTransport instance.
if credentials or client_options.credentials_file:
raise ValueError("When providing a transport instance, "
"provide its credentials directly.")
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
)
def write_user_event(self,
request: Union[user_event_service.WriteUserEventRequest, dict] = None,
*,
parent: str = None,
user_event: gcr_user_event.UserEvent = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcr_user_event.UserEvent:
r"""Writes a single user event.
Args:
request (Union[google.cloud.recommendationengine_v1beta1.types.WriteUserEventRequest, dict]):
The request object. Request message for WriteUserEvent
method.
parent (str):
Required. The parent eventStore resource name, such as
``projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
user_event (google.cloud.recommendationengine_v1beta1.types.UserEvent):
Required. User event to write.
This corresponds to the ``user_event`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.recommendationengine_v1beta1.types.UserEvent:
UserEvent captures all metadata
information recommendation engine needs
to know about how end users interact
with customers' website.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, user_event])
if request is not None and has_flattened_params:
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a user_event_service.WriteUserEventRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, user_event_service.WriteUserEventRequest):
request = user_event_service.WriteUserEventRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if user_event is not None:
request.user_event = user_event
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.write_user_event]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def collect_user_event(self,
request: Union[user_event_service.CollectUserEventRequest, dict] = None,
*,
parent: str = None,
user_event: str = None,
uri: str = None,
ets: int = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> httpbody_pb2.HttpBody:
r"""Writes a single user event from the browser. This
uses a GET request to due to browser restriction of
POST-ing to a 3rd party domain.
This method is used only by the Recommendations AI
JavaScript pixel. Users should not call this method
directly.
Args:
request (Union[google.cloud.recommendationengine_v1beta1.types.CollectUserEventRequest, dict]):
The request object. Request message for CollectUserEvent
method.
parent (str):
Required. The parent eventStore name, such as
``projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
user_event (str):
Required. URL encoded UserEvent
proto.
This corresponds to the ``user_event`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
uri (str):
Optional. The url including cgi-
arameters but excluding the hash
fragment. The URL must be truncated to
1.5K bytes to conservatively be under
the 2K bytes. This is often more useful
than the referer url, because many
browsers only send the domain for 3rd
party requests.
This corresponds to the ``uri`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
ets (int):
Optional. The event timestamp in
milliseconds. This prevents browser
caching of otherwise identical get
requests. The name is abbreviated to
reduce the payload bytes.
This corresponds to the ``ets`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api.httpbody_pb2.HttpBody:
Message that represents an arbitrary HTTP body. It should only be used for
payload formats that can't be represented as JSON,
such as raw binary or an HTML page.
This message can be used both in streaming and
non-streaming API methods in the request as well as
the response.
It can be used as a top-level request field, which is
convenient if one wants to extract parameters from
either the URL or HTTP template into the request
fields and also want access to the raw HTTP body.
Example:
message GetResourceRequest {
// A unique request id. string request_id = 1;
// The raw HTTP body is bound to this field.
google.api.HttpBody http_body = 2;
}
service ResourceService {
rpc GetResource(GetResourceRequest)
returns (google.api.HttpBody);
rpc UpdateResource(google.api.HttpBody)
returns (google.protobuf.Empty);
}
Example with streaming methods:
service CaldavService {
rpc GetCalendar(stream google.api.HttpBody)
returns (stream google.api.HttpBody);
rpc UpdateCalendar(stream google.api.HttpBody)
returns (stream google.api.HttpBody);
}
Use of this type only changes how the request and
response bodies are handled, all other features will
continue to work unchanged.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, user_event, uri, ets])
if request is not None and has_flattened_params:
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a user_event_service.CollectUserEventRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, user_event_service.CollectUserEventRequest):
request = user_event_service.CollectUserEventRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if user_event is not None:
request.user_event = user_event
if uri is not None:
request.uri = uri
if ets is not None:
request.ets = ets
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.collect_user_event]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def list_user_events(self,
request: Union[user_event_service.ListUserEventsRequest, dict] = None,
*,
parent: str = None,
filter: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListUserEventsPager:
r"""Gets a list of user events within a time range, with
potential filtering.
Args:
request (Union[google.cloud.recommendationengine_v1beta1.types.ListUserEventsRequest, dict]):
The request object. Request message for ListUserEvents
method.
parent (str):
Required. The parent eventStore resource name, such as
``projects/*/locations/*/catalogs/default_catalog/eventStores/default_event_store``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
filter (str):
Optional. Filtering expression to specify restrictions
over returned events. This is a sequence of terms, where
each term applies some kind of a restriction to the
returned user events. Use this expression to restrict
results to a specific time range, or filter events by
eventType. eg: eventTime > "2012-04-23T18:25:43.511Z"
eventsMissingCatalogItems
eventTime<"2012-04-23T18:25:43.511Z" eventType=search
We expect only 3 types of fields:
::
* eventTime: this can be specified a maximum of 2 times, once with a
less than operator and once with a greater than operator. The
eventTime restrict should result in one contiguous valid eventTime
range.
* eventType: only 1 eventType restriction can be specified.
* eventsMissingCatalogItems: specififying this will restrict results
to events for which catalog items were not found in the catalog. The
default behavior is to return only those events for which catalog
items were found.
Some examples of valid filters expressions:
- Example 1: eventTime > "2012-04-23T18:25:43.511Z"
eventTime < "2012-04-23T18:30:43.511Z"
- Example 2: eventTime > "2012-04-23T18:25:43.511Z"
eventType = detail-page-view
- Example 3: eventsMissingCatalogItems eventType =
search eventTime < "2018-04-23T18:30:43.511Z"
- Example 4: eventTime > "2012-04-23T18:25:43.511Z"
- Example 5: eventType = search
- Example 6: eventsMissingCatalogItems
This corresponds to the ``filter`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.recommendationengine_v1beta1.services.user_event_service.pagers.ListUserEventsPager:
Response message for ListUserEvents
method.
Iterating over this object will yield
results and resolve additional pages
automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, filter])
if request is not None and has_flattened_params:
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a user_event_service.ListUserEventsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, user_event_service.ListUserEventsRequest):
request = user_event_service.ListUserEventsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if filter is not None:
request.filter = filter
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_user_events]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListUserEventsPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
def purge_user_events(self,
request: Union[user_event_service.PurgeUserEventsRequest, dict] = None,
*,
parent: str = None,
filter: str = None,
force: bool = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Deletes permanently all user events specified by the
filter provided. Depending on the number of events
specified by the filter, this operation could take hours
or days to complete. To test a filter, use the list
command first.
Args:
request (Union[google.cloud.recommendationengine_v1beta1.types.PurgeUserEventsRequest, dict]):
The request object. Request message for PurgeUserEvents
method.
parent (str):
Required. The resource name of the event_store under
which the events are created. The format is
``projects/${projectId}/locations/global/catalogs/${catalogId}/eventStores/${eventStoreId}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
filter (str):
Required. The filter string to specify the events to be
deleted. Empty string filter is not allowed. This filter
can also be used with ListUserEvents API to list events
that will be deleted. The eligible fields for filtering
are:
- eventType - UserEvent.eventType field of type string.
- eventTime - in ISO 8601 "zulu" format.
- visitorId - field of type string. Specifying this
will delete all events associated with a visitor.
- userId - field of type string. Specifying this will
delete all events associated with a user. Example 1:
Deleting all events in a time range.
``eventTime > "2012-04-23T18:25:43.511Z" eventTime < "2012-04-23T18:30:43.511Z"``
Example 2: Deleting specific eventType in time range.
``eventTime > "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"``
Example 3: Deleting all events for a specific visitor
``visitorId = visitor1024`` The filtering fields are
assumed to have an implicit AND.
This corresponds to the ``filter`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
force (bool):
Optional. The default value is false.
Override this flag to true to actually
perform the purge. If the field is not
set to true, a sampling of events to be
deleted will be returned.
This corresponds to the ``force`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.recommendationengine_v1beta1.types.PurgeUserEventsResponse` Response of the PurgeUserEventsRequest. If the long running operation is
successfully done, then this message is returned by
the google.longrunning.Operations.response field.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, filter, force])
if request is not None and has_flattened_params:
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a user_event_service.PurgeUserEventsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, user_event_service.PurgeUserEventsRequest):
request = user_event_service.PurgeUserEventsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if filter is not None:
request.filter = filter
if force is not None:
request.force = force
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.purge_user_events]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
user_event_service.PurgeUserEventsResponse,
metadata_type=user_event_service.PurgeUserEventsMetadata,
)
# Done; return the response.
return response
def import_user_events(self,
request: Union[import_.ImportUserEventsRequest, dict] = None,
*,
parent: str = None,
request_id: str = None,
input_config: import_.InputConfig = None,
errors_config: import_.ImportErrorsConfig = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Bulk import of User events. Request processing might
be synchronous. Events that already exist are skipped.
Use this method for backfilling historical user events.
Operation.response is of type ImportResponse. Note that
it is possible for a subset of the items to be
successfully inserted. Operation.metadata is of type
ImportMetadata.
Args:
request (Union[google.cloud.recommendationengine_v1beta1.types.ImportUserEventsRequest, dict]):
The request object. Request message for the
ImportUserEvents request.
parent (str):
Required.
``projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
request_id (str):
Optional. Unique identifier provided by client, within
the ancestor dataset scope. Ensures idempotency for
expensive long running operations. Server-generated if
unspecified. Up to 128 characters long. This is returned
as google.longrunning.Operation.name in the response.
Note that this field must not be set if the desired
input config is catalog_inline_source.
This corresponds to the ``request_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
input_config (google.cloud.recommendationengine_v1beta1.types.InputConfig):
Required. The desired input location
of the data.
This corresponds to the ``input_config`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
errors_config (google.cloud.recommendationengine_v1beta1.types.ImportErrorsConfig):
Optional. The desired location of
errors incurred during the Import.
This corresponds to the ``errors_config`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.recommendationengine_v1beta1.types.ImportUserEventsResponse` Response of the ImportUserEventsRequest. If the long running
operation was successful, then this message is
returned by the
google.longrunning.Operations.response field if the
operation was successful.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, request_id, input_config, errors_config])
if request is not None and has_flattened_params:
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a import_.ImportUserEventsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, import_.ImportUserEventsRequest):
request = import_.ImportUserEventsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if request_id is not None:
request.request_id = request_id
if input_config is not None:
request.input_config = input_config
if errors_config is not None:
request.errors_config = errors_config
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.import_user_events]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
import_.ImportUserEventsResponse,
metadata_type=import_.ImportMetadata,
)
# Done; return the response.
return response
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
"""Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
"""
self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-recommendations-ai",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = (
"UserEventServiceClient",
)
| 44.469891 | 211 | 0.612524 | [
"Apache-2.0"
] | googleapis/googleapis-gen | google/cloud/recommendationengine/v1beta1/recommendationengine-v1beta1-py/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py | 45,048 | Python |
# -*- coding: utf-8 -*-
import os
import re
from math import radians, degrees
import numpy as np
import pandas as pd
import cv2
from pdftabextract import imgproc
from pdftabextract.geom import pt
from pdftabextract.common import read_xml, parse_pages, save_page_grids
from pdftabextract.textboxes import rotate_textboxes, sorted_by_attr
from pdftabextract.clustering import (find_clusters_1d_break_dist,
calc_cluster_centers_1d,
zip_clusters_and_values)
from pdftabextract.splitpages import split_page_texts, create_split_pages_dict_structure
from pdftabextract.extract import make_grid_from_positions, fit_texts_into_grid, datatable_to_dataframe
#%% Some constants
#DATAPATH = 'data/'
#DATAPATH = 'ip/'
#OUTPUTPATH = 'generated_output/'
#OUTPUTPATH = 'op/'
#INPUT_XML = 'output.xml'
#INPUT_XML = 'output.xml'
DATAPATH = 'data/'
OUTPUTPATH = 'generated_output/'
INPUT_XML = 'schoollist_1.pdf.xml'
MIN_ROW_HEIGHT = 260 # minimum height of a row in pixels, measured in the scanned pages
MIN_COL_WIDTH = 194 # very important. the minimum width of a column in pixels, measured in the scanned pages
#%% Some helper functions
def save_image_w_lines(iproc_obj, imgfilebasename, orig_img_as_background, file_suffix_prefix=''):
file_suffix = 'lines-orig' if orig_img_as_background else 'lines'
img_lines = iproc_obj.draw_lines(orig_img_as_background=orig_img_as_background)
img_lines_file = os.path.join(OUTPUTPATH, '%s-%s.png' % (imgfilebasename, file_suffix_prefix + file_suffix))
print("> saving image with detected lines to '%s'" % img_lines_file)
cv2.imwrite(img_lines_file, img_lines)
#%% Read the XML
# Load the XML that was generated with pdftohtml
xmltree, xmlroot = read_xml(os.path.join(DATAPATH, INPUT_XML))
# parse it and generate a dict of pages
pages = parse_pages(xmlroot, require_image=True)
#%% Split the scanned double pages so that we can later process the lists page-by-page
split_texts_and_images = [] # list of tuples with (double page, split text boxes, split images)
for p_num, p in pages.items():
# get the image file of the scanned page
imgfilebasename = p['image'][:p['image'].rindex('.')]
imgfile = os.path.join(DATAPATH, p['image'])
print("page %d: detecting lines in image file '%s'..." % (p_num, imgfile))
# create an image processing object with the scanned page
iproc_obj = imgproc.ImageProc(imgfile)
# calculate the scaling of the image file in relation to the text boxes coordinate system dimensions
page_scaling_x = iproc_obj.img_w / p['width']
page_scaling_y = iproc_obj.img_h / p['height']
image_scaling = (page_scaling_x, # scaling in X-direction
page_scaling_y) # scaling in Y-direction
# detect the lines in the double pages
lines_hough = iproc_obj.detect_lines(canny_low_thresh=50, canny_high_thresh=150, canny_kernel_size=3,
hough_rho_res=1,
hough_theta_res=np.pi/500,
hough_votes_thresh=350)
print("> found %d lines" % len(lines_hough))
save_image_w_lines(iproc_obj, imgfilebasename, True, 'bothpages-')
# find the vertical line that separates both sides
sep_line_img_x = iproc_obj.find_pages_separator_line(dist_thresh=MIN_COL_WIDTH/2)
sep_line_page_x = sep_line_img_x / page_scaling_x
print("> found pages separator line at %f (image space position) / %f (page space position)"
% (sep_line_img_x, sep_line_page_x))
# split the scanned double page at the separator line
split_images = iproc_obj.split_image(sep_line_img_x)
# split the textboxes at the separator line
split_texts = split_page_texts(p, sep_line_page_x)
split_texts_and_images.append((p, split_texts, split_images))
# generate a new XML and "pages" dict structure from the split pages
split_pages_xmlfile = os.path.join(OUTPUTPATH, INPUT_XML[:INPUT_XML.rindex('.')] + '.split.xml')
print("> saving split pages XML to '%s'" % split_pages_xmlfile)
split_tree, split_root, split_pages = create_split_pages_dict_structure(split_texts_and_images,
save_to_output_path=split_pages_xmlfile)
# we don't need the original double pages any more, we'll work with 'split_pages'
del pages
#%% Detect clusters of horizontal lines using the image processing module and rotate back or deskew pages
hori_lines_clusters = {}
pages_image_scaling = {} # scaling of the scanned page image in relation to the OCR page dimensions for each page
for p_num, p in split_pages.items():
# get the image file of the scanned page
imgfilebasename = p['image'][:p['image'].rindex('.')]
imgfile = os.path.join(OUTPUTPATH, p['image'])
print("page %d: detecting lines in image file '%s'..." % (p_num, imgfile))
# create an image processing object with the scanned page
iproc_obj = imgproc.ImageProc(imgfile)
# calculate the scaling of the image file in relation to the text boxes coordinate system dimensions
page_scaling_x = iproc_obj.img_w / p['width']
page_scaling_y = iproc_obj.img_h / p['height']
pages_image_scaling[p_num] = (page_scaling_x, # scaling in X-direction
page_scaling_y) # scaling in Y-direction
# detect the lines
lines_hough = iproc_obj.detect_lines(canny_low_thresh=50, canny_high_thresh=150, canny_kernel_size=3,
hough_rho_res=1,
hough_theta_res=np.pi/500,
hough_votes_thresh=round(0.2 * iproc_obj.img_w))
print("> found %d lines" % len(lines_hough))
save_image_w_lines(iproc_obj, imgfilebasename, True)
save_image_w_lines(iproc_obj, imgfilebasename, False)
# find rotation or skew
# the parameters are:
# 1. the minimum threshold in radians for a rotation to be counted as such
# 2. the maximum threshold for the difference between horizontal and vertical line rotation (to detect skew)
# 3. an optional threshold to filter out "stray" lines whose angle is too far apart from the median angle of
# all other lines that go in the same direction (no effect here)
rot_or_skew_type, rot_or_skew_radians = iproc_obj.find_rotation_or_skew(radians(0.5), # uses "lines_hough"
radians(1),
omit_on_rot_thresh=radians(0.5))
# rotate back text boxes
# since often no vertical lines can be detected and hence it cannot be determined if the page is rotated or skewed,
# we assume that it's always rotated
if rot_or_skew_type is not None:
print("> rotating back by %f°" % -degrees(rot_or_skew_radians))
rotate_textboxes(p, -rot_or_skew_radians, pt(0, 0))
# rotate back detected lines
lines_hough = iproc_obj.apply_found_rotation_or_skew(rot_or_skew_type, -rot_or_skew_radians)
save_image_w_lines(iproc_obj, imgfilebasename + '-repaired', True)
save_image_w_lines(iproc_obj, imgfilebasename + '-repaired', False)
# cluster the detected *horizontal* lines using find_clusters_1d_break_dist as simple clustering function
# (break on distance MIN_ROW_HEIGHT/2)
# additionally, remove all cluster sections that are considered empty
# a cluster is considered empty when the number of text boxes in it is below 10% of the median number of text boxes
# per cluster section
hori_clusters = iproc_obj.find_clusters(imgproc.DIRECTION_HORIZONTAL, find_clusters_1d_break_dist,
remove_empty_cluster_sections_use_texts=p['texts'], # use this page's textboxes
remove_empty_cluster_sections_n_texts_ratio=0.1, # 10% rule
remove_empty_cluster_sections_scaling=page_scaling_y, # the positions are in "scanned image space" -> we scale them to "text box space"
dist_thresh=MIN_ROW_HEIGHT/2)
print("> found %d clusters" % len(hori_clusters))
if len(hori_clusters) > 0:
# draw the clusters
img_w_clusters = iproc_obj.draw_line_clusters(imgproc.DIRECTION_HORIZONTAL, hori_clusters)
save_img_file = os.path.join(OUTPUTPATH, '%s-hori-clusters.png' % imgfilebasename)
print("> saving image with detected horizontal clusters to '%s'" % save_img_file)
cv2.imwrite(save_img_file, img_w_clusters)
hori_lines_clusters[p_num] = hori_clusters
else:
print("> no horizontal line clusters found")
# save split and repaired XML (i.e. XML with deskewed textbox positions)
output_files_basename = INPUT_XML[:INPUT_XML.rindex('.')]
repaired_xmlfile = os.path.join(OUTPUTPATH, output_files_basename + '.split.repaired.xml')
print("saving split and repaired XML file to '%s'..." % repaired_xmlfile)
split_tree.write(repaired_xmlfile)
#%% Determine the rows and columns of the tables
pttrn_schoolnum = re.compile(r'^\d{6}$') # a valid school number indicates a table row
page_grids = {}
print("detecting rows and columns...")
for p_num, p in split_pages.items():
scaling_x, scaling_y = pages_image_scaling[p_num]
# try to find out the table rows in this page using the horizontal lines that were detected before
hori_lines = list(np.array(calc_cluster_centers_1d(hori_lines_clusters[p_num])) / scaling_y)
hori_lines.append(p['height']) # last line: page bottom
prev_line_y = 0
row_texts = []
row_positions = []
in_table = False # is True when the current segment is a real table row (not a table header or surrounding text)
for line_y in hori_lines:
# get all texts in this row
segment_texts = [t for t in p['texts'] if prev_line_y < t['bottom'] <= line_y]
if not segment_texts: continue # skip empty rows
# try to find the start and the end of the table
for t in segment_texts:
t_val = t['value'].strip()
if pttrn_schoolnum.search(t_val): # if this matches, we found the start of the table
if not in_table:
in_table = True
row_positions.append(prev_line_y)
break
else:
if in_table: # we found the end of the table
in_table = False
if in_table: # this is a table row, so add the texts and row positions to the respective lists
row_texts.append(segment_texts)
row_positions.append(line_y)
prev_line_y = line_y
# try to find out the table columns in this page using the distribution of x-coordinates of the left position of
# each text box in all rows
text_xs = []
for texts in row_texts:
text_xs.extend([t['left'] for t in texts])
text_xs = np.array(text_xs)
# make clusters of x positions
text_xs_clusters = find_clusters_1d_break_dist(text_xs, dist_thresh=MIN_COL_WIDTH/2/scaling_x)
text_xs_clusters_w_values = zip_clusters_and_values(text_xs_clusters, text_xs)
col_positions = calc_cluster_centers_1d(text_xs_clusters_w_values)
# remove falsely identified columns (i.e. merge columns with only a few text boxes)
filtered_col_positions = []
n_rows = len(row_positions)
n_cols = len(col_positions)
if n_cols > 1 and n_rows > 1:
top_y = row_positions[0]
bottom_y = row_positions[-1]
# append the rightmost text's right border as the last column border
rightmost_pos = sorted_by_attr(p['texts'], 'right')[-1]['right']
col_positions.append(rightmost_pos)
# merge columns with few text boxes
texts_in_table = [t for t in p['texts'] if top_y < t['top'] + t['height']/2 <= bottom_y]
prev_col_x = col_positions[0]
for col_x in col_positions[1:]:
col_texts = [t for t in texts_in_table if prev_col_x < t['left'] + t['width']/2 <= col_x]
if len(col_texts) >= n_rows: # there should be at least one text box per row
filtered_col_positions.append(prev_col_x)
last_col_x = col_x
prev_col_x = col_x
# manually add border for the last column because it has very few or no text boxes
filtered_col_positions.append(filtered_col_positions[-1] + (rightmost_pos - filtered_col_positions[-1]) / 2)
filtered_col_positions.append(rightmost_pos)
# create the grid
if filtered_col_positions:
grid = make_grid_from_positions(filtered_col_positions, row_positions)
n_rows = len(grid)
n_cols = len(grid[0])
print("> page %d: grid with %d rows, %d columns" % (p_num, n_rows, n_cols))
page_grids[p_num] = grid
else: # this happens for the first page as there's no table on that
print("> page %d: no table found" % p_num)
# save the page grids
# After you created the page grids, you should then check that they're correct using pdf2xml-viewer's
# loadGridFile() function
page_grids_file = os.path.join(OUTPUTPATH, output_files_basename + '.pagegrids.json')
print("saving page grids JSON file to '%s'" % page_grids_file)
save_page_grids(page_grids, page_grids_file)
#%% Create data frames (requires pandas library)
# For sake of simplicity, we will just fit the text boxes into the grid, merge the texts in their cells (splitting text
# boxes to separate lines if necessary) and output the result. Normally, you would do some more parsing here, e.g.
# extracting the address components from the second column.
full_df = pd.DataFrame()
print("fitting text boxes into page grids and generating final output...")
for p_num, p in split_pages.items():
if p_num not in page_grids: continue # happens when no table was detected
print("> page %d" % p_num)
datatable, unmatched_texts = fit_texts_into_grid(p['texts'], page_grids[p_num], return_unmatched_texts=True)
df = datatable_to_dataframe(datatable, split_texts_in_lines=True)
df['from_page'] = p_num
full_df = full_df.append(df, ignore_index=True)
print("extracted %d rows from %d pages" % (len(full_df), len(split_pages)))
csv_output_file = os.path.join(OUTPUTPATH, output_files_basename + '.csv')
print("saving extracted data to '%s'" % csv_output_file)
full_df.to_csv(csv_output_file, index=False)
excel_output_file = os.path.join(OUTPUTPATH, output_files_basename + '.xlsx')
print("saving extracted data to '%s'" % excel_output_file)
full_df.to_excel(excel_output_file, index=False)
| 46.610592 | 180 | 0.679054 | [
"Apache-2.0"
] | SagarRoy1996/TabularDataExtraction | examples/eg1/eg1.py | 14,963 | Python |
# coding=utf-8
import sys
import argparse
import os
from tensorflow.python.platform import gfile
import numpy as np
import tensorflow as tf
from tensorflow.python.layers.core import Dense
from utils.data_manager import load_data, load_data_one
from collections import defaultdict
from argparse import ArgumentParser
from decode_helper import decode_one
import sys
reload(sys)
sys.setdefaultencoding('utf8')
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
from tf_helper import train, evaluate, decode_data, decode_data_recover
from model1 import construct_graph
def init_args():
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument(
'--data_path',
default=os.path.dirname(os.path.abspath(__file__)) + '/data',
type=str,
help='Data path.')
arg_parser.add_argument(
'--load_data', default=False, type=bool, help='Load data.')
arg_parser.add_argument(
'--data',
choices=['wikisql', 'spider', 'overnight', 'overnight_set'],
default='wikisql',
help='data to train & test')
#arg_parser.add_argument('--tran_data', choices=['wikisql', 'spider', 'overnight'], default='overnight', help='data to transfer')
arg_parser.add_argument(
'--subset', choices=['all'], default='all', help='Subset of data.')
arg_parser.add_argument(
'--maxlen', default=60, type=int, help='Data record max length.')
arg_parser.add_argument(
'--annotation_path',
default=os.path.dirname(os.path.abspath(__file__)) +
'/data/DATA/wiki/',
type=str,
help='Data annotation path.')
arg_parser.add_argument(
'--mode',
choices=['train', 'infer', 'transfer','txt'],
default='infer',
help='Run mode')
#### Model configuration ####
arg_parser.add_argument(
'--cell',
choices=['gru'],
default='gru',
help='Type of cell used, currently only standard GRU cell is supported'
)
arg_parser.add_argument(
'--output_vocab_size',
default=20637,
#default=20452,
type=int,
help='Output vocabulary size.')
# Embedding sizes
arg_parser.add_argument(
'--embedding_dim',
default=300,
type=int,
help='Size of word embeddings')
#Hidden sizes
arg_parser.add_argument(
'--dim', default=400, type=int, help='Size of GRU hidden states')
arg_parser.add_argument(
'--hidden_size',
default=256,
type=int,
help='Size of LSTM hidden states')
arg_parser.add_argument(
'--no_copy',
default=False,
action='store_true',
help='Do not use copy mechanism')
#### Training ####
arg_parser.add_argument(
'--vocab', type=str, help='Path of the serialized vocabulary')
arg_parser.add_argument(
'--glove_embed_path',
default=None,
type=str,
help='Path to pretrained Glove mebedding')
arg_parser.add_argument(
'--batch_size', default=128, type=int, help='Batch size')
arg_parser.add_argument(
'--in_drop', default=0., type=float, help='In dropout rate')
arg_parser.add_argument(
'--out_drop', default=0., type=float, help='Out dropout rate')
# training details
arg_parser.add_argument(
'--valid_epoch_interval',
default=1,
type=int,
help='Perform validation every x epoch')
arg_parser.add_argument(
'--clip_grad', default=5., type=float, help='Clip gradients')
arg_parser.add_argument(
'--total_epochs', default=40, type=int, help='# of training epoches')
arg_parser.add_argument(
'--epochs', default=1, type=int, help='Record per x epoches')
arg_parser.add_argument(
'--lr', default=0.0001, type=float, help='Learning rate')
arg_parser.add_argument(
'--lr_decay',
default=0.5,
type=float,
help='decay learning rate if the validation performance drops')
#### decoding/validation/testing ####
arg_parser.add_argument(
'--load_model', default=False, type=bool, help='Whether to load model')
arg_parser.add_argument(
'--beam_width', default=5, type=int, help='Beam size for beam search')
arg_parser.add_argument(
'--decode_max_time_step',
default=100,
type=int,
help='Maximum number of time steps used '
'in decoding and sampling')
args = arg_parser.parse_args()
return args
def model(args, train_env, infer_env):
tf.reset_default_graph()
train_graph = tf.Graph()
infer_graph = tf.Graph()
with train_graph.as_default():
train_env.x = tf.placeholder(
tf.int32, shape=[None, args.maxlen], name='x')
train_env.y = tf.placeholder(tf.int32, (None, args.maxlen), name='y')
train_env.training = tf.placeholder_with_default(
False, (), name='train_mode')
train_env.train_op, train_env.loss, train_env.acc, sample_ids, logits = construct_graph(
"train", train_env, args)
train_env.saver = tf.train.Saver()
#[print(n.name) for n in tf.get_default_graph().as_graph_def().node if 'xxxxx' in n.name]
with infer_graph.as_default():
infer_env.x = tf.placeholder(
tf.int32, shape=[None, args.maxlen], name='x')
infer_env.y = tf.placeholder(tf.int32, (None, args.maxlen), name='y')
infer_env.training = tf.placeholder_with_default(
False, (), name='train_mode')
_, infer_env.loss, infer_env.acc, infer_env.pred_ids, _ = construct_graph(
"infer", infer_env, args)
infer_env.infer_saver = tf.train.Saver()
return train_graph, infer_graph
def inferrence(args):
args.load_model = True
class Dummy:
pass
train_env = Dummy()
infer_env = Dummy()
_, infer_graph = model(args, train_env, infer_env)
args.data = 'wikisql'
args.load_data = True
X_train, y_train = load_data(maxlen=args.maxlen,load=args.load_data, s='train')
X_test, y_test = load_data(maxlen=args.maxlen,load=args.load_data, s='test')
X_dev, y_dev = load_data(maxlen=args.maxlen,load=args.load_data, s='dev')
#X_train, y_train, X_test, y_test, X_dev, y_dev = load_data(args)
model2load = 'model/{}'.format(args.subset)
sess = tf.InteractiveSession(graph=infer_graph)
infer_env.infer_saver.restore(sess, model2load)
print('===========dev set============')
decode_data(sess, infer_env, X_dev, y_dev)
em = decode_data_recover(sess, infer_env, X_dev, y_dev, 'dev')
print('==========test set===========')
decode_data(sess, infer_env, X_test, y_test)
test_em = decode_data_recover(sess, infer_env, X_test, y_test,
'test')
return
def infer_one(args):
args.load_model = True
class Dummy:
pass
train_env = Dummy()
infer_env = Dummy()
_, infer_graph = model(args, train_env, infer_env)
args.data = 'wikisql'
args.load_data = True
model2load = 'model/{}'.format(args.subset)
sess = tf.InteractiveSession(graph=infer_graph)
infer_env.infer_saver.restore(sess, model2load)
print('===========decode============')
X_one = load_data_one(args.maxlen, 'qs.txt')
decode_one(sess, infer_env, X_one)
return
def train_model(args):
class Dummy:
pass
train_env = Dummy()
infer_env = Dummy()
train_graph, infer_graph = model(args, train_env, infer_env)
args.data = 'wikisql'
args.load_data = True
args.load_model = False
X_train, y_train = load_data(maxlen=args.maxlen,load=args.load_data, s='train')
X_test, y_test = load_data(maxlen=args.maxlen,load=args.load_data, s='test')
X_dev, y_dev = load_data(maxlen=args.maxlen,load=args.load_data, s='dev')
#X_train, y_train, X_test, y_test, X_dev, y_dev = load_data(args)
model2load = 'model/{}'.format(args.subset)
max_em, global_test_em, best_base = -1, -1, -1
acc = 0
sess1 = tf.InteractiveSession(graph=train_graph)
sess1.run(tf.global_variables_initializer())
sess1.run(tf.local_variables_initializer())
sess2 = tf.InteractiveSession(graph=infer_graph)
sess2.run(tf.global_variables_initializer())
sess2.run(tf.global_variables_initializer())
for base in range(args.total_epochs / args.epochs):
print('\nIteration: %d (%d epochs)' % (base, args.epochs))
model2load = train(
sess1,
train_env,
X_train,
y_train,
epochs=args.epochs,
load=args.load_model,
name=args.subset,
batch_size=args.batch_size,
base=base,
model2Bload=model2load)
args.load_model = True
infer_env.infer_saver.restore(sess2, model2load)
print('===========dev set============')
dev_em = decode_data(sess2, infer_env, X_dev, y_dev)
dev_em = decode_data_recover(sess2, infer_env, X_dev, y_dev,
'dev')
print('==========test set===========')
test_em = decode_data(sess2, infer_env, X_test, y_test)
test_em = decode_data_recover(sess2, infer_env, X_test, y_test,
'test')
if dev_em > max_em:
max_em = dev_em
global_test_em = test_em
best_base = base
print('\n Saving model for best testing')
train_env.saver.save(sess1, 'best_model/{0}-{1}-{2:.2f}'.format(args.subset, base, max_em))
print('Max EM acc: %.4f during %d iteration.' % (max_em, best_base))
print('test EM acc: %.4f ' % global_test_em)
return
def transfer(args):
load_model = args.load_model if args.mode == 'train' else True
class Dummy:
pass
train_env = Dummy()
infer_env = Dummy()
_, infer_graph = model(args, train_env, infer_env)
args.data = 'overnight'
args.load_data = True
#X_tran, y_tran = load_data(args)
X_tran, y_tran = load_data(maxlen=args.maxlen,load=args.load_data, s='overnight')
args.data = 'overnight_set'
#tran_sets = load_data(args)
tran_sets = load_data(maxlen=args.maxlen,load=args.load_data, s='overnight_set')
model2load = 'model/{}'.format(args.subset)
sess = tf.InteractiveSession(graph=infer_graph)
infer_env.infer_saver.restore(sess, model2load)
print('========subset transfer set========')
subsets = ['basketball', 'calendar', 'housing', 'recipes', 'restaurants']
for subset, (X_tran_subset, y_tran_subset) in zip(subsets, tran_sets):
print('---------' + subset + '---------')
tran_em = decode_data(
sess,
infer_env,
X_tran_subset,
y_tran_subset,
filename=str(subset + '.txt'))
print('===========transfer set============')
tran_em = decode_data(sess, infer_env, X_tran, y_tran)
return
if __name__ == '__main__':
args = init_args()
print(args)
if args.mode == 'train':
print('\nTrain model.')
train_model(args)
elif args.mode == 'infer':
print('\nInference.')
inferrence(args)
elif args.mode == 'txt':
print('\nInference from txt.')
infer_one(args)
elif args.mode == 'transfer':
print('\nTransfer.')
transfer(args)
| 33.683432 | 133 | 0.623188 | [
"MIT"
] | VV123/NLIDB_gradient | main.py | 11,385 | Python |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.utils import timezone
class Migration(migrations.Migration):
dependencies = [
('nationalparks', '0009_auto_20150831_1721'),
]
operations = [
migrations.AddField(
model_name='federalsite',
name='active_participant',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='federalsite',
name='update_timestamp',
field=models.DateTimeField(auto_now=True, default=timezone.now()),
preserve_default=False,
),
migrations.AddField(
model_name='federalsite',
name='version',
field=models.PositiveIntegerField(default=0),
preserve_default=False,
),
]
| 27.75 | 78 | 0.606982 | [
"CC0-1.0"
] | 18F/ekip-api | ekip/nationalparks/migrations/0010_auto_20150902_1902.py | 888 | Python |
# Copyright 2015 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from fake_switches.command_processing.command_processor import CommandProcessor
class BaseCommandProcessor(CommandProcessor):
def __init__(self, switch_configuration, terminal_controller, logger, piping_processor):
"""
:type switch_configuration: fake_switches.switch_configuration.SwitchConfiguration
:type terminal_controller: fake_switches.terminal.TerminalController
:type logger: logging.Logger
:type piping_processor: fake_switches.command_processing.piping_processor_base.PipingProcessorBase
"""
self.switch_configuration = switch_configuration
self.terminal_controller = terminal_controller
self.logger = logger
self.piping_processor = piping_processor
self.sub_processor = None
self.continuing_to = None
self.is_done = False
self.replace_input = False
self.awaiting_keystroke = False
def process_command(self, line):
if " | " in line:
line, piping_command = line.split(" | ", 1)
piping_started = self.activate_piping(piping_command)
if not piping_started:
return False
processed = False
if self.sub_processor:
processed = self.delegate_to_sub_processor(line)
if not processed:
if self.continuing_to:
processed = self.continue_command(line)
else:
processed = self.parse_and_execute_command(line)
if not self.continuing_to and not self.awaiting_keystroke and not self.is_done and processed and not self.sub_processor:
self.finish_piping()
self.show_prompt()
return processed
def parse_and_execute_command(self, line):
if line.strip():
func, args = self.get_command_func(line)
if not func:
self.logger.debug("%s can't process : %s, falling back to parent" % (self.__class__.__name__, line))
return False
else:
func(*args)
return True
def continue_command(self, line):
func = self.continuing_to
self.continue_to(None)
func(line)
return True
def delegate_to_sub_processor(self, line):
processed = self.sub_processor.process_command(line)
if self.sub_processor.is_done:
self.sub_processor = None
self.show_prompt()
return processed
def move_to(self, new_process_class, *args):
self.sub_processor = new_process_class(self.switch_configuration, self.terminal_controller, self.logger, self.piping_processor, *args)
self.sub_processor.show_prompt()
def continue_to(self, continuing_action):
self.continuing_to = continuing_action
def get_continue_command_func(self, cmd):
return getattr(self, 'continue_' + cmd, None)
def write(self, data):
filtered = self.pipe(data)
if filtered is not False:
self.terminal_controller.write(filtered)
def write_line(self, data):
self.write(data + "\n")
def show_prompt(self):
if self.sub_processor is not None:
self.sub_processor.show_prompt()
else:
self.write(self.get_prompt())
def get_prompt(self):
pass
def activate_piping(self, piping_command):
return self.piping_processor.start_listening(piping_command)
def pipe(self, data):
if self.piping_processor.is_listening():
return self.piping_processor.pipe(data)
else:
return data
def finish_piping(self):
if self.piping_processor.is_listening():
self.piping_processor.stop_listening()
def on_keystroke(self, callback, *args):
def on_keystroke_handler(key):
self.awaiting_keystroke = False
self.terminal_controller.remove_any_key_handler()
callback(*(args + (key,)))
self.terminal_controller.add_any_key_handler(on_keystroke_handler)
self.awaiting_keystroke = True
| 35.037594 | 142 | 0.666953 | [
"Apache-2.0"
] | idjaw/fake-switches | fake_switches/command_processing/base_command_processor.py | 4,660 | Python |
from __future__ import division
import sys
class Inf(float):
__name__ = __name__
__file__ = __file__
@staticmethod
def div(p, q):
"""
``p / q`` returning the correct infinity instead of
raising ZeroDivisionError.
"""
from math import copysign
if q != 0.0:
# Normal case, no infinities.
return p / q
elif p == 0.0:
return p / q # Doesn't return, raises an Exception.
elif copysign(1, q) > 0:
# q is +0.0, return inf with same sign as p.
return copysign(inf, p)
else:
# q is -0.0, return inf with flipped sign.
return copysign(inf, -p)
sys.modules[__name__] = inf = Inf("+inf")
| 22.939394 | 64 | 0.535007 | [
"MIT"
] | sensiblecodeio/inf | inf.py | 757 | Python |
import os
import numpy as np
from shapely.geometry import box, Polygon
import geopandas as gpd
from ..utils.core import _check_gdf_load, _check_crs
from ..utils.tile import save_empty_geojson
from ..utils.geo import gdf_get_projection_unit, split_multi_geometries
from ..utils.geo import reproject_geometry
from tqdm import tqdm
class VectorTiler(object):
"""An object to tile geospatial vector data into smaller pieces.
Arguments
---------
Attributes
----------
"""
def __init__(self, dest_dir=None, dest_crs=None, output_format='GeoJSON',
verbose=False, super_verbose=False):
if verbose or super_verbose:
print('Preparing the tiler...')
self.dest_dir = dest_dir
if not os.path.isdir(self.dest_dir):
os.makedirs(self.dest_dir)
if dest_crs is not None:
self.dest_crs = _check_crs(dest_crs)
self.output_format = output_format
self.verbose = verbose
self.super_verbose = super_verbose
self.tile_paths = [] # retains the paths of the last call to .tile()
if self.verbose or self.super_verbose:
print('Initialization done.')
def tile(self, src, tile_bounds, tile_bounds_crs=None, geom_type='Polygon',
split_multi_geoms=True, min_partial_perc=0.0,
dest_fname_base='geoms', obj_id_col=None,
output_ext='.geojson'):
"""Tile `src` into vector data tiles bounded by `tile_bounds`.
Arguments
---------
src : `str` or :class:`geopandas.GeoDataFrame`
The source vector data to tile. Must either be a path to a GeoJSON
or a :class:`geopandas.GeoDataFrame`.
tile_bounds : list
A :class:`list` made up of ``[left, top, right, bottom] `` sublists
(this can be extracted from
:class:`solaris.tile.raster_tile.RasterTiler` after tiling imagery)
tile_bounds_crs : int, optional
The EPSG code or rasterio.crs.CRS object for the CRS that the tile
bounds are in. RasterTiler.tile returns the CRS of the raster tiles
and can be used here. If not provided, it's assumed that the CRS is the
same as in `src`. This argument must be provided if the bound
coordinates and `src` are not in the same CRS, otherwise tiling will
not occur correctly.
geom_type : str, optional (default: "Polygon")
The type of geometries contained within `src`. Defaults to
``"Polygon"``, can also be ``"LineString"``.
split_multi_geoms : bool, optional (default: True)
Should multi-polygons or multi-linestrings generated by clipping
a geometry into discontinuous pieces be separated? Defaults to yes
(``True``).
min_partial_perc : float, optional (default: 0.0)
The minimum percentage of a :class:`shapely.geometry.Polygon` 's
area or :class:`shapely.geometry.LineString` 's length that must
be retained within a tile's bounds to be included in the output.
Defaults to ``0.0``, meaning that the contained portion of a
clipped geometry will be included, no matter how small.
dest_fname_base : str, optional (default: 'geoms')
The base filename to use when creating outputs. The lower left
corner coordinates of the tile's bounding box will be appended
when saving.
obj_id_col : str, optional (default: None)
If ``split_multi_geoms=True``, the name of a column that specifies
a unique identifier for each geometry (e.g. the ``"BuildingId"``
column in many SpaceNet datasets.) See
:func:`solaris.utils.geo.split_multi_geometries` for more.
output_ext : str, optional, (default: geojson)
Extension of output files, can be 'geojson' or 'json'.
"""
tile_gen = self.tile_generator(src, tile_bounds, tile_bounds_crs,
geom_type, split_multi_geoms,
min_partial_perc,
obj_id_col=obj_id_col)
self.tile_paths = []
for tile_gdf, tb in tqdm(tile_gen):
if self.proj_unit not in ['meter', 'metre']:
dest_path = os.path.join(
self.dest_dir, '{}_{}_{}{}'.format(dest_fname_base,
np.round(tb[0], 3),
np.round(tb[3], 3),
output_ext))
else:
dest_path = os.path.join(
self.dest_dir, '{}_{}_{}{}'.format(dest_fname_base,
int(tb[0]),
int(tb[3]),
output_ext))
self.tile_paths.append(dest_path)
if len(tile_gdf) > 0:
tile_gdf.to_file(dest_path, driver='GeoJSON')
else:
save_empty_geojson(dest_path, self.dest_crs)
def tile_generator(self, src, tile_bounds, tile_bounds_crs=None,
geom_type='Polygon', split_multi_geoms=True,
min_partial_perc=0.0, obj_id_col=None):
"""Generate `src` vector data tiles bounded by `tile_bounds`.
Arguments
---------
src : `str` or :class:`geopandas.GeoDataFrame`
The source vector data to tile. Must either be a path to a GeoJSON
or a :class:`geopandas.GeoDataFrame`.
tile_bounds : list
A :class:`list` made up of ``[left, top, right, bottom] `` sublists
(this can be extracted from
:class:`solaris.tile.raster_tile.RasterTiler` after tiling imagery)
tile_bounds_crs : int, optional
The EPSG code for the CRS that the tile bounds are in. If not
provided, it's assumed that the CRS is the same as in `src`. This
argument must be provided if the bound coordinates and `src` are
not in the same CRS, otherwise tiling will not occur correctly.
geom_type : str, optional (default: "Polygon")
The type of geometries contained within `src`. Defaults to
``"Polygon"``, can also be ``"LineString"``.
split_multi_geoms : bool, optional (default: True)
Should multi-polygons or multi-linestrings generated by clipping
a geometry into discontinuous pieces be separated? Defaults to yes
(``True``).
min_partial_perc : float, optional (default: 0.0)
The minimum percentage of a :class:`shapely.geometry.Polygon` 's
area or :class:`shapely.geometry.LineString` 's length that must
be retained within a tile's bounds to be included in the output.
Defaults to ``0.0``, meaning that the contained portion of a
clipped geometry will be included, no matter how small.
obj_id_col : str, optional (default: None)
If ``split_multi_geoms=True``, the name of a column that specifies
a unique identifier for each geometry (e.g. the ``"BuildingId"``
column in many SpaceNet datasets.) See
:func:`solaris.utils.geo.split_multi_geometries` for more.
Yields
------
tile_gdf : :class:`geopandas.GeoDataFrame`
A tile geodataframe.
tb : list
A list with ``[left, top, right, bottom] `` coordinates for the
boundaries contained by `tile_gdf`.
"""
self.src = _check_gdf_load(src)
if self.verbose:
print("Num tiles:", len(tile_bounds))
self.src_crs = _check_crs(self.src.crs)
# check if the tile bounds and vector are in the same crs
if tile_bounds_crs is not None:
tile_bounds_crs = _check_crs(tile_bounds_crs)
else:
tile_bounds_crs = self.src_crs
if self.src_crs != tile_bounds_crs:
reproject_bounds = True # used to transform tb for clip_gdf()
else:
reproject_bounds = False
self.proj_unit = self.src_crs.linear_units
if getattr(self, 'dest_crs', None) is None:
self.dest_crs = self.src_crs
for i, tb in enumerate(tile_bounds):
if self.super_verbose:
print("\n", i, "/", len(tile_bounds))
if reproject_bounds:
tile_gdf = clip_gdf(self.src,
reproject_geometry(box(*tb),
tile_bounds_crs,
self.src_crs),
min_partial_perc,
geom_type, verbose=self.super_verbose)
else:
tile_gdf = clip_gdf(self.src, tb, min_partial_perc, geom_type,
verbose=self.super_verbose)
if self.src_crs != self.dest_crs:
tile_gdf = tile_gdf.to_crs(crs=self.dest_crs.to_wkt())
if split_multi_geoms:
split_multi_geometries(tile_gdf, obj_id_col=obj_id_col)
yield tile_gdf, tb
def search_gdf_polygon(gdf, tile_polygon):
"""Find polygons in a GeoDataFrame that overlap with `tile_polygon` .
Arguments
---------
gdf : :py:class:`geopandas.GeoDataFrame`
A :py:class:`geopandas.GeoDataFrame` of polygons to search.
tile_polygon : :py:class:`shapely.geometry.Polygon`
A :py:class:`shapely.geometry.Polygon` denoting a tile's bounds.
Returns
-------
precise_matches : :py:class:`geopandas.GeoDataFrame`
The subset of `gdf` that overlaps with `tile_polygon` . If
there are no overlaps, this will return an empty
:py:class:`geopandas.GeoDataFrame`.
"""
sindex = gdf.sindex
possible_matches_index = list(sindex.intersection(tile_polygon.bounds))
possible_matches = gdf.iloc[possible_matches_index]
precise_matches = possible_matches[
possible_matches.intersects(tile_polygon)
]
if precise_matches.empty:
precise_matches = gpd.GeoDataFrame(geometry=[])
return precise_matches
def clip_gdf(gdf, tile_bounds, min_partial_perc=0.0, geom_type="Polygon",
use_sindex=True, verbose=False):
"""Clip GDF to a provided polygon.
Clips objects within `gdf` to the region defined by
`poly_to_cut`. Also adds several columns to the output::
`origarea`
The original area of the polygons (only used if `geom_type` ==
``"Polygon"``).
`origlen`
The original length of the objects (only used if `geom_type` ==
``"LineString"``).
`partialDec`
The fraction of the object that remains after clipping
(fraction of area for Polygons, fraction of length for
LineStrings.) Can filter based on this by using `min_partial_perc`.
`truncated`
Boolean indicator of whether or not an object was clipped.
Arguments
---------
gdf : :py:class:`geopandas.GeoDataFrame`
A :py:class:`geopandas.GeoDataFrame` of polygons to clip.
tile_bounds : `list` or :class:`shapely.geometry.Polygon`
The geometry to clip objects in `gdf` to. This can either be a
``[left, top, right, bottom] `` bounds list or a
:class:`shapely.geometry.Polygon` object defining the area to keep.
min_partial_perc : float, optional
The minimum fraction of an object in `gdf` that must be
preserved. Defaults to 0.0 (include any object if any part remains
following clipping).
geom_type : str, optional
Type of objects in `gdf`. Can be one of
``["Polygon", "LineString"]`` . Defaults to ``"Polygon"`` .
use_sindex : bool, optional
Use the `gdf` sindex be used for searching. Improves efficiency
but requires `libspatialindex <http://libspatialindex.github.io/>`__ .
verbose : bool, optional
Switch to print relevant values.
Returns
-------
cut_gdf : :py:class:`geopandas.GeoDataFrame`
`gdf` with all contained objects clipped to `poly_to_cut` .
See notes above for details on additional clipping columns added.
"""
if isinstance(tile_bounds, tuple):
tb = box(*tile_bounds)
elif isinstance(tile_bounds, list):
tb = box(*tile_bounds)
elif isinstance(tile_bounds, Polygon):
tb = tile_bounds
if use_sindex and (geom_type == "Polygon"):
gdf = search_gdf_polygon(gdf, tb)
# if geom_type == "LineString":
if 'origarea' in gdf.columns:
pass
else:
if "geom_type" == "LineString":
gdf['origarea'] = 0
else:
gdf['origarea'] = gdf.area
if 'origlen' in gdf.columns:
pass
else:
if "geom_type" == "LineString":
gdf['origlen'] = gdf.length
else:
gdf['origlen'] = 0
# TODO must implement different case for lines and for spatialIndex
# (Assume RTree is already performed)
cut_gdf = gdf.copy()
cut_gdf.geometry = gdf.intersection(tb)
if geom_type == 'Polygon':
cut_gdf['partialDec'] = cut_gdf.area / cut_gdf['origarea']
cut_gdf = cut_gdf.loc[cut_gdf['partialDec'] > min_partial_perc, :]
cut_gdf['truncated'] = (cut_gdf['partialDec'] != 1.0).astype(int)
else:
# assume linestrings
# remove null
cut_gdf = cut_gdf[cut_gdf['geometry'].notnull()]
cut_gdf['partialDec'] = 1
cut_gdf['truncated'] = 0
# cut_gdf = cut_gdf[cut_gdf.geom_type != "GeometryCollection"]
if len(cut_gdf) > 0 and verbose:
print("clip_gdf() - gdf.iloc[0]:", gdf.iloc[0])
print("clip_gdf() - tb:", tb)
print("clip_gdf() - gdf_cut:", cut_gdf)
# TODO: IMPLEMENT TRUNCATION MEASUREMENT FOR LINESTRINGS
return cut_gdf
| 44.080745 | 83 | 0.594688 | [
"Apache-2.0"
] | CosmiQ/SpaceNet_SAR_Buildings_Solutions | 3-SatShipAI/solaris/tile/vector_tile.py | 14,194 | Python |
import os
import argparse
import gym
from gym import envs
import numpy as np
from skimage import transform
from stable_baselines.common.atari_wrappers import WarpFrame
from stable_baselines.common.vec_env import VecVideoRecorder, VecFrameStack, VecNormalize
from .utils import ALGOS, create_test_env, get_saved_hyperparams, get_latest_run_id, find_saved_model
#-----------------------------------------
import toy_simulator
#import dVRL_simulator
from skimage import transform
from gym.spaces import Box
import cv2
#-----------------------------------------
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--env', help='environment ID', type=str, default='CartPole-v1')
parser.add_argument('-f', '--folder', help='Log folder', type=str, default='trained_agents')
parser.add_argument('-o', '--output-folder', help='Output folder', type=str, default='logs/videos/')
parser.add_argument('--algo', help='RL Algorithm', default='ppo2',
type=str, required=False, choices=list(ALGOS.keys()))
parser.add_argument('-n', '--n-timesteps', help='number of timesteps', default=1000,
type=int)
parser.add_argument('--n-envs', help='number of environments', default=1,
type=int)
parser.add_argument('--deterministic', action='store_true', default=False,
help='Use deterministic actions')
parser.add_argument('--seed', help='Random generator seed', type=int, default=0)
parser.add_argument('--no-render', action='store_true', default=False,
help='Do not render the environment (useful for tests)')
parser.add_argument('--exp-id', help='Experiment ID (default: -1, no exp folder, 0: latest)', default=-1,
type=int)
args = parser.parse_args()
env_id = args.env
algo = args.algo
folder = args.folder
video_folder = args.output_folder
seed = args.seed
deterministic = args.deterministic
video_length = args.n_timesteps
n_envs = args.n_envs
if args.exp_id == 0:
args.exp_id = get_latest_run_id(os.path.join(folder, algo), env_id)
print('Loading latest experiment, id={}'.format(args.exp_id))
# Sanity checks
if args.exp_id > 0:
log_path = os.path.join(folder, algo, '{}_{}'.format(env_id, args.exp_id))
else:
log_path = os.path.join(folder, algo)
model_path = find_saved_model(algo, log_path, env_id)
stats_path = os.path.join(log_path, env_id)
hyperparams, stats_path = get_saved_hyperparams(stats_path)
is_atari = 'NoFrameskip' in env_id
env = create_test_env(env_id, n_envs=n_envs, is_atari=is_atari,
stats_path=stats_path, seed=seed, log_dir=None,
should_render=not args.no_render, hyperparams=hyperparams)
#env = RGBobs(env)
model = ALGOS[algo].load(model_path)
obs = env.reset()
#obs = cv2.cvtColor(obs, cv2.COLOR_RGB2GRAY) #ADDED 2
#obs = cv2.resize(obs, (84,84), interpolation=cv2.INTER_AREA) #ADDED 2
#obs_dummy = env.reset() #ADDED 1
#obs = transform.resize(obs_dummy, (84,84)) #ADDED 1
#env.observation_space = Box(low=0, high=255, shape=obs.shape, dtype=np.uint8) #ADDED 1
#obs = obs[:,:, None]*255 #ADDED 1
# Note: apparently it renders by default
env = VecVideoRecorder(env, video_folder,
record_video_trigger=lambda x: x == 0, video_length=video_length,
name_prefix="{}-{}".format(algo, env_id))
env.reset()
for _ in range(video_length + 1):
# action = [env.action_space.sample()]
action, _ = model.predict(obs, deterministic=deterministic)
if isinstance(env.action_space, gym.spaces.Box):
action = np.clip(action, env.action_space.low, env.action_space.high)
obs, _, _, _ = env.step(action)
# Workaround for https://github.com/openai/gym/issues/893
if n_envs == 1 and 'Bullet' not in env_id and not is_atari:
env = env.venv
# DummyVecEnv
while isinstance(env, VecNormalize) or isinstance(env, VecFrameStack):
env = env.venv
env.envs[0].env.close()
else:
# SubprocVecEnv
env.close()
| 36.803419 | 109 | 0.640734 | [
"MIT"
] | SilviaZirino/rl-baselines-zoo | utils/record_video.py | 4,306 | Python |
from __future__ import absolute_import
from __future__ import unicode_literals
#
# Copyright (c) 2014 Juniper Networks, Inc. All rights reserved.
#
from builtins import next
from builtins import chr
from builtins import str
from builtins import range
from builtins import object
import copy
import os
import gevent
from pprint import pformat
import six
from vnc_api import vnc_api
from .exceptions import NoIdError, VncError
from pysandesh.gen_py.sandesh.ttypes import SandeshLevel
from cfgm_common import jsonutils as json
from . import utils
import datetime
from operator import itemgetter
from collections import OrderedDict
from cfgm_common.datastore.drivers.cassandra_thrift import CassandraDriverThrift
from cfgm_common.datastore.drivers.cassandra_cql import CassandraDriverCQL
from cfgm_common.datastore import api as datastore_api
JSON_NONE = json.dumps(None)
class VncCassandraClient(object):
@staticmethod
def _is_metadata(column_name):
return column_name[:5] == 'META:'
@staticmethod
def _is_parent(column_name):
return column_name[:7] == 'parent:'
@staticmethod
def _is_prop(column_name):
return column_name[:5] == 'prop:'
@staticmethod
def _is_prop_list(column_name):
return column_name[:6] == 'propl:'
@staticmethod
def _is_prop_map(column_name):
return column_name[:6] == 'propm:'
@staticmethod
def _is_ref(column_name):
return column_name[:4] == 'ref:'
@staticmethod
def _is_backref(column_name):
return column_name[:8] == 'backref:'
@staticmethod
def _is_children(column_name):
return column_name[:9] == 'children:'
def add(self, cf_name, key, value):
try:
self._cassandra_driver.insert(key, value, cf_name=cf_name)
return True
except Exception as e:
self._logger("VNCCassandra, unable to add {}={}, error: {}".format(
key, value, e), level=SandeshLevel.SYS_WARN)
return False
def delete(self, cf_name, key, columns=None):
try:
self._cassandra_driver.remove(
key, columns, cf_name=cf_name)
return True
except Exception as e:
self._logger("VNCCassandra, unable to del {}={}, error: {}".format(
key, columns, e), level=SandeshLevel.SYS_WARN)
return False
def _get_resource_class(self, obj_type):
if hasattr(self, '_db_client_mgr'):
return self._db_client_mgr.get_resource_class(obj_type)
cls_name = '%s' % (utils.CamelCase(obj_type))
return getattr(vnc_api, cls_name)
# end _get_resource_class
@classmethod
def get_db_info(cls):
db_info = [(datastore_api.UUID_KEYSPACE_NAME, [datastore_api.OBJ_UUID_CF_NAME,
datastore_api.OBJ_FQ_NAME_CF_NAME,
datastore_api.OBJ_SHARED_CF_NAME])]
return db_info
# end get_db_info
def __init__(self, server_list, cassandra_driver, **options):
if cassandra_driver == 'cql':
driverClass = CassandraDriverCQL
elif cassandra_driver == 'thrift':
driverClass = CassandraDriverThrift
# TODO(sahid): To satisfy test-framework which has its
# specific py3 support for thrift we can have the above
# condition, when that will be fixed we could uncomment
# the code.
#if six.PY3:
# raise VncError(
# "selected driver `{}` not supported for Python 3.".format(
# cassandra_driver))
else:
raise VncError(
"datastore driver not selected, see `cassandra_driver`.")
self._cassandra_driver = driverClass(server_list, **options)
self._logger = self._cassandra_driver.options.logger
self._logger('VNCCassandra started with driver {}'.format(driverClass),
level=SandeshLevel.SYS_INFO)
self._cache_uuid_to_fq_name = {}
self._obj_cache_mgr = ObjectCacheManager(
self._cassandra_driver.options.logger,
self,
max_entries=self._cassandra_driver.options.obj_cache_entries,
obj_cache_exclude_types=self._cassandra_driver.options.obj_cache_exclude_types,
debug_obj_cache_types=self._cassandra_driver.options.debug_obj_cache_types,
)
self._obj_cache_exclude_types = self._cassandra_driver.options.obj_cache_exclude_types or []
# these functions make calls to pycassa xget() and get_range()
# generator functions which can't be wrapped around handle_exceptions()
# at the time of cassandra init, hence need to wrap these functions that
# uses it to catch cassandra connection failures.
self.object_update = self._cassandra_driver._handle_exceptions(
self.object_update)
self.object_list = self._cassandra_driver._handle_exceptions(
self.object_list)
self.object_read = self._cassandra_driver._handle_exceptions(
self.object_read)
self.object_raw_read = self._cassandra_driver._handle_exceptions(
self.object_raw_read)
self.object_delete = self._cassandra_driver._handle_exceptions(
self.object_delete)
self.prop_collection_read = self._cassandra_driver._handle_exceptions(
self.prop_collection_read)
self.uuid_to_fq_name = self._cassandra_driver._handle_exceptions(
self.uuid_to_fq_name)
self.uuid_to_obj_type = self._cassandra_driver._handle_exceptions(
self.uuid_to_obj_type)
self.fq_name_to_uuid = self._cassandra_driver._handle_exceptions(
self.fq_name_to_uuid)
self.get_shared = self._cassandra_driver._handle_exceptions(
self.get_shared)
self.walk = self._cassandra_driver._handle_exceptions(self.walk)
if self._cassandra_driver.options.walk:
self.walk()
# end __init__
def _create_prop(self, bch, obj_uuid, prop_name, prop_val):
self._cassandra_driver.insert(
obj_uuid,
{'prop:%s' % (prop_name): json.dumps(prop_val)},
batch=bch)
# end _create_prop
def _update_prop(self, bch, obj_uuid, prop_name, new_props):
if new_props[prop_name] is None:
self._cassandra_driver.remove(obj_uuid,
columns=['prop:' + prop_name],
batch=bch)
else:
self._cassandra_driver.insert(
obj_uuid,
{'prop:' + prop_name: json.dumps(new_props[prop_name])},
batch=bch)
# prop has been accounted for, remove so only new ones remain
del new_props[prop_name]
# end _update_prop
def _add_to_prop_list(self, bch, obj_uuid, prop_name,
prop_elem_value, prop_elem_position):
self._cassandra_driver.insert(obj_uuid,
{'propl:%s:%s' % (prop_name, prop_elem_position):
json.dumps(prop_elem_value)},
batch=bch)
# end _add_to_prop_list
def _delete_from_prop_list(self, bch, obj_uuid, prop_name,
prop_elem_position):
self._cassandra_driver.remove(
obj_uuid,
columns=['propl:%s:%s' % (prop_name, prop_elem_position)],
batch=bch)
# end _delete_from_prop_list
def _set_in_prop_map(self, bch, obj_uuid, prop_name,
prop_elem_value, prop_elem_position):
self._cassandra_driver.insert(obj_uuid,
{'propm:%s:%s' % (prop_name, prop_elem_position):
json.dumps(prop_elem_value)},
batch=bch)
# end _set_in_prop_map
def _delete_from_prop_map(self, bch, obj_uuid, prop_name,
prop_elem_position):
self._cassandra_driver.remove(
obj_uuid,
columns=['propm:%s:%s' % (prop_name, prop_elem_position)],
batch=bch)
# end _delete_from_prop_map
def _create_child(self, bch, parent_type, parent_uuid,
child_type, child_uuid):
child_col = {'children:%s:%s' %
(child_type, child_uuid): JSON_NONE}
self._cassandra_driver.insert(parent_uuid, child_col, batch=bch)
parent_col = {'parent:%s:%s' %
(parent_type, parent_uuid): JSON_NONE}
self._cassandra_driver.insert(child_uuid, parent_col, batch=bch)
# update latest_col_ts on parent object
if parent_type not in self._obj_cache_exclude_types:
self.update_latest_col_ts(bch, parent_uuid)
# end _create_child
def _delete_child(self, bch, parent_type, parent_uuid,
child_type, child_uuid):
self._cassandra_driver.remove(
parent_uuid,
columns=['children:%s:%s' % (child_type, child_uuid)],
batch=bch)
# update latest_col_ts on parent object
if parent_type not in self._obj_cache_exclude_types:
self.update_latest_col_ts(bch, parent_uuid)
# end _delete_child
def _create_ref(self, bch, obj_type, obj_uuid, ref_obj_type, ref_uuid,
ref_data):
j_ref_data = json.dumps(ref_data)
symmetric_ref_updates = []
self._cassandra_driver.insert(
obj_uuid, {'ref:%s:%s' %
(ref_obj_type, ref_uuid): j_ref_data},
batch=bch)
if obj_type == ref_obj_type:
self._cassandra_driver.insert(
ref_uuid, {'ref:%s:%s' %
(obj_type, obj_uuid): j_ref_data},
batch=bch)
self.update_last_modified(bch, obj_type, ref_uuid)
symmetric_ref_updates = [ref_uuid]
else:
self._cassandra_driver.insert(
ref_uuid, {'backref:%s:%s' %
(obj_type, obj_uuid): j_ref_data},
batch=bch)
# update latest_col_ts on referred object
if ref_obj_type not in self._obj_cache_exclude_types:
if ref_obj_type == obj_type:
# evict other side of ref since it is stale from
# GET /<old-ref-uuid> pov.
self._obj_cache_mgr.evict(obj_type, [ref_uuid])
else:
self.update_latest_col_ts(bch, ref_uuid)
return symmetric_ref_updates
# end _create_ref
def _update_ref(self, bch, obj_type, obj_uuid, ref_obj_type, old_ref_uuid,
new_ref_infos):
if ref_obj_type not in new_ref_infos:
# update body didn't touch this type, nop
return []
symmetric_ref_updates = []
if old_ref_uuid not in new_ref_infos[ref_obj_type]:
# remove old ref
self._cassandra_driver.remove(
obj_uuid,
columns=['ref:%s:%s' % (ref_obj_type, old_ref_uuid)],
batch=bch)
if obj_type == ref_obj_type:
self._cassandra_driver.remove(
old_ref_uuid,
columns=['ref:%s:%s' % (obj_type, obj_uuid)],
batch=bch)
try:
self.update_last_modified(bch, obj_type, old_ref_uuid)
symmetric_ref_updates = [old_ref_uuid]
except NoIdError as e:
# old_ref_uuid might have been deleted
# if cache has the link, it will be evicted
# if cache doesn't have, keyerror is caught and continued
pass
else:
self._cassandra_driver.remove(
old_ref_uuid,
columns=['backref:%s:%s' % (obj_type, obj_uuid)],
batch=bch)
else:
# retain old ref with new ref attr
new_ref_data = new_ref_infos[ref_obj_type][old_ref_uuid]
j_new_ref_data = json.dumps(new_ref_data)
self._cassandra_driver.insert(
obj_uuid,
{'ref:%s:%s' % (ref_obj_type, old_ref_uuid):
j_new_ref_data},
batch=bch)
if obj_type == ref_obj_type:
self._cassandra_driver.insert(
old_ref_uuid,
{'ref:%s:%s' % (obj_type, obj_uuid):
j_new_ref_data},
batch=bch)
self.update_last_modified(bch, obj_type, old_ref_uuid)
symmetric_ref_updates = [old_ref_uuid]
else:
self._cassandra_driver.insert(
old_ref_uuid,
{'backref:%s:%s' % (obj_type, obj_uuid):
j_new_ref_data},
batch=bch)
# uuid has been accounted for, remove so only new ones remain
del new_ref_infos[ref_obj_type][old_ref_uuid]
# update latest_col_ts on referred object
if ref_obj_type not in self._obj_cache_exclude_types:
if ref_obj_type == obj_type:
# evict other side of ref since it is stale from
# GET /<old-ref-uuid> pov.
self._obj_cache_mgr.evict(obj_type, [old_ref_uuid])
else:
self.update_latest_col_ts(bch, old_ref_uuid)
return symmetric_ref_updates
# end _update_ref
def _delete_ref(self, bch, obj_type, obj_uuid, ref_obj_type, ref_uuid):
send = False
symmetric_ref_updates = []
if bch is None:
send = True
bch = self._cassandra_driver.get_cf_batch(datastore_api.OBJ_UUID_CF_NAME)
self._cassandra_driver.remove(
obj_uuid,
columns=['ref:%s:%s' % (ref_obj_type, ref_uuid)],
batch=bch)
if obj_type == ref_obj_type:
self._cassandra_driver.remove(ref_uuid, columns=[
'ref:%s:%s' % (obj_type, obj_uuid)],
batch=bch)
try:
self.update_last_modified(bch, obj_type, ref_uuid)
symmetric_ref_updates = [ref_uuid]
except NoIdError as e:
# ref_uuid might have been deleted
# if cache has the link, it will be evicted
# if cache doesn't have, keyerror is caught and continued
pass
else:
self._cassandra_driver.remove(
ref_uuid,
columns=['backref:%s:%s' % (obj_type, obj_uuid)],
batch=bch)
# update latest_col_ts on referred object
if ref_obj_type not in self._obj_cache_exclude_types:
if ref_obj_type == obj_type:
# evict other side of ref since it is stale from
# GET /<old-ref-uuid> pov.
self._obj_cache_mgr.evict(obj_type, [ref_uuid])
else:
self.update_latest_col_ts(bch, ref_uuid)
if send:
bch.send()
return symmetric_ref_updates
# end _delete_ref
def _get_xsd_class(self, xsd_type):
return getattr(vnc_api, xsd_type)
# end _get_xsd_class
def object_create(self, obj_type, obj_id, obj_dict,
uuid_batch=None, fqname_batch=None):
obj_class = self._get_resource_class(obj_type)
if uuid_batch:
bch = uuid_batch
else:
# Gather column values for obj and updates to backrefs
# in a batch and write it at the end
bch = self._cassandra_driver.get_cf_batch(datastore_api.OBJ_UUID_CF_NAME)
obj_cols = {}
obj_cols['fq_name'] = json.dumps(obj_dict['fq_name'])
obj_cols['type'] = json.dumps(obj_type)
if obj_type not in self._obj_cache_exclude_types:
obj_cols['META:latest_col_ts'] = JSON_NONE
if 'parent_type' in obj_dict:
# non config-root child
parent_type = obj_dict['parent_type']
if parent_type not in obj_class.parent_types:
msg = ("Invalid parent type: %s not in %s" %
(parent_type, obj_class.parent_types))
return False, (400, msg)
parent_object_type = self._get_resource_class(
parent_type).object_type
parent_fq_name = obj_dict['fq_name'][:-1]
obj_cols['parent_type'] = json.dumps(parent_type)
parent_uuid = self.fq_name_to_uuid(parent_object_type,
parent_fq_name)
self._create_child(bch, parent_object_type, parent_uuid, obj_type,
obj_id)
# Properties
for prop_field in obj_class.prop_fields:
field = obj_dict.get(prop_field)
# Specifically checking for None
if field is None:
continue
if prop_field == 'id_perms':
field['created'] = datetime.datetime.utcnow().isoformat()
field['last_modified'] = field['created']
if prop_field in obj_class.prop_list_fields:
# store list elements in list order
# iterate on wrapped element or directly or prop field
if obj_class.prop_list_field_has_wrappers[prop_field]:
wrapper_field_keys = list(field.keys())
if wrapper_field_keys:
wrapper_field = wrapper_field_keys[0]
list_coll = field[wrapper_field]
else:
list_coll = []
else:
list_coll = field
for i in range(len(list_coll)):
self._add_to_prop_list(
bch, obj_id, prop_field, list_coll[i], str(i))
elif prop_field in obj_class.prop_map_fields:
# iterate on wrapped element or directly or prop field
if obj_class.prop_map_field_has_wrappers[prop_field]:
wrapper_field_keys = list(field.keys())
if wrapper_field_keys:
wrapper_field = wrapper_field_keys[0]
map_coll = field[wrapper_field]
else:
map_coll = []
else:
map_coll = field
map_key_name = obj_class.prop_map_field_key_names[prop_field]
for map_elem in map_coll:
map_key = map_elem[map_key_name]
self._set_in_prop_map(
bch, obj_id, prop_field, map_elem, map_key)
else:
self._create_prop(bch, obj_id, prop_field, field)
# References
# e.g. ref_field = 'network_ipam_refs'
# ref_res_type = 'network-ipam'
# ref_link_type = 'VnSubnetsType'
# is_weakref = False
symmetric_ref_updates = []
for ref_field in obj_class.ref_fields:
ref_fld_types_list = list(obj_class.ref_field_types[ref_field])
ref_res_type = ref_fld_types_list[0]
ref_link_type = ref_fld_types_list[1]
ref_obj_type = self._get_resource_class(ref_res_type).object_type
refs = obj_dict.get(ref_field, [])
for ref in refs:
ref_uuid = self.fq_name_to_uuid(ref_obj_type, ref['to'])
ref_attr = ref.get('attr')
ref_data = {'attr': ref_attr, 'is_weakref': False}
ret = self._create_ref(bch, obj_type, obj_id, ref_obj_type, ref_uuid,
ref_data)
symmetric_ref_updates.extend(ret)
self._cassandra_driver.insert(obj_id, obj_cols, batch=bch)
if not uuid_batch:
bch.send()
# Update fqname table
fq_name_str = ':'.join(obj_dict['fq_name'])
fq_name_cols = {utils.encode_string(fq_name_str) + ':' + obj_id:
JSON_NONE}
if fqname_batch:
fqname_batch.insert(obj_type, fq_name_cols)
else:
self._cassandra_driver.insert(
cf_name=datastore_api.OBJ_FQ_NAME_CF_NAME,
key=obj_type,
columns=fq_name_cols)
return (True, symmetric_ref_updates)
# end object_create
def object_raw_read(self, obj_type, obj_uuids, prop_names):
obj_class = self._get_resource_class(obj_type)
hit_obj_dicts, miss_uuids = self._obj_cache_mgr.read(
obj_class, obj_uuids, prop_names, False)
miss_obj_rows = self._cassandra_driver.multiget(
datastore_api.OBJ_UUID_CF_NAME, miss_uuids,
['prop:' + x for x in prop_names])
miss_obj_dicts = []
for obj_uuid, columns in list(miss_obj_rows.items()):
miss_obj_dict = {'uuid': obj_uuid}
for prop_name in columns:
# strip 'prop:' before sending result back
miss_obj_dict[prop_name[5:]] = columns[prop_name]
miss_obj_dicts.append(miss_obj_dict)
return hit_obj_dicts + miss_obj_dicts
def object_read(self, obj_type, obj_uuids, field_names=None,
ret_readonly=False):
if not obj_uuids:
return (True, [])
# if field_names=None, all fields will be read/returned
req_fields = field_names
obj_class = self._get_resource_class(obj_type)
ref_fields = obj_class.ref_fields
backref_fields = obj_class.backref_fields
children_fields = obj_class.children_fields
list_fields = obj_class.prop_list_fields
map_fields = obj_class.prop_map_fields
prop_fields = obj_class.prop_fields - (list_fields | map_fields)
if ((ret_readonly is False) or
(obj_type in self._obj_cache_exclude_types)):
ignore_cache = True
else:
ignore_cache = False
# optimize for common case of reading non-backref, non-children fields
# ignoring columns starting from 'b' and 'c' - significant performance
# impact in scaled setting. e.g. read of project
# For caching (when ret values will be used for readonly
# e.g. object read/list context):
# 1. pick the hits, and for the misses..
# 2. read from db, cache, filter with fields
# else read from db with specified field filters
if (field_names is None or
set(field_names) & (backref_fields | children_fields)):
# atleast one backref/children field is needed
include_backrefs_children = True
if ignore_cache:
hit_obj_dicts = []
miss_uuids = obj_uuids
else:
hit_obj_dicts, miss_uuids = self._obj_cache_mgr.read(
obj_class,
obj_uuids,
field_names,
include_backrefs_children,
)
miss_obj_rows = self._cassandra_driver.multiget(
datastore_api.OBJ_UUID_CF_NAME, miss_uuids,
timestamp=True)
else:
# ignore reading backref + children columns
include_backrefs_children = False
if ignore_cache:
hit_obj_dicts = []
miss_uuids = obj_uuids
else:
hit_obj_dicts, miss_uuids = self._obj_cache_mgr.read(
obj_class,
obj_uuids,
field_names,
include_backrefs_children,
)
miss_obj_rows = self._cassandra_driver.multiget(
datastore_api.OBJ_UUID_CF_NAME,
miss_uuids,
start='d',
timestamp=True)
if (ignore_cache or
self._obj_cache_mgr.max_entries < len(miss_uuids)):
# caller may modify returned value, or
# cannot fit in cache,
# just render with filter and don't cache
rendered_objs = self._render_obj_from_db(
obj_class, miss_obj_rows, req_fields,
include_backrefs_children)
obj_dicts = hit_obj_dicts + \
[v['obj_dict'] for k,v in list(rendered_objs.items())]
else:
# can fit and caller won't modify returned value,
# so render without filter, cache and return
# cached value
rendered_objs_to_cache = self._render_obj_from_db(
obj_class, miss_obj_rows, None,
include_backrefs_children)
field_filtered_objs = self._obj_cache_mgr.set(
obj_type,
rendered_objs_to_cache,
req_fields,
include_backrefs_children,
)
obj_dicts = hit_obj_dicts + field_filtered_objs
if not obj_dicts:
if len(obj_uuids) == 1:
raise NoIdError(obj_uuids[0])
else:
return (True, [])
return (True, obj_dicts)
# end object_read
def object_count_children(self, obj_type, obj_uuid, child_type):
if child_type is None:
return (False, '')
obj_class = self._get_resource_class(obj_type)
if child_type not in obj_class.children_fields:
return (False,
'%s is not a child type of %s' % (child_type, obj_type))
col_start = 'children:' + child_type[:-1] + ':'
col_finish = 'children:' + child_type[:-1] + ';'
num_children = self._cassandra_driver.get_count(
datastore_api.OBJ_UUID_CF_NAME,
obj_uuid,
start=col_start,
finish=col_finish)
return (True, num_children)
# end object_count_children
def update_last_modified(self, bch, obj_type, obj_uuid, id_perms=None):
if id_perms is None:
id_perms = self._cassandra_driver.get_one_col(
datastore_api.OBJ_UUID_CF_NAME,
obj_uuid,
'prop:id_perms')
id_perms['last_modified'] = datetime.datetime.utcnow().isoformat()
self._update_prop(bch, obj_uuid, 'id_perms', {'id_perms': id_perms})
if obj_type not in self._obj_cache_exclude_types:
self.update_latest_col_ts(bch, obj_uuid)
# end update_last_modified
def update_latest_col_ts(self, bch, obj_uuid):
try:
self._cassandra_driver.get_one_col(datastore_api.OBJ_UUID_CF_NAME,
obj_uuid,
'type')
except NoIdError:
return
self._cassandra_driver.insert(obj_uuid,
{'META:latest_col_ts':
JSON_NONE},
batch=bch)
# end update_latest_col_ts
def object_update(self, obj_type, obj_uuid, new_obj_dict, uuid_batch=None):
obj_class = self._get_resource_class(obj_type)
# Grab ref-uuids and properties in new version
new_ref_infos = {}
symmetric_ref_updates = []
# Properties
new_props = {}
for prop_field in obj_class.prop_fields:
if prop_field in new_obj_dict:
new_props[prop_field] = new_obj_dict[prop_field]
# References
# e.g. ref_field = 'network_ipam_refs'
# ref_type = 'network-ipam'
# ref_link_type = 'VnSubnetsType'
# is_weakref = False
for ref_field in obj_class.ref_fields:
ref_fld_types_list = list(obj_class.ref_field_types[ref_field])
ref_res_type = ref_fld_types_list[0]
ref_link_type = ref_fld_types_list[1]
is_weakref = ref_fld_types_list[2]
ref_obj_type = self._get_resource_class(ref_res_type).object_type
if ref_field in new_obj_dict:
new_refs = new_obj_dict[ref_field]
new_ref_infos[ref_obj_type] = {}
for new_ref in new_refs or []:
try:
new_ref_uuid = new_ref['uuid']
except KeyError:
new_ref_uuid = self.fq_name_to_uuid(ref_obj_type,
new_ref['to'])
new_ref_attr = new_ref.get('attr')
new_ref_data = {'attr': new_ref_attr,
'is_weakref': is_weakref}
new_ref_infos[ref_obj_type][new_ref_uuid] = new_ref_data
# Gather column values for obj and updates to backrefs
# in a batch and write it at the end
if uuid_batch:
bch = uuid_batch
else:
bch = self._cassandra_driver.get_cf_batch(
datastore_api.OBJ_UUID_CF_NAME)
for col_name, col_value in self._cassandra_driver.xget(
datastore_api.OBJ_UUID_CF_NAME, obj_uuid):
if self._is_prop(col_name):
(_, prop_name) = col_name.split(':')
if prop_name == 'id_perms':
# id-perms always has to be updated for last-mod timestamp
# get it from request dict(or from db if not in request dict)
new_id_perms = new_obj_dict.get(
prop_name, json.loads(col_value))
self.update_last_modified(
bch, obj_type, obj_uuid, new_id_perms)
elif prop_name in new_obj_dict:
self._update_prop(
bch, obj_uuid, prop_name, new_props)
if self._is_prop_list(col_name):
(_, prop_name, prop_elem_position) = col_name.split(':', 2)
if prop_name in new_props:
# delete all old values of prop list
self._delete_from_prop_list(
bch, obj_uuid, prop_name, prop_elem_position)
if self._is_prop_map(col_name):
(_, prop_name, prop_elem_position) = col_name.split(':', 2)
if prop_name in new_props:
# delete all old values of prop list
self._delete_from_prop_map(
bch, obj_uuid, prop_name, prop_elem_position)
if self._is_ref(col_name):
(_, ref_type, ref_uuid) = col_name.split(':')
ret = self._update_ref(bch, obj_type, obj_uuid, ref_type,
ref_uuid, new_ref_infos)
symmetric_ref_updates.extend(ret)
# for all column names
# create new refs
for ref_type in list(new_ref_infos.keys()):
for ref_uuid in list(new_ref_infos[ref_type].keys()):
ref_data = new_ref_infos[ref_type][ref_uuid]
ret = self._create_ref(bch, obj_type, obj_uuid, ref_type,
ref_uuid, ref_data)
symmetric_ref_updates.extend(ret)
# create new props
for prop_name in list(new_props.keys()):
if prop_name in obj_class.prop_list_fields:
# store list elements in list order
# iterate on wrapped element or directly on prop field
# for wrapped lists, store without the wrapper. regenerate
# wrapper on read
if (obj_class.prop_list_field_has_wrappers[prop_name] and
new_props[prop_name]):
wrapper_field = list(new_props[prop_name].keys())[0]
list_coll = new_props[prop_name][wrapper_field]
else:
list_coll = new_props[prop_name]
for i in range(len(list_coll)):
self._add_to_prop_list(bch, obj_uuid, prop_name,
list_coll[i], str(i))
elif prop_name in obj_class.prop_map_fields:
# store map elements in key order
# iterate on wrapped element or directly on prop field
# for wrapped lists, store without the wrapper. regenerate
# wrapper on read
if (obj_class.prop_map_field_has_wrappers[prop_name] and
new_props[prop_name]):
wrapper_field = list(new_props[prop_name].keys())[0]
map_coll = new_props[prop_name][wrapper_field]
else:
map_coll = new_props[prop_name]
map_key_name = obj_class.prop_map_field_key_names[prop_name]
for map_elem in map_coll:
map_key = map_elem[map_key_name]
self._set_in_prop_map(bch, obj_uuid, prop_name,
map_elem, map_key)
else:
self._create_prop(bch, obj_uuid, prop_name, new_props[prop_name])
if not uuid_batch:
try:
bch.send()
finally:
self._obj_cache_mgr.evict(obj_type, [obj_uuid])
return (True, symmetric_ref_updates)
# end object_update
def object_list(self, obj_type, parent_uuids=None, back_ref_uuids=None,
obj_uuids=None, count=False, filters=None,
paginate_start=None, paginate_count=None):
obj_class = self._get_resource_class(obj_type)
children_fq_names_uuids = []
ret_marker = None
anchored_op = True
def filter_rows(coll_infos, filters=None):
if not coll_infos or not filters:
return coll_infos
filtered_infos = {}
columns = ['prop:%s' % filter_key for filter_key in filters if
filter_key in obj_class.prop_fields]
if not columns:
return coll_infos
rows = self._cassandra_driver.multiget(datastore_api.OBJ_UUID_CF_NAME,
list(coll_infos.keys()),
columns=columns)
for obj_uuid, properties in list(rows.items()):
# give chance for zk heartbeat/ping
gevent.sleep(0)
full_match = True
for filter_key, filter_values in list(filters.items()):
property = 'prop:%s' % filter_key
if property not in properties:
full_match = False
break
prop_value = properties[property]
if isinstance(prop_value, dict):
for filter_value in filter_values:
try:
filter_dict = json.loads(filter_value)
except ValueError:
continue
if (six.viewitems(filter_dict) <=
six.viewitems(prop_value)):
break
else:
full_match = False
break
elif prop_value not in filter_values:
full_match = False
break
if full_match:
filtered_infos[obj_uuid] = coll_infos[obj_uuid]
return filtered_infos
# end filter_rows
def get_fq_name_uuid_list(obj_uuids):
ret_list = []
for obj_uuid in obj_uuids:
try:
if obj_type != self.uuid_to_obj_type(obj_uuid):
continue
obj_fq_name = self.uuid_to_fq_name(obj_uuid)
ret_list.append((obj_fq_name, obj_uuid))
except NoIdError:
pass
return ret_list
# end get_fq_name_uuid_list
if parent_uuids:
# go from parent to child
## tune start and count if paginated on same row
#if paginate_start and (len(parent_uuids) == 1):
if paginate_start and paginate_start != '0':
start = 'children:%s:%s' % (obj_type,
paginate_start[:-1]+chr(ord(paginate_start[-1])+1))
num_columns = paginate_count
else:
start = 'children:%s:' % (obj_type)
num_columns = None
obj_rows = self._cassandra_driver.multiget(
datastore_api.OBJ_UUID_CF_NAME,
parent_uuids,
start=start,
finish='children:%s;' % (obj_type),
num_columns=num_columns,
timestamp=True)
def filter_rows_parent_anchor(sort=False):
# flatten to [('children:<type>:<uuid>', (<val>,<ts>), *]
all_cols = [cols for obj_key in list(obj_rows.keys())
for cols in list(obj_rows[obj_key].items())]
all_child_infos = {}
for col_name, col_val_ts in all_cols:
# give chance for zk heartbeat/ping
gevent.sleep(0)
child_uuid = col_name.split(':')[2]
if obj_uuids and child_uuid not in obj_uuids:
continue
if back_ref_uuids:
child_cols = self._cassandra_driver.get(
datastore_api.OBJ_UUID_CF_NAME,
child_uuid,
start='ref:',
finish='ref;')
child_ref_ids = {col.split(':')[2]
for col in child_cols or []}
if not set(back_ref_uuids) & child_ref_ids:
continue
all_child_infos[child_uuid] = {'uuid': child_uuid,
'tstamp': col_val_ts[1]}
filt_child_infos = filter_rows(all_child_infos, filters)
if not sort:
ret_child_infos = list(filt_child_infos.values())
else:
ret_child_infos = sorted(list(filt_child_infos.values()),
key=itemgetter('tstamp'))
return get_fq_name_uuid_list(r['uuid'] for r in ret_child_infos)
# end filter_rows_parent_anchor
children_fq_names_uuids.extend(filter_rows_parent_anchor(sort=True))
elif back_ref_uuids:
# go from anchor to backrefs
if paginate_start and paginate_start != '0':
# get next lexical value of marker
start = 'backref:%s:%s' % (obj_type,
paginate_start[:-1]+chr(ord(paginate_start[-1])+1))
num_columns = paginate_count
else:
start = 'backref:%s:' % (obj_type)
num_columns = None
obj_rows = self._cassandra_driver.multiget(
datastore_api.OBJ_UUID_CF_NAME,
back_ref_uuids,
start=start,
finish='backref:%s;' % (obj_type),
num_columns=num_columns,
timestamp=True)
def filter_rows_backref_anchor():
# flatten to [('backref:<obj-type>:<uuid>', (<val>,<ts>), *]
all_cols = [cols for obj_key in list(obj_rows.keys())
for cols in list(obj_rows[obj_key].items())]
all_backref_infos = {}
for col_name, col_val_ts in all_cols:
# give chance for zk heartbeat/ping
gevent.sleep(0)
backref_uuid = col_name.split(':')[2]
if obj_uuids and backref_uuid not in obj_uuids:
continue
all_backref_infos[backref_uuid] = \
{'uuid': backref_uuid, 'tstamp': col_val_ts[1]}
filt_backref_infos = filter_rows(all_backref_infos, filters)
return get_fq_name_uuid_list(r['uuid'] for r in
list(filt_backref_infos.values()))
# end filter_rows_backref_anchor
children_fq_names_uuids.extend(filter_rows_backref_anchor())
else:
anchored_op = False
if obj_uuids:
# exact objects specified
def filter_rows_object_list():
all_obj_infos = {}
marker = None
read_in = 0
start_idx = 0
if paginate_start and paginate_start != '0':
# paginate through objects
# in list order of obj_uuids
try:
start_idx = obj_uuids.index(paginate_start) + 1
except ValueError:
# simulate end of pagination
start_idx = len(obj_uuids)
for obj_uuid in obj_uuids[start_idx:]:
all_obj_infos[obj_uuid] = None
read_in += 1
if paginate_start and read_in >= paginate_count:
marker = obj_uuid
break
filt_obj_infos = filter_rows(all_obj_infos, filters)
return get_fq_name_uuid_list(list(filt_obj_infos.keys())), marker
# end filter_rows_object_list
filtered_rows, ret_marker = filter_rows_object_list()
children_fq_names_uuids.extend(filtered_rows)
else: # grab all resources of this type
if paginate_start and paginate_start != '0':
start = paginate_start[:-1] + \
chr(ord(paginate_start[-1]) + 1)
else:
start = ''
cols = self._cassandra_driver.xget(
datastore_api.OBJ_FQ_NAME_CF_NAME, '%s' %(obj_type),
start=start)
def filter_rows_no_anchor():
marker = None
all_obj_infos = {}
read_in = 0
for col_name, _ in cols:
# give chance for zk heartbeat/ping
gevent.sleep(0)
col_name_arr = utils.decode_string(col_name).split(':')
obj_uuid = col_name_arr[-1]
all_obj_infos[obj_uuid] = (col_name_arr[:-1], obj_uuid)
read_in += 1
if paginate_start and read_in >= paginate_count:
marker = col_name
break
filt_obj_infos = filter_rows(all_obj_infos, filters)
return list(filt_obj_infos.values()), marker
# end filter_rows_no_anchor
if count and not filters:
# when listing all objects of a type
# return early if only count query is in request
return (True, sum(1 for col in cols), None)
filtered_rows, ret_marker = filter_rows_no_anchor()
children_fq_names_uuids.extend(filtered_rows)
if count:
return (True, len(children_fq_names_uuids), None)
# for anchored list with pagination,
# prune from union of anchors and last uuid is marker
if paginate_start and anchored_op:
children_fq_names_uuids = sorted(children_fq_names_uuids,
key=lambda fqn_uuid: fqn_uuid[1])
if len(children_fq_names_uuids) > paginate_count:
children_fq_names_uuids = children_fq_names_uuids[:paginate_count]
if not children_fq_names_uuids:
ret_marker = None
else:
ret_marker = children_fq_names_uuids[-1][1]
return (True, children_fq_names_uuids, ret_marker)
# end object_list
def object_delete(self, obj_type, obj_uuid):
obj_class = self._get_resource_class(obj_type)
fq_name = self._cassandra_driver.get_one_col(datastore_api.OBJ_UUID_CF_NAME,
obj_uuid,
'fq_name')
bch = self._cassandra_driver.get_cf_batch(datastore_api.OBJ_UUID_CF_NAME)
# unlink from parent
col_start = 'parent:'
col_fin = 'parent;'
col_name_iter = self._cassandra_driver.xget(datastore_api.OBJ_UUID_CF_NAME,
obj_uuid, start=col_start, finish=col_fin)
for (col_name, col_val) in col_name_iter:
(_, parent_type, parent_uuid) = col_name.split(':')
self._delete_child(
bch, parent_type, parent_uuid, obj_type, obj_uuid)
# remove refs
col_start = 'ref:'
col_fin = 'ref;'
col_name_iter = self._cassandra_driver.xget(datastore_api.OBJ_UUID_CF_NAME,
obj_uuid, start=col_start, finish=col_fin)
symmetric_ref_updates = []
for (col_name, col_val) in col_name_iter:
(_, ref_type, ref_uuid) = col_name.split(':')
ret = self._delete_ref(bch, obj_type, obj_uuid, ref_type, ref_uuid)
symmetric_ref_updates.extend(ret)
# remove link from relaxed back refs
col_start = 'relaxbackref:'
col_fin = 'relaxbackref;'
col_name_iter = self._cassandra_driver.xget(datastore_api.OBJ_UUID_CF_NAME,
obj_uuid, start=col_start, finish=col_fin)
for (col_name, col_val) in col_name_iter:
(_, backref_uuid) = col_name.split(':')
self._delete_ref(bch, None, backref_uuid, obj_type, obj_uuid)
self._cassandra_driver.remove(obj_uuid, batch=bch)
try:
bch.send()
finally:
self._obj_cache_mgr.evict(obj_type, [obj_uuid])
# Update fqname table
fq_name_str = ':'.join(fq_name)
fq_name_col = utils.encode_string(fq_name_str) + ':' + obj_uuid
self._cassandra_driver.remove(
cf_name=datastore_api.OBJ_FQ_NAME_CF_NAME,
key=obj_type,
columns=[fq_name_col])
# Purge map naming cache
self.cache_uuid_to_fq_name_del(obj_uuid)
return (True, symmetric_ref_updates)
# end object_delete
def prop_collection_read(self, obj_type, obj_uuid, obj_fields, position):
obj_class = self._get_resource_class(obj_type)
result = {}
# always read-in id-perms for upper-layers to do rbac/visibility
result['id_perms'] = self._cassandra_driver.get_one_col(
datastore_api.OBJ_UUID_CF_NAME,
obj_uuid,
'prop:id_perms')
# read in prop-list or prop-map fields
for field in obj_fields:
if field in obj_class.prop_list_fields:
prop_pfx = 'propl'
elif field in obj_class.prop_map_fields:
prop_pfx = 'propm'
else:
continue
if position:
col_start = '%s:%s:%s' % (prop_pfx, field, position)
col_end = '%s:%s:%s' % (prop_pfx, field, position)
else:
col_start = '%s:%s:' % (prop_pfx, field)
col_end = '%s:%s;' % (prop_pfx, field)
obj_cols = self._cassandra_driver.xget(
datastore_api.OBJ_UUID_CF_NAME,
obj_uuid,
start=col_start,
finish=col_end)
result[field] = []
for name, value in obj_cols:
# tuple of col_value, position. result is already sorted
# lexically by position (necessary only for list property)
result[field].append((json.loads(value), name.split(':', 2)[-1]))
return (True, result)
# end prop_collection_read
def cache_uuid_to_fq_name_add(self, id, fq_name, obj_type):
self._cache_uuid_to_fq_name[id] = (fq_name, obj_type)
# end cache_uuid_to_fq_name_add
def cache_uuid_to_fq_name_del(self, id):
self._cache_uuid_to_fq_name.pop(id, None)
# end cache_uuid_to_fq_name_del
def uuid_to_fq_name(self, id):
try:
return copy.copy(self._cache_uuid_to_fq_name[id][0])
except KeyError:
obj = self._cassandra_driver.get(datastore_api.OBJ_UUID_CF_NAME, id,
columns=['fq_name', 'type'])
if not obj:
raise NoIdError(id)
if 'type' not in obj or 'fq_name' not in obj:
raise NoIdError(id)
fq_name = obj['fq_name']
obj_type = obj['type']
self.cache_uuid_to_fq_name_add(id, fq_name, obj_type)
return copy.copy(fq_name)
# end uuid_to_fq_name
def uuid_to_obj_type(self, id):
try:
return self._cache_uuid_to_fq_name[id][1]
except KeyError:
obj = self._cassandra_driver.get(datastore_api.OBJ_UUID_CF_NAME, id,
columns=['fq_name', 'type'])
if not obj:
raise NoIdError(id)
if 'type' not in obj or 'fq_name' not in obj:
raise NoIdError(id)
fq_name = obj['fq_name']
obj_type = obj['type']
self.cache_uuid_to_fq_name_add(id, fq_name, obj_type)
return obj_type
# end uuid_to_obj_type
def fq_name_to_uuid(self, obj_type, fq_name):
fq_name_str = utils.encode_string(':'.join(fq_name))
col_infos = self._cassandra_driver.get(datastore_api.OBJ_FQ_NAME_CF_NAME,
obj_type,
start=fq_name_str + ':',
finish=fq_name_str + ';')
if not col_infos:
raise NoIdError('%s %s' % (obj_type, fq_name_str))
if len(col_infos) > 1:
raise VncError('Multi match %s for %s' % (fq_name_str, obj_type))
fq_name_uuid = utils.decode_string(col_infos.popitem()[0]).split(':')
if obj_type != 'route_target' and fq_name_uuid[:-1] != fq_name:
raise NoIdError('%s %s' % (obj_type, fq_name_str))
return fq_name_uuid[-1]
# end fq_name_to_uuid
# return all objects shared with a (share_type, share_id)
def get_shared(self, obj_type, share_id='', share_type='global'):
result = []
column = '%s:%s' % (share_type, share_id)
col_infos = self._cassandra_driver.get(datastore_api.OBJ_SHARED_CF_NAME,
obj_type,
start=column + ':',
finish=column + ';')
if not col_infos:
return None
for (col_name, col_val) in list(col_infos.items()):
# ('*:*:f7963198-08a4-4b96-a02e-41cc66593163', u'7')
obj_uuid = col_name.split(':')[-1]
result.append((obj_uuid, col_val))
return result
# share an object 'obj_id' with <share_type:share_id>
# rwx indicate type of access (sharing) allowed
def set_shared(self, obj_type, obj_id, share_id = '', share_type = 'global', rwx = 7):
col_name = '%s:%s:%s' % (share_type, share_id, obj_id)
self._cassandra_driver.insert(
cf_name=datastore_api.OBJ_SHARED_CF_NAME,
key=obj_type,
columns={col_name:json.dumps(rwx)})
# delete share of 'obj_id' object with <share_type:share_id>
def del_shared(self, obj_type, obj_id, share_id = '', share_type = 'global'):
col_name = '%s:%s:%s' % (share_type, share_id, obj_id)
self._cassandra_driver.remove(
cf_name=datastore_api.OBJ_SHARED_CF_NAME,
key=obj_type,
columns=[col_name])
def _render_obj_from_db(self, obj_class, obj_rows, field_names=None,
include_backrefs_children=False):
ref_fields = obj_class.ref_fields
backref_fields = obj_class.backref_fields
children_fields = obj_class.children_fields
list_fields = obj_class.prop_list_fields
map_fields = obj_class.prop_map_fields
prop_fields = obj_class.prop_fields - (list_fields | map_fields)
results = {}
for obj_uuid, obj_cols in list(obj_rows.items()):
if 'type' not in obj_cols or 'fq_name' not in obj_cols:
# if object has been deleted, these fields may not
# be present
continue
if obj_class.object_type != obj_cols.pop('type')[0]:
continue
id_perms_ts = 0
row_latest_ts = 0
result = {}
result['uuid'] = obj_uuid
result['fq_name'] = obj_cols.pop('fq_name')[0]
for col_name in list(obj_cols.keys()):
if self._is_parent(col_name):
# non config-root child
(_, _, parent_uuid) = col_name.split(':')
try:
result['parent_type'] = obj_cols['parent_type'][0]
except KeyError:
# parent_type may not be present in obj_cols
pass
result['parent_uuid'] = parent_uuid
continue
if self._is_prop(col_name):
(_, prop_name) = col_name.split(':')
if prop_name == 'id_perms':
id_perms_ts = obj_cols[col_name][1]
if ((prop_name not in prop_fields) or
(field_names and prop_name not in field_names)):
continue
result[prop_name] = obj_cols[col_name][0]
continue
if self._is_prop_list(col_name):
(_, prop_name, prop_elem_position) = col_name.split(':')
if field_names and prop_name not in field_names:
continue
if obj_class.prop_list_field_has_wrappers[prop_name]:
prop_field_types = obj_class.prop_field_types[prop_name]
wrapper_type = prop_field_types['xsd_type']
wrapper_cls = self._get_xsd_class(wrapper_type)
wrapper_field = wrapper_cls.attr_fields[0]
if prop_name not in result:
result[prop_name] = {wrapper_field: []}
result[prop_name][wrapper_field].append(
(obj_cols[col_name][0], prop_elem_position))
else:
if prop_name not in result:
result[prop_name] = []
result[prop_name].append((obj_cols[col_name][0],
prop_elem_position))
continue
if self._is_prop_map(col_name):
(_, prop_name, _) = col_name.split(':', 2)
if field_names and prop_name not in field_names:
continue
if obj_class.prop_map_field_has_wrappers[prop_name]:
prop_field_types = obj_class.prop_field_types[prop_name]
wrapper_type = prop_field_types['xsd_type']
wrapper_cls = self._get_xsd_class(wrapper_type)
wrapper_field = wrapper_cls.attr_fields[0]
if prop_name not in result:
result[prop_name] = {wrapper_field: []}
result[prop_name][wrapper_field].append(
obj_cols[col_name][0])
else:
if prop_name not in result:
result[prop_name] = []
result[prop_name].append(obj_cols[col_name][0])
continue
if self._is_children(col_name):
(_, child_type, child_uuid) = col_name.split(':')
if field_names and '%ss' %(child_type) not in field_names:
continue
if child_type+'s' not in children_fields:
continue
child_tstamp = obj_cols[col_name][1]
try:
self._read_child(result, obj_uuid, child_type,
child_uuid, child_tstamp)
except NoIdError:
continue
continue
if self._is_ref(col_name):
(_, ref_type, ref_uuid) = col_name.split(':')
if ((ref_type+'_refs' not in ref_fields) or
(field_names and ref_type + '_refs' not in field_names)):
continue
self._read_ref(result, obj_uuid, ref_type, ref_uuid,
obj_cols[col_name][0])
continue
if self._is_backref(col_name):
(_, back_ref_type, back_ref_uuid) = col_name.split(':')
if back_ref_type+'_back_refs' not in backref_fields:
continue
if (field_names and
'%s_back_refs' %(back_ref_type) not in field_names):
continue
try:
self._read_back_ref(result, obj_uuid, back_ref_type,
back_ref_uuid, obj_cols[col_name][0])
except NoIdError:
continue
continue
if self._is_metadata(col_name):
(_, meta_type) = col_name.split(':')
if meta_type == 'latest_col_ts':
row_latest_ts = obj_cols[col_name][1]
continue
# for all column names
# sort children by creation time
for child_field in obj_class.children_fields:
if child_field not in result:
continue
sorted_children = sorted(result[child_field],
key = itemgetter('tstamp'))
# re-write result's children without timestamp
result[child_field] = sorted_children
[child.pop('tstamp') for child in result[child_field]]
# for all children
# Ordering property lists by position attribute
for prop_name in (obj_class.prop_list_fields & set(result.keys())):
if isinstance(result[prop_name], list):
result[prop_name] = [el[0] for el in
sorted(result[prop_name],
key=itemgetter(1))]
elif isinstance(result[prop_name], dict):
wrapper, unsorted_list = result[prop_name].popitem()
result[prop_name][wrapper] = [el[0] for el in
sorted(unsorted_list,
key=itemgetter(1))]
# 'id_perms_ts' tracks timestamp of id-perms column
# i.e. latest update of *any* prop or ref.
# 'row_latest_ts' tracks timestamp of last modified column
# so any backref/children column is also captured. 0=>unknown
results[obj_uuid] = {'obj_dict': result,
'id_perms_ts': id_perms_ts}
if include_backrefs_children:
# update our copy of ts only if we read the
# corresponding fields from db
results[obj_uuid]['row_latest_ts'] = row_latest_ts
# end for all rows
return results
# end _render_obj_from_db
def _read_child(self, result, obj_uuid, child_obj_type, child_uuid,
child_tstamp):
if '%ss' % (child_obj_type) not in result:
result['%ss' % (child_obj_type)] = []
child_res_type = self._get_resource_class(child_obj_type).resource_type
child_info = {}
child_info['to'] = self.uuid_to_fq_name(child_uuid)
child_info['uuid'] = child_uuid
child_info['tstamp'] = child_tstamp
result['%ss' % (child_obj_type)].append(child_info)
# end _read_child
def _read_ref(self, result, obj_uuid, ref_obj_type, ref_uuid, ref_data_json):
if '%s_refs' % (ref_obj_type) not in result:
result['%s_refs' % (ref_obj_type)] = []
ref_data = ref_data_json
ref_info = {}
try:
ref_info['to'] = self.uuid_to_fq_name(ref_uuid)
except NoIdError:
ref_info['to'] = ['ERROR']
if ref_data:
try:
ref_info['attr'] = ref_data['attr']
except KeyError:
# TODO remove backward compat old format had attr directly
ref_info['attr'] = ref_data
ref_info['uuid'] = ref_uuid
result['%s_refs' % (ref_obj_type)].append(ref_info)
# end _read_ref
def _read_back_ref(self, result, obj_uuid, back_ref_obj_type, back_ref_uuid,
back_ref_data_json):
if '%s_back_refs' % (back_ref_obj_type) not in result:
result['%s_back_refs' % (back_ref_obj_type)] = []
back_ref_info = {}
back_ref_info['to'] = self.uuid_to_fq_name(back_ref_uuid)
back_ref_data = back_ref_data_json
if back_ref_data:
try:
back_ref_info['attr'] = back_ref_data['attr']
except KeyError:
# TODO remove backward compat old format had attr directly
back_ref_info['attr'] = back_ref_data
back_ref_info['uuid'] = back_ref_uuid
result['%s_back_refs' % (back_ref_obj_type)].append(back_ref_info)
# end _read_back_ref
def walk(self, fn=None):
type_to_object = {}
for obj_uuid, obj_col in self._cassandra_driver.get_range(
datastore_api.OBJ_UUID_CF_NAME,
columns=['type', 'fq_name']):
try:
obj_type = json.loads(obj_col['type'])
obj_fq_name = json.loads(obj_col['fq_name'])
# prep cache to avoid n/w round-trip in db.read for ref
self.cache_uuid_to_fq_name_add(obj_uuid, obj_fq_name, obj_type)
try:
type_to_object[obj_type].append(obj_uuid)
except KeyError:
type_to_object[obj_type] = [obj_uuid]
except Exception as e:
self._logger('Error in db walk read %s' % (str(e)),
level=SandeshLevel.SYS_ERR)
continue
if fn is None:
return []
walk_results = []
for obj_type, uuid_list in list(type_to_object.items()):
try:
self._logger('DB walk: obj_type %s len %s'
% (obj_type, len(uuid_list)),
level=SandeshLevel.SYS_INFO)
result = fn(obj_type, uuid_list)
if result:
walk_results.append(result)
except Exception as e:
self._logger('Error in db walk invoke %s' % (str(e)),
level=SandeshLevel.SYS_ERR)
continue
return walk_results
# end walk
# end class VncCassandraClient
class ObjectCacheManager(object):
class CachedObject(object):
# provide a read-only copy in so far as
# top level keys cannot be add/mod/del
class RODict(dict):
def __readonly__(self, *args, **kwargs):
raise RuntimeError("Cannot modify ReadOnlyDict")
__setitem__ = __readonly__
__delitem__ = __readonly__
pop = __readonly__
popitem = __readonly__
clear = __readonly__
update = __readonly__
setdefault = __readonly__
del __readonly__
# end RODict
def __init__(self, obj_dict, id_perms_ts, row_latest_ts):
self.obj_dict = self.RODict(obj_dict)
self.id_perms_ts = id_perms_ts
self.row_latest_ts = row_latest_ts
# end __init__
def update_obj_dict(self, new_obj_dict):
self.obj_dict = self.RODict(new_obj_dict)
# end update_obj_dict
def get_filtered_copy(self, field_names=None):
if not field_names:
return self.obj_dict
# TODO filter with field_names
return {k: copy.deepcopy(self.obj_dict[k])
for k in field_names if k in self.obj_dict}
# end get_filtered_copy
# end class CachedObject
def __init__(self, logger, db_client, max_entries,
obj_cache_exclude_types=None, debug_obj_cache_types=None):
self._logger = logger
self.max_entries = max_entries
self._db_client = db_client
self._cache = OrderedDict()
self._obj_cache_exclude_types = set(obj_cache_exclude_types or [])
self._debug_obj_cache_types = set(debug_obj_cache_types or [])
self._debug_obj_cache_types -= self._obj_cache_exclude_types
# end __init__
def _log(self, msg, level=SandeshLevel.SYS_DEBUG):
msg = 'Object UUID cache manager: %s' % msg
self._logger(msg, level)
def evict(self, obj_type, obj_uuids):
for obj_uuid in obj_uuids:
try:
obj_dict = self._cache.pop(obj_uuid).obj_dict
if obj_type in self._debug_obj_cache_types:
self._log("%s %s (%s) was evicted from cache. Cache "
"contained: %s" % (
obj_type.replace('_', '-').title(),
':'.join(obj_dict['fq_name']),
obj_uuid,
pformat(obj_dict),
),
)
except KeyError:
continue
# end evict
def set(self, obj_type, db_rendered_objs, req_fields,
include_backrefs_children):
# build up results with field filter
result_obj_dicts = []
if req_fields:
result_fields = set(req_fields) | set(['fq_name', 'uuid',
'parent_type', 'parent_uuid'])
for obj_uuid, render_info in list(db_rendered_objs.items()):
id_perms_ts = render_info.get('id_perms_ts', 0)
row_latest_ts = render_info.get('row_latest_ts', 0)
cached_obj = self._cache.pop(obj_uuid, None)
if cached_obj is not None:
# if we had stale, just update from new db value
cached_obj.update_obj_dict(render_info['obj_dict'])
cached_obj.id_perms_ts = id_perms_ts
if include_backrefs_children:
cached_obj.row_latest_ts = row_latest_ts
else:
# this was a miss in cache
cached_obj = self.CachedObject(
render_info['obj_dict'],
id_perms_ts,
row_latest_ts,
)
if len(self._cache) >= self.max_entries:
# get first element (least recently used)
# without getting full copy of dict keys
if hasattr(self._cache, 'iterkeys'):
key = next(iter(list(self._cache.iterkeys())))
else:
# 'keys()' returns an iterator with PY3.
key = next(iter(list(self._cache.keys())))
self.evict(obj_type, [key])
self._cache[obj_uuid] = cached_obj
if obj_type in self._debug_obj_cache_types:
self._log("%s %s (%s) was set in cache with values: %s" % (
obj_type.replace('_', ' ').title(),
':'.join(cached_obj.obj_dict['fq_name']),
obj_uuid,
pformat(cached_obj.obj_dict),
),
)
if req_fields:
result_obj_dicts.append(
cached_obj.get_filtered_copy(result_fields))
else:
result_obj_dicts.append(cached_obj.get_filtered_copy())
# end for all rendered objects
return result_obj_dicts
# end set
def read(self, obj_class, obj_uuids, req_fields, include_backrefs_children):
# find which keys are a hit, find which hit keys are not stale
# return hit entries and miss+stale uuids.
hit_uuids = []
miss_uuids = []
for obj_uuid in obj_uuids:
if obj_uuid in self._cache:
hit_uuids.append(obj_uuid)
else:
miss_uuids.append(obj_uuid)
stale_uuids = []
# staleness when include_backrefs_children is False = id_perms tstamp
# when include_backrefs_children is True = latest_col_ts tstamp
if include_backrefs_children:
stale_check_col_name = 'META:latest_col_ts'
stale_check_ts_attr = 'row_latest_ts'
else:
stale_check_col_name = 'prop:id_perms'
stale_check_ts_attr = 'id_perms_ts'
hit_rows_in_db = self._db_client._cassandra_driver.multiget(
datastore_api.OBJ_UUID_CF_NAME, hit_uuids,
columns=[stale_check_col_name], timestamp=True)
obj_dicts = []
result_fields = {'fq_name', 'uuid', 'parent_type', 'parent_uuid'}
if req_fields:
result_fields = set(req_fields) | result_fields
for hit_uuid in hit_uuids:
try:
obj_cols = hit_rows_in_db[hit_uuid]
cached_obj = self._cache[hit_uuid]
except KeyError:
# Either stale check column missing, treat as miss
# Or entry could have been evicted while context switched
# for reading stale-check-col, treat as miss
miss_uuids.append(hit_uuid)
continue
if (getattr(cached_obj, stale_check_ts_attr) !=
obj_cols[stale_check_col_name][1]):
miss_uuids.append(hit_uuid)
stale_uuids.append(hit_uuid)
continue
if req_fields:
obj_dicts.append(cached_obj.get_filtered_copy(result_fields))
else:
obj_dicts.append(cached_obj.get_filtered_copy())
if obj_class.object_type in self._debug_obj_cache_types:
obj_rows = self._db_client._cassandra_driver.multiget(
datastore_api.OBJ_UUID_CF_NAME,
[hit_uuid],
timestamp=True)
rendered_objs = self._db_client._render_obj_from_db(
obj_class, obj_rows, req_fields, include_backrefs_children)
db_obj_dict = rendered_objs[hit_uuid]['obj_dict']
self._log("%s %s (%s) was read from cache.\nDB values: %s\n"
"Cache value: %s\n" % (
obj_class.object_type.replace('_', ' ').title(),
':'.join(cached_obj.obj_dict['fq_name']),
hit_uuid,
pformat(db_obj_dict),
pformat(cached_obj.obj_dict),
),
)
# end for all hit in cache
self.evict(obj_class.object_type, stale_uuids)
return obj_dicts, miss_uuids
# end read
def dump_cache(self, obj_uuids=None, count=10):
obj_dicts = {}
i = 1
if obj_uuids:
for obj_uuid in obj_uuids:
try:
obj = self._cache[obj_uuid]
except KeyError:
continue
obj_json = json.dumps(obj, default=lambda o: dict((k, v)
for k, v in list(o.__dict__.items())))
obj_dicts[i] = json.loads(obj_json)
i += 1
else:
for key in self._cache:
if i > count:
break
obj = self._cache[key]
obj_json = json.dumps(obj, default=lambda o: dict((k, v)
for k, v in list(o.__dict__.items())))
obj_dicts[i] = json.loads(obj_json)
i += 1
return obj_dicts
# end class ObjectCacheManager
| 42.319405 | 100 | 0.542985 | [
"Apache-2.0"
] | atsgen/tf-controller | src/config/common/cfgm_common/vnc_cassandra.py | 73,932 | Python |
import django
import six
from django.http import HttpResponseRedirect
if django.VERSION[0] < 2:
from django.core.urlresolvers import reverse
else:
from django.urls import reverse
from django.db import transaction
from django.utils import timezone
import logging
from processlib.assignment import inherit
from processlib.tasks import run_async_activity
logger = logging.getLogger(__name__)
@six.python_2_unicode_compatible
class Activity(object):
def __init__(
self,
flow,
process,
instance,
name,
verbose_name=None,
permission=None,
auto_create_permission=True,
permission_name=None,
skip_if=None,
assign_to=inherit,
):
self.flow = flow
self.process = process
self.verbose_name = verbose_name
self.permission = permission
self.auto_create_permission = auto_create_permission
self.permission_name = permission_name or verbose_name or name
self.name = name
self.instance = instance
# ensure that we have a single referenced process object
if self.instance:
self.instance.process = self.process
self._skip = skip_if
self._get_assignment = assign_to
def should_skip(self):
if not self._skip:
return False
return self._skip(self)
def should_wait(self):
return False
def has_view(self):
return False
def __str__(self):
return six.text_type(self.verbose_name or self.name)
def __repr__(self):
return '{}(name="{}")'.format(self.__class__.__name__, self.name)
def instantiate(
self, predecessor=None, instance_kwargs=None, request=None, **kwargs
):
assert not self.instance
instance_kwargs = instance_kwargs or {}
request_user = (
request.user if request and request.user.is_authenticated else None
)
user, group = self._get_assignment(
request_user=request_user, predecessor=predecessor
)
if "assigned_user" not in instance_kwargs:
instance_kwargs["assigned_user"] = user
if "assigned_group" not in instance_kwargs:
instance_kwargs["assigned_group"] = group
self.instance = self.flow.activity_model(
process=self.process, activity_name=self.name, **(instance_kwargs or {})
)
self.instance.save()
if predecessor:
self.instance.predecessors.add(predecessor.instance)
def assign_to(self, user, group):
self.instance.assigned_user = user
self.instance.assigned_group = group
self.instance.save()
def start(self, **kwargs):
assert self.instance.status in (
self.instance.STATUS_INSTANTIATED,
self.instance.STATUS_SCHEDULED,
)
if not self.instance.started_at:
self.instance.started_at = timezone.now()
self.instance.status = self.instance.STATUS_STARTED
def finish(self, **kwargs):
assert self.instance.status == self.instance.STATUS_STARTED
if not self.instance.finished_at:
self.instance.finished_at = timezone.now()
self.instance.status = self.instance.STATUS_DONE
self.instance.modified_by = kwargs.get("user", None)
self.instance.save()
self._instantiate_next_activities()
def cancel(self, **kwargs):
assert self.instance.status in (
self.instance.STATUS_INSTANTIATED,
self.instance.STATUS_ERROR,
)
self.instance.status = self.instance.STATUS_CANCELED
self.instance.modified_by = kwargs.get("user", None)
self.instance.save()
def undo(self, **kwargs):
assert self.instance.status == self.instance.STATUS_DONE
self.instance.finished_at = None
self.instance.status = self.instance.STATUS_INSTANTIATED
self.instance.modified_by = kwargs.get("user", None)
self.instance.save()
undo_callback = getattr(self.process, "undo_{}".format(self.name), None)
if undo_callback is not None:
undo_callback()
def error(self, **kwargs):
assert self.instance.status != self.instance.STATUS_DONE
self.instance.status = self.instance.STATUS_ERROR
self.instance.finished_at = timezone.now()
self.instance.modified_by = kwargs.get("user", None)
self.instance.save()
def _get_next_activities(self):
for activity_name in self.flow._out_edges[self.name]:
activity = self.flow._get_activity_by_name(
process=self.process, activity_name=activity_name
)
if activity.should_skip():
for later_activity in activity._get_next_activities():
yield later_activity
else:
yield activity
def _instantiate_next_activities(self):
for activity in self._get_next_activities():
activity.instantiate(predecessor=self)
class State(Activity):
"""
An activity that simple serves as a marker for a certain state being reached, e.g.
if the activity before it was conditional.
"""
def instantiate(self, **kwargs):
super(State, self).instantiate(**kwargs)
self.start()
self.finish()
class ViewActivity(Activity):
def __init__(self, view=None, **kwargs):
super(ViewActivity, self).__init__(**kwargs)
if view is None:
raise ValueError(
"A ViewActivity requires a view, non given for {}.{}".format(
self.flow.label, self.name
)
)
self.view = view
def has_view(self):
return True
def get_absolute_url(self):
return reverse(
"processlib:process-activity",
kwargs={"flow_label": self.flow.label, "activity_id": self.instance.pk},
)
def dispatch(self, request, *args, **kwargs):
kwargs["activity"] = self
return self.view(request, *args, **kwargs)
class FunctionActivity(Activity):
def __init__(self, callback=None, **kwargs):
self.callback = callback
super(FunctionActivity, self).__init__(**kwargs)
def instantiate(self, **kwargs):
super(FunctionActivity, self).instantiate(**kwargs)
self.start()
def start(self, **kwargs):
super(FunctionActivity, self).start(**kwargs)
try:
self.callback(self)
except Exception as e:
logger.exception(e)
self.error(exception=e)
return
self.finish()
def retry(self):
assert self.instance.status == self.instance.STATUS_ERROR
self.instance.status = self.instance.STATUS_INSTANTIATED
self.instance.finished_at = None
self.instance.save()
self.start()
class AsyncActivity(Activity):
def __init__(self, callback=None, **kwargs):
self.callback = callback
super(AsyncActivity, self).__init__(**kwargs)
def instantiate(self, **kwargs):
super(AsyncActivity, self).instantiate(**kwargs)
self.schedule()
def schedule(self, **kwargs):
self.instance.status = self.instance.STATUS_SCHEDULED
self.instance.scheduled_at = timezone.now()
self.instance.save()
transaction.on_commit(
lambda: run_async_activity.delay(self.flow.label, self.instance.pk)
)
def retry(self, **kwargs):
assert self.instance.status == self.instance.STATUS_ERROR
self.instance.status = self.instance.STATUS_INSTANTIATED
self.instance.finished_at = None
self.schedule(**kwargs)
def start(self, **kwargs):
super(AsyncActivity, self).start(**kwargs)
self.callback(self)
class AsyncViewActivity(AsyncActivity):
"""
An async activity that renders a view while the async task is running.
The view could be AsyncActivityView with a custom template_name
"""
def __init__(self, view=None, **kwargs):
super(AsyncViewActivity, self).__init__(**kwargs)
if view is None:
raise ValueError(
"An AsyncViewActivity requires a view, non given for {}.{}".format(
self.flow.label, self.name
)
)
self.view = view
def has_view(self):
return True
def get_absolute_url(self):
return reverse(
"processlib:process-activity",
kwargs={"flow_label": self.flow.label, "activity_id": self.instance.pk},
)
def dispatch(self, request, *args, **kwargs):
kwargs["activity"] = self
return self.view(request, *args, **kwargs)
class StartMixin(Activity):
def instantiate(
self, predecessor=None, instance_kwargs=None, request=None, **kwargs
):
assert not self.instance
assert not predecessor
instance_kwargs = instance_kwargs or {}
request_user = (
request.user if request and request.user.is_authenticated else None
)
user, group = self._get_assignment(
request_user=request_user, predecessor=predecessor
)
if "assigned_user" not in instance_kwargs:
instance_kwargs["assigned_user"] = user
if "assigned_group" not in instance_kwargs:
instance_kwargs["assigned_group"] = group
self.instance = self.flow.activity_model(
process=self.process, activity_name=self.name, **(instance_kwargs or {})
)
def finish(self, **kwargs):
assert self.instance.status == self.instance.STATUS_STARTED
if not self.instance.finished_at:
self.instance.finished_at = timezone.now()
self.process.save()
self.instance.process = self.process
self.instance.status = self.instance.STATUS_DONE
self.instance.modified_by = kwargs.get("user", None)
self.instance.save()
self._instantiate_next_activities()
class StartActivity(StartMixin, Activity):
pass
class StartViewActivity(StartMixin, ViewActivity):
pass
class EndActivity(Activity):
def instantiate(self, **kwargs):
super(EndActivity, self).instantiate(**kwargs)
self.start()
self.finish()
def finish(self, **kwargs):
super(EndActivity, self).finish(**kwargs)
update_fields = []
if not self.process.finished_at:
self.process.finished_at = self.instance.finished_at
update_fields.append("finished_at")
if not self.process.status == self.process.STATUS_DONE:
self.process.status = self.process.STATUS_DONE
update_fields.append("status")
self.process.save(update_fields=update_fields)
class EndRedirectActivity(EndActivity):
def __init__(self, redirect_url_callback=None, **kwargs):
self.redirect_url_callback = redirect_url_callback
super(EndActivity, self).__init__(**kwargs)
def instantiate(self, **kwargs):
# HACK: we skip the EndActivity implementation
# because it would finish the activity right away
super(EndActivity, self).instantiate(**kwargs)
def has_view(self):
return True
def get_absolute_url(self):
return reverse(
"processlib:process-activity",
kwargs={"flow_label": self.flow.label, "activity_id": self.instance.pk},
)
def dispatch(self, request, *args, **kwargs):
self.start()
url = reverse(
"processlib:process-detail", kwargs={"pk": self.instance.process.pk}
)
try:
if self.redirect_url_callback:
url = self.redirect_url_callback(self)
self.finish()
except Exception as e:
logger.exception(e)
self.error(exception=e)
return HttpResponseRedirect(url)
class FormActivity(Activity):
def __init__(self, form_class=None, **kwargs):
self.form_class = form_class
super(FormActivity, self).__init__(**kwargs)
def get_form(self, **kwargs):
return self.form_class(**kwargs)
class StartFormActivity(StartMixin, FormActivity):
pass
class IfElse(Activity):
def __init__(self, flow, process, instance, name, **kwargs):
super(IfElse, self).__init__(flow, process, instance, name, **kwargs)
class Wait(Activity):
def __init__(self, flow, process, instance, name, **kwargs):
wait_for = kwargs.pop("wait_for", None)
if not wait_for:
raise ValueError("Wait activity needs to wait for something.")
super(Wait, self).__init__(flow, process, instance, name, **kwargs)
self._wait_for = set(wait_for) if wait_for else None
def _find_existing_instance(self, predecessor):
candidates = list(
self.flow.activity_model.objects.filter(
process=self.process, activity_name=self.name
)
)
for candidate in candidates:
# FIXME this only corrects for simple loops, may fail with more complex scenarios
if not candidate.successors.filter(
status=candidate.STATUS_DONE, activity_name=self.name
).exists():
return candidate
raise self.flow.activity_model.DoesNotExist()
def instantiate(self, predecessor=None, instance_kwargs=None, **kwargs):
if predecessor is None:
raise ValueError("Can't wait for something without a predecessor.")
# find the instance
try:
self.instance = self._find_existing_instance(predecessor)
except self.flow.activity_model.DoesNotExist:
self.instance = self.flow.activity_model(
process=self.process, activity_name=self.name, **(instance_kwargs or {})
)
self.instance.save()
self.instance.predecessors.add(predecessor.instance)
self.start()
def start(self, **kwargs):
if not self.instance.started_at:
self.instance.started_at = timezone.now()
self.instance.status = self.instance.STATUS_STARTED
self.instance.save()
predecessor_names = {
instance.activity_name for instance in self.instance.predecessors.all()
}
if self._wait_for.issubset(predecessor_names):
self.finish()
| 31.793407 | 93 | 0.635144 | [
"BSD-3-Clause"
] | RaphaelKimmig/processlib | processlib/activity.py | 14,466 | Python |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import os
import sys
def update_allure_feature_name(results_dir: str, prefix: str):
"""Make Allure JSON results unique by pre-pending a prefix to: name, historyId & uuid.
Use it when not all of the test results show up in the Allure report.
This is because tests from different workers can actually have the same: historyId & uuid values.
You can use e.g. browser name as the prefix.
"""
results_dir_path = os.path.join(".", results_dir)
update_count = 0
for filename in os.listdir(results_dir_path):
if filename.endswith(".json"):
result_file = os.path.join(results_dir_path, filename)
with open(result_file, "r") as json_file:
report = json.loads(json_file.read())
report["name"] = f"{prefix} - {report['name']}"
report["historyId"] = f"{prefix}{report['historyId']}"
report["uuid"] = f"{prefix}{report['uuid']}"
with open(result_file, "w") as json_file:
json.dump(report, json_file, indent=2, ensure_ascii=False)
update_count += 1
print(f"Updated {update_count} JSON reports")
if __name__ == "__main__":
update_allure_feature_name(results_dir=sys.argv[1], prefix=sys.argv[2])
| 38.941176 | 101 | 0.638218 | [
"Apache-2.0"
] | tomaszwozniak/behave-docker-parallel | update_results.py | 1,324 | Python |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('forms', '0016_auto_20150330_1413'),
]
operations = [
migrations.AlterField(
model_name='radiosheet',
name='item_number',
field=models.PositiveIntegerField(help_text='Write in the number that describes the position of the story within the newscast. E.g. the first story in the newscast is item 1; the seventh story is item 7.', verbose_name='(1) Item Number', choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9), (10, 10), (11, 11), (12, 12), (13, 13), (14, 14), (15, 15), (16, 16), (17, 17), (18, 18), (19, 19), (20, 20), (21, 21), (22, 22), (23, 23), (24, 24), (25, 25), (26, 26), (27, 27), (28, 28), (29, 29)]),
preserve_default=True,
),
migrations.AlterField(
model_name='televisionsheet',
name='item_number',
field=models.PositiveIntegerField(help_text='Write in the number that describes the position of the story within the newscast. E.g. the first story in the newscast is item 1; the seventh story is item 7.', verbose_name='(1) Item Number', choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9), (10, 10), (11, 11), (12, 12), (13, 13), (14, 14), (15, 15), (16, 16), (17, 17), (18, 18), (19, 19), (20, 20), (21, 21), (22, 22), (23, 23), (24, 24), (25, 25), (26, 26), (27, 27), (28, 28), (29, 29)]),
preserve_default=True,
),
]
| 59.62963 | 532 | 0.560248 | [
"Apache-2.0"
] | Code4SA/gmmp | forms/migrations/0017_auto_20150331_1815.py | 1,610 | Python |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['PrivateEndpointConnectionArgs', 'PrivateEndpointConnection']
@pulumi.input_type
class PrivateEndpointConnectionArgs:
def __init__(__self__, *,
factory_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
private_endpoint_connection_name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input['PrivateLinkConnectionApprovalRequestArgs']] = None):
"""
The set of arguments for constructing a PrivateEndpointConnection resource.
:param pulumi.Input[str] factory_name: The factory name.
:param pulumi.Input[str] resource_group_name: The resource group name.
:param pulumi.Input[str] private_endpoint_connection_name: The private endpoint connection name.
:param pulumi.Input['PrivateLinkConnectionApprovalRequestArgs'] properties: Core resource properties
"""
pulumi.set(__self__, "factory_name", factory_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if private_endpoint_connection_name is not None:
pulumi.set(__self__, "private_endpoint_connection_name", private_endpoint_connection_name)
if properties is not None:
pulumi.set(__self__, "properties", properties)
@property
@pulumi.getter(name="factoryName")
def factory_name(self) -> pulumi.Input[str]:
"""
The factory name.
"""
return pulumi.get(self, "factory_name")
@factory_name.setter
def factory_name(self, value: pulumi.Input[str]):
pulumi.set(self, "factory_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The resource group name.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="privateEndpointConnectionName")
def private_endpoint_connection_name(self) -> Optional[pulumi.Input[str]]:
"""
The private endpoint connection name.
"""
return pulumi.get(self, "private_endpoint_connection_name")
@private_endpoint_connection_name.setter
def private_endpoint_connection_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "private_endpoint_connection_name", value)
@property
@pulumi.getter
def properties(self) -> Optional[pulumi.Input['PrivateLinkConnectionApprovalRequestArgs']]:
"""
Core resource properties
"""
return pulumi.get(self, "properties")
@properties.setter
def properties(self, value: Optional[pulumi.Input['PrivateLinkConnectionApprovalRequestArgs']]):
pulumi.set(self, "properties", value)
class PrivateEndpointConnection(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
factory_name: Optional[pulumi.Input[str]] = None,
private_endpoint_connection_name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['PrivateLinkConnectionApprovalRequestArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Private Endpoint Connection ARM resource.
API Version: 2018-06-01.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] factory_name: The factory name.
:param pulumi.Input[str] private_endpoint_connection_name: The private endpoint connection name.
:param pulumi.Input[pulumi.InputType['PrivateLinkConnectionApprovalRequestArgs']] properties: Core resource properties
:param pulumi.Input[str] resource_group_name: The resource group name.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PrivateEndpointConnectionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Private Endpoint Connection ARM resource.
API Version: 2018-06-01.
:param str resource_name: The name of the resource.
:param PrivateEndpointConnectionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PrivateEndpointConnectionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
factory_name: Optional[pulumi.Input[str]] = None,
private_endpoint_connection_name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['PrivateLinkConnectionApprovalRequestArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PrivateEndpointConnectionArgs.__new__(PrivateEndpointConnectionArgs)
if factory_name is None and not opts.urn:
raise TypeError("Missing required property 'factory_name'")
__props__.__dict__["factory_name"] = factory_name
__props__.__dict__["private_endpoint_connection_name"] = private_endpoint_connection_name
__props__.__dict__["properties"] = properties
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["etag"] = None
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:datafactory:PrivateEndpointConnection"), pulumi.Alias(type_="azure-native:datafactory/v20180601:PrivateEndpointConnection"), pulumi.Alias(type_="azure-nextgen:datafactory/v20180601:PrivateEndpointConnection")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(PrivateEndpointConnection, __self__).__init__(
'azure-native:datafactory:PrivateEndpointConnection',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'PrivateEndpointConnection':
"""
Get an existing PrivateEndpointConnection resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = PrivateEndpointConnectionArgs.__new__(PrivateEndpointConnectionArgs)
__props__.__dict__["etag"] = None
__props__.__dict__["name"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["type"] = None
return PrivateEndpointConnection(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
Etag identifies change in the resource.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.RemotePrivateEndpointConnectionResponse']:
"""
Core resource properties
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The resource type.
"""
return pulumi.get(self, "type")
| 43.659091 | 297 | 0.669755 | [
"Apache-2.0"
] | polivbr/pulumi-azure-native | sdk/python/pulumi_azure_native/datafactory/private_endpoint_connection.py | 9,605 | Python |
# By @HYPER-MOD
from PIL import Image, ImageEnhance, ImageDraw
import numpy as np
import os
import cv2
import shutil
async def circle_with_bg(client, message):
try:
userid = str(message.chat.id)
if not os.path.isdir(f"./DOWNLOADS/{userid}"):
os.makedirs(f"./DOWNLOADS/{userid}")
download_location = "./DOWNLOADS" + "/" + userid + "/" + userid + ".jpg"
edit_img_loc = "./DOWNLOADS" + "/" + userid + "/" + "circle.png"
if not message.reply_to_message.empty:
msg = await message.reply_to_message.reply_text(
"Downloading image", quote=True
)
a = await client.download_media(
message=message.reply_to_message, file_name=download_location
)
await msg.edit("Processing Image...")
img = Image.open(a).convert("RGB")
npImage = np.array(img)
h, w = img.size
alpha = Image.new("L", img.size, 0)
draw = ImageDraw.Draw(alpha)
draw.pieslice([0, 0, h, w], 0, 360, fill=255)
npAlpha = np.array(alpha)
npImage = np.dstack((npImage, npAlpha))
Image.fromarray(npImage).save(edit_img_loc)
await message.reply_chat_action("upload_photo")
await message.reply_to_message.reply_photo(edit_img_loc, quote=True)
await msg.delete()
else:
await message.reply_text("Why did you delete that??")
try:
shutil.rmtree(f"./DOWNLOADS/{userid}")
except Exception:
pass
except Exception as e:
print("circle_with_bg-error - " + str(e))
if "USER_IS_BLOCKED" in str(e):
return
else:
try:
await message.reply_to_message.reply_text(
"Something went wrong!", quote=True
)
except Exception:
return
async def circle_without_bg(client, message):
try:
userid = str(message.chat.id)
if not os.path.isdir(f"./DOWNLOADS/{userid}"):
os.makedirs(f"./DOWNLOADS/{userid}")
download_location = "./DOWNLOADS" + "/" + userid + "/" + userid + ".jpg"
edit_img_loc = "./DOWNLOADS" + "/" + userid + "/" + "circle.png"
if not message.reply_to_message.empty:
msg = await message.reply_to_message.reply_text(
"Downloading image", quote=True
)
a = await client.download_media(
message=message.reply_to_message, file_name=download_location
)
await msg.edit("Processing Image...")
img = Image.open(a).convert("RGB")
npImage = np.array(img)
h, w = img.size
alpha = Image.new("L", img.size, 0)
draw = ImageDraw.Draw(alpha)
draw.pieslice([0, 0, h, w], 0, 360, fill=255)
npAlpha = np.array(alpha)
npImage = np.dstack((npImage, npAlpha))
Image.fromarray(npImage).save(edit_img_loc)
await message.reply_chat_action("upload_document")
await message.reply_to_message.reply_document(edit_img_loc, quote=True)
await msg.delete()
else:
await message.reply_text("Why did you delete that??")
try:
shutil.rmtree(f"./DOWNLOADS/{userid}")
except Exception:
pass
except Exception as e:
print("circle_without_bg-error - " + str(e))
if "USER_IS_BLOCKED" in str(e):
return
else:
try:
await message.reply_to_message.reply_text(
"Something went wrong!", quote=True
)
except Exception:
return
async def sticker(client, message):
try:
userid = str(message.chat.id)
if not os.path.isdir(f"./DOWNLOADS/{userid}"):
os.makedirs(f"./DOWNLOADS/{userid}")
download_location = "./DOWNLOADS" + "/" + userid + "/" + userid + ".jpg"
edit_img_loc = "./DOWNLOADS" + "/" + userid + "/" + "sticker.webp"
if not message.reply_to_message.empty:
msg = await message.reply_to_message.reply_text(
"Downloading image", quote=True
)
a = await client.download_media(
message=message.reply_to_message, file_name=download_location
)
await msg.edit("Processing Image...")
os.rename(a, edit_img_loc)
await message.reply_to_message.reply_sticker(edit_img_loc, quote=True)
await msg.delete()
else:
await message.reply_text("Why did you delete that??")
try:
shutil.rmtree(f"./DOWNLOADS/{userid}")
except Exception:
pass
except Exception as e:
print("sticker-error - " + str(e))
if "USER_IS_BLOCKED" in str(e):
return
else:
try:
await message.reply_to_message.reply_text(
"Something went wrong!", quote=True
)
except Exception:
return
def add_corners(im, rad):
circle = Image.new("L", (rad * 2, rad * 2), 0)
draw = ImageDraw.Draw(circle)
draw.ellipse((0, 0, rad * 2, rad * 2), fill=255)
alpha = Image.new("L", im.size, 255)
w, h = im.size
alpha.paste(circle.crop((0, 0, rad, rad)), (0, 0))
alpha.paste(circle.crop((0, rad, rad, rad * 2)), (0, h - rad))
alpha.paste(circle.crop((rad, 0, rad * 2, rad)), (w - rad, 0))
alpha.paste(circle.crop((rad, rad, rad * 2, rad * 2)), (w - rad, h - rad))
im.putalpha(alpha)
return im
async def edge_curved(client, message):
try:
userid = str(message.chat.id)
if not os.path.isdir(f"./DOWNLOADS/{userid}"):
os.makedirs(f"./DOWNLOADS/{userid}")
download_location = "./DOWNLOADS" + "/" + userid + "/" + userid + ".jpg"
edit_img_loc = "./DOWNLOADS" + "/" + userid + "/" + "edge_curved.webp"
if not message.reply_to_message.empty:
msg = await message.reply_to_message.reply_text(
"Downloading image", quote=True
)
a = await client.download_media(
message=message.reply_to_message, file_name=download_location
)
await msg.edit("Processing Image...")
im = Image.open(a)
im = add_corners(im, 100)
im.save(edit_img_loc)
await message.reply_chat_action("upload_photo")
await message.reply_to_message.reply_sticker(edit_img_loc, quote=True)
await msg.delete()
else:
await message.reply_text("Why did you delete that??")
try:
shutil.rmtree(f"./DOWNLOADS/{userid}")
except Exception:
pass
except Exception as e:
print("edge_curved-error - " + str(e))
if "USER_IS_BLOCKED" in str(e):
return
else:
try:
await message.reply_to_message.reply_text(
"Something went wrong!", quote=True
)
except Exception:
return
async def contrast(client, message):
try:
userid = str(message.chat.id)
if not os.path.isdir(f"./DOWNLOADS/{userid}"):
os.makedirs(f"./DOWNLOADS/{userid}")
download_location = "./DOWNLOADS" + "/" + userid + "/" + userid + ".jpg"
edit_img_loc = "./DOWNLOADS" + "/" + userid + "/" + "contrast.jpg"
if not message.reply_to_message.empty:
msg = await message.reply_to_message.reply_text(
"Downloading image", quote=True
)
a = await client.download_media(
message=message.reply_to_message, file_name=download_location
)
await msg.edit("Processing Image...")
image = Image.open(a)
contrast = ImageEnhance.Contrast(image)
contrast.enhance(1.5).save(edit_img_loc)
await message.reply_chat_action("upload_photo")
await message.reply_to_message.reply_photo(edit_img_loc, quote=True)
await msg.delete()
else:
await message.reply_text("Why did you delete that??")
try:
shutil.rmtree(f"./DOWNLOADS/{userid}")
except Exception:
pass
except Exception as e:
print("contrast-error - " + str(e))
if "USER_IS_BLOCKED" in str(e):
return
else:
try:
await message.reply_to_message.reply_text(
"Something went wrong!", quote=True
)
except Exception:
return
def sepia(img):
width, height = img.size
new_img = img.copy()
for x in range(width):
for y in range(height):
red, green, blue = img.getpixel((x, y))
new_val = 0.3 * red + 0.59 * green + 0.11 * blue
new_red = int(new_val * 2)
if new_red > 255:
new_red = 255
new_green = int(new_val * 1.5)
if new_green > 255:
new_green = 255
new_blue = int(new_val)
if new_blue > 255:
new_blue = 255
new_img.putpixel((x, y), (new_red, new_green, new_blue))
return new_img
async def sepia_mode(client, message):
try:
userid = str(message.chat.id)
if not os.path.isdir(f"./DOWNLOADS/{userid}"):
os.makedirs(f"./DOWNLOADS/{userid}")
download_location = "./DOWNLOADS" + "/" + userid + "/" + userid + ".jpg"
edit_img_loc = "./DOWNLOADS" + "/" + userid + "/" + "sepia.jpg"
if not message.reply_to_message.empty:
msg = await message.reply_to_message.reply_text(
"Downloading image", quote=True
)
a = await client.download_media(
message=message.reply_to_message, file_name=download_location
)
await msg.edit("Processing Image...")
image = Image.open(a)
new_img = sepia(image)
new_img.save(edit_img_loc)
await message.reply_chat_action("upload_photo")
await message.reply_to_message.reply_photo(edit_img_loc, quote=True)
await msg.delete()
else:
await message.reply_text("Why did you delete that??")
try:
shutil.rmtree(f"./DOWNLOADS/{userid}")
except Exception:
pass
except Exception as e:
print("sepia_mode-error - " + str(e))
if "USER_IS_BLOCKED" in str(e):
return
else:
try:
await message.reply_to_message.reply_text(
"Something went wrong!", quote=True
)
except Exception:
return
def dodgeV2(x, y):
return cv2.divide(x, 255 - y, scale=256)
async def pencil(client, message):
try:
userid = str(message.chat.id)
if not os.path.isdir(f"./DOWNLOADS/{userid}"):
os.makedirs(f"./DOWNLOADS/{userid}")
download_location = "./DOWNLOADS" + "/" + userid + "/" + userid + ".jpg"
edit_img_loc = "./DOWNLOADS" + "/" + userid + "/" + "pencil.jpg"
if not message.reply_to_message.empty:
msg = await message.reply_to_message.reply_text(
"Downloading image", quote=True
)
a = await client.download_media(
message=message.reply_to_message, file_name=download_location
)
await msg.edit("Processing Image...")
img = cv2.imread(a)
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img_invert = cv2.bitwise_not(img_gray)
img_smoothing = cv2.GaussianBlur(img_invert, (21, 21), sigmaX=0, sigmaY=0)
final_img = dodgeV2(img_gray, img_smoothing)
cv2.imwrite(edit_img_loc, final_img)
await message.reply_chat_action("upload_photo")
await message.reply_to_message.reply_photo(edit_img_loc, quote=True)
await msg.delete()
else:
await message.reply_text("Why did you delete that??")
try:
shutil.rmtree(f"./DOWNLOADS/{userid}")
except Exception:
pass
except Exception as e:
print("pencil-error - " + str(e))
if "USER_IS_BLOCKED" in str(e):
return
else:
try:
await message.reply_to_message.reply_text(
"Something went wrong!", quote=True
)
except Exception:
return
def color_quantization(img, k):
data = np.float32(img).reshape((-1, 3))
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 20, 1.0)
_, label, center = cv2.kmeans(
data, k, None, criteria, 10, cv2.KMEANS_RANDOM_CENTERS
)
center = np.uint8(center)
result = center[label.flatten()]
result = result.reshape(img.shape)
return result
async def cartoon(client, message):
try:
userid = str(message.chat.id)
if not os.path.isdir(f"./DOWNLOADS/{userid}"):
os.makedirs(f"./DOWNLOADS/{userid}")
download_location = "./DOWNLOADS" + "/" + userid + "/" + userid + ".jpg"
edit_img_loc = "./DOWNLOADS" + "/" + userid + "/" + "kang.jpg"
if not message.reply_to_message.empty:
msg = await message.reply_to_message.reply_text(
"Downloading image", quote=True
)
a = await client.download_media(
message=message.reply_to_message, file_name=download_location
)
await msg.edit("Processing Image...")
img = cv2.imread(a)
edges = cv2.Canny(img, 100, 200)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
edges = cv2.adaptiveThreshold(
gray, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY, 9, 5
)
color = cv2.bilateralFilter(img, d=9, sigmaColor=200, sigmaSpace=200)
cv2.bitwise_and(color, color, mask=edges)
img_1 = color_quantization(img, 7)
cv2.imwrite(edit_img_loc, img_1)
await message.reply_chat_action("upload_photo")
await message.reply_to_message.reply_photo(edit_img_loc, quote=True)
await msg.delete()
else:
await message.reply_text("Why did you delete that??")
try:
shutil.rmtree(f"./DOWNLOADS/{userid}")
except Exception:
pass
except Exception as e:
print("cartoon-error - " + str(e))
if "USER_IS_BLOCKED" in str(e):
return
else:
try:
await message.reply_to_message.reply_text(
"Something went wrong!", quote=True
)
except Exception:
return
| 37.626566 | 86 | 0.550057 | [
"MIT"
] | HYPER-MODZ/image-editor-bot | image/edit_2.py | 15,013 | Python |
#!/usr/bin/env python
# coding: utf-8
# # WORKFLOW PROCEDURE
# In[ ]:
# import utilities
from ds_utils import *
# to plot results
get_ipython().run_line_magic('matplotlib', 'inline')
# ## How to use this code:
#
# ### Step 1
#
# From a list of train and test datasets run the baseline_generator function and check the results
# in the output file. This file is sorted by AUC value in each dataset and algorithm. You should probably need to
# run the ROC_baseline_plot as well to get a visualization of the previous baseline results. This will give us
# an idea of the general performances. So the next step should be optimized the best model(s) using the best dataset
# according to the previous results. If you want to optimized more than one model they can be stored into a list to use a grid search
# in all models by using the nestedCV function
#
#
# ### Step 2
#
# Pick the dataset and the algorithm to optimized and pass them to the nestedCV function. This function will find the best combination of
# parameters and train a model based on it. As an output the fitted model will be returned, so there is no need to fit the model again. This
# output could be used in the next step testing these models on a unseen test set which was not used in the nestedCV phase.
#
#
# ### Step 3
#
# From a list of optimized model by the nesteCV funtion, predict classes using an unseen test set using the check_predictions_unseen_test_set.
# This function will return a file which is sorted by AUC value as well as a roc curve plot. This file will tell us the model which achieves better performance in the
# test set.
#
#
# ### Step 4
#
# Further analysis plotting some graphs such as ROC curve, PR, etc..
# In[ ]:
# set list of train and test files
listFiles_tr = ['minitrain.csv', 's.ds_MA_tr.csv']
listFiles_ts = ['minitest.csv', 's.ds_MA_ts.csv']
# run a baseline with datasets from above
baseline_generator(listFiles_tr, listFiles_ts)
# In[ ]:
# plot the ROC curves for the dataset which achieves the best performance
# as we can see 'minitrain.csv' is the dataset which seems to get better performances
# so let's plot roc curves on it.
newFile_tr = 'minitrain.csv' # new training data
newFile_ts = 'minitest.csv' # new testing data
ROC_baseline_plot(newFile_tr, newFile_ts)
# According to this baseline results it seems that GradientBoostingClassifier is a good candidate as is one of the model with higher AUC, so we can try to optimize its parameters on the minitrain dataset since is the one the suits better GradientBoostingClassifier. For simplicity we will look for parameters on an algorithm which is faster to train, let's say Logistics Regression and another one more complex such as Random Forest.
#
# So we should proceed as follows:
# Once we have decided to use a dataset we can extract its values only once. By doing this we can use some
# useful functions like the ones described below
# In[ ]:
# Since now we were using just one dataset. So we keep newFile_tr and newFile_ts from above
# Get data from that datasets
values = datasets_parser(newFile_tr, newFile_ts, outVar=outVar)
X_tr_data = values[0] # X_train data
y_tr_data = values[1] # y_train data
X_ts_data = values[2] # X_test data
y_ts_data = values[3] # y_test data
# In[ ]:
def gridsearchCV_strategy(X_tr_data, y_tr_data, list_estimators, list_params):
"""
len of list_estimators and list_params should be the same. For any
estimator you need a list of parameters to optimize. Eg
list_estimators = [RandomForestClassifier(),
LogisticRegression()]
list_params = [{'n_estimators': [500,1000],
'max_features': [8,10],
'max_depth' : [4,6,8],
'criterion' :['gini', 'entropy']},'C': [100, 1000], 'solver' : ['lbfgs'],
'max_iter' : [1000, 2000], 'n_jobs' : [-1]
}]
"""
# First check if both lists has the same length
if len(list_estimators) != len(list_params):
raise ValueError("list_estimators and list_params must have the same length")
# Estimate weights in the data used to look for parameters
class_weights = set_weights(y_tr_data)
# iterate through the list of estimators to see if any of them has some parameters such as random_state or
# class_weight or n_jobs, if so we will set them to the chosen seed for the running task and the weights estimated
# into this function which will be the ones obtained from the training data used.
for est in list_estimators:
est_params = est.get_params()
if 'class_weight' in est_params:
est.set_params(class_weight = class_weights)
if 'n_jobs' in est_params:
est.set_params(n_jobs = -1)
if 'random_state' in est_params:
est.set_params(random_state = seed)
dict_estimators_to_optimize = {}
for estimator, parameters in zip(list_estimators, list_params):
dict_estimators_to_optimize[estimator] = parameters
list_optimized_models = [nestedCV(estimator, X_tr_data, y_tr_data, param_grid=parameters)
for estimator, parameters in dict_estimators_to_optimize.items()]
#check which params were used in the list_optimized_models
#for op_model in list_optimized_models:
# print(op_model.get_params())
return list_optimized_models
# In[ ]:
# Example of execution
list_estimators = [RandomForestClassifier(),LogisticRegression()]
list_params = [{'n_estimators': [500],
'max_features': [8],
'max_depth' : [8],
'criterion' :['entropy']}, {'C': [1000], 'solver' : ['lbfgs'],
'max_iter' : [200]
}]
list_optimized_models = gridsearchCV_strategy(X_tr_data, y_tr_data, list_estimators, list_params)
# Converge warning are due to the scale of the dataset. It would be converge faster using standar_scaler
# transformation from scikit-learn
# In[ ]:
# Make predictions on unseen dataset
check_predictions_unseen_test_set(list_optimized_models, X_ts_data, y_ts_data, newFile_ts)
# In[ ]:
| 35.111732 | 436 | 0.694829 | [
"Apache-2.0"
] | kennethriva/Machine-Learning-for-drugs-cytokines | workflow_procedure_example.py | 6,285 | Python |
import logging
from logging import basicConfig, getLogger
from lib import do_something
def main():
log_fmt = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
basicConfig(format=log_fmt,
level="DEBUG")
logger = getLogger(__name__)
for i in range(3):
logger.info("---------------{}-th trial------------".format(i))
logger.debug("debugging...")
logger.info("tell information")
logger.warn("warn it comes from something like ...")
logger.error("Ops some error is occured")
logger.critical("critical event happened")
logger.debug("It's raining again")
logger.info("with hail the size of hailstones")
do_something()
if __name__ == '__main__':
main()
| 28.296296 | 71 | 0.602094 | [
"MIT"
] | terasakisatoshi/pythonCodes | loggingExer/moduleExer/start.py | 764 | Python |
# pylint:disable=unused-variable
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
# pylint:disable=protected-access
import json
import re
import urllib.parse
from collections import namedtuple
from pathlib import Path
from random import randint
from typing import Callable, List
from uuid import uuid4
import pytest
import respx
from fastapi import FastAPI, status
from models_library.services import ServiceDockerData, ServiceKeyVersion
from simcore_service_director_v2.models.schemas.services import (
RunningServiceDetails,
ServiceExtras,
)
from simcore_service_director_v2.modules.director_v0 import DirectorV0Client
@pytest.fixture(autouse=True)
def minimal_director_config(project_env_devel_environment, monkeypatch):
"""set a minimal configuration for testing the director connection only"""
monkeypatch.setenv("DIRECTOR_ENABLED", "1")
monkeypatch.setenv("POSTGRES_ENABLED", "0")
monkeypatch.setenv("CELERY_ENABLED", "0")
monkeypatch.setenv("REGISTRY_ENABLED", "0")
@pytest.fixture
def mocked_director_v0_service_api(minimal_app, entrypoint, exp_data, resp_alias):
with respx.mock(
base_url=minimal_app.state.settings.director_v0.base_url(include_tag=False),
assert_all_called=False,
assert_all_mocked=True,
) as respx_mock:
# lists services
respx_mock.get(
urllib.parse.unquote(entrypoint),
content=exp_data,
alias=resp_alias,
)
yield respx_mock
ForwardToDirectorParams = namedtuple(
"ForwardToDirectorParams", "entrypoint,exp_status,exp_data,resp_alias"
)
def _get_list_services_calls() -> List[ForwardToDirectorParams]:
return [
ForwardToDirectorParams(
entrypoint="/v0/services",
exp_status=status.HTTP_200_OK,
exp_data={"data": ["service1", "service2"]},
resp_alias="list_all_services",
),
ForwardToDirectorParams(
entrypoint="/v0/services?service_type=computational",
exp_status=status.HTTP_200_OK,
exp_data={"data": ["service1", "service2"]},
resp_alias="list_computational_services",
),
ForwardToDirectorParams(
entrypoint="/v0/services?service_type=dynamic",
exp_status=status.HTTP_200_OK,
exp_data={"data": ["service1", "service2"]},
resp_alias="list_dynamic_services",
),
]
def _get_service_version_calls() -> List[ForwardToDirectorParams]:
# TODO: here we see the return value is currently not validated
return [
ForwardToDirectorParams(
entrypoint="/v0/services/simcore%2Fservices%2Fdynamic%2Fmyservice/1.3.4",
exp_status=status.HTTP_200_OK,
exp_data={"data": ["stuff about my service"]},
resp_alias="get_service_version",
)
]
def _get_service_version_extras_calls() -> List[ForwardToDirectorParams]:
# TODO: here we see the return value is currently not validated
return [
ForwardToDirectorParams(
entrypoint="/v0/services/simcore%2Fservices%2Fdynamic%2Fmyservice/1.3.4/extras",
exp_status=status.HTTP_200_OK,
exp_data={"data": "extra stuff about my service"},
resp_alias="get_service_extras",
)
]
@pytest.mark.parametrize(
"entrypoint,exp_status,exp_data,resp_alias",
_get_list_services_calls()
+ _get_service_version_calls()
+ _get_service_version_extras_calls(),
)
def test_forward_to_director(
client, mocked_director_v0_service_api, entrypoint, exp_status, exp_data, resp_alias
):
response = client.get(entrypoint)
assert response.status_code == exp_status
assert response.json() == exp_data
assert mocked_director_v0_service_api[resp_alias].called
@pytest.fixture(scope="session")
def fake_service_details(mocks_dir: Path) -> ServiceDockerData:
fake_service_path = mocks_dir / "fake_service.json"
assert fake_service_path.exists()
fake_service_data = json.loads(fake_service_path.read_text())
return ServiceDockerData(**fake_service_data)
@pytest.fixture
def fake_service_extras(random_json_from_schema: Callable) -> ServiceExtras:
random_extras = ServiceExtras(
**random_json_from_schema(ServiceExtras.schema_json(indent=2))
)
return random_extras
@pytest.fixture
def fake_running_service_details(
random_json_from_schema: Callable,
) -> RunningServiceDetails:
random_data = random_json_from_schema(RunningServiceDetails.schema_json(indent=2))
# fix port stuff, the randomiser does not understand positive ints
KEYS_TO_FIX = ["published_port", "service_port"]
for k in KEYS_TO_FIX:
if k in random_data:
random_data[k] = randint(1, 50000)
random_details = RunningServiceDetails(**random_data)
return random_details
@pytest.fixture
def mocked_director_service_fcts(
minimal_app: FastAPI,
fake_service_details: ServiceDockerData,
fake_service_extras: ServiceExtras,
fake_running_service_details: RunningServiceDetails,
):
with respx.mock(
base_url=minimal_app.state.settings.director_v0.base_url(include_tag=False),
assert_all_called=False,
assert_all_mocked=True,
) as respx_mock:
respx_mock.get(
"/v0/services/simcore%2Fservices%2Fdynamic%2Fmyservice/1.3.4",
content={"data": [fake_service_details.dict(by_alias=True)]},
alias="get_service_version",
)
respx_mock.get(
"/v0/service_extras/simcore%2Fservices%2Fdynamic%2Fmyservice/1.3.4",
content={"data": fake_service_extras.dict(by_alias=True)},
alias="get_service_extras",
)
pattern = re.compile(
r"v0/running_interactive_services/[0-9a-fA-F]{8}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{4}-?[0-9a-fA-F]{12}$"
)
respx_mock.get(
pattern,
content={"data": fake_running_service_details.dict(by_alias=True)},
alias="get_running_service_details",
)
yield respx_mock
async def test_get_service_details(
minimal_app: FastAPI,
mocked_director_service_fcts,
fake_service_details: ServiceDockerData,
):
director_client: DirectorV0Client = minimal_app.state.director_v0_client
service = ServiceKeyVersion(
key="simcore/services/dynamic/myservice", version="1.3.4"
)
service_details: ServiceDockerData = await director_client.get_service_details(
service
)
assert mocked_director_service_fcts["get_service_version"].called
assert fake_service_details == service_details
async def test_get_service_extras(
minimal_app: FastAPI,
mocked_director_service_fcts,
fake_service_extras: ServiceExtras,
):
director_client: DirectorV0Client = minimal_app.state.director_v0_client
service = ServiceKeyVersion(
key="simcore/services/dynamic/myservice", version="1.3.4"
)
service_extras: ServiceExtras = await director_client.get_service_extras(service)
assert mocked_director_service_fcts["get_service_extras"].called
assert fake_service_extras == service_extras
async def test_get_running_service_details(
minimal_app: FastAPI,
mocked_director_service_fcts,
fake_running_service_details: RunningServiceDetails,
):
director_client: DirectorV0Client = minimal_app.state.director_v0_client
service_details: RunningServiceDetails = (
await director_client.get_running_service_details(str(uuid4()))
)
assert mocked_director_service_fcts["get_running_service_details"].called
assert fake_running_service_details == service_details
| 33.733624 | 127 | 0.71767 | [
"MIT"
] | GitHK/osparc-simcore-forked | services/director-v2/tests/unit/test_modules_director_v0.py | 7,725 | Python |
#!/usr/bin/env python3
# Copyright (c) 2015-2018 The Dash Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.blocktools import get_masternode_payment, create_coinbase, create_block
from test_framework.mininode import *
from test_framework.test_framework import cryptocauseTestFramework
from test_framework.util import *
from time import *
'''
llmq-is-cl-conflicts.py
Checks conflict handling between ChainLocks and InstantSend
'''
class TestNode(SingleNodeConnCB):
def __init__(self):
SingleNodeConnCB.__init__(self)
self.clsigs = {}
self.islocks = {}
def send_clsig(self, clsig):
hash = uint256_from_str(hash256(clsig.serialize()))
self.clsigs[hash] = clsig
inv = msg_inv([CInv(29, hash)])
self.send_message(inv)
def send_islock(self, islock):
hash = uint256_from_str(hash256(islock.serialize()))
self.islocks[hash] = islock
inv = msg_inv([CInv(30, hash)])
self.send_message(inv)
def on_getdata(self, conn, message):
for inv in message.inv:
if inv.hash in self.clsigs:
self.send_message(self.clsigs[inv.hash])
if inv.hash in self.islocks:
self.send_message(self.islocks[inv.hash])
class LLMQ_IS_CL_Conflicts(cryptocauseTestFramework):
def __init__(self):
super().__init__(6, 5, [], fast_dip3_enforcement=True)
#disable_mocktime()
def run_test(self):
while self.nodes[0].getblockchaininfo()["bip9_softforks"]["dip0008"]["status"] != "active":
self.nodes[0].generate(10)
sync_blocks(self.nodes, timeout=60*5)
self.test_node = TestNode()
self.test_node.add_connection(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.test_node))
NetworkThread().start() # Start up network handling in another thread
self.test_node.wait_for_verack()
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.nodes[0].spork("SPORK_19_CHAINLOCKS_ENABLED", 0)
self.nodes[0].spork("SPORK_20_INSTANTSEND_LLMQ_BASED", 0)
self.wait_for_sporks_same()
self.mine_quorum()
# mine single block, wait for chainlock
self.nodes[0].generate(1)
self.wait_for_chainlock_tip_all_nodes()
self.test_chainlock_overrides_islock(False)
self.test_chainlock_overrides_islock(True)
self.test_islock_overrides_nonchainlock()
def test_chainlock_overrides_islock(self, test_block_conflict):
# create three raw TXs, they will conflict with each other
rawtx1 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)['hex']
rawtx2 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)['hex']
rawtx3 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)['hex']
rawtx1_obj = FromHex(CTransaction(), rawtx1)
rawtx2_obj = FromHex(CTransaction(), rawtx2)
rawtx3_obj = FromHex(CTransaction(), rawtx3)
rawtx1_txid = self.nodes[0].sendrawtransaction(rawtx1)
rawtx2_txid = encode(hash256(hex_str_to_bytes(rawtx2))[::-1], 'hex_codec').decode('ascii')
rawtx3_txid = encode(hash256(hex_str_to_bytes(rawtx3))[::-1], 'hex_codec').decode('ascii')
# Create a chained TX on top of tx1
inputs = []
n = 0
for out in rawtx1_obj.vout:
if out.nValue == 100000000:
inputs.append({"txid": rawtx1_txid, "vout": n})
n += 1
rawtx4 = self.nodes[0].createrawtransaction(inputs, {self.nodes[0].getnewaddress(): 0.999})
rawtx4 = self.nodes[0].signrawtransaction(rawtx4)['hex']
rawtx4_txid = self.nodes[0].sendrawtransaction(rawtx4)
for node in self.nodes:
self.wait_for_instantlock(rawtx1_txid, node)
self.wait_for_instantlock(rawtx4_txid, node)
block = self.create_block(self.nodes[0], [rawtx2_obj])
if test_block_conflict:
submit_result = self.nodes[0].submitblock(ToHex(block))
assert(submit_result == "conflict-tx-lock")
cl = self.create_chainlock(self.nodes[0].getblockcount() + 1, block.sha256)
self.test_node.send_clsig(cl)
# Give the CLSIG some time to propagate. We unfortunately can't check propagation here as "getblock/getblockheader"
# is required to check for CLSIGs, but this requires the block header to be propagated already
sleep(1)
# The block should get accepted now, and at the same time prune the conflicting ISLOCKs
submit_result = self.nodes[1].submitblock(ToHex(block))
if test_block_conflict:
assert(submit_result == "duplicate")
else:
assert(submit_result is None)
for node in self.nodes:
self.wait_for_chainlock(node, "%064x" % block.sha256)
# Create a chained TX on top of tx2
inputs = []
n = 0
for out in rawtx2_obj.vout:
if out.nValue == 100000000:
inputs.append({"txid": rawtx2_txid, "vout": n})
n += 1
rawtx5 = self.nodes[0].createrawtransaction(inputs, {self.nodes[0].getnewaddress(): 0.999})
rawtx5 = self.nodes[0].signrawtransaction(rawtx5)['hex']
rawtx5_txid = self.nodes[0].sendrawtransaction(rawtx5)
for node in self.nodes:
self.wait_for_instantlock(rawtx5_txid, node)
# Lets verify that the ISLOCKs got pruned
for node in self.nodes:
assert_raises_jsonrpc(-5, "No such mempool or blockchain transaction", node.getrawtransaction, rawtx1_txid, True)
assert_raises_jsonrpc(-5, "No such mempool or blockchain transaction", node.getrawtransaction, rawtx4_txid, True)
rawtx = node.getrawtransaction(rawtx2_txid, True)
assert(rawtx['chainlock'])
assert(rawtx['instantlock'])
assert(not rawtx['instantlock_internal'])
def test_islock_overrides_nonchainlock(self):
# create two raw TXs, they will conflict with each other
rawtx1 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)['hex']
rawtx2 = self.create_raw_tx(self.nodes[0], self.nodes[0], 1, 1, 100)['hex']
rawtx1_txid = encode(hash256(hex_str_to_bytes(rawtx1))[::-1], 'hex_codec').decode('ascii')
rawtx2_txid = encode(hash256(hex_str_to_bytes(rawtx2))[::-1], 'hex_codec').decode('ascii')
# Create an ISLOCK but don't broadcast it yet
islock = self.create_islock(rawtx2)
# Stop enough MNs so that ChainLocks don't work anymore
for i in range(3):
self.stop_node(len(self.nodes) - 1)
self.nodes.pop(len(self.nodes) - 1)
self.mninfo.pop(len(self.mninfo) - 1)
# Send tx1, which will later conflict with the ISLOCK
self.nodes[0].sendrawtransaction(rawtx1)
# fast forward 11 minutes, so that the TX is considered safe and included in the next block
set_mocktime(get_mocktime() + int(60 * 11))
set_node_times(self.nodes, get_mocktime())
# Mine the conflicting TX into a block
good_tip = self.nodes[0].getbestblockhash()
self.nodes[0].generate(2)
self.sync_all()
# Assert that the conflicting tx got mined and the locked TX is not valid
assert(self.nodes[0].getrawtransaction(rawtx1_txid, True)['confirmations'] > 0)
assert_raises_jsonrpc(-25, "Missing inputs", self.nodes[0].sendrawtransaction, rawtx2)
# Send the ISLOCK, which should result in the last 2 blocks to be invalidated, even though the nodes don't know
# the locked transaction yet
self.test_node.send_islock(islock)
sleep(5)
assert(self.nodes[0].getbestblockhash() == good_tip)
assert(self.nodes[1].getbestblockhash() == good_tip)
# Send the actual transaction and mine it
self.nodes[0].sendrawtransaction(rawtx2)
self.nodes[0].generate(1)
self.sync_all()
assert(self.nodes[0].getrawtransaction(rawtx2_txid, True)['confirmations'] > 0)
assert(self.nodes[1].getrawtransaction(rawtx2_txid, True)['confirmations'] > 0)
assert(self.nodes[0].getrawtransaction(rawtx2_txid, True)['instantlock'])
assert(self.nodes[1].getrawtransaction(rawtx2_txid, True)['instantlock'])
assert(self.nodes[0].getbestblockhash() != good_tip)
assert(self.nodes[1].getbestblockhash() != good_tip)
def wait_for_chainlock_tip_all_nodes(self):
for node in self.nodes:
tip = node.getbestblockhash()
self.wait_for_chainlock(node, tip)
def wait_for_chainlock_tip(self, node):
tip = node.getbestblockhash()
self.wait_for_chainlock(node, tip)
def wait_for_chainlock(self, node, block_hash):
t = time()
while time() - t < 15:
try:
block = node.getblockheader(block_hash)
if block["confirmations"] > 0 and block["chainlock"]:
return
except:
# block might not be on the node yet
pass
sleep(0.1)
raise AssertionError("wait_for_chainlock timed out")
def create_block(self, node, vtx=[]):
bt = node.getblocktemplate()
height = bt['height']
tip_hash = bt['previousblockhash']
coinbasevalue = bt['coinbasevalue']
miner_address = node.getnewaddress()
mn_payee = bt['masternode'][0]['payee']
# calculate fees that the block template included (we'll have to remove it from the coinbase as we won't
# include the template's transactions
bt_fees = 0
for tx in bt['transactions']:
bt_fees += tx['fee']
new_fees = 0
for tx in vtx:
in_value = 0
out_value = 0
for txin in tx.vin:
txout = node.gettxout("%064x" % txin.prevout.hash, txin.prevout.n, False)
in_value += int(txout['value'] * COIN)
for txout in tx.vout:
out_value += txout.nValue
new_fees += in_value - out_value
# fix fees
coinbasevalue -= bt_fees
coinbasevalue += new_fees
mn_amount = get_masternode_payment(height, coinbasevalue)
miner_amount = coinbasevalue - mn_amount
outputs = {miner_address: str(Decimal(miner_amount) / COIN)}
if mn_amount > 0:
outputs[mn_payee] = str(Decimal(mn_amount) / COIN)
coinbase = FromHex(CTransaction(), node.createrawtransaction([], outputs))
coinbase.vin = create_coinbase(height).vin
# We can't really use this one as it would result in invalid merkle roots for masternode lists
if len(bt['coinbase_payload']) != 0:
cbtx = FromHex(CCbTx(version=1), bt['coinbase_payload'])
coinbase.nVersion = 3
coinbase.nType = 5 # CbTx
coinbase.vExtraPayload = cbtx.serialize()
coinbase.calc_sha256()
block = create_block(int(tip_hash, 16), coinbase, nTime=bt['curtime'])
block.vtx += vtx
# Add quorum commitments from template
for tx in bt['transactions']:
tx2 = FromHex(CTransaction(), tx['data'])
if tx2.nType == 6:
block.vtx.append(tx2)
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
return block
def create_chainlock(self, height, blockHash):
request_id = "%064x" % uint256_from_str(hash256(ser_string(b"clsig") + struct.pack("<I", height)))
message_hash = "%064x" % blockHash
for mn in self.mninfo:
mn.node.quorum('sign', 100, request_id, message_hash)
recSig = None
t = time()
while time() - t < 10:
try:
recSig = self.nodes[0].quorum('getrecsig', 100, request_id, message_hash)
break
except:
sleep(0.1)
assert(recSig is not None)
clsig = msg_clsig(height, blockHash, hex_str_to_bytes(recSig['sig']))
return clsig
def create_islock(self, hextx):
tx = FromHex(CTransaction(), hextx)
tx.rehash()
request_id_buf = ser_string(b"islock") + ser_compact_size(len(tx.vin))
inputs = []
for txin in tx.vin:
request_id_buf += txin.prevout.serialize()
inputs.append(txin.prevout)
request_id = "%064x" % uint256_from_str(hash256(request_id_buf))
message_hash = "%064x" % tx.sha256
for mn in self.mninfo:
mn.node.quorum('sign', 100, request_id, message_hash)
recSig = None
t = time()
while time() - t < 10:
try:
recSig = self.nodes[0].quorum('getrecsig', 100, request_id, message_hash)
break
except:
sleep(0.1)
assert(recSig is not None)
islock = msg_islock(inputs, tx.sha256, hex_str_to_bytes(recSig['sig']))
return islock
if __name__ == '__main__':
LLMQ_IS_CL_Conflicts().main()
| 39.00885 | 125 | 0.628176 | [
"MIT"
] | cryptowithacause/cryptocause-coin | qa/rpc-tests/llmq-is-cl-conflicts.py | 13,224 | Python |
#!/usr/bin/env python
#
# @file test_signals.py
#
# @author Matt Gigli <[email protected]>
#
# @section LICENSE
#
# The MIT License (MIT)
# Copyright (c) 2016 Matt Gigli
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
#
import unittest
from ao.signals import dispatcher
class test_signals(unittest.TestCase):
def setUp(self):
self.cb1 = 0
self.cb2 = 0
self.cb3 = 0
self.cb4 = 0
self.cb5 = 0
self.cb6 = 0
self.cb_arg1 = None
self.cb_arg2 = None
def tearDown(self):
dispatcher.unsubscribe_all()
def callback_1(self):
self.cb1 = 1
def callback_2(self):
self.cb2 = 2
def callback_1234(self):
self.cb1 = 1
self.cb2 = 2
self.cb3 = 3
self.cb4 = 4
def callback_34(self):
self.cb3 = 3
self.cb4 = 4
def callback_56(self):
self.cb5 = 5
self.cb6 = 6
def callback_args(self, arg1, arg2=None):
self.cb_arg1 = arg1
self.cb_arg2 = arg2
def test_one_receiver(self):
# verify test class members are reset
self.assertEqual(self.cb1, 0)
# subscribe to the cb1_sig signal
dispatcher.subscribe('cb1_sig', self.callback_1)
# publish the signal to call the callback
dispatcher.publish('cb1_sig')
# verify callback was called
self.assertEqual(self.cb1, 1)
def test_arguments(self):
# verify test class members are reset
self.assertEqual(self.cb_arg1, None)
self.assertEqual(self.cb_arg2, None)
# subscribe to the cbarg_sig signal
dispatcher.subscribe('cbarg_sig', self.callback_args)
# publish the signal to call the callback
dispatcher.publish('cbarg_sig', 'arg1', arg2='arg2')
# verify callback was called
self.assertEqual(self.cb_arg1, 'arg1')
self.assertEqual(self.cb_arg2, 'arg2')
def test_multiple_receivers(self):
# verify test class members are reset
self.assertEqual(self.cb1, 0)
self.assertEqual(self.cb2, 0)
self.assertEqual(self.cb3, 0)
self.assertEqual(self.cb4, 0)
# subscribe to the signals
dispatcher.subscribe('cb2_sig', self.callback_1234)
# publish the signal to call the callbacks
dispatcher.publish('cb2_sig')
# verify callbacks were called
self.assertEqual(self.cb1, 1)
self.assertEqual(self.cb2, 2)
self.assertEqual(self.cb3, 3)
self.assertEqual(self.cb4, 4)
def test_publish_unsubscribed_signal(self):
# publish a signal that hasn't been subscribed to, to verify that no
# error occurs when publishing such a signal
dispatcher.publish('lonely_sig')
def test_unsubscribe_unsubscribed_signal(self):
# verify no exception is raised when unsubscribing a receiver from a
# signal that was never subscribed to
dispatcher.unsubscribe('lonely_sig', self.callback_1)
def test_unsubscribe(self):
# subscribe, publish and check that callback was called
dispatcher.subscribe('cb1_sig', self.callback_1)
dispatcher.publish('cb1_sig')
self.assertEqual(self.cb1, 1)
# reset cb1, unsubscribe and show that callback is not called
self.cb1 = 0
dispatcher.unsubscribe('cb1_sig', self.callback_1)
dispatcher.publish('cb1_sig')
self.assertEqual(self.cb1, 0)
def test_unsubscribe_all_for_signal(self):
# subscribe some receivers for some signals
dispatcher.subscribe('cb1_sig', self.callback_1)
dispatcher.subscribe('cb1_sig', self.callback_2)
dispatcher.subscribe('cb3_sig', self.callback_34)
dispatcher.subscribe('cb3_sig', self.callback_56)
# unsuscribe just for cb1_sig
dispatcher.unsubscribe_all('cb1_sig')
# verify only cb1_sig receivers were unsubscribed
dispatcher.publish('cb1_sig')
dispatcher.publish('cb3_sig')
self.assertEqual(self.cb1, 0)
self.assertEqual(self.cb2, 0)
self.assertEqual(self.cb3, 3)
self.assertEqual(self.cb4, 4)
self.assertEqual(self.cb5, 5)
self.assertEqual(self.cb6, 6)
def test_unsubscribe_all(self):
dispatcher.subscribe('cb1_sig', self.callback_1)
dispatcher.subscribe('cb1_sig', self.callback_2)
dispatcher.subscribe('cb3_sig', self.callback_34)
dispatcher.subscribe('cb3_sig', self.callback_56)
# unsuscribe all signals
dispatcher.unsubscribe_all()
# verify all receivers were unsubscribed
dispatcher.publish('cb1_sig')
dispatcher.publish('cb3_sig')
self.assertEqual(self.cb1, 0)
self.assertEqual(self.cb2, 0)
self.assertEqual(self.cb3, 0)
self.assertEqual(self.cb4, 0)
self.assertEqual(self.cb5, 0)
self.assertEqual(self.cb6, 0)
| 33.005556 | 79 | 0.666723 | [
"MIT"
] | mjgigli/active-object | test/test_signals.py | 5,941 | Python |
import asyncio
import sys
import click
from pydantic import SecretStr
from server.application.auth.commands import ChangePassword
from server.config.di import bootstrap, resolve
from server.domain.auth.entities import User
from server.domain.auth.repositories import UserRepository
from server.seedwork.application.messages import MessageBus
async def _prompt_user() -> User:
repository = resolve(UserRepository)
email = click.prompt("Email")
user = await repository.get_by_email(email)
if user is None:
click.echo(click.style(f"User does not exist: {email}", fg="red"))
sys.exit(1)
return user
def _prompt_password() -> SecretStr:
return click.prompt(
"Password",
confirmation_prompt="Password (repeat)",
value_proc=SecretStr,
hide_input=True,
)
async def main() -> None:
bus = resolve(MessageBus)
user = await _prompt_user()
password = _prompt_password()
await bus.execute(ChangePassword(email=user.email, password=password))
if __name__ == "__main__":
bootstrap()
asyncio.run(main())
| 22.510204 | 74 | 0.710789 | [
"MIT"
] | multi-coop/catalogage-donnees | tools/changepassword.py | 1,103 | Python |
"""Various functions that interact with Slack, e.g. posting messages."""
import asyncio
import logging
import socket
from pathlib import Path
from typing import Union, Optional
from slack_sdk.errors import SlackApiError
from lsw_slackbot.plots import plot_resource_use
from lsw_slackbot.resources import current_memory_fraction, _get_resource_usage_dataframe
from lsw_slackbot.util import string_time
async def _send_message(client, channel: str, message: str):
"""Sends a message to a channel, with basic logging & error handling."""
try:
await client.chat_postMessage(channel=channel, text=message)
# Handle various different errors, *some* of which are non-critical...
except SlackApiError as e:
logging.exception(f"error from slack API when trying to send message: {e.response['error']}")
print("Encountered SlackApiError when trying to send message (see logs.)")
except AttributeError:
logging.exception("suspected issue in Slack API when trying to send message. This bug has occured before!")
print("Encountered AttributeError when trying to send message (see logs.)")
async def _send_file(client, channel: str, file: Union[Path, str], title):
"""Sends a file to a channel, with basic logging & error handling."""
if isinstance(file, Path):
file = str(file.absolute())
try:
await client.files_upload(channels=channel, file=file, title=title)
# Handle various different errors, *some* of which are non-critical...
except SlackApiError as e:
logging.exception(f"error from Slack API when trying to upload file: {e.response['error']}")
print("Encountered SlackApiError when trying to upload file (see logs.)")
except AttributeError:
logging.exception("suspected issue in Slack API when trying to upload file. This bug has occured before!")
print("Encountered AttributeError when trying to upload file (see logs.)")
async def hello_world(client, channel: str):
"""Basic function to post an init message to a channel."""
# Todo: it would be really cool if hello_world also printed the latest commit message.
# This could be done by running the command `git log -1` from Python?
# See https://stackoverflow.com/questions/7293008/display-last-git-commit-comment
logging.info(f"Saying hello world in {channel}!")
system_name = socket.gethostname()
await _send_message(
client, channel, f"Server time & date: {string_time()}\nApp is running on system {system_name}.")
async def send_resource_use_plot(client, channel: str, plot_kwargs: dict, title: Optional[str] = None):
"""Sends a resource usage plot to a given channel."""
if title is None:
title = f"Resource usage plot generated at {string_time()}"
else:
title = title + f" (plot generated at {string_time()})"
# Firstly, let's generate a plot
logging.info("Generating a resource usage plot")
logging.debug(f"plot kwargs: {plot_kwargs}")
location_plot = await plot_resource_use(**plot_kwargs)
# Now, let's try and send it to slack
logging.info(f"Sending to Slack in channel {channel}")
await _send_file(client, channel, location_plot, title)
_LAST_MEMORY_FRACTION = 0.0
async def check_memory(client, channel: str, memory_warn_fraction=0.8, sleep_time=3600):
"""Quick function for checking current server memory and sending a warning to a desired channel if it's
too high."""
global _LAST_MEMORY_FRACTION # Sorry for using global variables =(
current_usage = current_memory_fraction()
# Only warn if we didn't warn before
if _LAST_MEMORY_FRACTION < memory_warn_fraction:
if current_usage > memory_warn_fraction:
# Firstly, prioritise sending a basic warning
await _send_message(client, channel, f"WARNING: current memory usage at {current_usage:.2%}!")
# Next, grab info on currently running threads
thread_df = await _get_resource_usage_dataframe(measurement_time=1.0)
thread_df = thread_df.sort_values("memory")
# ... and format it into something we can send
message = ["Users with something currently running:"]
for i, a_row in thread_df.iterrows():
message.append(f"{a_row.name}: {a_row['cpu_percent']:.2f}% CPU "
f"-- {a_row['memory']:.2f} GB"
f"-- {a_row['threads']} threads")
message.append(f"\n(no further warnings will be sent for a sleep period of {sleep_time/60**2:.2f} hour(s))")
# Send it!
await _send_message(client, channel, "\n".join(message))
# Sleep so we don't spam the chat
await asyncio.sleep(sleep_time)
_LAST_MEMORY_FRACTION = current_usage
| 41.444444 | 120 | 0.690864 | [
"BSD-3-Clause"
] | emilyhunt/lsw-slackbot | lsw_slackbot/slack.py | 4,849 | Python |
from contextlib import contextmanager
import platform
import shlex
from subprocess import PIPE, Popen
from shutil import which
class ShellCommandResult(tuple):
"""
The result of a :func:`coalib.misc.run_shell_command` call.
It is based on a ``(stdout, stderr)`` string tuple like it is returned
form ``subprocess.Popen.communicate`` and was originally returned from
:func:`coalib.misc.run_shell_command`. So it is backwards-compatible.
It additionally stores the return ``.code``:
>>> process = Popen(['python', '-c',
... 'import sys; print(sys.stdin.readline().strip() +'
... ' " processed")'],
... stdin=PIPE, stdout=PIPE, stderr=PIPE,
... universal_newlines=True)
>>> stdout, stderr = process.communicate(input='data')
>>> stderr
''
>>> result = ShellCommandResult(process.returncode, stdout, stderr)
>>> result[0]
'data processed\\n'
>>> result[1]
''
>>> result.code
0
"""
def __new__(cls, code, stdout, stderr):
"""
Creates the basic tuple from `stdout` and `stderr`.
"""
return tuple.__new__(cls, (stdout, stderr))
def __init__(self, code, stdout, stderr):
"""
Stores the return `code`.
"""
self.code = code
@contextmanager
def run_interactive_shell_command(command, **kwargs):
"""
Runs a single command in shell and provides stdout, stderr and stdin
streams.
This function creates a context manager that sets up the process (using
``subprocess.Popen()``), returns to caller and waits for process to exit on
leaving.
By default the process is opened in ``universal_newlines`` mode and creates
pipes for all streams (stdout, stderr and stdin) using ``subprocess.PIPE``
special value. These pipes are closed automatically, so if you want to get
the contents of the streams you should retrieve them before the context
manager exits.
>>> with run_interactive_shell_command(["echo", "TEXT"]) as p:
... stdout = p.stdout
... stdout_text = stdout.read()
>>> stdout_text
'TEXT\\n'
>>> stdout.closed
True
Custom streams provided are not closed except of ``subprocess.PIPE``.
>>> from tempfile import TemporaryFile
>>> stream = TemporaryFile()
>>> with run_interactive_shell_command(["echo", "TEXT"],
... stdout=stream) as p:
... stderr = p.stderr
>>> stderr.closed
True
>>> stream.closed
False
:param command: The command to run on shell. This parameter can either
be a sequence of arguments that are directly passed to
the process or a string. A string gets splitted beforehand
using ``shlex.split()``. If providing ``shell=True`` as a
keyword-argument, no ``shlex.split()`` is performed and the
command string goes directly to ``subprocess.Popen()``.
:param kwargs: Additional keyword arguments to pass to
``subprocess.Popen`` that are used to spawn the process.
:return: A context manager yielding the process started from the
command.
"""
if not kwargs.get('shell', False) and isinstance(command, str):
command = shlex.split(command)
else:
command = list(command)
if platform.system() == 'Windows': # pragma: no cover
# subprocess doesn't implicitly look for .bat and .cmd scripts when
# running commands under Windows
command[0] = which(command[0])
args = {'stdout': PIPE,
'stderr': PIPE,
'stdin': PIPE,
'universal_newlines': True}
args.update(kwargs)
process = Popen(command, **args)
try:
yield process
finally:
if args['stdout'] is PIPE:
process.stdout.close()
if args['stderr'] is PIPE:
process.stderr.close()
if args['stdin'] is PIPE:
process.stdin.close()
process.wait()
def run_shell_command(command, stdin=None, **kwargs):
"""
Runs a single command in shell and returns the read stdout and stderr data.
This function waits for the process (created using ``subprocess.Popen()``)
to exit. Effectively it wraps ``run_interactive_shell_command()`` and uses
``communicate()`` on the process.
See also ``run_interactive_shell_command()``.
:param command: The command to run on shell. This parameter can either
be a sequence of arguments that are directly passed to
the process or a string. A string gets splitted beforehand
using ``shlex.split()``.
:param stdin: Initial input to send to the process.
:param kwargs: Additional keyword arguments to pass to
``subprocess.Popen`` that is used to spawn the process.
:return: A tuple with ``(stdoutstring, stderrstring)``.
"""
with run_interactive_shell_command(command, **kwargs) as p:
ret = p.communicate(stdin)
return ShellCommandResult(p.returncode, *ret)
def get_shell_type(): # pragma: no cover
"""
Finds the current shell type based on the outputs of common pre-defined
variables in them. This is useful to identify which sort of escaping
is required for strings.
:return: The shell type. This can be either "powershell" if Windows
Powershell is detected, "cmd" if command prompt is been
detected or "sh" if it's neither of these.
"""
out = run_shell_command('echo $host.name', shell=True)[0]
if out.strip() == 'ConsoleHost':
return 'powershell'
out = run_shell_command('echo $0', shell=True)[0]
if out.strip() == '$0':
return 'cmd'
return 'sh'
| 35.42515 | 79 | 0.615619 | [
"MIT"
] | prashant0598/CoffeeApp | venv/lib/python3.5/site-packages/coalib/misc/Shell.py | 5,916 | Python |
#!/usr/bin/env python
import numpy as np
def initialize_hyper_parameters(layer_acts, learning_rate):
"""
Initialize parameters for different levels of the network
Arguments:
layer_acts -- python array (list) containing the activation functions of each layer in the network
learning_rate -- float value used as constant for gradient descent
Returns:
hyper_parameters -- python dictionary containing hyper_parameters (can be further extended)
"""
hyper_parameters = {}
activations = {}
L = len(layer_acts) # number of layers in the network
for l in range(0, L):
activations[l+1] = layer_acts[l]
hyper_parameters["activations"] = activations
hyper_parameters["learning_rate"] = learning_rate
return hyper_parameters
def test_initialize_hyper_parameters():
print("\033[92m" + "\nTest initialize_hyper_parameters() ..." + "\033[0m")
layer_acts = ["relu", "relu", "sigmoid"]
learning_rate = 0.0075
hyper_parameters = initialize_hyper_parameters(layer_acts, learning_rate)
print(hyper_parameters["activations"])
assert len(hyper_parameters["activations"]) == 3
assert hyper_parameters["activations"][1] == "relu"
print("\033[92m" + "... end test" + "\033[0m")
def initialize_parameters(layer_dims):
"""
Initialize parameters for different levels of the network
Arguments:
layer_dims -- python array (list) containing the dimensions of each layer in the network
Returns:
parameters -- python dictionary containing your parameters "W1", "b1", ..., "WL", "bL", ...:
Wl -- weight matrix of shape (layer_dims[l], layer_dims[l-1])
bl -- bias vector of shape (layer_dims[l], 1)
"""
np.random.seed(1)
parameters = {}
L = len(layer_dims) # number of layers in the network
for l in range(1, L):
parameters['W' + str(l)] = np.random.randn(layer_dims[l], layer_dims[l-1]) * 0.01
parameters['b' + str(l)] = np.zeros((layer_dims[l], 1))
assert(parameters['W' + str(l)].shape == (layer_dims[l], layer_dims[l-1]))
assert(parameters['b' + str(l)].shape == (layer_dims[l], 1))
return parameters
def test_initialize_parameters():
print("\n" + "\033[92m" + "Test initialize_parameters() ..." + "\033[0m")
np.random.seed(1)
parameters = initialize_parameters([3,2,1])
print("W1 = " + str(parameters["W1"]))
print("b1 = " + str(parameters["b1"]))
print("W2 = " + str(parameters["W2"]))
print("b2 = " + str(parameters["b2"]))
W1 = parameters["W1"]
W1_expected = np.array([[0.01624345,-0.00611756,-0.00528172],[-0.01072969,0.00865408,-0.02301539]])
assert np.allclose(W1, W1_expected, rtol=1e-05, atol=1e-06)
b1 = parameters["b1"]
b1_expected = np.array([[0.],[0.]])
assert np.allclose(b1, b1_expected, rtol=1e-05, atol=1e-06)
W2 = parameters["W2"]
W2_expected = np.array([[0.01744812, -0.00761207]])
assert np.allclose(W2, W2_expected, rtol=1e-05, atol=1e-06)
b2 = parameters["b2"]
b2_expected = np.array([[ 0.]])
assert np.allclose(b2, b2_expected, rtol=1e-05, atol=1e-06)
print("\033[92m" + "... end test" + "\033[0m")
if __name__ == "__main__":
test_initialize_hyper_parameters()
test_initialize_parameters()
| 32.861386 | 103 | 0.647484 | [
"Apache-2.0"
] | giuseppefutia/word2vec | utils/init_parameters.py | 3,319 | Python |
"""
Code for particle tracking, designed for ROMS output. This new version
makes extensive use of nearest-neighbor KDTree algorithms for interpolation.
This results is significantly (36x) faster runtimes compared with old version.
PERFORMANCE: about 3 minutes per day for a 3D cas6 experiment with 10k particles.
NOTE: You have to have run make_KDTrees.py for the grid (e.g. cas6) before running.
NOTE: There is some issue, perhaps with garbage collection, which causes
the loading of NetCDF files to happen slower after running a few times
interactively from ipython. It appears that this can be avoided by running
from the terminal as: python tracker.py [args].
This program is a driver where you specify:
- an experiment (ROMS run + release locations + other choices)
- a release or set of releases within that experiment (start day, etc.)
The main argument you provide is -exp, which is the experiment name, and
is used by experiments.get_exp_info() and .get_ic() to get the gtagex and initial particle
locations. Other possible commmand line arguments and their defaults
are explained in the argparse section below.
NOTE: To improve usefulness for people other than me, this driver will
first look for:
- LiveOcean_user/tracker/user_trackfun.py
before loading my versions.
This allows you to create your own modifications to the tracking
(e.g. for diurnal depth behavior) while still being able to use git pull to update the main code.
It can be run on its own, or with command line arguments to facilitate
large, automated jobs, for example in python:
Examples:
python tracker.py -clb True
the same command, with all the argmuents typed, instead of getting the as defaults:
python tracker.py -gtx cas6_v3_lo8b -ro 2 -d 2019.07.04 -exp jdf0 -clb True
"""
import sys
from datetime import datetime, timedelta
from time import time
import argparse
import numpy as np
from lo_tools import Lfun, zfun
Ldir = Lfun.Lstart()
from importlib import reload
pth = Ldir['LOu'] / 'tracker'
if str(pth) not in sys.path:
sys.path.append(str(pth))
import experiments as exp
reload(exp)
import trackfun_nc as tfnc
reload(tfnc)
# The import of trackfun or user_trackfun is done later in this program,
# about 100 lines down.
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# command line arguments, can be input in any order
parser = argparse.ArgumentParser()
# Set the experiment name
# (details set in experiments.py, or, if it exists, user_experiments.py)
parser.add_argument('-gtx', '--gtagex', default='cas6_v0_live', type=str)
parser.add_argument('-ro', '--roms_out_num', default=2, type=int)
# 1 = Ldir['roms_out1'], etc.
# this is the first starting day
parser.add_argument('-d', '--date_string', default='2021.10.15', type=str)
parser.add_argument('-exp', '--exp_name', default='elb', type=str)
parser.add_argument('-clb', '--clobber', default=False, type=zfun.boolean_string)
# overwrite existing output folder if clobber == True
parser.add_argument('-sub_tag', default='', type=str)
# append an optional tag to the end of the output folder name
# These are False unless the flags are used with the argument True
# so if you do NOT use these flags the run will be:
# - trapped to the surface
# - no vertical turbulent diffusion
parser.add_argument('-3d', default=False, type=zfun.boolean_string) # do 3d tracking
parser.add_argument('-laminar', default=False, type=zfun.boolean_string) # no turbulence
parser.add_argument('-no_advection', default=False, type=zfun.boolean_string) # no advection
parser.add_argument('-sink', default=0, type=float) # particle sinking speed (m per day, e.g. 40)
# windage = a small number: 0 <= windage << 1 (e.g. 0.03)
# fraction of windspeed added to advection, only for 3d=False
parser.add_argument('-wnd', '--windage', default=0, type=float)
# You can make multiple releases using:
# number_of_start_days > 1 & days_between_starts, and which hour (UTC) to start on
parser.add_argument('-nsd', '--number_of_start_days', default=1, type=int)
parser.add_argument('-dbs', '--days_between_starts', default=1, type=int)
parser.add_argument('-dtt', '--days_to_track', default=1, type=int)
parser.add_argument('-sh', '--start_hour', default=0, type=int)
# number of divisions to make between saves for the integration
# e.g. if ndiv = 12 and we have hourly saves, we use a 300 sec step
# for the integration. 300 s seems like a good default value,
# based on Banas et al. (2009, CSR RISE paper).
parser.add_argument('-ndiv', default=12, type=int)
parser.add_argument('-sph', default=1, type=int)
# sph = saves per hour, a new argument to allow more frequent writing of output.
args = parser.parse_args()
TR = args.__dict__
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# set where to look for model output
if args.roms_out_num == 0:
TR['roms_out'] = Ldir['roms_out']
elif args.roms_out_num > 0:
TR['roms_out'] = Ldir['roms_out' + str(args.roms_out_num)]
# set dependent and default fields
TR['turb'] = False
# make sure sph is no greater than ndiv
TR['sph'] = np.min((TR['sph'],TR['ndiv']))
# overrides
if TR['3d']:
TR['windage'] = 0
TR['turb'] = True # default is that 3d is always turbulent
if TR['laminar']:
TR['turb'] = False
# get experiment info
TR['gridname'], TR['tag'], TR['ex_name'] = TR['gtagex'].split('_')
# pass some info to Ldir
Ldir['gtagex'] = TR['gtagex']
Ldir['roms_out'] = TR['roms_out']
# get the full path to a valid history file
fn00 = Ldir['roms_out'] / TR['gtagex'] / ('f' + TR['date_string']) / 'ocean_his_0001.nc'
TR['fn00'] = fn00
# set the name of the output folder
out_name = TR['exp_name']
# modify the output folder name, based on other choices
if TR['3d']:
out_name += '_3d'
elif not TR['3d']:
out_name += '_surf'
if TR['laminar']:
out_name += '_laminar'
if TR['windage'] > 0:
out_name += '_wind' + str(int(100*TR['windage']))
if TR['start_hour'] > 0:
out_name += '_sh' + str(int(TR['start_hour']))
if TR['sink'] > 0:
out_name += '_sink' + str(int(TR['sink']))
if TR['no_advection'] == True:
out_name += '_nadv'
if TR['ndiv'] != 12: # only mention ndiv if it is NOT 12
out_name += '_ndiv' + str(TR['ndiv'])
if len(TR['sub_tag']) > 0:
out_name += '_' + TR['sub_tag']
# make the list of start days (datetimes) for separate releases
idt_list = []
dt = datetime.strptime(TR['date_string'], '%Y.%m.%d')
for nic in range(TR['number_of_start_days']):
idt_list.append(dt)
dt = dt + timedelta(TR['days_between_starts'])
# make the output directory (empty)
outdir0 = Ldir['LOo'] / 'tracks'
outdir1 = out_name
outdir = outdir0 / outdir1
if outdir.is_dir():
if args.clobber:
pass # continue and overwrite if clobber is True
else:
print('Warning: output directory exists - rename if you want to keep it!!')
print('-- tracker run not started --')
sys.exit()
Lfun.make_dir(outdir, clean=True)
print(50*'*' + '\nWriting to ' + str(outdir))
sys.stdout.flush()
# Write some info to outdir0 for use by trackfun.py
Lfun.dict_to_csv(TR, outdir0 / 'exp_info.csv')
# and write the same info to outdir as part of the archived run output
Lfun.dict_to_csv(TR, outdir / 'exp_info.csv')
# Load the trackfun module.
# NOTE: we have to load this module AFTER we write [outdir0]/exp_info.csv
# because it uses that information to decide which KDTrees to load. Crude.
if (Ldir['LOu'] / 'tracker' / 'user_trackfun.py').is_file():
sys.path.append(str(Ldir['LOu'] / 'tracker'))
import user_trackfun as tfun
else:
import trackfun as tfun
reload(tfun)
# get the initial particle location vectors
EI = exp.get_exp_info(TR['exp_name'])
plon00, plat00, pcs00 = exp.get_ic(EI, TR['fn00'])
# step through the releases, one for each start day
write_grid = True
for idt0 in idt_list:
tt0 = time() # monitor integration time
# name the release file by start day
idt0_str = datetime.strftime(idt0,'%Y.%m.%d')
outname = ('release_' + idt0_str + '.nc')
print('-- ' + outname)
sys.stdout.flush()
out_fn = outdir / outname
# we do the calculation in one-day segments, but write complete
# output for a release to a single NetCDF file.
for nd in range(TR['days_to_track']):
# get or replace the history file list for this day
idt = idt0 + timedelta(days=nd)
idt_str = datetime.strftime(idt,'%Y.%m.%d')
print(' - working on ' + idt_str)
sys.stdout.flush()
fn_list = tfun.get_fn_list(idt, Ldir)
# write the grid file (once per experiment) for plotting
if write_grid == True:
g_infile = fn_list[0]
g_outfile = outdir / 'grid.nc'
tfnc.write_grid(g_infile, g_outfile)
write_grid = False
# DO THE TRACKING
if nd == 0: # first day
# set IC
plon0 = plon00.copy()
plat0 = plat00.copy()
pcs0 = pcs00.copy()
# do the tracking
if TR['start_hour'] > 0:
fn_list = fn_list[TR['start_hour']:]
P = tfun.get_tracks(fn_list, plon0, plat0, pcs0, TR, trim_loc=True)
# save the results to NetCDF
tfnc.start_outfile(out_fn, P)
else: # subsequent days
# set IC
plon0 = P['lon'][-1,:]
plat0 = P['lat'][-1,:]
pcs0 = P['cs'][-1,:]
# do the tracking
P = tfun.get_tracks(fn_list, plon0, plat0, pcs0, TR)
tfnc.append_to_outfile(out_fn, P)
print(' - Took %0.1f sec for %s day(s)' %
(time() - tt0, str(TR['days_to_track'])))
print(50*'=')
print(50*'*' + '\nWrote to ' + str(outdir))
| 35.465201 | 97 | 0.673001 | [
"MIT"
] | PuffyPuffin/LO_user | tracker/tracker/user_tracker.py | 9,682 | Python |
"""
Custom Decorators
"""
# Django
from django.shortcuts import redirect, reverse
from django.http import JsonResponse
from django.utils.translation import gettext as _
from django.http import Http404
# local Django
from app.modules.util.helpers import Helpers
from app.modules.core.response import Response
from app.modules.entity.option_entity import Option_Entity
def redirect_if_authenticated(function):
def wrap(controller, request, *args, **kwargs):
if request.user and request.user.is_authenticated:
if "redirect" in request.GET:
return redirect(request.GET["redirect"])
return redirect("app.web.admin.dashboard")
return function(controller, request, *args, **kwargs)
return wrap
def login_if_not_authenticated(function):
def wrap(controller, request, *args, **kwargs):
if not request.user or not request.user.is_authenticated:
return redirect(reverse("app.web.login") + "?redirect=" + request.get_full_path())
return function(controller, request, *args, **kwargs)
return wrap
def stop_request_if_authenticated(function):
def wrap(controller, request, *args, **kwargs):
if request.user and request.user.is_authenticated:
response = Response()
return JsonResponse(response.send_private_failure([{
"type": "error",
"message": _("Error! Access forbidden for authenticated users.")
}]))
return function(controller, request, *args, **kwargs)
return wrap
def redirect_if_not_installed(function):
def wrap(controller, request, *args, **kwargs):
installed = False if Option_Entity().get_one_by_key("app_installed") is False else True
if not installed:
return redirect("app.web.install")
return function(controller, request, *args, **kwargs)
return wrap
def protect_metric_with_auth_key(function):
def wrap(controller, request, *args, **kwargs):
if kwargs["type"] == "prometheus":
prometheus_token = Option_Entity().get_one_by_key("prometheus_token")
if prometheus_token.value != "" and ("HTTP_AUTHORIZATION" not in request.META or prometheus_token.value != request.META["HTTP_AUTHORIZATION"]):
raise Http404("Host not found.")
return function(controller, request, *args, **kwargs)
return wrap
def stop_request_if_installed(function):
def wrap(controller, request, *args, **kwargs):
installed = False if Option_Entity().get_one_by_key("app_installed") is False else True
if installed:
response = Response()
return JsonResponse(response.send_private_failure([{
"type": "error",
"message": _("Error! Application is already installed.")
}]))
return function(controller, request, *args, **kwargs)
return wrap
def log_request_data(function):
def wrap(controller, request, *args, **kwargs):
_helper = Helpers()
_logger = _helper.get_logger(__name__)
_logger.debug(_("Request Method: %s") % request.method)
_logger.debug(_("Request URL: %s") % request.path)
_logger.debug(_("Request Body: %s") % request.body)
return function(controller, request, *args, **kwargs)
return wrap
| 37.943182 | 155 | 0.668763 | [
"Apache-2.0"
] | Clivern/Kraven | app/modules/core/decorators.py | 3,339 | Python |
from datetime import datetime, timedelta
from django.test import TestCase
from mock import patch
from corehq.apps.domain.models import Domain
from corehq.apps.hqcase.utils import update_case
from corehq.apps.sms.mixin import PhoneNumberInUseException
from corehq.apps.sms.models import (
PhoneNumber,
SQLMobileBackend,
SQLMobileBackendMapping,
)
from corehq.apps.sms.tasks import (
delete_phone_numbers_for_owners,
sync_case_phone_number,
)
from corehq.apps.sms.tests.util import delete_domain_phone_numbers
from corehq.apps.users.models import CommCareUser, WebUser
from corehq.apps.users.tasks import tag_cases_as_deleted_and_remove_indices
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.form_processor.tests.utils import run_with_all_backends
from corehq.form_processor.utils import is_commcarecase
from corehq.messaging.smsbackends.test.models import SQLTestSMSBackend
from corehq.util.test_utils import create_test_case
class PhoneNumberCacheClearTestCase(TestCase):
def assertNoMatch(self, phone_search, suffix_search, owner_id_search):
self.assertIsNone(PhoneNumber.get_two_way_number(phone_search))
self.assertIsNone(PhoneNumber.get_two_way_number_by_suffix(suffix_search))
self.assertEqual(PhoneNumber.by_owner_id(owner_id_search), [])
def assertPhoneNumbersEqual(self, phone1, phone2):
for field in phone1._meta.fields:
self.assertEqual(getattr(phone1, field.name), getattr(phone2, field.name))
def assertMatch(self, match, phone_search, suffix_search, owner_id_search):
lookedup = PhoneNumber.get_two_way_number(phone_search)
self.assertPhoneNumbersEqual(match, lookedup)
lookedup = PhoneNumber.get_two_way_number_by_suffix(suffix_search)
self.assertPhoneNumbersEqual(match, lookedup)
[lookedup] = PhoneNumber.by_owner_id(owner_id_search)
self.assertPhoneNumbersEqual(match, lookedup)
def _test_cache_clear(self, refresh_each_time=True):
"""
A test to make sure that the cache clearing is working as expected.
This test gets run twice using different values for refresh_each_time.
This makes sure that the mechanism used for clearing the cache works
whether you're updating a document you just saved or getting a document
fresh from the database and updating it.
"""
created = PhoneNumber(
domain='phone-number-test',
owner_doc_type='CommCareCase',
owner_id='fake-owner-id1',
phone_number='99912341234',
backend_id=None,
ivr_backend_id=None,
verified=True,
pending_verification=False,
is_two_way=True,
contact_last_modified=datetime.utcnow()
)
created.save()
self.assertNoMatch('99952345234', '52345234', 'fake-owner-id2')
self.assertMatch(created, '99912341234', '12341234', 'fake-owner-id1')
# Update Phone Number
if refresh_each_time:
created = PhoneNumber.objects.get(pk=created.pk)
created.phone_number = '99952345234'
created.save()
self.assertNoMatch('99912341234', '12341234', 'fake-owner-id2')
self.assertMatch(created, '99952345234', '52345234', 'fake-owner-id1')
# Update Owner Id
if refresh_each_time:
created = PhoneNumber.objects.get(pk=created.pk)
created.owner_id = 'fake-owner-id2'
created.save()
self.assertNoMatch('99912341234', '12341234', 'fake-owner-id1')
self.assertMatch(created, '99952345234', '52345234', 'fake-owner-id2')
created.delete()
self.assertNoMatch('99952345234', '52345234', 'fake-owner-id2')
def test_cache_clear_with_refresh(self):
self._test_cache_clear(refresh_each_time=True)
def test_cache_clear_without_refresh(self):
self._test_cache_clear(refresh_each_time=False)
class CaseContactPhoneNumberTestCase(TestCase):
def setUp(self):
self.domain = 'case-phone-number-test'
def tearDown(self):
delete_domain_phone_numbers(self.domain)
def set_case_property(self, case, property_name, value):
update_case(self.domain, case.case_id, case_properties={property_name: value})
return CaseAccessors(self.domain).get_case(case.case_id)
def get_case_phone_number(self, case):
return case.get_phone_number()
def assertPhoneNumberDetails(self, case, phone_number, sms_backend_id, ivr_backend_id,
verified, pending_verification, is_two_way, pk=None):
v = self.get_case_phone_number(case)
self.assertEqual(v.domain, case.domain)
self.assertEqual(v.owner_doc_type, case.doc_type)
self.assertEqual(v.owner_id, case.case_id)
self.assertEqual(v.phone_number, phone_number)
self.assertEqual(v.backend_id, sms_backend_id)
self.assertEqual(v.ivr_backend_id, ivr_backend_id)
self.assertEqual(v.verified, verified)
self.assertEqual(v.pending_verification, pending_verification)
self.assertEqual(v.is_two_way, is_two_way)
self.assertEqual(v.contact_last_modified, case.server_modified_on)
if pk:
self.assertEqual(v.pk, pk)
@run_with_all_backends
def test_case_phone_number_updates(self):
extra_number = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999123',
verified=True,
pending_verification=False,
is_two_way=True
)
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 1)
with create_test_case(self.domain, 'participant', 'test1', drop_signals=False) as case:
self.assertIsNone(self.get_case_phone_number(case))
case = self.set_case_property(case, 'contact_phone_number', '99987658765')
self.assertPhoneNumberDetails(case, '99987658765', None, None, False, False, False)
pk = self.get_case_phone_number(case).pk
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
case = self.set_case_property(case, 'contact_phone_number_is_verified', '1')
self.assertPhoneNumberDetails(case, '99987658765', None, None, True, False, True, pk=pk)
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
case = self.set_case_property(case, 'contact_phone_number', '99987698769')
self.assertPhoneNumberDetails(case, '99987698769', None, None, True, False, True)
pk = self.get_case_phone_number(case).pk
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
case = self.set_case_property(case, 'contact_backend_id', 'sms-backend')
self.assertPhoneNumberDetails(case, '99987698769', 'sms-backend', None, True, False, True, pk=pk)
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
case = self.set_case_property(case, 'contact_ivr_backend_id', 'ivr-backend')
self.assertPhoneNumberDetails(case, '99987698769', 'sms-backend', 'ivr-backend', True, False, True,
pk=pk)
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
# If phone entry is ahead of the case in terms of contact_last_modified, no update should happen
v = self.get_case_phone_number(case)
v.contact_last_modified += timedelta(days=1)
v.save()
with patch('corehq.apps.sms.models.PhoneNumber.save') as mock_save:
case = self.set_case_property(case, 'contact_phone_number', '99912341234')
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
mock_save.assert_not_called()
self.assertEqual(PhoneNumber.get_two_way_number('999123'), extra_number)
@run_with_all_backends
def test_close_case(self):
extra_number = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999123',
verified=True,
pending_verification=False,
is_two_way=True
)
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 1)
with create_test_case(self.domain, 'participant', 'test1', drop_signals=False) as case:
case = self.set_case_property(case, 'contact_phone_number', '99987658765')
case = self.set_case_property(case, 'contact_phone_number_is_verified', '1')
self.assertIsNotNone(self.get_case_phone_number(case))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
update_case(self.domain, case.case_id, close=True)
self.assertIsNone(self.get_case_phone_number(case))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 1)
self.assertEqual(PhoneNumber.get_two_way_number('999123'), extra_number)
@run_with_all_backends
def test_case_soft_delete(self):
extra_number = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999123',
verified=True,
pending_verification=False,
is_two_way=True
)
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 1)
with create_test_case(self.domain, 'participant', 'test1', drop_signals=False) as case:
case = self.set_case_property(case, 'contact_phone_number', '99987658765')
case = self.set_case_property(case, 'contact_phone_number_is_verified', '1')
self.assertIsNotNone(self.get_case_phone_number(case))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
tag_cases_as_deleted_and_remove_indices(self.domain, [case.case_id], '123', datetime.utcnow())
self.assertIsNone(self.get_case_phone_number(case))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 1)
self.assertEqual(PhoneNumber.get_two_way_number('999123'), extra_number)
@run_with_all_backends
def test_case_zero_phone_number(self):
extra_number = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999123',
verified=True,
pending_verification=False,
is_two_way=True
)
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 1)
with create_test_case(self.domain, 'participant', 'test1', drop_signals=False) as case:
case = self.set_case_property(case, 'contact_phone_number', '99987658765')
case = self.set_case_property(case, 'contact_phone_number_is_verified', '1')
self.assertIsNotNone(self.get_case_phone_number(case))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
case = self.set_case_property(case, 'contact_phone_number', '0')
self.assertIsNone(self.get_case_phone_number(case))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 1)
self.assertEqual(PhoneNumber.get_two_way_number('999123'), extra_number)
@run_with_all_backends
def test_invalid_phone_format(self):
extra_number = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999123',
verified=True,
pending_verification=False,
is_two_way=True
)
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 1)
with create_test_case(self.domain, 'participant', 'test1', drop_signals=False) as case:
case = self.set_case_property(case, 'contact_phone_number', '99987658765')
case = self.set_case_property(case, 'contact_phone_number_is_verified', '1')
self.assertIsNotNone(self.get_case_phone_number(case))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
case = self.set_case_property(case, 'contact_phone_number', 'xyz')
self.assertIsNone(self.get_case_phone_number(case))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 1)
self.assertEqual(PhoneNumber.get_two_way_number('999123'), extra_number)
@run_with_all_backends
def test_phone_number_already_in_use(self):
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 0)
with create_test_case(self.domain, 'participant', 'test1', drop_signals=False) as case1, \
create_test_case(self.domain, 'participant', 'test2', drop_signals=False) as case2:
case1 = self.set_case_property(case1, 'contact_phone_number', '99987658765')
case1 = self.set_case_property(case1, 'contact_phone_number_is_verified', '1')
case2 = self.set_case_property(case2, 'contact_phone_number', '99987698769')
case2 = self.set_case_property(case2, 'contact_phone_number_is_verified', '1')
self.assertIsNotNone(self.get_case_phone_number(case1))
self.assertIsNotNone(self.get_case_phone_number(case2))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
case2 = self.set_case_property(case2, 'contact_phone_number', '99987658765')
self.assertIsNotNone(self.get_case_phone_number(case1))
self.assertIsNotNone(self.get_case_phone_number(case2))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
self.assertPhoneNumberDetails(case1, '99987658765', None, None, True, False, True)
self.assertPhoneNumberDetails(case2, '99987658765', None, None, False, False, False)
@run_with_all_backends
def test_multiple_entries(self):
extra_number = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999124',
verified=False,
pending_verification=False,
is_two_way=False
)
with create_test_case(self.domain, 'participant', 'test1', drop_signals=False) as case:
case = self.set_case_property(case, 'contact_phone_number', '999124')
case = self.set_case_property(case, 'contact_phone_number_is_verified', '1')
case.create_phone_entry('999125')
self.assertEqual(PhoneNumber.objects.count(), 3)
sync_case_phone_number(case)
self.assertEqual(PhoneNumber.objects.count(), 2)
number1 = PhoneNumber.objects.get(pk=extra_number.pk)
self.assertEqual(number1.owner_id, 'X')
number2 = PhoneNumber.objects.get(owner_id=case.case_id)
self.assertTrue(number2.verified)
self.assertTrue(number2.is_two_way)
self.assertFalse(number2.pending_verification)
class SQLPhoneNumberTestCase(TestCase):
def setUp(self):
self.domain = 'sql-phone-number-test'
self.domain_obj = Domain(name=self.domain)
self.domain_obj.save()
def delete_objects(self, result):
for obj in result:
# Delete and clear cache
obj.delete()
def tearDown(self):
self.delete_objects(PhoneNumber.objects.filter(domain=self.domain))
self.delete_objects(SQLMobileBackend.objects.filter(domain=self.domain))
SQLMobileBackendMapping.objects.filter(domain=self.domain).delete()
self.domain_obj.delete()
def test_backend(self):
backend1 = SQLTestSMSBackend.objects.create(
hq_api_id=SQLTestSMSBackend.get_api_id(),
is_global=False,
domain=self.domain,
name='BACKEND1'
)
backend2 = SQLTestSMSBackend.objects.create(
hq_api_id=SQLTestSMSBackend.get_api_id(),
is_global=False,
domain=self.domain,
name='BACKEND2'
)
SQLMobileBackendMapping.set_default_domain_backend(self.domain, backend1)
number = PhoneNumber(domain=self.domain, phone_number='+999123')
self.assertEqual(number.backend, backend1)
number.backend_id = backend2.name
self.assertEqual(number.backend, backend2)
number.backend_id = ' '
self.assertEqual(number.backend, backend1)
@run_with_all_backends
def test_case_owner(self):
with create_test_case(self.domain, 'participant', 'test') as case:
number = PhoneNumber(owner_doc_type='CommCareCase', owner_id=case.case_id)
owner = number.owner
self.assertTrue(is_commcarecase(owner))
self.assertEqual(owner.case_id, case.case_id)
def test_user_owner(self):
mobile_user = CommCareUser.create(self.domain, 'abc', 'def', None, None)
number = PhoneNumber(owner_doc_type='CommCareUser', owner_id=mobile_user.get_id)
owner = number.owner
self.assertTrue(isinstance(owner, CommCareUser))
self.assertEqual(owner.get_id, mobile_user.get_id)
web_user = WebUser.create(self.domain, 'ghi', 'jkl', None, None)
number = PhoneNumber(owner_doc_type='WebUser', owner_id=web_user.get_id)
owner = number.owner
self.assertTrue(isinstance(owner, WebUser))
self.assertEqual(owner.get_id, web_user.get_id)
number = PhoneNumber(owner_doc_type='X')
self.assertIsNone(number.owner)
def test_get_two_way_number(self):
number1 = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999123',
verified=True,
pending_verification=False,
is_two_way=True
)
PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999123',
verified=False,
pending_verification=False,
is_two_way=False
)
self.assertEqual(PhoneNumber.get_two_way_number('999123'), number1)
self.assertEqual(PhoneNumber.get_two_way_number('+999 123'), number1)
self.assertIsNone(PhoneNumber.get_two_way_number('999124'))
self.assertIsNone(PhoneNumber.get_number_pending_verification('999123'))
self.assertIsNone(PhoneNumber.get_number_pending_verification('999124'))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
# test cache clear on save
number1.phone_number = '999124'
number1.save()
self.assertIsNone(PhoneNumber.get_two_way_number('999123'))
self.assertEqual(PhoneNumber.get_two_way_number('999124'), number1)
self.assertIsNone(PhoneNumber.get_number_pending_verification('999123'))
self.assertIsNone(PhoneNumber.get_number_pending_verification('999124'))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
# test cache clear on delete
number1.delete()
self.assertIsNone(PhoneNumber.get_two_way_number('999123'))
self.assertIsNone(PhoneNumber.get_two_way_number('999124'))
self.assertIsNone(PhoneNumber.get_number_pending_verification('999123'))
self.assertIsNone(PhoneNumber.get_number_pending_verification('999124'))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 1)
def test_get_number_pending_verification(self):
number1 = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999123',
verified=False,
pending_verification=True,
is_two_way=False
)
PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999123',
verified=False,
pending_verification=False,
is_two_way=False
)
self.assertIsNone(PhoneNumber.get_two_way_number('999123'))
self.assertIsNone(PhoneNumber.get_two_way_number('999124'))
self.assertEqual(PhoneNumber.get_number_pending_verification('999123'), number1)
self.assertEqual(PhoneNumber.get_number_pending_verification('+999 123'), number1)
self.assertIsNone(PhoneNumber.get_number_pending_verification('999124'))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
# test cache clear on save
number1.phone_number = '999124'
number1.save()
self.assertIsNone(PhoneNumber.get_two_way_number('999123'))
self.assertIsNone(PhoneNumber.get_two_way_number('999124'))
self.assertIsNone(PhoneNumber.get_number_pending_verification('999123'))
self.assertEqual(PhoneNumber.get_number_pending_verification('999124'), number1)
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
# test promotion to two-way
number1.set_two_way()
number1.set_verified()
number1.save()
self.assertIsNone(PhoneNumber.get_two_way_number('999123'))
self.assertEqual(PhoneNumber.get_two_way_number('999124'), number1)
self.assertIsNone(PhoneNumber.get_number_pending_verification('999123'))
self.assertIsNone(PhoneNumber.get_number_pending_verification('999124'))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
# test cache clear on delete
number1.delete()
self.assertIsNone(PhoneNumber.get_two_way_number('999123'))
self.assertIsNone(PhoneNumber.get_two_way_number('999124'))
self.assertIsNone(PhoneNumber.get_number_pending_verification('999123'))
self.assertIsNone(PhoneNumber.get_number_pending_verification('999124'))
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 1)
def test_suffix_lookup(self):
number1 = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999123',
verified=True,
pending_verification=False,
is_two_way=True
)
number2 = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999223',
verified=True,
pending_verification=False,
is_two_way=True
)
self.assertEqual(PhoneNumber.get_two_way_number_by_suffix('1 23'), number1)
self.assertEqual(PhoneNumber.get_two_way_number_by_suffix('2 23'), number2)
self.assertIsNone(PhoneNumber.get_two_way_number_by_suffix('23'))
# test update
number1.phone_number = '999124'
number1.save()
number2.phone_number = '999224'
number2.save()
self.assertIsNone(PhoneNumber.get_two_way_number_by_suffix('1 23'))
self.assertIsNone(PhoneNumber.get_two_way_number_by_suffix('2 23'))
self.assertEqual(PhoneNumber.get_two_way_number_by_suffix('124'), number1)
self.assertEqual(PhoneNumber.get_two_way_number_by_suffix('224'), number2)
def test_extensive_search(self):
number = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999123',
verified=True,
pending_verification=False,
is_two_way=True
)
self.assertEqual(PhoneNumber.by_extensive_search('999123'), number)
self.assertEqual(PhoneNumber.by_extensive_search('0999123'), number)
self.assertEqual(PhoneNumber.by_extensive_search('00999123'), number)
self.assertEqual(PhoneNumber.by_extensive_search('000999123'), number)
self.assertEqual(PhoneNumber.by_extensive_search('123'), number)
self.assertIsNone(PhoneNumber.by_extensive_search('999124'))
def test_by_domain(self):
number1 = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999123',
verified=True,
pending_verification=False,
is_two_way=True
)
number2 = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999124',
verified=False,
pending_verification=False,
is_two_way=False
)
number3 = PhoneNumber.objects.create(
domain=self.domain + 'X',
owner_doc_type='X',
owner_id='X',
phone_number='999124',
verified=True,
pending_verification=False,
is_two_way=True
)
self.addCleanup(number3.delete)
self.assertEqual(
set(PhoneNumber.by_domain(self.domain)),
set([number1, number2])
)
self.assertEqual(
set(PhoneNumber.by_domain(self.domain, ids_only=True)),
set([number1.couch_id, number2.couch_id])
)
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 2)
def test_by_owner_id(self):
number = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='owner1',
phone_number='999123',
verified=True,
pending_verification=False,
is_two_way=True
)
[lookup] = PhoneNumber.by_owner_id('owner1')
self.assertEqual(lookup, number)
# test cache clear
number.owner_id = 'owner2'
number.save()
self.assertEqual(PhoneNumber.by_owner_id('owner1'), [])
[lookup] = PhoneNumber.by_owner_id('owner2')
self.assertEqual(lookup, number)
number.verified = False
number.is_two_way = False
number.save()
[lookup] = PhoneNumber.by_owner_id('owner2')
self.assertFalse(lookup.verified)
self.assertFalse(lookup.is_two_way)
def create_case_contact(self, phone_number):
return create_test_case(
self.domain,
'participant',
'test',
case_properties={
'contact_phone_number': phone_number,
'contact_phone_number_is_verified': '1',
},
drop_signals=False
)
@run_with_all_backends
def test_delete_phone_numbers_for_owners(self):
with self.create_case_contact('9990001') as case1, \
self.create_case_contact('9990002') as case2, \
self.create_case_contact('9990003') as case3:
self.assertEqual(len(PhoneNumber.by_owner_id(case1.case_id)), 1)
self.assertEqual(len(PhoneNumber.by_owner_id(case2.case_id)), 1)
self.assertEqual(len(PhoneNumber.by_owner_id(case3.case_id)), 1)
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 3)
delete_phone_numbers_for_owners([case2.case_id, case3.case_id])
self.assertEqual(len(PhoneNumber.by_owner_id(case1.case_id)), 1)
self.assertEqual(len(PhoneNumber.by_owner_id(case2.case_id)), 0)
self.assertEqual(len(PhoneNumber.by_owner_id(case3.case_id)), 0)
self.assertEqual(PhoneNumber.count_by_domain(self.domain), 1)
def test_verify_uniqueness(self):
number1 = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999123',
verified=True,
pending_verification=False,
is_two_way=True
)
number2 = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='X',
phone_number='999123',
verified=False,
pending_verification=False,
is_two_way=False
)
# Raises no exception
number1.verify_uniqueness()
# Raises PhoneNumberInUseException
with self.assertRaises(PhoneNumberInUseException):
number2.verify_uniqueness()
class TestUserPhoneNumberSync(TestCase):
def setUp(self):
self.domain = 'user-phone-number-test'
self.domain_obj = Domain(name=self.domain)
self.domain_obj.save()
self.mobile_worker1 = CommCareUser.create(self.domain, 'mobile1', 'mobile1', None, None)
self.mobile_worker2 = CommCareUser.create(self.domain, 'mobile2', 'mobile2', None, None)
def tearDown(self):
delete_domain_phone_numbers(self.domain)
self.domain_obj.delete()
def assertPhoneEntries(self, user, phone_numbers):
entries = user.get_phone_entries()
self.assertEqual(len(entries), len(phone_numbers))
self.assertEqual(set(entries.keys()), set(phone_numbers))
def testSync(self):
extra_number = PhoneNumber.objects.create(
domain=self.domain,
owner_doc_type='X',
owner_id='owner1',
phone_number='999123',
verified=True,
pending_verification=False,
is_two_way=True
)
user = self.mobile_worker1
self.assertEqual(PhoneNumber.by_domain(self.domain).count(), 1)
user.phone_numbers = ['9990001']
user.save()
self.assertEqual(PhoneNumber.by_domain(self.domain).count(), 2)
self.assertPhoneEntries(user, ['9990001'])
before = user.get_phone_entries()['9990001']
user.phone_numbers = ['9990001', '9990002']
user.save()
self.assertEqual(PhoneNumber.by_domain(self.domain).count(), 3)
self.assertPhoneEntries(user, ['9990001', '9990002'])
after = user.get_phone_entries()['9990001']
self.assertEqual(before.pk, after.pk)
user.phone_numbers = ['9990002']
user.save()
self.assertEqual(PhoneNumber.by_domain(self.domain).count(), 2)
self.assertPhoneEntries(user, ['9990002'])
self.assertEqual(PhoneNumber.get_two_way_number('999123'), extra_number)
def testRetire(self):
self.mobile_worker1.phone_numbers = ['9990001']
self.mobile_worker1.save()
self.assertEqual(PhoneNumber.by_domain(self.domain).count(), 1)
self.assertPhoneEntries(self.mobile_worker1, ['9990001'])
self.mobile_worker2.phone_numbers = ['9990002']
self.mobile_worker2.save()
self.assertEqual(PhoneNumber.by_domain(self.domain).count(), 2)
self.assertPhoneEntries(self.mobile_worker2, ['9990002'])
self.mobile_worker1.retire(deleted_by=None)
self.assertEqual(PhoneNumber.by_domain(self.domain).count(), 1)
self.assertPhoneEntries(self.mobile_worker2, ['9990002'])
class TestGenericContactMethods(TestCase):
def setUp(self):
self.domain = 'contact-phone-number-test'
self.domain_obj = Domain(name=self.domain)
self.domain_obj.save()
self.mobile_worker1 = CommCareUser.create(self.domain, 'mobile1', 'mobile1', None, None)
self.mobile_worker2 = CommCareUser.create(self.domain, 'mobile2', 'mobile2', None, None)
def tearDown(self):
delete_domain_phone_numbers(self.domain)
self.domain_obj.delete()
def testGetOrCreate(self):
before = self.mobile_worker1.get_or_create_phone_entry('999123')
self.assertEqual(before.owner_doc_type, 'CommCareUser')
self.assertEqual(before.owner_id, self.mobile_worker1.get_id)
self.assertEqual(before.phone_number, '999123')
self.assertEqual(PhoneNumber.by_domain(self.domain).count(), 1)
after = self.mobile_worker1.get_or_create_phone_entry('999123')
self.assertEqual(before.pk, after.pk)
self.assertEqual(after.owner_doc_type, 'CommCareUser')
self.assertEqual(after.owner_id, self.mobile_worker1.get_id)
self.assertEqual(after.phone_number, '999123')
self.assertEqual(PhoneNumber.by_domain(self.domain).count(), 1)
def testGetPhoneEntries(self):
number1 = self.mobile_worker1.get_or_create_phone_entry('999123')
number2 = self.mobile_worker1.get_or_create_phone_entry('999124')
self.mobile_worker1.get_or_create_phone_entry('999125')
number4 = self.mobile_worker2.get_or_create_phone_entry('999126')
number1.set_two_way()
number2.set_pending_verification()
number4.set_two_way()
self.assertEqual(PhoneNumber.by_domain(self.domain).count(), 4)
entries = self.mobile_worker1.get_phone_entries()
self.assertEqual(set(entries.keys()), set(['999123', '999124', '999125']))
entries = self.mobile_worker1.get_two_way_numbers()
self.assertEqual(set(entries.keys()), set(['999123']))
def testDelete(self):
self.mobile_worker1.get_or_create_phone_entry('999123')
self.mobile_worker1.get_or_create_phone_entry('999124')
self.mobile_worker1.get_or_create_phone_entry('999125')
self.mobile_worker2.get_or_create_phone_entry('999126')
self.assertEqual(PhoneNumber.by_domain(self.domain).count(), 4)
self.mobile_worker1.delete_phone_entry('999124')
self.assertEqual(PhoneNumber.by_domain(self.domain).count(), 3)
entries = self.mobile_worker1.get_phone_entries()
self.assertEqual(set(entries.keys()), set(['999123', '999125']))
entries = self.mobile_worker2.get_phone_entries()
self.assertEqual(set(entries.keys()), set(['999126']))
def testUserSyncNoChange(self):
before = self.mobile_worker1.get_or_create_phone_entry('999123')
before.set_two_way()
before.set_verified()
before.save()
self.assertEqual(PhoneNumber.by_domain(self.domain).count(), 1)
self.mobile_worker1.phone_numbers = ['999123']
self.mobile_worker1.save()
self.assertEqual(PhoneNumber.by_domain(self.domain).count(), 1)
after = self.mobile_worker1.get_phone_entries()['999123']
self.assertEqual(before.pk, after.pk)
self.assertTrue(after.is_two_way)
self.assertTrue(after.verified)
self.assertFalse(after.pending_verification)
| 41.17485 | 111 | 0.666647 | [
"BSD-3-Clause"
] | AliRizvi1/commcare-hq | corehq/apps/sms/tests/test_phone_numbers.py | 34,381 | Python |
from hash_map_base_class import *
class ProbeHashMap(HashMapBase):
"""Hash map implemented with linear probing for collision resolution."""
_AVAIL = object() # sentinal marks locations of previous deletions
def _is_available(self,j):
"""Return True if the index j is available in the table."""
return self._table[j] is None or self._table[j] is ProbeHashMap._AVAIL
def _find_slot(self,j,k):
"""Search for key k in bucket at index j.
Return (success, index) tuple, described as follows:
If match was found, success is True and index denotes its location.
If no match found, success is False and index denotes first available slot.
"""
firstAvail = None
while True:
if self._is_available(j):
if firstAvail is None:
firstAvail = j # mark this as first avail
if self._table[j] is None:
return (False, firstAvail) # search has failed
elif k == self._table[j]._key:
return (True,j) # found a match
j = (j+1)%len(self._table) # keep looking (cyclically)
def _bucket_getitem(self,j,k):
found,s = self._find_slot(j,k)
if not found:
raise KeyError("Key Error: " + repr(k)) # no match found
return self._table[s]._value
def _bucket_setitem(self,j,k,v):
found,s = self._find_slot(j,k)
if not found:
self._table[s] = self._Item(k,v) # insert new item
self._n += 1 # size has increased
else:
self._table[s]._value = v # overwrite existing
def _bucket_delitem(self,j,k):
found,s = self._find_slot(j,k)
if not found:
raise KeyError("Key Error: " + repr(k)) # no match found
self._table[s] = ProbeHashMap._AVAIL # mark as vacated
def __iter__(self):
for j in range(len(self._table)): # scan entire table
if not self._is_available(j):
yield self._table[j]._key
| 40.481481 | 83 | 0.558097 | [
"MIT"
] | ahammadshawki8/DSA-Implementations-in-Python | CHAPTER 10 (maps, hash tables and skip lists)/probe_hash_map_class.py | 2,186 | Python |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('filer', '0009_auto_20171220_1635'),
]
operations = [
migrations.AlterField(
model_name='image',
name='file_ptr',
field=models.OneToOneField(primary_key=True, serialize=False, related_name='filer_image_file', parent_link=True, to='filer.File', on_delete=models.CASCADE),
),
]
| 25.95 | 168 | 0.653179 | [
"BSD-3-Clause"
] | ip-dolganov/django-filer2.0 | filer/migrations/0010_auto_20180414_2058.py | 519 | Python |
import numpy as np
from plotly.subplots import make_subplots
import plotly.graph_objects as go
class BrownianStockSimulator:
plot_title = "Simulated White noise, Brownian Motion and Stock Price"
plotly_template = "plotly_white"
plot_width = 1500
plot_height = 1000
def __init__(self, time_horizon, steps_count, sigma):
self.time_horizon = time_horizon
self.steps_count = steps_count
self.sigma = sigma
self.sampling_points = self.time_horizon * self.steps_count
self.dt = self.time_horizon / self.sampling_points
self.time_grid = self._get_time_grid()
def _get_time_grid(self):
time_grid = np.arange(0, self.time_horizon + self.dt, self.dt)
return time_grid
def _get_white_noise(self):
white_noise = np.sqrt(self.dt) * np.random.normal(
loc=0, scale=1.0, size=self.sampling_points
)
return white_noise
def _get_brownian_motion(self, white_noise):
brownian_motion = np.cumsum(white_noise)
brownian_motion = np.append(0, brownian_motion)
return brownian_motion
def _get_stock_price(self, init_stock_price):
output = (
self.sigma * self.brownian_motion - 0.5 * self.sigma ** 2 * self.time_grid
)
return init_stock_price * np.exp(output)
def simulate(self, init_stock_price, random_seed=42):
np.random.seed(random_seed)
self.white_noise = self._get_white_noise()
self.brownian_motion = self._get_brownian_motion(self.white_noise)
self.price = self._get_stock_price(init_stock_price)
def plot(self):
fig = make_subplots(rows=3, cols=1)
fig.append_trace(
go.Scatter(x=self.time_grid, y=self.white_noise, name="White Noise"),
row=1,
col=1,
),
fig.append_trace(
go.Scatter(
x=self.time_grid, y=self.brownian_motion, name="Brownian Motion"
),
row=2,
col=1,
),
fig.append_trace(
go.Scatter(x=self.time_grid, y=self.price, name="Stock Price"), row=3, col=1
)
fig.update_layout(
height=self.plot_height,
width=self.plot_width,
title_text=self.plot_title,
template=self.plotly_template,
)
fig.show()
| 33.408451 | 88 | 0.63027 | [
"MIT"
] | coinflip112/stoch_models_for_finance | brownian_stock_simulator.py | 2,372 | Python |
"""Tests for solver module
"""
# from mathgrid import solver
from mathgrid import solver
def test_calculator_01():
assert solver.calculator('=((1+3)*2)/(6-4)') == 4
assert solver.calculator('((1+3)*2)/(6-4)') == '((1+3)*2)/(6-4)'
assert solver.calculator('=hola') == 'hola'
| 24.083333 | 68 | 0.619377 | [
"BSD-2-Clause"
] | logiflo/mathgrid | tests/test_solver.py | 289 | Python |
"""
Tests for various datasette helper functions.
"""
from datasette.app import Datasette
from datasette import utils
from datasette.utils.asgi import Request
from datasette.utils.sqlite import sqlite3
import json
import os
import pathlib
import pytest
import tempfile
from unittest.mock import patch
@pytest.mark.parametrize(
"path,expected",
[
("foo", ["foo"]),
("foo,bar", ["foo", "bar"]),
("123,433,112", ["123", "433", "112"]),
("123~2C433,112", ["123,433", "112"]),
("123~2F433~2F112", ["123/433/112"]),
],
)
def test_urlsafe_components(path, expected):
assert expected == utils.urlsafe_components(path)
@pytest.mark.parametrize(
"path,added_args,expected",
[
("/foo", {"bar": 1}, "/foo?bar=1"),
("/foo?bar=1", {"baz": 2}, "/foo?bar=1&baz=2"),
("/foo?bar=1&bar=2", {"baz": 3}, "/foo?bar=1&bar=2&baz=3"),
("/foo?bar=1", {"bar": None}, "/foo"),
# Test order is preserved
(
"/?_facet=prim_state&_facet=area_name",
(("prim_state", "GA"),),
"/?_facet=prim_state&_facet=area_name&prim_state=GA",
),
(
"/?_facet=state&_facet=city&state=MI",
(("city", "Detroit"),),
"/?_facet=state&_facet=city&state=MI&city=Detroit",
),
(
"/?_facet=state&_facet=city",
(("_facet", "planet_int"),),
"/?_facet=state&_facet=city&_facet=planet_int",
),
],
)
def test_path_with_added_args(path, added_args, expected):
request = Request.fake(path)
actual = utils.path_with_added_args(request, added_args)
assert expected == actual
@pytest.mark.parametrize(
"path,args,expected",
[
("/foo?bar=1", {"bar"}, "/foo"),
("/foo?bar=1&baz=2", {"bar"}, "/foo?baz=2"),
("/foo?bar=1&bar=2&bar=3", {"bar": "2"}, "/foo?bar=1&bar=3"),
],
)
def test_path_with_removed_args(path, args, expected):
request = Request.fake(path)
actual = utils.path_with_removed_args(request, args)
assert expected == actual
# Run the test again but this time use the path= argument
request = Request.fake("/")
actual = utils.path_with_removed_args(request, args, path=path)
assert expected == actual
@pytest.mark.parametrize(
"path,args,expected",
[
("/foo?bar=1", {"bar": 2}, "/foo?bar=2"),
("/foo?bar=1&baz=2", {"bar": None}, "/foo?baz=2"),
],
)
def test_path_with_replaced_args(path, args, expected):
request = Request.fake(path)
actual = utils.path_with_replaced_args(request, args)
assert expected == actual
@pytest.mark.parametrize(
"row,pks,expected_path",
[
({"A": "foo", "B": "bar"}, ["A", "B"], "foo,bar"),
({"A": "f,o", "B": "bar"}, ["A", "B"], "f~2Co,bar"),
({"A": 123}, ["A"], "123"),
(
utils.CustomRow(
["searchable_id", "tag"],
[
("searchable_id", {"value": 1, "label": "1"}),
("tag", {"value": "feline", "label": "feline"}),
],
),
["searchable_id", "tag"],
"1,feline",
),
],
)
def test_path_from_row_pks(row, pks, expected_path):
actual_path = utils.path_from_row_pks(row, pks, False)
assert expected_path == actual_path
@pytest.mark.parametrize(
"obj,expected",
[
(
{
"Description": "Soft drinks",
"Picture": b"\x15\x1c\x02\xc7\xad\x05\xfe",
"CategoryID": 1,
},
"""
{"CategoryID": 1, "Description": "Soft drinks", "Picture": {"$base64": true, "encoded": "FRwCx60F/g=="}}
""".strip(),
)
],
)
def test_custom_json_encoder(obj, expected):
actual = json.dumps(obj, cls=utils.CustomJSONEncoder, sort_keys=True)
assert expected == actual
@pytest.mark.parametrize(
"bad_sql",
[
"update blah;",
"-- sql comment to skip\nupdate blah;",
"update blah set some_column='# Hello there\n\n* This is a list\n* of items\n--\n[And a link](https://github.com/simonw/datasette-render-markdown).'\nas demo_markdown",
"PRAGMA case_sensitive_like = true",
"SELECT * FROM pragma_not_on_allow_list('idx52')",
],
)
def test_validate_sql_select_bad(bad_sql):
with pytest.raises(utils.InvalidSql):
utils.validate_sql_select(bad_sql)
@pytest.mark.parametrize(
"good_sql",
[
"select count(*) from airports",
"select foo from bar",
"--sql comment to skip\nselect foo from bar",
"select '# Hello there\n\n* This is a list\n* of items\n--\n[And a link](https://github.com/simonw/datasette-render-markdown).'\nas demo_markdown",
"select 1 + 1",
"explain select 1 + 1",
"explain\nselect 1 + 1",
"explain query plan select 1 + 1",
"explain query plan\nselect 1 + 1",
"SELECT\nblah FROM foo",
"WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;",
"explain WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;",
"explain query plan WITH RECURSIVE cnt(x) AS (SELECT 1 UNION ALL SELECT x+1 FROM cnt LIMIT 10) SELECT x FROM cnt;",
"SELECT * FROM pragma_index_info('idx52')",
"select * from pragma_table_xinfo('table')",
],
)
def test_validate_sql_select_good(good_sql):
utils.validate_sql_select(good_sql)
@pytest.mark.parametrize("open_quote,close_quote", [('"', '"'), ("[", "]")])
def test_detect_fts(open_quote, close_quote):
sql = """
CREATE TABLE "Dumb_Table" (
"TreeID" INTEGER,
"qSpecies" TEXT
);
CREATE TABLE "Street_Tree_List" (
"TreeID" INTEGER,
"qSpecies" TEXT,
"qAddress" TEXT,
"SiteOrder" INTEGER,
"qSiteInfo" TEXT,
"PlantType" TEXT,
"qCaretaker" TEXT
);
CREATE VIEW Test_View AS SELECT * FROM Dumb_Table;
CREATE VIRTUAL TABLE {open}Street_Tree_List_fts{close} USING FTS4 ("qAddress", "qCaretaker", "qSpecies", content={open}Street_Tree_List{close});
CREATE VIRTUAL TABLE r USING rtree(a, b, c);
""".format(
open=open_quote, close=close_quote
)
conn = utils.sqlite3.connect(":memory:")
conn.executescript(sql)
assert None is utils.detect_fts(conn, "Dumb_Table")
assert None is utils.detect_fts(conn, "Test_View")
assert None is utils.detect_fts(conn, "r")
assert "Street_Tree_List_fts" == utils.detect_fts(conn, "Street_Tree_List")
@pytest.mark.parametrize("table", ("regular", "has'single quote"))
def test_detect_fts_different_table_names(table):
sql = """
CREATE TABLE [{table}] (
"TreeID" INTEGER,
"qSpecies" TEXT
);
CREATE VIRTUAL TABLE [{table}_fts] USING FTS4 ("qSpecies", content="{table}");
""".format(
table=table
)
conn = utils.sqlite3.connect(":memory:")
conn.executescript(sql)
assert "{table}_fts".format(table=table) == utils.detect_fts(conn, table)
@pytest.mark.parametrize(
"url,expected",
[
("http://www.google.com/", True),
("https://example.com/", True),
("www.google.com", False),
("http://www.google.com/ is a search engine", False),
],
)
def test_is_url(url, expected):
assert expected == utils.is_url(url)
@pytest.mark.parametrize(
"s,expected",
[
("simple", "simple"),
("MixedCase", "MixedCase"),
("-no-leading-hyphens", "no-leading-hyphens-65bea6"),
("_no-leading-underscores", "no-leading-underscores-b921bc"),
("no spaces", "no-spaces-7088d7"),
("-", "336d5e"),
("no $ characters", "no--characters-59e024"),
],
)
def test_to_css_class(s, expected):
assert expected == utils.to_css_class(s)
def test_temporary_docker_directory_uses_hard_link():
with tempfile.TemporaryDirectory() as td:
os.chdir(td)
with open("hello", "w") as fp:
fp.write("world")
# Default usage of this should use symlink
with utils.temporary_docker_directory(
files=["hello"],
name="t",
metadata=None,
extra_options=None,
branch=None,
template_dir=None,
plugins_dir=None,
static=[],
install=[],
spatialite=False,
version_note=None,
secret="secret",
) as temp_docker:
hello = os.path.join(temp_docker, "hello")
with open(hello) as fp:
assert "world" == fp.read()
# It should be a hard link
assert 2 == os.stat(hello).st_nlink
@patch("os.link")
def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link):
# Copy instead if os.link raises OSError (normally due to different device)
mock_link.side_effect = OSError
with tempfile.TemporaryDirectory() as td:
os.chdir(td)
with open("hello", "w") as fp:
fp.write("world")
# Default usage of this should use symlink
with utils.temporary_docker_directory(
files=["hello"],
name="t",
metadata=None,
extra_options=None,
branch=None,
template_dir=None,
plugins_dir=None,
static=[],
install=[],
spatialite=False,
version_note=None,
secret=None,
) as temp_docker:
hello = os.path.join(temp_docker, "hello")
with open(hello) as fp:
assert "world" == fp.read()
# It should be a copy, not a hard link
assert 1 == os.stat(hello).st_nlink
def test_temporary_docker_directory_quotes_args():
with tempfile.TemporaryDirectory() as td:
os.chdir(td)
with open("hello", "w") as fp:
fp.write("world")
with utils.temporary_docker_directory(
files=["hello"],
name="t",
metadata=None,
extra_options="--$HOME",
branch=None,
template_dir=None,
plugins_dir=None,
static=[],
install=[],
spatialite=False,
version_note="$PWD",
secret="secret",
) as temp_docker:
df = os.path.join(temp_docker, "Dockerfile")
with open(df) as fp:
df_contents = fp.read()
assert "'$PWD'" in df_contents
assert "'--$HOME'" in df_contents
assert "ENV DATASETTE_SECRET 'secret'" in df_contents
def test_compound_keys_after_sql():
assert "((a > :p0))" == utils.compound_keys_after_sql(["a"])
assert """
((a > :p0)
or
(a = :p0 and b > :p1))
""".strip() == utils.compound_keys_after_sql(
["a", "b"]
)
assert """
((a > :p0)
or
(a = :p0 and b > :p1)
or
(a = :p0 and b = :p1 and c > :p2))
""".strip() == utils.compound_keys_after_sql(
["a", "b", "c"]
)
async def table_exists(table):
return table == "exists.csv"
@pytest.mark.asyncio
@pytest.mark.parametrize(
"table_and_format,expected_table,expected_format",
[
("blah", "blah", None),
("blah.csv", "blah", "csv"),
("blah.json", "blah", "json"),
("blah.baz", "blah.baz", None),
("exists.csv", "exists.csv", None),
],
)
async def test_resolve_table_and_format(
table_and_format, expected_table, expected_format
):
actual_table, actual_format = await utils.resolve_table_and_format(
table_and_format, table_exists, ["json"]
)
assert expected_table == actual_table
assert expected_format == actual_format
def test_table_columns():
conn = sqlite3.connect(":memory:")
conn.executescript(
"""
create table places (id integer primary key, name text, bob integer)
"""
)
assert ["id", "name", "bob"] == utils.table_columns(conn, "places")
@pytest.mark.parametrize(
"path,format,extra_qs,expected",
[
("/foo?sql=select+1", "csv", {}, "/foo.csv?sql=select+1"),
("/foo?sql=select+1", "json", {}, "/foo.json?sql=select+1"),
("/foo/bar", "json", {}, "/foo/bar.json"),
("/foo/bar", "csv", {}, "/foo/bar.csv"),
("/foo/bar", "csv", {"_dl": 1}, "/foo/bar.csv?_dl=1"),
(
"/sf-trees/Street_Tree_List?_search=cherry&_size=1000",
"csv",
{"_dl": 1},
"/sf-trees/Street_Tree_List.csv?_search=cherry&_size=1000&_dl=1",
),
],
)
def test_path_with_format(path, format, extra_qs, expected):
request = Request.fake(path)
actual = utils.path_with_format(request=request, format=format, extra_qs=extra_qs)
assert expected == actual
@pytest.mark.parametrize(
"bytes,expected",
[
(120, "120 bytes"),
(1024, "1.0 KB"),
(1024 * 1024, "1.0 MB"),
(1024 * 1024 * 1024, "1.0 GB"),
(1024 * 1024 * 1024 * 1.3, "1.3 GB"),
(1024 * 1024 * 1024 * 1024, "1.0 TB"),
],
)
def test_format_bytes(bytes, expected):
assert expected == utils.format_bytes(bytes)
@pytest.mark.parametrize(
"query,expected",
[
("dog", '"dog"'),
("cat,", '"cat,"'),
("cat dog", '"cat" "dog"'),
# If a phrase is already double quoted, leave it so
('"cat dog"', '"cat dog"'),
('"cat dog" fish', '"cat dog" "fish"'),
# Sensibly handle unbalanced double quotes
('cat"', '"cat"'),
('"cat dog" "fish', '"cat dog" "fish"'),
],
)
def test_escape_fts(query, expected):
assert expected == utils.escape_fts(query)
@pytest.mark.parametrize(
"input,expected",
[
("dog", "dog"),
('dateutil_parse("1/2/2020")', r"dateutil_parse(\0000221/2/2020\000022)"),
("this\r\nand\r\nthat", r"this\00000Aand\00000Athat"),
],
)
def test_escape_css_string(input, expected):
assert expected == utils.escape_css_string(input)
def test_check_connection_spatialite_raises():
path = str(pathlib.Path(__file__).parent / "spatialite.db")
conn = sqlite3.connect(path)
with pytest.raises(utils.SpatialiteConnectionProblem):
utils.check_connection(conn)
def test_check_connection_passes():
conn = sqlite3.connect(":memory:")
utils.check_connection(conn)
def test_call_with_supported_arguments():
def foo(a, b):
return f"{a}+{b}"
assert "1+2" == utils.call_with_supported_arguments(foo, a=1, b=2)
assert "1+2" == utils.call_with_supported_arguments(foo, a=1, b=2, c=3)
with pytest.raises(TypeError):
utils.call_with_supported_arguments(foo, a=1)
@pytest.mark.parametrize(
"data,should_raise",
[
([["foo", "bar"], ["foo", "baz"]], False),
([("foo", "bar"), ("foo", "baz")], False),
((["foo", "bar"], ["foo", "baz"]), False),
([["foo", "bar"], ["foo", "baz", "bax"]], True),
({"foo": ["bar", "baz"]}, False),
({"foo": ("bar", "baz")}, False),
({"foo": "bar"}, True),
],
)
def test_multi_params(data, should_raise):
if should_raise:
with pytest.raises(AssertionError):
utils.MultiParams(data)
return
p1 = utils.MultiParams(data)
assert "bar" == p1["foo"]
assert ["bar", "baz"] == list(p1.getlist("foo"))
@pytest.mark.parametrize(
"actor,allow,expected",
[
# Default is to allow:
(None, None, True),
# {} means deny-all:
(None, {}, False),
({"id": "root"}, {}, False),
# true means allow-all
({"id": "root"}, True, True),
(None, True, True),
# false means deny-all
({"id": "root"}, False, False),
(None, False, False),
# Special case for "unauthenticated": true
(None, {"unauthenticated": True}, True),
(None, {"unauthenticated": False}, False),
# Match on just one property:
(None, {"id": "root"}, False),
({"id": "root"}, None, True),
({"id": "simon", "staff": True}, {"staff": True}, True),
({"id": "simon", "staff": False}, {"staff": True}, False),
# Special "*" value for any key:
({"id": "root"}, {"id": "*"}, True),
({}, {"id": "*"}, False),
({"name": "root"}, {"id": "*"}, False),
# Supports single strings or list of values:
({"id": "root"}, {"id": "bob"}, False),
({"id": "root"}, {"id": ["bob"]}, False),
({"id": "root"}, {"id": "root"}, True),
({"id": "root"}, {"id": ["root"]}, True),
# Any matching role will work:
({"id": "garry", "roles": ["staff", "dev"]}, {"roles": ["staff"]}, True),
({"id": "garry", "roles": ["staff", "dev"]}, {"roles": ["dev"]}, True),
({"id": "garry", "roles": ["staff", "dev"]}, {"roles": ["otter"]}, False),
({"id": "garry", "roles": ["staff", "dev"]}, {"roles": ["dev", "otter"]}, True),
({"id": "garry", "roles": []}, {"roles": ["staff"]}, False),
({"id": "garry"}, {"roles": ["staff"]}, False),
# Any single matching key works:
({"id": "root"}, {"bot_id": "my-bot", "id": ["root"]}, True),
],
)
def test_actor_matches_allow(actor, allow, expected):
assert expected == utils.actor_matches_allow(actor, allow)
@pytest.mark.parametrize(
"config,expected",
[
({"foo": "bar"}, {"foo": "bar"}),
({"$env": "FOO"}, "x"),
({"k": {"$env": "FOO"}}, {"k": "x"}),
([{"k": {"$env": "FOO"}}, {"z": {"$env": "FOO"}}], [{"k": "x"}, {"z": "x"}]),
({"k": [{"in_a_list": {"$env": "FOO"}}]}, {"k": [{"in_a_list": "x"}]}),
],
)
def test_resolve_env_secrets(config, expected):
assert expected == utils.resolve_env_secrets(config, {"FOO": "x"})
@pytest.mark.parametrize(
"actor,expected",
[
({"id": "blah"}, "blah"),
({"id": "blah", "login": "l"}, "l"),
({"id": "blah", "login": "l"}, "l"),
({"id": "blah", "login": "l", "username": "u"}, "u"),
({"login": "l", "name": "n"}, "n"),
(
{"id": "blah", "login": "l", "username": "u", "name": "n", "display": "d"},
"d",
),
({"weird": "shape"}, "{'weird': 'shape'}"),
],
)
def test_display_actor(actor, expected):
assert expected == utils.display_actor(actor)
@pytest.mark.asyncio
@pytest.mark.parametrize(
"dbs,expected_path",
[
(["one_table"], "/one/one"),
(["two_tables"], "/two"),
(["one_table", "two_tables"], "/"),
],
)
async def test_initial_path_for_datasette(tmp_path_factory, dbs, expected_path):
db_dir = tmp_path_factory.mktemp("dbs")
one_table = str(db_dir / "one.db")
sqlite3.connect(one_table).execute("create table one (id integer primary key)")
two_tables = str(db_dir / "two.db")
sqlite3.connect(two_tables).execute("create table two (id integer primary key)")
sqlite3.connect(two_tables).execute("create table three (id integer primary key)")
datasette = Datasette(
[{"one_table": one_table, "two_tables": two_tables}[db] for db in dbs]
)
path = await utils.initial_path_for_datasette(datasette)
assert path == expected_path
@pytest.mark.parametrize(
"content,expected",
(
("title: Hello", {"title": "Hello"}),
('{"title": "Hello"}', {"title": "Hello"}),
("{{ this }} is {{ bad }}", None),
),
)
def test_parse_metadata(content, expected):
if expected is None:
with pytest.raises(utils.BadMetadataError):
utils.parse_metadata(content)
else:
assert utils.parse_metadata(content) == expected
@pytest.mark.asyncio
@pytest.mark.parametrize(
"sql,expected",
(
("select 1", []),
("select 1 + :one", ["one"]),
("select 1 + :one + :two", ["one", "two"]),
("select 'bob' || '0:00' || :cat", ["cat"]),
("select this is invalid :one, :two, :three", ["one", "two", "three"]),
),
)
async def test_derive_named_parameters(sql, expected):
ds = Datasette([], memory=True)
db = ds.get_database("_memory")
params = await utils.derive_named_parameters(db, sql)
assert params == expected
@pytest.mark.parametrize(
"original,expected",
(
("abc", "abc"),
("/foo/bar", "~2Ffoo~2Fbar"),
("/-/bar", "~2F-~2Fbar"),
("-/db-/table.csv", "-~2Fdb-~2Ftable~2Ecsv"),
(r"%~-/", "~25~7E-~2F"),
("~25~7E~2D~2F", "~7E25~7E7E~7E2D~7E2F"),
),
)
def test_tilde_encoding(original, expected):
actual = utils.tilde_encode(original)
assert actual == expected
# And test round-trip
assert original == utils.tilde_decode(actual)
| 31.74732 | 176 | 0.556268 | [
"Apache-2.0"
] | chriswedgwood/datasette | tests/test_utils.py | 20,731 | Python |
import pgzero
import pgzrun
import random
from pgzero.actor import Actor
__all__ = ["pgzrun", "pgzero"]
from pgzero.clock import clock
from pgzero.keyboard import keyboard
from pgzero.loaders import sounds
clouds = [Actor('cloud1', (200, 200)),
Actor('cloud2', (400, 300)),
Actor('cloud3', (600, 200)),
Actor('cloud1', (800, 300))]
obstacles = [Actor('cactus', (random.randint(900, 1000), 495)),
Actor('cactus', (random.randint(1200, 1500), 495)),
Actor('cactus', (random.randint(1500, 2000), 495))]
player = Actor('p3_stand', (100, 484))
# 0 - game not started
# 1 - game just stared
# 2 - finished
game = 0
# frame that is currently running
frame = 0
# player movement speed and direction
jump = 0
# 0 - jump is available
# 1 - jump is forbidden
jump_blocked = 0
cloud_speed = 2
game_time = 0
# cactus movement speed
game_speed = 8
# 0 - game running
# 1 - game blocked
jump_unblocked = 0
def draw():
global game
screen.clear()
screen.fill('#cff4f7')
for i in range((screen.width // 70) + 1):
screen.blit('grass', (i * 70, screen.height - 70))
for cloud in clouds:
cloud.draw()
for obstacle in obstacles:
obstacle.draw()
screen.draw.text(
align_text_time(game_time),
midright=(screen.width - 50, 50),
fontname="roboto_mono_bold",
color="orange",
fontsize=45
)
player.draw()
if game == 0:
screen.draw.text(
"Wcisnij spacje",
center=(screen.width / 2, screen.height / 2),
color="orange",
fontsize=60
)
if game == 2:
screen.draw.text(
"Koniec gry",
center=(screen.width / 2, screen.height / 2),
color="red",
fontsize=60
)
screen.draw.text(
"Wcisnij spacje aby zagrac jeszcze raz",
center=(screen.width / 2, screen.height - 200),
color="red",
fontsize=30
)
def update():
global game
global jump
global jump_blocked
global jump_unblocked
if keyboard.SPACE and jump_unblocked == 0:
if game == 0 or game == 2:
jump_blocked = 1
clock.schedule_unique(unblock_jump, 0.3)
reset()
game = 1
if jump_blocked == 0:
jump = -18
jump_blocked = 1
sounds.jingles_jump.play()
animation()
jump_fall()
move_cloud()
move_obstacle()
check_collision()
# change difficulty level, increase game and clouds speed
def change_difficulty_level():
global game_speed
global cloud_speed
if game_speed < 16:
game_speed += 1
cloud_speed += 1
# reset global variables
def reset():
global frame
global game
global jump
global jump_blocked
global cloud_speed
global game_speed
global game_time
if game == 2:
frame = 0
game = 0
jump = 0
jump_blocked = 1
cloud_speed = 2
game_speed = 8
game_time = 0
player.pos = (100, 484)
clouds[0].pos = (200, 200)
clouds[1].pos = (400, 300)
clouds[2].pos = (600, 200)
clouds[3].pos = (800, 300)
obstacles[0].pos = (random.randint(900, 1000), 495)
obstacles[1].pos = (random.randint(1200, 1500), 495)
obstacles[2].pos = (random.randint(1500, 2000), 495)
clock.unschedule(change_difficulty_level)
# change difficulty level every 20s
clock.schedule_interval(change_difficulty_level, 20)
def unblock_game():
global jump_unblocked
jump_unblocked = 0
# check collision with cactus
def check_collision():
global game
global jump_unblocked
if game == 1:
for i in obstacles:
if player.collidepoint(i.x, i.y):
game = 2
sounds.jingles_end.play()
jump_unblocked = 1
# unblock game in 2 sec
clock.schedule_unique(unblock_game, 2.0)
def move_obstacle():
global game_speed
global game
if game == 1:
for i in range(len(obstacles)):
# decrease x for all obstacles about speed value
obstacles[i].x -= game_speed
# if obstacles is out of screen get random position
if obstacles[i].x + 35 < 0:
obstacles[i].x = random.randint(900, 1500)
# if obstacles have the same position as other or is too close, move it about 400
for j in range(0, len(obstacles)):
if j != i and abs(obstacles[i].x - obstacles[j].x < 300):
obstacles[i].x += 400
# triggered every 0.1s increasing game time about 1s
def measure_time():
global game_time
global game
if game == 0:
game_time = 0
elif game == 1:
game_time +=1
def align_text_time(time):
text = "0" * (5 - len(str(time)))
text += str(time)
return text
def move_cloud():
global cloud_speed
global game
if game == 1:
# move clouds x pos about cloud speed
for cloud in clouds:
cloud.x -= cloud_speed
# if cloud out of screen move it to right side
if cloud.x + 64 < 0:
cloud.x = screen.width + 32
def unblock_jump():
global jump_blocked
jump_blocked = 0
def jump_fall():
global jump
global frame
if jump != 0:
# block animation
frame = 0
player.y += jump
# if player on the ground unblock
if player.y >= 484:
unblock_jump()
jump = 0
# if player jumped start falling
if player.y <= 250:
jump *= (-1)
# player animation
def animation():
global frame
if game == 1:
if frame == 0:
player.image = 'p3_walk01'
if frame == 1:
player.image = 'p3_walk02'
if frame == 2:
player.image = 'p3_walk03'
if frame == 3:
player.image = 'p3_walk04'
if frame == 4:
player.image = 'p3_walk05'
if frame == 5:
player.image = 'p3_walk06'
if frame == 6:
player.image = 'p3_walk07'
if frame == 7:
player.image = 'p3_walk08'
if frame == 8:
player.image = 'p3_walk09'
if frame == 9:
player.image = 'p3_walk10'
if frame == 10:
player.image = 'p3_walk11'
frame += 1
# result is 0 or less than 11
frame %= 11
clock.schedule_interval(measure_time, 0.1)
clock.schedule_interval(change_difficulty_level, 20)
pgzrun.go() | 24.791822 | 97 | 0.562453 | [
"MIT"
] | bartlomiej-kedziora/games | dino/main.py | 6,669 | Python |
"""
A pure python (slow) implementation of rijndael with a decent interface
To include -
from rijndael import rijndael
To do a key setup -
r = rijndael(key, block_size = 16)
key must be a string of length 16, 24, or 32
blocksize must be 16, 24, or 32. Default is 16
To use -
ciphertext = r.encrypt(plaintext)
plaintext = r.decrypt(ciphertext)
If any strings are of the wrong length a ValueError is thrown
"""
# ported from the Java reference code by Bram Cohen, April 2001
# this code is public domain, unless someone makes
# an intellectual property claim against the reference
# code, in which case it can be made public domain by
# deleting all the comments and renaming all the variables
import copy
import string
shifts = [[[0, 0], [1, 3], [2, 2], [3, 1]],
[[0, 0], [1, 5], [2, 4], [3, 3]],
[[0, 0], [1, 7], [3, 5], [4, 4]]]
# [keysize][block_size]
num_rounds = {16: {16: 10, 24: 12, 32: 14}, 24: {16: 12, 24: 12, 32: 14}, 32: {16: 14, 24: 14, 32: 14}}
A = [[1, 1, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0],
[0, 0, 0, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 1, 1, 1, 1],
[1, 1, 0, 0, 0, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 1, 1],
[1, 1, 1, 1, 0, 0, 0, 1]]
# produce log and alog tables, needed for multiplying in the
# field GF(2^m) (generator = 3)
alog = [1]
for i in range(255):
j = (alog[-1] << 1) ^ alog[-1]
if j & 0x100 != 0:
j ^= 0x11B
alog.append(j)
log = [0] * 256
for i in range(1, 255):
log[alog[i]] = i
# multiply two elements of GF(2^m)
def mul(a, b):
if a == 0 or b == 0:
return 0
return alog[(log[a & 0xFF] + log[b & 0xFF]) % 255]
# substitution box based on F^{-1}(x)
box = [[0] * 8 for i in range(256)]
box[1][7] = 1
for i in range(2, 256):
j = alog[255 - log[i]]
for t in range(8):
box[i][t] = (j >> (7 - t)) & 0x01
B = [0, 1, 1, 0, 0, 0, 1, 1]
# affine transform: box[i] <- B + A*box[i]
cox = [[0] * 8 for i in range(256)]
for i in range(256):
for t in range(8):
cox[i][t] = B[t]
for j in range(8):
cox[i][t] ^= A[t][j] * box[i][j]
# S-boxes and inverse S-boxes
S = [0] * 256
Si = [0] * 256
for i in range(256):
S[i] = cox[i][0] << 7
for t in range(1, 8):
S[i] ^= cox[i][t] << (7-t)
Si[S[i] & 0xFF] = i
# T-boxes
G = [[2, 1, 1, 3],
[3, 2, 1, 1],
[1, 3, 2, 1],
[1, 1, 3, 2]]
AA = [[0] * 8 for i in range(4)]
for i in range(4):
for j in range(4):
AA[i][j] = G[i][j]
AA[i][i+4] = 1
for i in range(4):
pivot = AA[i][i]
if pivot == 0:
t = i + 1
while AA[t][i] == 0 and t < 4:
t += 1
assert t != 4, 'G matrix must be invertible'
for j in range(8):
AA[i][j], AA[t][j] = AA[t][j], AA[i][j]
pivot = AA[i][i]
for j in range(8):
if AA[i][j] != 0:
AA[i][j] = alog[(255 + log[AA[i][j] & 0xFF] - log[pivot & 0xFF]) % 255]
for t in range(4):
if i != t:
for j in range(i+1, 8):
AA[t][j] ^= mul(AA[i][j], AA[t][i])
AA[t][i] = 0
iG = [[0] * 4 for i in range(4)]
for i in range(4):
for j in range(4):
iG[i][j] = AA[i][j + 4]
def mul4(a, bs):
if a == 0:
return 0
r = 0
for b in bs:
r <<= 8
if b != 0:
r = r | mul(a, b)
return r
T1 = []
T2 = []
T3 = []
T4 = []
T5 = []
T6 = []
T7 = []
T8 = []
U1 = []
U2 = []
U3 = []
U4 = []
for t in range(256):
s = S[t]
T1.append(mul4(s, G[0]))
T2.append(mul4(s, G[1]))
T3.append(mul4(s, G[2]))
T4.append(mul4(s, G[3]))
s = Si[t]
T5.append(mul4(s, iG[0]))
T6.append(mul4(s, iG[1]))
T7.append(mul4(s, iG[2]))
T8.append(mul4(s, iG[3]))
U1.append(mul4(t, iG[0]))
U2.append(mul4(t, iG[1]))
U3.append(mul4(t, iG[2]))
U4.append(mul4(t, iG[3]))
# round constants
rcon = [1]
r = 1
for t in range(1, 30):
r = mul(2, r)
rcon.append(r)
del A
del AA
del pivot
del B
del G
del box
del log
del alog
del i
del j
del r
del s
del t
del mul
del mul4
del cox
del iG
class rijndael(object):
def __init__(self, key, block_size = 16):
if block_size != 16 and block_size != 24 and block_size != 32:
raise ValueError('Invalid block size: ' + str(block_size))
if len(key) != 16 and len(key) != 24 and len(key) != 32:
raise ValueError('Invalid key size: ' + str(len(key)))
self.block_size = block_size
ROUNDS = num_rounds[len(key)][block_size]
BC = block_size // 4
# encryption round keys
Ke = [[0] * BC for i in range(ROUNDS + 1)]
# decryption round keys
Kd = [[0] * BC for i in range(ROUNDS + 1)]
ROUND_KEY_COUNT = (ROUNDS + 1) * BC
KC = len(key) // 4
# copy user material bytes into temporary ints
tk = []
for i in range(0, KC):
tk.append((ord(key[i * 4]) << 24) | (ord(key[i * 4 + 1]) << 16) |
(ord(key[i * 4 + 2]) << 8) | ord(key[i * 4 + 3]))
# copy values into round key arrays
t = 0
j = 0
while j < KC and t < ROUND_KEY_COUNT:
Ke[t // BC][t % BC] = tk[j]
Kd[ROUNDS - (t // BC)][t % BC] = tk[j]
j += 1
t += 1
tt = 0
rconpointer = 0
while t < ROUND_KEY_COUNT:
# extrapolate using phi (the round key evolution function)
tt = tk[KC - 1]
tk[0] ^= (S[(tt >> 16) & 0xFF] & 0xFF) << 24 ^ \
(S[(tt >> 8) & 0xFF] & 0xFF) << 16 ^ \
(S[ tt & 0xFF] & 0xFF) << 8 ^ \
(S[(tt >> 24) & 0xFF] & 0xFF) ^ \
(rcon[rconpointer] & 0xFF) << 24
rconpointer += 1
if KC != 8:
for i in range(1, KC):
tk[i] ^= tk[i-1]
else:
for i in range(1, KC // 2):
tk[i] ^= tk[i-1]
tt = tk[KC // 2 - 1]
tk[KC // 2] ^= (S[ tt & 0xFF] & 0xFF) ^ \
(S[(tt >> 8) & 0xFF] & 0xFF) << 8 ^ \
(S[(tt >> 16) & 0xFF] & 0xFF) << 16 ^ \
(S[(tt >> 24) & 0xFF] & 0xFF) << 24
for i in range(KC // 2 + 1, KC):
tk[i] ^= tk[i-1]
# copy values into round key arrays
j = 0
while j < KC and t < ROUND_KEY_COUNT:
Ke[t // BC][t % BC] = tk[j]
Kd[ROUNDS - (t // BC)][t % BC] = tk[j]
j += 1
t += 1
# inverse MixColumn where needed
for r in range(1, ROUNDS):
for j in range(BC):
tt = Kd[r][j]
Kd[r][j] = U1[(tt >> 24) & 0xFF] ^ \
U2[(tt >> 16) & 0xFF] ^ \
U3[(tt >> 8) & 0xFF] ^ \
U4[ tt & 0xFF]
self.Ke = Ke
self.Kd = Kd
def encrypt(self, plaintext):
if len(plaintext) != self.block_size:
raise ValueError('wrong block length, expected ' + str(self.block_size) + ' got ' + str(len(plaintext)))
Ke = self.Ke
BC = self.block_size // 4
ROUNDS = len(Ke) - 1
if BC == 4:
SC = 0
elif BC == 6:
SC = 1
else:
SC = 2
s1 = shifts[SC][1][0]
s2 = shifts[SC][2][0]
s3 = shifts[SC][3][0]
a = [0] * BC
# temporary work array
t = []
# plaintext to ints + key
for i in range(BC):
t.append((ord(plaintext[i * 4 ]) << 24 |
ord(plaintext[i * 4 + 1]) << 16 |
ord(plaintext[i * 4 + 2]) << 8 |
ord(plaintext[i * 4 + 3]) ) ^ Ke[0][i])
# apply round transforms
for r in range(1, ROUNDS):
for i in range(BC):
a[i] = (T1[(t[ i ] >> 24) & 0xFF] ^
T2[(t[(i + s1) % BC] >> 16) & 0xFF] ^
T3[(t[(i + s2) % BC] >> 8) & 0xFF] ^
T4[ t[(i + s3) % BC] & 0xFF] ) ^ Ke[r][i]
t = copy.copy(a)
# last round is special
result = []
for i in range(BC):
tt = Ke[ROUNDS][i]
result.append((S[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF)
result.append((S[(t[(i + s1) % BC] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF)
result.append((S[(t[(i + s2) % BC] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF)
result.append((S[ t[(i + s3) % BC] & 0xFF] ^ tt ) & 0xFF)
return ''.join(map(chr, result))
def decrypt(self, ciphertext):
if len(ciphertext) != self.block_size:
raise ValueError('wrong block length, expected ' + str(self.block_size) + ' got ' + str(len(ciphertext)))
Kd = self.Kd
BC = self.block_size // 4
ROUNDS = len(Kd) - 1
if BC == 4:
SC = 0
elif BC == 6:
SC = 1
else:
SC = 2
s1 = shifts[SC][1][1]
s2 = shifts[SC][2][1]
s3 = shifts[SC][3][1]
a = [0] * BC
# temporary work array
t = [0] * BC
# ciphertext to ints + key
for i in range(BC):
t[i] = (ord(ciphertext[i * 4 ]) << 24 |
ord(ciphertext[i * 4 + 1]) << 16 |
ord(ciphertext[i * 4 + 2]) << 8 |
ord(ciphertext[i * 4 + 3]) ) ^ Kd[0][i]
# apply round transforms
for r in range(1, ROUNDS):
for i in range(BC):
a[i] = (T5[(t[ i ] >> 24) & 0xFF] ^
T6[(t[(i + s1) % BC] >> 16) & 0xFF] ^
T7[(t[(i + s2) % BC] >> 8) & 0xFF] ^
T8[ t[(i + s3) % BC] & 0xFF] ) ^ Kd[r][i]
t = copy.copy(a)
# last round is special
result = []
for i in range(BC):
tt = Kd[ROUNDS][i]
result.append((Si[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF)
result.append((Si[(t[(i + s1) % BC] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF)
result.append((Si[(t[(i + s2) % BC] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF)
result.append((Si[ t[(i + s3) % BC] & 0xFF] ^ tt ) & 0xFF)
return ''.join(map(chr, result))
def encrypt(key, block):
return rijndael(key, len(block)).encrypt(block)
def decrypt(key, block):
return rijndael(key, len(block)).decrypt(block)
def t(kl, bl):
b = 'b' * bl
r = rijndael('a' * kl, bl)
assert r.decrypt(r.encrypt(b)) == b
def multiple_calls(N):
for _ in xrange(N):
t(16, 24)
t(16, 32)
t(24, 16)
t(24, 24)
t(24, 32)
t(32, 16)
t(32, 24)
t(32, 32)
if __name__ == '__main__':
n_repeats = 50
multiple_calls(n_repeats)
| 28.871728 | 117 | 0.430683 | [
"Apache-2.0"
] | codeclimate-testing/falcon | benchmarks/crypto.py | 11,029 | Python |
# (c) 2019, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
''' unit tests for Ansible module: na_ontap_rest_cli'''
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import pytest
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
from ansible_collections.netapp.ontap.tests.unit.compat import unittest
from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
from ansible_collections.netapp.ontap.plugins.modules.na_ontap_rest_cli \
import NetAppONTAPCommandREST as rest_cli_module, main # module under test
if not netapp_utils.has_netapp_lib():
pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
# REST API canned responses when mocking send_request
SRR = {
# common responses
'is_rest': (200, {}, None),
'empty_good': (200, {}, None),
'end_of_sequence': (500, None, "Ooops, the UT needs one more SRR response"),
'generic_error': (400, None, "Expected error"),
# module specific response
'allow': (200, {'Allow': ['GET', 'WHATEVER']}, None)
}
def set_module_args(args):
"""prepare arguments so that they will be picked up during module creation"""
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access
class AnsibleExitJson(Exception):
"""Exception class to be raised by module.exit_json and caught by the test case"""
class AnsibleFailJson(Exception):
"""Exception class to be raised by module.fail_json and caught by the test case"""
def exit_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over exit_json; package return data into an exception"""
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
def fail_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over fail_json; package return data into an exception"""
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
class TestMyModule(unittest.TestCase):
''' Unit tests for na_ontap_job_schedule '''
def setUp(self):
self.mock_module_helper = patch.multiple(basic.AnsibleModule,
exit_json=exit_json,
fail_json=fail_json)
self.mock_module_helper.start()
self.addCleanup(self.mock_module_helper.stop)
def mock_args(self):
return {
'hostname': 'test',
'username': 'test_user',
'password': 'test_pass!',
'https': False,
'command': 'volume',
'verb': 'GET',
'params': {'fields': 'size,percent_used'}
}
def get_cli_mock_object(self):
# For rest, mocking is achieved through side_effect
return rest_cli_module()
def test_module_fail_when_required_args_missing(self):
''' required arguments are reported as errors '''
with pytest.raises(AnsibleFailJson) as exc:
set_module_args({})
rest_cli_module()
print('Info: %s' % exc.value.args[0]['msg'])
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_rest_cli(self, mock_request):
data = dict(self.mock_args())
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['empty_good'],
SRR['end_of_sequence']
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_cli_mock_object().apply()
assert exc.value.args[0]['changed']
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_rest_cli_options(self, mock_request):
data = dict(self.mock_args())
data['verb'] = 'OPTIONS'
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['allow'],
SRR['end_of_sequence']
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_cli_mock_object().apply()
assert exc.value.args[0]['changed']
assert 'Allow' in exc.value.args[0]['msg']
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_negative_connection_error(self, mock_request):
data = dict(self.mock_args())
data['verb'] = 'OPTIONS'
set_module_args(data)
mock_request.side_effect = [
SRR['generic_error'],
SRR['end_of_sequence']
]
with pytest.raises(AnsibleFailJson) as exc:
self.get_cli_mock_object().apply()
msg = "failed to connect to REST over test: ['Expected error']. Use na_ontap_command for non-rest CLI."
assert msg in exc.value.args[0]['msg']
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def check_verb(self, verb, mock_request):
data = dict(self.mock_args())
data['verb'] = verb
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['allow'],
SRR['end_of_sequence']
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_cli_mock_object().apply()
assert exc.value.args[0]['changed']
assert 'Allow' in exc.value.args[0]['msg']
assert mock_request.call_args[0][0] == verb
def test_verbs(self):
for verb in ['POST', 'DELETE', 'PATCH', 'OPTIONS', 'PATCH']:
self.check_verb(verb)
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_negative_verb(self, mock_request):
data = dict(self.mock_args())
data['verb'] = 'GET'
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['end_of_sequence']
]
uut = self.get_cli_mock_object()
with pytest.raises(AnsibleFailJson) as exc:
uut.verb = 'INVALID'
uut.run_command()
msg = 'Error: unexpected verb INVALID'
assert msg in exc.value.args[0]['msg']
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_negative_error(self, mock_request):
data = dict(self.mock_args())
data['verb'] = 'GET'
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['generic_error'],
SRR['end_of_sequence']
]
with pytest.raises(AnsibleFailJson) as exc:
main()
msg = 'Error: Expected error'
assert msg in exc.value.args[0]['msg']
| 37.262032 | 112 | 0.644087 | [
"MIT"
] | elixir-no-nels/usegalaxy | venv/lib/python3.6/site-packages/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_rest_cli.py | 6,968 | Python |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Some common SessionRunHook classes.
@@
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
import numpy as np
import six
from tensorflow.contrib.framework.python.ops import variables as contrib_variables
from tensorflow.contrib.learn.python.learn import session_run_hook
from tensorflow.contrib.learn.python.learn.session_run_hook import SessionRunArgs
from tensorflow.contrib.learn.python.learn.summary_writer_cache import SummaryWriterCache
from tensorflow.core.framework.summary_pb2 import Summary
from tensorflow.core.util.event_pb2 import SessionLog
from tensorflow.python.framework import ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import training_util
class LoggingTensorHook(session_run_hook.SessionRunHook):
"""Prints given tensors every N iteration.
The tensors will be printed to the log, with `INFO` severity.
"""
def __init__(self, tensors, every_n_iter=100):
"""Initializes a LoggingHook monitor.
Args:
tensors: `dict` of tag to tensors/names or
`iterable` of tensors/names.
every_n_iter: `int`, print every N iteration.
Raises:
ValueError: if `every_n_iter` is non-positive.
"""
if every_n_iter <= 0:
raise ValueError("Invalid every_n_iter=%s." % every_n_iter)
if not isinstance(tensors, dict):
tensors = {item: item for item in tensors}
self._tensors = tensors
self._every_n_iter = every_n_iter
def begin(self):
self._iter_count = 0
# Convert names to tensors if given
self._current_tensors = {tag: _as_graph_element(tensor)
for (tag, tensor) in self._tensors.items()}
def before_run(self, run_context): # pylint: disable=unused-argument
if self._iter_count % self._every_n_iter == 0:
return SessionRunArgs(self._current_tensors)
else:
return None
def after_run(self, run_context, run_values):
_ = run_context
if self._iter_count % self._every_n_iter == 0:
stats = []
for tag in sorted(self._current_tensors.keys()):
stats.append("%s = %s" % (tag, run_values.results[tag]))
logging.info("%s", ", ".join(stats))
self._iter_count += 1
class StopAtStepHook(session_run_hook.SessionRunHook):
"""Monitor to request stop at a specified step."""
def __init__(self, num_steps=None, last_step=None):
"""Create a StopAtStep Hook.
This hook requests stop after either a number of steps have been
executed or a last step has been reached. Only of the two options can be
specified.
if `num_steps` is specified, it indicates the number of steps to execute
after `begin()` is called. If instead `last_step` is specified, it
indicates the last step we want to execute, as passed to the `after_run()`
call.
Args:
num_steps: Number of steps to execute.
last_step: Step after which to stop.
Raises:
ValueError: If one of the arguments is invalid.
"""
if num_steps is None and last_step is None:
raise ValueError("One of num_steps or last_step must be specified.")
if num_steps is not None and last_step is not None:
raise ValueError("Only one of num_steps or last_step can be specified.")
self._num_steps = num_steps
self._last_step = last_step
def begin(self):
self._global_step_tensor = contrib_variables.get_global_step()
if self._global_step_tensor is None:
raise RuntimeError("Global step should be created to use StopAtStepHook.")
def before_run(self, run_context): # pylint: disable=unused-argument
return SessionRunArgs(self._global_step_tensor)
def after_run(self, run_context, run_values):
global_step = run_values.results
if self._last_step is None:
self._last_step = global_step + self._num_steps - 1
if global_step >= self._last_step:
run_context.request_stop()
class CheckpointSaverHook(session_run_hook.SessionRunHook):
"""Saves checkpoints every N steps or seconds."""
def __init__(self,
checkpoint_dir,
save_secs=None,
save_steps=None,
saver=None,
checkpoint_basename="model.ckpt",
scaffold=None):
"""Initialize CheckpointSaverHook monitor.
Args:
checkpoint_dir: `str`, base directory for the checkpoint files.
save_secs: `int`, save every N secs.
save_steps: `int`, save every N steps.
saver: `Saver` object, used for saving.
checkpoint_basename: `str`, base name for the checkpoint files.
scaffold: `Scaffold`, use to get saver object.
Raises:
ValueError: One of `save_steps` or `save_secs` should be set.
"""
logging.info("Create CheckpointSaverHook.")
self._saver = saver
self._checkpoint_dir = checkpoint_dir
self._summary_writer = SummaryWriterCache.get(checkpoint_dir)
self._save_path = os.path.join(checkpoint_dir, checkpoint_basename)
self._scaffold = scaffold
self._save_secs = save_secs
self._save_steps = save_steps
self._last_saved_time = None
self._last_saved_step = None
if save_steps is None and save_secs is None:
raise ValueError("Either save_steps or save_secs should be provided")
if (save_steps is not None) and (save_secs is not None):
raise ValueError("Can not provide both save_steps and save_secs.")
def begin(self):
self._last_saved_time = None
self._last_saved_step = None
self._global_step_tensor = contrib_variables.get_global_step()
if self._global_step_tensor is None:
raise RuntimeError(
"Global step should be created to use CheckpointSaverHook.")
def before_run(self, run_context): # pylint: disable=unused-argument
if self._last_saved_time is None:
# Write graph in the first call.
training_util.write_graph(
ops.get_default_graph().as_graph_def(add_shapes=True),
self._checkpoint_dir,
"graph.pbtxt")
self._summary_writer.add_graph(ops.get_default_graph())
return SessionRunArgs(self._global_step_tensor)
def after_run(self, run_context, run_values):
global_step = run_values.results
if self._last_saved_time is None:
self._save(global_step, run_context.session)
if self._save_steps is not None:
if global_step >= self._last_saved_step + self._save_steps:
self._save(global_step, run_context.session)
if self._save_secs is not None:
if time.time() >= self._last_saved_time + self._save_secs:
self._save(global_step, run_context.session)
def end(self, session):
last_step = session.run(contrib_variables.get_global_step())
self._save(last_step, session)
def _save(self, step, session):
"""Saves the latest checkpoint."""
if step == self._last_saved_step:
return
logging.info("Saving checkpoints for %d into %s.", step, self._save_path)
self._last_saved_time = time.time()
self._last_saved_step = step
if self._saver is None:
self._scaffold.saver.save(session, self._save_path, global_step=step)
else:
self._saver.save(session, self._save_path, global_step=step)
self._summary_writer.add_session_log(
SessionLog(
status=SessionLog.CHECKPOINT, checkpoint_path=self._save_path),
step)
class StepCounterHook(session_run_hook.SessionRunHook):
"""Steps per second monitor."""
def __init__(self, every_n_steps=100, output_dir=None, summary_writer=None):
self._summary_tag = "global_step/sec"
self._every_n_steps = every_n_steps
self._summary_writer = summary_writer
if summary_writer is None and output_dir:
self._summary_writer = SummaryWriterCache.get(output_dir)
def begin(self):
self._last_reported_time = None
self._last_reported_step = None
self._global_step_tensor = contrib_variables.get_global_step()
if self._global_step_tensor is None:
raise RuntimeError(
"Global step should be created to use StepCounterHook.")
def before_run(self, run_context): # pylint: disable=unused-argument
return SessionRunArgs(self._global_step_tensor)
def after_run(self, run_context, run_values):
_ = run_context
if not self._summary_writer:
return
global_step = run_values.results
current_time = time.time()
if self._last_reported_time is None:
self._last_reported_step = global_step
self._last_reported_time = current_time
else:
if global_step >= self._every_n_steps + self._last_reported_step:
added_steps = global_step - self._last_reported_step
elapsed_time = current_time - self._last_reported_time
steps_per_sec = added_steps / elapsed_time
summary = Summary(value=[Summary.Value(
tag=self._summary_tag, simple_value=steps_per_sec)])
self._summary_writer.add_summary(summary, global_step)
self._last_reported_step = global_step
self._last_reported_time = current_time
class NanLossDuringTrainingError(RuntimeError):
def __str__(self):
return "NaN loss during training."
class NanTensorHook(session_run_hook.SessionRunHook):
"""NaN Loss monitor.
Monitors loss and stops training if loss is NaN.
Can either fail with exception or just stop training.
"""
def __init__(self, loss_tensor, fail_on_nan_loss=True):
"""Initializes NanLoss monitor.
Args:
loss_tensor: `Tensor`, the loss tensor.
fail_on_nan_loss: `bool`, whether to raise exception when loss is NaN.
"""
self._loss_tensor = loss_tensor
self._fail_on_nan_loss = fail_on_nan_loss
def before_run(self, run_context): # pylint: disable=unused-argument
return SessionRunArgs(self._loss_tensor)
def after_run(self, run_context, run_values):
if np.isnan(run_values.results):
failure_message = "Model diverged with loss = NaN."
if self._fail_on_nan_loss:
logging.error(failure_message)
raise NanLossDuringTrainingError
else:
logging.warning(failure_message)
# We don't raise an error but we request stop without an exception.
run_context.request_stop()
class SummarySaverHook(session_run_hook.SessionRunHook):
"""Saves summaries every N steps."""
def __init__(self,
save_steps=100,
output_dir=None,
summary_writer=None,
scaffold=None,
summary_op=None):
"""Initializes a `SummarySaver` monitor.
Args:
save_steps: `int`, save summaries every N steps. See `EveryN`.
output_dir: `string`, the directory to save the summaries to. Only used
if no `summary_writer` is supplied.
summary_writer: `SummaryWriter`. If `None` and an `output_dir` was passed,
one will be created accordingly.
scaffold: `Scaffold` to get summary_op if it's not provided.
summary_op: `Tensor` of type `string`. A serialized `Summary` protocol
buffer, as output by TF summary methods like `scalar_summary` or
`merge_all_summaries`.
"""
# TODO(ipolosukhin): Implement every N seconds.
self._summary_op = summary_op
self._summary_writer = summary_writer
if summary_writer is None and output_dir:
self._summary_writer = SummaryWriterCache.get(output_dir)
self._scaffold = scaffold
self._save_steps = save_steps
# TODO(mdan): Throw an error if output_dir and summary_writer are None.
def begin(self):
self._last_saved_step = None
self._request_summary = True
self._global_step_tensor = contrib_variables.get_global_step()
if self._global_step_tensor is None:
raise RuntimeError(
"Global step should be created to use SummarySaverHook.")
def before_run(self, run_context): # pylint: disable=unused-argument
requests = {"global_step": self._global_step_tensor}
if self._request_summary:
if self._summary_op is not None:
requests["summary"] = self._summary_op
elif self._scaffold.summary_op is not None:
requests["summary"] = self._scaffold.summary_op
return SessionRunArgs(requests)
def after_run(self, run_context, run_values):
_ = run_context
if not self._summary_writer:
return
global_step = run_values.results["global_step"]
if self._last_saved_step is None:
self._summary_writer.add_session_log(
SessionLog(status=SessionLog.START), global_step)
if self._request_summary:
self._last_saved_step = global_step
if "summary" in run_values.results:
self._summary_writer.add_summary(run_values.results["summary"],
global_step)
self._request_summary = (
global_step >= self._last_saved_step + self._save_steps - 1)
def end(self, session=None):
if self._summary_writer:
self._summary_writer.flush()
def _as_graph_element(obj):
"""Retrieves Graph element."""
graph = ops.get_default_graph()
if not isinstance(obj, six.string_types):
if not hasattr(obj, "graph") or obj.graph != graph:
raise ValueError("Passed %s should have graph attribute that is equal "
"to current graph %s." % (obj, graph))
return obj
if ":" in obj:
element = graph.as_graph_element(obj)
else:
element = graph.as_graph_element(obj + ":0")
# Check that there is no :1 (e.g. it's single output).
try:
graph.as_graph_element(obj + ":1")
except (KeyError, ValueError):
pass
else:
raise ValueError("Name %s is ambiguous, "
"as this `Operation` has multiple outputs "
"(at least 2)." % obj)
return element
| 35.992519 | 89 | 0.701933 | [
"Apache-2.0"
] | Najah-lshanableh/tensorflow | tensorflow/contrib/learn/python/learn/basic_session_run_hooks.py | 14,433 | Python |
import unittest
from my_lambdata.assignment1 import WrangledFrame
class TestWrangledFrame(unittest.TestCase):
def test_add_state_names(self):
wf = WrangledFrame({"abbrev": ["CA", "CO", "CT", "DC", "TX"]})
breakpoint()
wf.add_state_names()
# ensure there is a "name" column
self.assertEqual(list(wf.columns), ['abbrev', 'name'])
# ensure the values of WF are specific classes/values
# (string, "California")
self.assertEqual(wf["name"][0], "California")
self.assertEqual(wf["abbrev"][0], "CA")
if __name__ == '__main__':
unittest.main()
| 27.086957 | 70 | 0.627608 | [
"MIT"
] | zack-murray/lambdata-zmurray | tests/wrangled_test.py | 623 | Python |
# -*- coding:utf-8 -*-
"""
博客系统。
"""
import pymysql
pymysql.install_as_MySQLdb()
| 12.142857 | 28 | 0.611765 | [
"Apache-2.0"
] | nebula-2020/202103 | end/nebulablogs/__init__.py | 95 | Python |
# -*- coding: utf-8 -*-
import pytest
import tempfile
from jsonschema import ValidationError
from rasa.nlu import training_data
from rasa.nlu.convert import convert_training_data
from rasa.nlu.extractors.mitie_entity_extractor import MitieEntityExtractor
from rasa.nlu.tokenizers.whitespace_tokenizer import WhitespaceTokenizer
from rasa.nlu.training_data import TrainingData
from rasa.nlu.training_data.formats import MarkdownReader
from rasa.nlu.training_data.formats.rasa import validate_rasa_nlu_data
from rasa.nlu.training_data.loading import guess_format, UNK, load_data
from rasa.nlu.training_data.util import get_file_format
import rasa.utils.io as io_utils
def test_example_training_data_is_valid():
demo_json = "data/examples/rasa/demo-rasa.json"
data = io_utils.read_json_file(demo_json)
validate_rasa_nlu_data(data)
@pytest.mark.parametrize(
"invalid_data",
[
{"wrong_top_level": []},
["this is not a toplevel dict"],
{
"rasa_nlu_data": {
"common_examples": [{"intent": "some example without text"}]
}
},
{
"rasa_nlu_data": {
"common_examples": [
{
"text": "mytext",
"entities": [{"start": "INVALID", "end": 0, "entity": "x"}],
}
]
}
},
],
)
def test_validation_is_throwing_exceptions(invalid_data):
with pytest.raises(ValidationError):
validate_rasa_nlu_data(invalid_data)
def test_luis_data():
td = training_data.load_data("data/examples/luis/demo-restaurants.json")
assert not td.is_empty()
assert len(td.entity_examples) == 8
assert len(td.intent_examples) == 28
assert len(td.training_examples) == 28
assert td.entity_synonyms == {}
assert td.intents == {"affirm", "goodbye", "greet", "inform"}
assert td.entities == {"location", "cuisine"}
def test_wit_data():
td = training_data.load_data("data/examples/wit/demo-flights.json")
assert not td.is_empty()
assert len(td.entity_examples) == 4
assert len(td.intent_examples) == 1
assert len(td.training_examples) == 4
assert td.entity_synonyms == {}
assert td.intents == {"flight_booking"}
assert td.entities == {"location", "datetime"}
def test_dialogflow_data():
td = training_data.load_data("data/examples/dialogflow/")
assert not td.is_empty()
assert len(td.entity_examples) == 5
assert len(td.intent_examples) == 24
assert len(td.training_examples) == 24
assert len(td.lookup_tables) == 2
assert td.intents == {"affirm", "goodbye", "hi", "inform"}
assert td.entities == {"cuisine", "location"}
non_trivial_synonyms = {k: v for k, v in td.entity_synonyms.items() if k != v}
assert non_trivial_synonyms == {
"mexico": "mexican",
"china": "chinese",
"india": "indian",
}
# The order changes based on different computers hence the grouping
assert {td.lookup_tables[0]["name"], td.lookup_tables[1]["name"]} == {
"location",
"cuisine",
}
assert {
len(td.lookup_tables[0]["elements"]),
len(td.lookup_tables[1]["elements"]),
} == {4, 6}
def test_lookup_table_json():
lookup_fname = "data/test/lookup_tables/plates.txt"
td_lookup = training_data.load_data("data/test/lookup_tables/lookup_table.json")
assert not td_lookup.is_empty()
assert td_lookup.lookup_tables[0]["name"] == "plates"
assert td_lookup.lookup_tables[0]["elements"] == lookup_fname
assert td_lookup.lookup_tables[1]["name"] == "drinks"
assert td_lookup.lookup_tables[1]["elements"] == [
"mojito",
"lemonade",
"sweet berry wine",
"tea",
"club mate",
]
def test_lookup_table_md():
lookup_fname = "data/test/lookup_tables/plates.txt"
td_lookup = training_data.load_data("data/test/lookup_tables/lookup_table.md")
assert not td_lookup.is_empty()
assert td_lookup.lookup_tables[0]["name"] == "plates"
assert td_lookup.lookup_tables[0]["elements"] == lookup_fname
assert td_lookup.lookup_tables[1]["name"] == "drinks"
assert td_lookup.lookup_tables[1]["elements"] == [
"mojito",
"lemonade",
"sweet berry wine",
"tea",
"club mate",
]
@pytest.mark.parametrize(
"files",
[
[
"data/examples/rasa/demo-rasa.json",
"data/examples/rasa/demo-rasa-responses.md",
],
[
"data/examples/rasa/demo-rasa.md",
"data/examples/rasa/demo-rasa-responses.md",
],
],
)
def test_demo_data(files):
from rasa.importers.utils import training_data_from_paths
td = training_data_from_paths(files, language="en")
assert td.intents == {"affirm", "greet", "restaurant_search", "goodbye", "chitchat"}
assert td.entities == {"location", "cuisine"}
assert td.responses == {"I am Mr. Bot", "It's sunny where I live"}
assert len(td.training_examples) == 46
assert len(td.intent_examples) == 46
assert len(td.response_examples) == 4
assert len(td.entity_examples) == 11
assert len(td.nlg_stories) == 2
assert td.entity_synonyms == {
"Chines": "chinese",
"Chinese": "chinese",
"chines": "chinese",
"vegg": "vegetarian",
"veggie": "vegetarian",
}
assert td.regex_features == [
{"name": "greet", "pattern": r"hey[^\s]*"},
{"name": "zipcode", "pattern": r"[0-9]{5}"},
]
@pytest.mark.parametrize(
"filepaths",
[["data/examples/rasa/demo-rasa.md", "data/examples/rasa/demo-rasa-responses.md"]],
)
def test_train_test_split(filepaths):
from rasa.importers.utils import training_data_from_paths
td = training_data_from_paths(filepaths, language="en")
assert td.intents == {"affirm", "greet", "restaurant_search", "goodbye", "chitchat"}
assert td.entities == {"location", "cuisine"}
assert len(td.training_examples) == 46
assert len(td.intent_examples) == 46
td_train, td_test = td.train_test_split(train_frac=0.8)
assert len(td_train.training_examples) == 35
assert len(td_test.training_examples) == 11
@pytest.mark.parametrize(
"files",
[
("data/examples/rasa/demo-rasa.json", "data/test/multiple_files_json"),
("data/examples/rasa/demo-rasa.md", "data/test/multiple_files_markdown"),
],
)
def test_data_merging(files):
td_reference = training_data.load_data(files[0])
td = training_data.load_data(files[1])
assert len(td.entity_examples) == len(td_reference.entity_examples)
assert len(td.intent_examples) == len(td_reference.intent_examples)
assert len(td.training_examples) == len(td_reference.training_examples)
assert td.intents == td_reference.intents
assert td.entities == td_reference.entities
assert td.entity_synonyms == td_reference.entity_synonyms
assert td.regex_features == td_reference.regex_features
def test_markdown_single_sections():
td_regex_only = training_data.load_data(
"data/test/markdown_single_sections/regex_only.md"
)
assert td_regex_only.regex_features == [{"name": "greet", "pattern": r"hey[^\s]*"}]
td_syn_only = training_data.load_data(
"data/test/markdown_single_sections/synonyms_only.md"
)
assert td_syn_only.entity_synonyms == {"Chines": "chinese", "Chinese": "chinese"}
def test_repeated_entities():
data = """
{
"rasa_nlu_data": {
"common_examples" : [
{
"text": "book a table today from 3 to 6 for 3 people",
"intent": "unk",
"entities": [
{
"entity": "description",
"start": 35,
"end": 36,
"value": "3"
}
]
}
]
}
}"""
with tempfile.NamedTemporaryFile(suffix="_tmp_training_data.json") as f:
f.write(data.encode("utf-8"))
f.flush()
td = training_data.load_data(f.name)
assert len(td.entity_examples) == 1
example = td.entity_examples[0]
entities = example.get("entities")
assert len(entities) == 1
tokens = WhitespaceTokenizer().tokenize(example.text)
start, end = MitieEntityExtractor.find_entity(entities[0], example.text, tokens)
assert start == 9
assert end == 10
def test_multiword_entities():
data = """
{
"rasa_nlu_data": {
"common_examples" : [
{
"text": "show me flights to New York City",
"intent": "unk",
"entities": [
{
"entity": "destination",
"start": 19,
"end": 32,
"value": "New York City"
}
]
}
]
}
}"""
with tempfile.NamedTemporaryFile(suffix="_tmp_training_data.json") as f:
f.write(data.encode("utf-8"))
f.flush()
td = training_data.load_data(f.name)
assert len(td.entity_examples) == 1
example = td.entity_examples[0]
entities = example.get("entities")
assert len(entities) == 1
tokens = WhitespaceTokenizer().tokenize(example.text)
start, end = MitieEntityExtractor.find_entity(entities[0], example.text, tokens)
assert start == 4
assert end == 7
def test_nonascii_entities():
data = """
{
"luis_schema_version": "2.0",
"utterances" : [
{
"text": "I am looking for a ßäæ ?€ö) item",
"intent": "unk",
"entities": [
{
"entity": "description",
"startPos": 19,
"endPos": 26
}
]
}
]
}"""
with tempfile.NamedTemporaryFile(suffix="_tmp_training_data.json") as f:
f.write(data.encode("utf-8"))
f.flush()
td = training_data.load_data(f.name)
assert len(td.entity_examples) == 1
example = td.entity_examples[0]
entities = example.get("entities")
assert len(entities) == 1
entity = entities[0]
assert entity["value"] == "ßäæ ?€ö)"
assert entity["start"] == 19
assert entity["end"] == 27
assert entity["entity"] == "description"
def test_entities_synonyms():
data = """
{
"rasa_nlu_data": {
"entity_synonyms": [
{
"value": "nyc",
"synonyms": ["New York City", "nyc", "the big apple"]
}
],
"common_examples" : [
{
"text": "show me flights to New York City",
"intent": "unk",
"entities": [
{
"entity": "destination",
"start": 19,
"end": 32,
"value": "NYC"
}
]
},
{
"text": "show me flights to nyc",
"intent": "unk",
"entities": [
{
"entity": "destination",
"start": 19,
"end": 22,
"value": "nyc"
}
]
}
]
}
}"""
with tempfile.NamedTemporaryFile(suffix="_tmp_training_data.json") as f:
f.write(data.encode("utf-8"))
f.flush()
td = training_data.load_data(f.name)
assert td.entity_synonyms["New York City"] == "nyc"
def cmp_message_list(firsts, seconds):
assert len(firsts) == len(seconds), "Message lists have unequal length"
def cmp_dict_list(firsts, seconds):
if len(firsts) != len(seconds):
return False
for a in firsts:
for idx, b in enumerate(seconds):
if hash(a) == hash(b):
del seconds[idx]
break
else:
others = ", ".join([e.text for e in seconds])
assert False, "Failed to find message {} in {}".format(a.text, others)
return not seconds
@pytest.mark.parametrize(
"data_file,gold_standard_file,output_format,language",
[
(
"data/examples/wit/demo-flights.json",
"data/test/wit_converted_to_rasa.json",
"json",
None,
),
(
"data/examples/luis/demo-restaurants.json",
"data/test/luis_converted_to_rasa.json",
"json",
None,
),
(
"data/examples/dialogflow/",
"data/test/dialogflow_en_converted_to_rasa.json",
"json",
"en",
),
(
"data/examples/dialogflow/",
"data/test/dialogflow_es_converted_to_rasa.json",
"json",
"es",
),
(
"data/examples/rasa/demo-rasa.md",
"data/test/md_converted_to_json.json",
"json",
None,
),
(
"data/examples/rasa/demo-rasa.json",
"data/test/json_converted_to_md.md",
"md",
None,
),
(
"data/test/training_data_containing_special_chars.json",
"data/test/json_with_special_chars_convered_to_md.md",
"md",
None,
),
],
)
def test_training_data_conversion(
tmpdir, data_file, gold_standard_file, output_format, language
):
out_path = tmpdir.join("rasa_nlu_data.json")
convert_training_data(data_file, out_path.strpath, output_format, language)
td = training_data.load_data(out_path.strpath, language)
assert td.entity_examples != []
assert td.intent_examples != []
gold_standard = training_data.load_data(gold_standard_file, language)
cmp_message_list(td.entity_examples, gold_standard.entity_examples)
cmp_message_list(td.intent_examples, gold_standard.intent_examples)
assert td.entity_synonyms == gold_standard.entity_synonyms
# converting the converted file back to original
# file format and performing the same tests
rto_path = tmpdir.join("data_in_original_format.txt")
convert_training_data(out_path.strpath, rto_path.strpath, "json", language)
rto = training_data.load_data(rto_path.strpath, language)
cmp_message_list(gold_standard.entity_examples, rto.entity_examples)
cmp_message_list(gold_standard.intent_examples, rto.intent_examples)
assert gold_standard.entity_synonyms == rto.entity_synonyms
# If the above assert fails - this can be used
# to dump to the file and diff using git
# with io.open(gold_standard_file) as f:
# f.write(td.as_json(indent=2))
def test_url_data_format():
data = """
{
"rasa_nlu_data": {
"entity_synonyms": [
{
"value": "nyc",
"synonyms": ["New York City", "nyc", "the big apple"]
}
],
"common_examples" : [
{
"text": "show me flights to New York City",
"intent": "unk",
"entities": [
{
"entity": "destination",
"start": 19,
"end": 32,
"value": "NYC"
}
]
}
]
}
}"""
fname = io_utils.create_temporary_file(
data.encode("utf-8"), suffix="_tmp_training_data.json", mode="w+b"
)
data = io_utils.read_json_file(fname)
assert data is not None
validate_rasa_nlu_data(data)
def test_markdown_entity_regex():
r = MarkdownReader()
md = """
## intent:restaurant_search
- i'm looking for a place to eat
- i'm looking for a place in the [north](loc-direction) of town
- show me [chines](cuisine:chinese) restaurants
- show me [chines](22_ab-34*3.A:43er*+?df) restaurants
"""
result = r.reads(md)
assert len(result.training_examples) == 4
first = result.training_examples[0]
assert first.data == {"intent": "restaurant_search"}
assert first.text == "i'm looking for a place to eat"
second = result.training_examples[1]
assert second.data == {
"intent": "restaurant_search",
"entities": [
{"start": 31, "end": 36, "value": "north", "entity": "loc-direction"}
],
}
assert second.text == "i'm looking for a place in the north of town"
third = result.training_examples[2]
assert third.data == {
"intent": "restaurant_search",
"entities": [{"start": 8, "end": 14, "value": "chinese", "entity": "cuisine"}],
}
assert third.text == "show me chines restaurants"
fourth = result.training_examples[3]
assert fourth.data == {
"intent": "restaurant_search",
"entities": [
{"start": 8, "end": 14, "value": "43er*+?df", "entity": "22_ab-34*3.A"}
],
}
assert fourth.text == "show me chines restaurants"
def test_get_file_format():
fformat = get_file_format("data/examples/luis/demo-restaurants.json")
assert fformat == "json"
fformat = get_file_format("data/examples")
assert fformat == "json"
fformat = get_file_format("examples/restaurantbot/data/nlu.md")
assert fformat == "md"
with pytest.raises(AttributeError):
get_file_format("path-does-not-exists")
with pytest.raises(AttributeError):
get_file_format(None)
def test_guess_format_from_non_existing_file_path():
assert guess_format("not existing path") == UNK
def test_load_data_from_non_existing_file():
with pytest.raises(ValueError):
load_data("some path")
def test_is_empty():
assert TrainingData().is_empty()
def test_markdown_empty_section():
data = training_data.load_data(
"data/test/markdown_single_sections/empty_section.md"
)
assert data.regex_features == [{"name": "greet", "pattern": r"hey[^\s]*"}]
assert not data.entity_synonyms
assert len(data.lookup_tables) == 1
assert data.lookup_tables[0]["name"] == "chinese"
assert "Chinese" in data.lookup_tables[0]["elements"]
assert "Chines" in data.lookup_tables[0]["elements"]
def test_markdown_not_existing_section():
with pytest.raises(ValueError):
training_data.load_data(
"data/test/markdown_single_sections/not_existing_section.md"
)
| 30.249578 | 88 | 0.606088 | [
"Apache-2.0"
] | AppliedSoul/rasa | tests/nlu/base/test_training_data.py | 17,950 | Python |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class SasDefinitionCreateParameters(Model):
"""The SAS definition create parameters.
All required parameters must be populated in order to send to Azure.
:param template_uri: Required. The SAS definition token template signed
with an arbitrary key. Tokens created according to the SAS definition
will have the same properties as the template.
:type template_uri: str
:param sas_type: Required. The type of SAS token the SAS definition will
create. Possible values include: 'account', 'service'
:type sas_type: str or ~storage.models.SasTokenType
:param validity_period: Required. The validity period of SAS tokens
created according to the SAS definition.
:type validity_period: str
:param sas_definition_attributes: The attributes of the SAS definition.
:type sas_definition_attributes: ~storage.models.SasDefinitionAttributes
:param tags: Application specific metadata in the form of key-value pairs.
:type tags: dict[str, str]
"""
_validation = {
'template_uri': {'required': True},
'sas_type': {'required': True},
'validity_period': {'required': True},
}
_attribute_map = {
'template_uri': {'key': 'templateUri', 'type': 'str'},
'sas_type': {'key': 'sasType', 'type': 'str'},
'validity_period': {'key': 'validityPeriod', 'type': 'str'},
'sas_definition_attributes': {'key': 'attributes', 'type': 'SasDefinitionAttributes'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, **kwargs):
super(SasDefinitionCreateParameters, self).__init__(**kwargs)
self.template_uri = kwargs.get('template_uri', None)
self.sas_type = kwargs.get('sas_type', None)
self.validity_period = kwargs.get('validity_period', None)
self.sas_definition_attributes = kwargs.get('sas_definition_attributes', None)
self.tags = kwargs.get('tags', None)
| 43.175439 | 94 | 0.650549 | [
"MIT"
] | mccoyp/azure-keyvault-7.3-preview | sdk/keyvault/azure-keyvault/azure/keyvault/v7_3_preview/models/sas_definition_create_parameters.py | 2,461 | Python |
import ocdskingfisherprocess.cli.commands.base
import ocdskingfisherprocess.database
from ocdskingfisherprocess.transform import TRANSFORM_TYPE_UPGRADE_1_0_TO_1_1
class NewTransformUpgrade10To11CLICommand(ocdskingfisherprocess.cli.commands.base.CLICommand):
command = 'new-transform-upgrade-1-0-to-1-1'
def configure_subparser(self, subparser):
self.configure_subparser_for_selecting_existing_collection(subparser)
def run_command(self, args):
self.run_command_for_selecting_existing_collection(args)
if self.collection.deleted_at:
print("That collection is deleted!")
return
id = self.database.get_collection_id(
self.collection.source_id,
self.collection.data_version,
self.collection.sample,
transform_from_collection_id=self.collection.database_id,
transform_type=TRANSFORM_TYPE_UPGRADE_1_0_TO_1_1)
if id:
print("Already exists! The ID is {}".format(id))
return
id = self.database.get_or_create_collection_id(self.collection.source_id,
self.collection.data_version,
self.collection.sample,
transform_from_collection_id=self.collection.database_id,
transform_type=TRANSFORM_TYPE_UPGRADE_1_0_TO_1_1)
print("Created! The ID is {}".format(id))
print("Now run transform-collection with that ID.")
| 43.459459 | 112 | 0.636816 | [
"BSD-3-Clause"
] | matiasSanabria/kingfisher-process | ocdskingfisherprocess/cli/commands/new_transform_upgrade_1_0_to_1_1.py | 1,608 | Python |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
from iris.test_case import *
class Test(BaseTest):
def __init__(self, app):
BaseTest.__init__(self, app)
self.meta = 'This is a test for checking private browsing navigation'
def run(self):
url = 'https://www.google.com/?hl=EN'
private_browsing_image = 'private_browsing.png'
google_search_image = 'google_search.png'
# check if incognito mode works
new_private_window()
expected_1 = exists(private_browsing_image, 10)
assert_true(self, expected_1, 'Find private browsing image')
# check basic_url in incognito mode
navigate(url)
expected_2 = exists(google_search_image, 10)
assert_true(self, expected_2, 'Find google search image')
| 30.774194 | 77 | 0.677149 | [
"MPL-2.0"
] | davehunt/iris | iris/tests/experiments/private_browsing_mode.py | 954 | Python |
from flask import Flask, request, abort
from linebot import (
LineBotApi, WebhookHandler
)
from linebot.exceptions import (
InvalidSignatureError
)
from linebot.models import *
app = Flask(__name__)
# LINE BOT info
line_bot_api = LineBotApi('QCynFfsDk7My1YN72sVQyvk6ArYkD2TUQW/pUxUQqllnGFNcqjZ8tKC+qMcVa2u4Lg1WmdUVLcS124tweaXtcVWLmK/thFH1NFUZL/Olev6ugLeKG4VUVd0ee8VUUgnrqqCZD+ZBpD6j61TRW2eJEgdB04t89/1O/w1cDnyilFU=')
handler = WebhookHandler('f3b5d7b57ef1f4d1277aecd7f045db3d')
@app.route("/callback", methods=['POST'])
def callback():
signature = request.headers['X-Line-Signature']
body = request.get_data(as_text=True)
app.logger.info("Request body: " + body)
print(body)
try:
handler.handle(body, signature)
except InvalidSignatureError:
abort(400)
return 'OK'
# Message event
@handler.add(MessageEvent)
def handle_message(event):
message_type = event.message.type
user_id = event.source.user_id
reply_token = event.reply_token
message = event.message.text
line_bot_api.reply_message(reply_token, TextSendMessage(text = message))
import os
if __name__ == "__main__":
port = int(os.environ.get('PORT', 80))
app.run(host='0.0.0.0', port=port) | 30.725 | 201 | 0.746949 | [
"MIT"
] | tingyo123/IoT_Project | Final_Project/hx711py/lineBotTest.py | 1,229 | Python |
#!/usr/bin/env python3
# still in development
#
import asyncio
import websockets
import json
import requests
eventsAPIPath = '/api/v1/events'
localServerIP = '0.0.0.0'
localServerAPIPort = '8000'
localServerWSPort = '8000'
localServerPath = '/sealog-server'
localToken = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjU5ODFmMTY3MjEyYjM0OGFlZDdmYTlmNSIsInNjb3BlIjpbImFkbWluIiwiZXZlbnRfbWFuYWdlciIsImV2ZW50X2xvZ2dlciIsImV2ZW50X3dhdGNoZXIiXSwiaWF0IjoxNTI1MDE0NDE3fQ.D8ja66bnLxJ3bsJlaKRtOquu8XbibjNCyFxJpI7vafc'
localClientWSID = 'localSealogReceive'
remoteServerIP = '162.243.201.175'
remoteServerAPIPort = '80'
remoteServerWSPort = '8000'
remoteServerPath = '/sealog-server'
remoteToken = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjU5ODFmMTY3MjEyYjM0OGFlZDdmYTlmNSIsInNjb3BlIjpbImFkbWluIiwiZXZlbnRfbWFuYWdlciIsImV2ZW50X2xvZ2dlciIsImV2ZW50X3dhdGNoZXIiXSwiaWF0IjoxNTI1MDEzNTUxfQ.8X-fBRUHdrwtkTLcOFAsW-vvvqCzmkZKM2gQgHNkBKk"
remoteClientWSID = 'remoteSealogReceive'
hello = {
'type': 'hello',
'id': remoteClientWSID,
'auth': {
'headers': {
'authorization': remoteToken
}
},
'version': '2',
'subs': ['/ws/status/newEvents']
}
ping = {
'type':'ping',
'id':remoteClientWSID
}
localHeaders = {'authorization': localToken}
remoteHeaders = {'authorization': remoteToken}
async def eventlog():
try:
async with websockets.connect('ws://' + remoteServerIP + ':' + remoteServerWSPort) as websocket:
await websocket.send(json.dumps(hello))
while(True):
event = await websocket.recv()
eventObj = json.loads(event)
print("eventObj:", eventObj)
if eventObj['type'] and eventObj['type'] == 'ping':
await websocket.send(json.dumps(ping))
elif eventObj['type'] and eventObj['type'] == 'pub':
r = requests.post('http://' + localServerIP + ':' + localServerAPIPort + localServerPath + eventsAPIPath, headers=localHeaders, data = json.dumps(eventObj['message']))
print(r.text)
### end of repeat
except Exception as error:
print(error)
asyncio.get_event_loop().run_until_complete(eventlog())
| 31.347222 | 251 | 0.692955 | [
"MIT"
] | OceanDataTools/sealog-server | misc/sealog_repeater_receive.py | 2,257 | Python |
PAYLOAD_BITS_120 = '111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'
KEY_BITS_192 = '010101010101010101010101011110100000010111101010101010101000010101110101010101010101010101010101010101010101010100001011111010101010101010101010101010101010101101011111010101000100010010101010'
ZEROS_64 = '0000000000000000000000000000000000000000000000000000000000000000'
PAYLOAD_RANDOM_64 = '1010101001001101010111111000101010010010110101010001101010010101' | 129 | 209 | 0.955426 | [
"MIT"
] | bejohi/StegoSources | cp1_code/test_constants.py | 516 | Python |
"""
https://leetcode.com/problems/powerful-integers/
Given two positive integers x and y, an integer is powerful if it is equal to x^i + y^j for some integers i >= 0 and j >= 0.
Return a list of all powerful integers that have value less than or equal to bound.
You may return the answer in any order. In your answer, each value should occur at most once.
Example 1:
Input: x = 2, y = 3, bound = 10
Output: [2,3,4,5,7,9,10]
Explanation:
2 = 2^0 + 3^0
3 = 2^1 + 3^0
4 = 2^0 + 3^1
5 = 2^1 + 3^1
7 = 2^2 + 3^1
9 = 2^3 + 3^0
10 = 2^0 + 3^2
Example 2:
Input: x = 3, y = 5, bound = 15
Output: [2,4,6,8,10,14]
Note:
1 <= x <= 100
1 <= y <= 100
0 <= bound <= 10^6
"""
# time complexity: O(n), space complexity: O(n^0.5)
class Solution:
def powerfulIntegers(self, x: int, y: int, bound: int) -> List[int]:
xlist = [1]
ylist = [1]
i = 1
while x**i <= bound and x > 1:
xlist.append(x**i)
i += 1
i = 1
while y**i <= bound and y > 1:
ylist.append(y**i)
i += 1
return list(set(x+y for x in xlist for y in ylist if x+y <= bound)) | 22.7 | 124 | 0.557709 | [
"Apache-2.0"
] | Davidxswang/leetcode | easy/970-Powerful Integers.py | 1,135 | Python |
class RenderNodeAction(Enum,IComparable,IFormattable,IConvertible):
"""
Enumerated actions for processing a render node during custom export.
enum RenderNodeAction,values: Proceed (0),Skip (1)
"""
def __eq__(self,*args):
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self,*args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __ge__(self,*args):
pass
def __gt__(self,*args):
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self,*args):
pass
def __lt__(self,*args):
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
Proceed=None
Skip=None
value__=None
| 28.470588 | 215 | 0.667355 | [
"MIT"
] | denfromufa/ironpython-stubs | stubs.min/Autodesk/Revit/DB/__init___parts/RenderNodeAction.py | 968 | Python |
import numpy as np
import torch
from dataclasses import dataclass
from typing import List
from jiant.tasks.core import (
BaseExample,
BaseTokenizedExample,
BaseDataRow,
BatchMixin,
GlueMixin,
Task,
TaskTypes,
)
from jiant.tasks.lib.templates.shared import double_sentence_featurize, labels_to_bimap
from jiant.utils.python.io import read_jsonl
@dataclass
class Example(BaseExample):
guid: str
input_premise: str
input_hypothesis: str
label: str
def tokenize(self, tokenizer):
return TokenizedExample(
guid=self.guid,
input_premise=tokenizer.tokenize(self.input_premise),
input_hypothesis=tokenizer.tokenize(self.input_hypothesis),
label_id=WnliTask.LABEL_TO_ID[self.label],
)
@dataclass
class TokenizedExample(BaseTokenizedExample):
guid: str
input_premise: List
input_hypothesis: List
label_id: int
def featurize(self, tokenizer, feat_spec):
return double_sentence_featurize(
guid=self.guid,
input_tokens_a=self.input_premise,
input_tokens_b=self.input_hypothesis,
label_id=self.label_id,
tokenizer=tokenizer,
feat_spec=feat_spec,
data_row_class=DataRow,
)
@dataclass
class DataRow(BaseDataRow):
guid: str
input_ids: np.ndarray
input_mask: np.ndarray
segment_ids: np.ndarray
label_id: int
tokens: list
@dataclass
class Batch(BatchMixin):
input_ids: torch.LongTensor
input_mask: torch.LongTensor
segment_ids: torch.LongTensor
label_id: torch.LongTensor
tokens: list
class WnliTask(GlueMixin, Task):
Example = Example
TokenizedExample = Example
DataRow = DataRow
Batch = Batch
TASK_TYPE = TaskTypes.CLASSIFICATION
LABELS = ["0", "1"]
LABEL_TO_ID, ID_TO_LABEL = labels_to_bimap(LABELS)
def get_train_examples(self):
return self._create_examples(lines=read_jsonl(self.train_path), set_type="train")
def get_val_examples(self):
return self._create_examples(lines=read_jsonl(self.val_path), set_type="val")
def get_test_examples(self):
return self._create_examples(lines=read_jsonl(self.test_path), set_type="test")
@classmethod
def _create_examples(cls, lines, set_type):
examples = []
for (i, line) in enumerate(lines):
examples.append(
Example(
# NOTE: get_glue_preds() is dependent on this guid format.
guid="%s-%s" % (set_type, i),
input_premise=line["premise"],
input_hypothesis=line["hypothesis"],
label=line["label"] if set_type != "test" else cls.LABELS[-1],
)
)
return examples
| 26.688679 | 89 | 0.655002 | [
"MIT"
] | HonoMi/jiant | jiant/tasks/lib/wnli.py | 2,829 | Python |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""External task balancer.
Overall architecture is:
1. Users interact with clients.
2. Clients make requests against the frontend's REST API.
3. The FE makes a REST call against a worker or worker pool identified by
gcb_external_task_balancer_worker_url. The FE provisions a unique token,
generates a Task instance, and dispatches a REST request to the worker or
worker pool.
4. The worker or worker pool exposes a REST API for use by the FE. Worker
responses contain the name of the worker so the FE can poll a specific worker
for results using the (ticket, name) combination. Workers are in charge both
of doing work and of cleaning up their results. Clients do not talk to
workers directly.
To enable, set up a pool of workers behind a single URL. For example, this might
be a set of machines behind a balancer on GCE or an AWS ELB. Next, set
gcb_external_task_balancer_rest_enabled to True and set
gcb_external_task_balancer_worker_url to the URL of your worker pool. Secure
communication if desired, and write a client against the REST API this module
exposes.
This implementation has the following big limitations:
1. It is insecure. Currently there is no token exchange/validation at the API
level, so anyone who gets a ticket (for example, by listening to HTTP
traffic between clients and the FE) can issue API calls.
2. There is no XSSI/XSRF protection. Note that exposed endpoints will 404 by
default because gcb_external_task_balancer_rest_enabled is False, so the
behavior without overrides does *not* expose unprotected REST endpoints.
3. Old task items hang around forever. Could implement garbage collection cron
to remove them past a TTL.
4. The REST api is missing ability to mark a single task for deletion and to
fetch a paginated list of results (without their payloads) for a given
user_id. Open issue: we do not expose the notion of a project in the REST
API, but we have it in the workers. Should we expose it to allow filtering at
the API level?
5. Add support for one balancer handling multiple pools of workers, not just
one.
6. Manager.mark* methods don't all check that the requested status transition is
valid. This means buggy handlers/workers/clients could cause invalid status
transitions. Fix is to have the Manager throw TransitionError in those cases
and modify the handlers to 400/500.
TODO(johncox): add URL of sample worker implementation once it's finished.
"""
__author__ = [
'[email protected] (John Cox)',
]
import logging
import urllib
from controllers import utils
from models import config
from models import custom_modules
from models import entities
from models import transforms
from google.appengine.api import urlfetch
from google.appengine.ext import db
_DISABLE_CACHING_HEADERS = {
'Cache-Control': 'max-age=0, must-revalidate',
'Pragma': 'no-cache',
}
_PAYLOAD = 'payload'
_TICKET = 'ticket'
_PROJECT_NAME = 'project'
_REST_URL_BASE = '/rest/balancer/v1'
_REST_URL_PROJECT = _REST_URL_BASE + '/project'
_REST_URL_TASK = _REST_URL_BASE
_STATUS = 'status'
_USER_ID = 'user_id'
_WORKER_DEADLINE_SECONDS = 5
_WORKER_ID = 'worker_id'
_WORKER_LOCKED = 'Worker locked'
_WORKER_LOCKED_MAX_RETRIES = 3
_LOG = logging.getLogger('modules.balancer.balancer')
logging.basicConfig()
EXTERNAL_TASK_BALANCER_REST_ENABLED = config.ConfigProperty(
'gcb_external_task_balancer_rest_enabled', bool,
('Whether or not to enable the REST endpoints for the external task '
'balancer module. You must also set the external task balancer URL '
'to use this feature.'), default_value=False,
label='Enable task balancer REST endpoints')
EXTERNAL_TASK_BALANCER_WORKER_URL = config.ConfigProperty(
'gcb_external_task_balancer_worker_url', str,
'URL for the worker pool used by the external task balancer module.',
default_value='', label='External task balancer worker URL')
class Error(Exception):
"""Base error class."""
class NotFoundError(Exception):
"""Raised when an op that needs an entity is run with a missing entity."""
class TransitionError(Exception):
"""Raised when an op attempts an invalid transition on a task."""
def _from_json(json_str):
"""Turns json -> object (or None if json cannot be parsed)."""
try:
return transforms.loads(json_str)
except: # Deliberately catching everything. pylint: disable=bare-except
return None
class Manager(object):
"""DAO for external tasks."""
# Treating access as module-protected. pylint: disable=protected-access
@classmethod
def create(cls, user_id=None):
"""Creates task and returns ticket string."""
task = _ExternalTask(status=_ExternalTask.CREATED, user_id=user_id)
return _ExternalTask.get_ticket_by_key(db.put(task))
@classmethod
def get(cls, ticket):
"""Gets task for ticket (or None if no matching task)."""
external_task = db.get(_ExternalTask.get_key_by_ticket(ticket))
if not external_task:
return None
return Task._from_external_task(external_task)
@classmethod
def list(cls, user_id):
"""Returns list of Task matching user_id, ordered by create date."""
return [Task._from_external_task(et) for et in sorted(
_ExternalTask.all().filter(
'%s =' % _ExternalTask.user_id.name, user_id
).fetch(1000), key=lambda task: task.create_date)]
@classmethod
@db.transactional
def mark_deleted(cls, ticket):
task = cls._get_or_raise_not_found_error(ticket)
task.status = _ExternalTask.DELETED
db.put(task)
@classmethod
@db.transactional
def mark_done(cls, ticket, status, result):
if status not in _ExternalTask._TERMINAL_STATUSES:
raise TransitionError(
'mark_done called with non-terminal status ' + status)
task = cls._get_or_raise_not_found_error(ticket)
task.result = result
task.status = status
db.put(task)
@classmethod
@db.transactional
def mark_failed(cls, ticket):
task = cls._get_or_raise_not_found_error(ticket)
task.status = _ExternalTask.FAILED
db.put(task)
@classmethod
@db.transactional
def mark_running(cls, ticket, worker_id):
task = cls._get_or_raise_not_found_error(ticket)
task.status = _ExternalTask.RUNNING
task.worker_id = worker_id
db.put(task)
@classmethod
def _delete(cls, ticket):
key = _ExternalTask.get_key_by_ticket(ticket)
db.delete(key)
@classmethod
def _get_or_raise_not_found_error(cls, ticket):
key = _ExternalTask.get_key_by_ticket(ticket)
task = db.get(key)
if not task:
raise NotFoundError
return task
class Task(object):
"""DTO for external tasks."""
def __init__(
self, change_date, create_date, result, status, ticket, user_id,
worker_id):
self.change_date = change_date
self.create_date = create_date
self.result = result
self.status = status
self.ticket = ticket
self.user_id = user_id
self.worker_id = worker_id
@classmethod
def _from_external_task(cls, external_task):
return cls(
external_task.change_date, external_task.create_date,
external_task.result, external_task.status,
external_task.get_ticket(), external_task.user_id,
external_task.worker_id)
def is_done(self):
return _ExternalTask.is_status_terminal(self.status)
def for_json(self):
return {
'change_date': self.change_date.strftime(
transforms.ISO_8601_DATETIME_FORMAT),
'create_date': self.create_date.strftime(
transforms.ISO_8601_DATETIME_FORMAT),
'result': self.result,
'status': self.status,
'ticket': self.ticket,
'user_id': self.user_id,
'worker_id': self.worker_id,
}
def __eq__(self, other):
return (
isinstance(other, Task) and
self.change_date == other.change_date and
self.create_date == other.create_date and
self.result == other.result and
self.status == other.status and
self.ticket == other.ticket and
self.user_id == other.user_id and
self.worker_id == other.worker_id)
def __ne__(self, other):
return not self.__eq__(other)
def __str__(self):
return (
'Task - change_date: %(change_date)s, '
'create_date: %(create_date)s, result: %(result)s, '
'status: %(status)s, ticket: %(ticket)s, user_id: %(user_id)s, '
'worker_id: %(worker_id)s' % self.to_dict())
class _ExternalTask(entities.BaseEntity):
"""Storage for external tasks."""
# States a task may be in.
COMPLETE = 'complete' # Done running and in known success state.
CREATED = 'created' # Datastore entity created, but task not yet running.
DELETED = 'deleted' # Marked for deletion; could be deleted later.
FAILED = 'failed' # Done running and in known failure state.
RUNNING = 'running' # Currently running on a worker.
_PENDING_STATUSES = frozenset([
CREATED,
RUNNING,
])
_TERMINAL_STATUSES = frozenset([
COMPLETE,
DELETED,
FAILED,
])
STATUSES = _PENDING_STATUSES.union(_TERMINAL_STATUSES)
# When the task was last edited.
change_date = db.DateTimeProperty(required=True, auto_now=True)
# When the task was created.
create_date = db.DateTimeProperty(required=True, auto_now_add=True)
# Output of the task in JSON.
result = db.TextProperty()
# Last observed status of the task. Can be inaccurate: for example, if a
# user creates a new task but navigates away before the task completes and
# their client never fetches the task when it's done, we'll still show it
# running.
status = db.StringProperty(required=True, choices=STATUSES)
# Optional identifier for the user who owns the task. We impose no
# restrictions beyond the identifier being a string <= 500B, per datastore.
user_id = db.StringProperty()
# Identifier for the worker.
worker_id = db.StringProperty()
@classmethod
def get_key_by_ticket(cls, ticket_str):
try:
return db.Key(encoded=ticket_str)
except:
raise ValueError(
'Cannot make _ExternalTask key from ticket value: %s' % (
ticket_str))
@classmethod
def get_ticket_by_key(cls, key):
return str(key)
@classmethod
def is_status_terminal(cls, status):
return status in cls._TERMINAL_STATUSES
def get_ticket(self):
"""Returns string identifier for the task; raises NotSavedError."""
return self.get_ticket_by_key(self.key())
class _Operation(object):
"""Base class for wire operation payloads."""
@classmethod
def from_str(cls, raw_str):
return cls._from_json(transforms.loads(raw_str))
@classmethod
def _from_json(cls, parsed):
# Parse and validate raw input, raising ValueError if necessary.
raise NotImplementedError
def ready(self):
"""True iff the operation has all data it needs to be issued."""
raise NotImplementedError
def to_json(self):
return transforms.dumps(self._to_dict())
def to_url(self):
return urllib.quote_plus(self.to_json())
def update(self, updates_dict):
for k, v in updates_dict.iteritems():
if not hasattr(self, k):
raise ValueError('Cannot set name ' + k)
setattr(self, k, v)
def _to_dict(self):
raise NotImplementedError
class _CreateTaskOperation(_Operation):
def __init__(self, payload, ticket, user_id):
self.payload = payload
self.ticket = ticket
self.user_id = user_id
@classmethod
def _from_json(cls, parsed):
return cls(parsed, None, parsed.get(_USER_ID))
def ready(self):
return self.payload is not None and self.ticket is not None
def _to_dict(self):
return {
_PAYLOAD: self.payload,
_TICKET: self.ticket,
_USER_ID: self.user_id,
}
class _GetProjectOperation(_Operation):
def __init__(self, payload):
self.payload = payload
@classmethod
def _from_json(cls, parsed):
return cls(parsed)
def ready(self):
return self.payload is not None
def _to_dict(self):
return {_PAYLOAD: self.payload}
class _GetTaskOperation(_Operation):
def __init__(self, payload, ticket, worker_id):
self.payload = payload
self.ticket = ticket
self.worker_id = worker_id
@classmethod
def _from_json(cls, parsed):
ticket = parsed.get(_TICKET)
if not ticket:
raise ValueError('%s not set' % _TICKET)
return cls(parsed, ticket, parsed.get(_WORKER_ID))
def ready(self):
return (
self.payload is not None and self.ticket is not None and
self.worker_id is not None)
def _to_dict(self):
return {
_PAYLOAD: self.payload,
_TICKET: self.ticket,
_WORKER_ID: self.worker_id,
}
class _WorkerPool(object):
"""Interface for the pool of machines that do background work."""
@classmethod
def _check_response(cls, response):
return response.has_key(_PAYLOAD)
@classmethod
def _do_fetch(cls, url, method, operation):
try:
response = urlfetch.fetch(
cls._get_url(url, method, operation),
deadline=_WORKER_DEADLINE_SECONDS,
headers=_DISABLE_CACHING_HEADERS, method=method,
payload=cls._get_request_body(method, operation))
return (
response.status_code, cls._transform_response(response))
except urlfetch.DownloadError as e: # 4xx, 5xx, timeouts.
_LOG.error('Unable to dispatch request to pool; error: %s', e)
return 500, {_PAYLOAD: 'Unable to dispatch request'}
@classmethod
def _get_base_url(cls, worker_id=None):
base = (
worker_id if worker_id is not None else
EXTERNAL_TASK_BALANCER_WORKER_URL.value)
return base + '/rest/v1'
@classmethod
def _get_create_task_url(cls):
return cls._get_base_url()
@classmethod
def _get_get_project_url(cls):
return cls._get_base_url() + '/project'
@classmethod
def _get_get_task_url(cls, worker_id):
return cls._get_base_url(worker_id=worker_id)
@classmethod
def _get_request_body(cls, method, operation):
if method == 'GET':
return None
return operation.to_json()
@classmethod
def _get_url(cls, url, method, operation):
if method == 'GET':
return '%s?request=%s' % (url, operation.to_url())
return url
@classmethod
def _transform_response(cls, response):
"""Transforms worker success/error responses into a standard format."""
try:
parsed = transforms.loads(response.content)
if not cls._check_response(parsed):
raise ValueError
return {_PAYLOAD: parsed[_PAYLOAD]}
except: # Catch everything on purpose. pylint: disable=bare-except
_LOG.error(
'Unable to parse worker response: ' + response.content)
return {_PAYLOAD: 'Received invalid response'}
@classmethod
def create_task(cls, operation):
return cls._do_fetch(cls._get_create_task_url(), 'POST', operation)
@classmethod
def get_project(cls, operation):
return cls._do_fetch(cls._get_get_project_url(), 'GET', operation)
@classmethod
def get_task(cls, operation):
return cls._do_fetch(
cls._get_get_task_url(operation.worker_id), 'GET', operation)
class _BaseRestHandler(utils.BaseRESTHandler):
def _send_json_response(self, code, response):
self.response.headers['Content-Disposition'] = 'attachment'
self.response.headers['Content-Type'] = (
'application/javascript; charset=utf-8')
self.response.headers['X-Content-Type-Options'] = 'nosniff'
self.response.headers['Access-Control-Allow-Origin'] = '*'
self.response.status_code = code
self.response.write(transforms.dumps(response))
def _check_config_or_send_error(self):
if not EXTERNAL_TASK_BALANCER_REST_ENABLED.value:
self._send_json_response(404, 'Not found.')
return False
elif not EXTERNAL_TASK_BALANCER_WORKER_URL.value:
self._send_json_response(500, 'No worker pool found.')
return False
return True
class _ProjectRestHandler(_BaseRestHandler):
def get(self):
configured = self._check_config_or_send_error()
if not configured:
return
try:
op = _GetProjectOperation.from_str(self.request.get('request'))
except ValueError:
self._send_json_response(400, 'Bad request')
return
self._send_json_response(*_WorkerPool.get_project(op))
class _TaskRestHandler(_BaseRestHandler):
def _get_payload(self, response):
return response.get(_PAYLOAD)
def _get_status(self, response):
return self._get_payload(response).get(_STATUS)
def _get_task_payload(self, response):
return response.get(_PAYLOAD).get(_PAYLOAD)
def _get_ticket(self, response):
return self._get_payload(response).get(_TICKET)
def _get_worker_id(self, response):
return self._get_payload(response).get(_WORKER_ID)
def _retry_create_task(self, response, op):
tries = 0
while tries < _WORKER_LOCKED_MAX_RETRIES:
tries += 1
_LOG.info('Worker locked; retrying (tries: %s)', tries)
code, response = _WorkerPool.create_task(op)
if not self._worker_locked(response):
return code, response
return code, {_PAYLOAD: _WORKER_LOCKED}
def _worker_locked(self, response):
return response.get(_PAYLOAD) == _WORKER_LOCKED
def get(self):
configured = self._check_config_or_send_error()
if not configured:
return
try:
op = _GetTaskOperation.from_str(self.request.get('request'))
except: # pylint: disable=bare-except
self._send_json_response(400, 'Bad request')
return
task = None
try:
task = Manager.get(op.ticket)
except ValueError:
pass # Invalid ticket; handle as 404.
if not task:
self._send_json_response(
404, 'Task not found for ticket %s' % op.ticket)
return
if task.is_done():
self._send_json_response(200, task.for_json())
return
op.update({_WORKER_ID: task.worker_id})
if not op.ready():
# If the operation cannot be issued now, the most likely cause is
# that a past response from a worker contained insufficient data to
# dispatch requests to that worker (for example, it might not have)
# set the worker_id). We cannot recover; all we can do is signal
# likely programmer error.
self._send_json_response(
500, 'Unable to compose request for worker')
return
code, response = _WorkerPool.get_task(op)
if code != 200:
self._send_json_response(code, response)
return
status = self._get_status(response)
if status is None:
self._send_json_response(500, 'Worker sent partial response')
return
elif _ExternalTask.is_status_terminal(status):
try:
payload = self._get_task_payload(response)
Manager.mark_done(op.ticket, status, payload)
except: # Catch everything. pylint: disable=bare-except
# TODO(johncox): could differentiate here and transition to a
# failed state when the payload is too big so we don't force
# unnecessary refetches against workers.
self._send_json_response(
500, 'Invalid worker status or payload too big')
return
self._send_json_response(*_WorkerPool.get_task(op))
def post(self):
configured = self._check_config_or_send_error()
if not configured:
return
try:
op = _CreateTaskOperation.from_str(self.request.get('request'))
except: # pylint: disable=bare-except
self._send_json_response(400, 'Bad request')
return
# Must allocate ticket at storage level for wire ops against worker, so
# we cannot create the task in one datastore call.
ticket = Manager.create(user_id=op.user_id)
op.update({_TICKET: ticket})
if not op.ready():
self._send_json_response(
500, 'Unable to compose request for worker')
return
code, response = _WorkerPool.create_task(op)
if self._worker_locked(response):
code, response = self._retry_create_task(response, op)
if code != 200:
Manager.mark_failed(ticket)
self._send_json_response(500, self._get_payload(response))
return
request_failed = code != 200
ticket_mismatch = self._get_ticket(response) != ticket
if request_failed or ticket_mismatch:
response = 'Ticket mismatch' if ticket_mismatch else 'Worker failed'
Manager.mark_failed(ticket)
self._send_json_response(500, response)
else: # Worker response indicates success.
Manager.mark_running(ticket, self._get_worker_id(response))
self._send_json_response(code, response)
custom_module = None
def register_module():
global custom_module # pylint: disable=global-statement
global_handlers = [
(_REST_URL_TASK, _TaskRestHandler),
(_REST_URL_PROJECT, _ProjectRestHandler),
]
namespaced_handlers = []
custom_module = custom_modules.Module(
'External Task Balancer', 'External Task Balancer', global_handlers,
namespaced_handlers)
return custom_module
| 33.253923 | 80 | 0.658616 | [
"Apache-2.0"
] | ehiller/CourseBuilderV19-TeacherDashboard | modules/balancer/balancer.py | 23,311 | Python |
"""
CLI tests
"""
from tso.tsocli import __main__ as tsocli
import pytest
from unittest.mock import patch, MagicMock, mock_open
mock_configurqation = "{}"
class TestCli:
def test_cli_should_exit_with_no_args(self):
with pytest.raises(SystemExit) as pytest_wrapped_e:
tsocli.main([])
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 1
def test_cli_should_exit_with_only_one_arg(self):
with pytest.raises(SystemExit) as pytest_wrapped_e_pseudo_name:
tsocli.main(['s'])
with pytest.raises(SystemExit) as pytest_wrapped_e_full_name:
tsocli.main(['schedule'])
# Both Exceptions should be the same
assert pytest_wrapped_e_pseudo_name.type == pytest_wrapped_e_full_name.type
assert pytest_wrapped_e_pseudo_name.value.code == pytest_wrapped_e_full_name.value.code
# The exceptions should be a System Exit
assert pytest_wrapped_e_pseudo_name.type == SystemExit
assert pytest_wrapped_e_pseudo_name.value.code == 1
@patch('configuration.configuration_parser.parse', return_value=mock_configurqation)
@patch('tso.tsocli.command.cli_pipeline')
def test_cli_should_call_pipeline_when_successful(self, mock_pipeline, mock_config_parser):
tsocli.main([
'schedule',
'--start-date-time',
'2019-03-01 19:00',
'--end-date-time',
'2019-03-12 19:00',
'--export-to-file',
'--export-to-browser'
])
assert mock_pipeline.called
@patch('configuration.configuration_parser.parse', return_value=mock_configurqation)
@patch('tso.tsocli.command.cli_pipeline')
def test_cli_should_have_default_date_time_values(self, mock_pipeline, mock_config_parser):
tsocli.main([
'schedule',
'--export-to-file'
])
assert mock_pipeline.call_args.start_date_time
assert mock_pipeline.call_args.end_date_time
| 30.787879 | 95 | 0.683071 | [
"MIT"
] | elijah-ward/TSO | src/tso/tsocli/tests/test_cli.py | 2,032 | Python |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import datetime
import unittest
from unittest import mock
from airflow.models.dag import DAG
from airflow.providers.microsoft.azure.transfers.local_to_wasb import LocalFilesystemToWasbOperator
class TestLocalFilesystemToWasbOperator(unittest.TestCase):
_config = {
'file_path': 'file',
'container_name': 'container',
'blob_name': 'blob',
'wasb_conn_id': 'wasb_default',
'retries': 3,
}
def setUp(self):
args = {'owner': 'airflow', 'start_date': datetime.datetime(2017, 1, 1)}
self.dag = DAG('test_dag_id', default_args=args)
def test_init(self):
operator = LocalFilesystemToWasbOperator(task_id='wasb_operator_1', dag=self.dag, **self._config)
assert operator.file_path == self._config['file_path']
assert operator.container_name == self._config['container_name']
assert operator.blob_name == self._config['blob_name']
assert operator.wasb_conn_id == self._config['wasb_conn_id']
assert operator.load_options == {}
assert operator.retries == self._config['retries']
operator = LocalFilesystemToWasbOperator(
task_id='wasb_operator_2', dag=self.dag, load_options={'timeout': 2}, **self._config
)
assert operator.load_options == {'timeout': 2}
@mock.patch('airflow.providers.microsoft.azure.transfers.local_to_wasb.WasbHook', autospec=True)
def test_execute(self, mock_hook):
mock_instance = mock_hook.return_value
operator = LocalFilesystemToWasbOperator(
task_id='wasb_sensor', dag=self.dag, load_options={'timeout': 2}, **self._config
)
operator.execute(None)
mock_instance.load_file.assert_called_once_with('file', 'container', 'blob', timeout=2)
| 40.171875 | 105 | 0.710618 | [
"Apache-2.0"
] | 0411tony/airflow | tests/providers/microsoft/azure/transfers/test_local_to_wasb.py | 2,571 | Python |
import sys
sys.path.append("./stqft")
sys.path.append("./qcnn")
import os
#Activate the cuda env
os.environ["LD_LIBRARY_PATH"] = "$LD_LIBRARY_PATH:/usr/local/cuda/lib64/:/usr/lib64:/usr/local/cuda/extras/CUPTI/lib64:/usr/local/cuda-11.2/lib64:/usr/local/cuda/targets/x86_64-linux/lib/"
import time
import multiprocessing
import glob
import numpy as np
datasetPath = "/storage/mstrobl/dataset"
featurePath = "/storage/mstrobl/features"
checkpointsPath = "/storage/mstrobl/checkpoints"
modelsPath = "/storage/mstrobl/models"
quantumPath = "/storage/mstrobl/dataQuantum"
waveformPath = "/storage/mstrobl/waveforms"
checkpointsPath = "/storage/mstrobl/checkpoints"
exportPath = "/storage/mstrobl/versioning"
TOPIC = "PrepGenTrain"
batchSize = 28
kernelSize = 2
epochs = 40
portion = 1
PoolSize = int(multiprocessing.cpu_count()*0.6) #be gentle..
# PoolSize = 1 #be gentle..
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--waveform", default = 1, help = "Generate Waveforms")
parser.add_argument("--quantum", default= 1, help = "Generate Quantum Data")
parser.add_argument("--train", default = 1, action='store_true', help = "Fit the model")
parser.add_argument("--checkTree", default = 1, help = "Checks if the working tree is dirty")
args = parser.parse_args()
from stqft.frontend import export
if int(args.checkTree) == 1:
export.checkWorkingTree(exportPath)
print(f"\n\n\n-----------------------\n\n\n")
print(f"Train Time @{time.time()}")
print(f"\n\n\n-----------------------\n\n\n")
multiprocessing.set_start_method('spawn')
print(f"Running {PoolSize} processes")
datasetFiles = glob.glob(datasetPath + "/**/*.wav", recursive=True)
print(f"Found {len(datasetFiles)} files in the dataset")
exp = export(topic=TOPIC, identifier="dataset", dataDir=exportPath)
exp.setData(export.DESCRIPTION, f"Dataset {len(datasetFiles)} in {datasetPath}")
exp.setData(export.GENERICDATA, datasetFiles)
exp.doExport()
print(f"\n\n\n-----------------------\n\n\n")
print(f"Generating Waveforms @{time.time()}")
print(f"\n\n\n-----------------------\n\n\n")
from generateFeatures import gen_features, gen_quantum, reportSettings, samplingRate
from qcnn.small_qsr import labels
if int(args.waveform)==1:
x_train, x_valid, y_train, y_valid = gen_features(labels, datasetPath, featurePath, PoolSize, waveformPath=waveformPath, portion=portion)
else:
print("Loading from disk...")
x_train = np.load(f"{featurePath}/x_train_speech.npy")
x_valid = np.load(f"{featurePath}/x_valid_speech.npy")
y_train = np.load(f"{featurePath}/y_train_speech.npy")
y_valid = np.load(f"{featurePath}/y_valid_speech.npy")
exp = export(topic=TOPIC, identifier="waveformData", dataDir=exportPath)
exp.setData(export.DESCRIPTION, f"Waveforms generated (T)/ loaded (F): {args.waveform}; Labels used: {labels}; FeaturePath: {featurePath}; PoolSize: {PoolSize}; WaveformPath: {waveformPath}; Portioning: {portion}, SamplingRate: {samplingRate}, {reportSettings()}")
exp.setData(export.GENERICDATA, {"x_train":x_train, "x_valid":x_valid, "y_train":y_train, "y_valid":y_valid})
exp.doExport()
print(f"\n\n\n-----------------------\n\n\n")
print(f"Generating Quantum Data @{time.time()}")
print(f"\n\n\n-----------------------\n\n\n")
# disable quanv and pix chan mal
if int(args.quantum)==-2:
q_train = x_train
q_valid = x_valid
# enable quanv
elif int(args.quantum)==1:
q_train, q_valid = gen_quantum(x_train, x_valid, kernelSize, output=quantumPath, poolSize=PoolSize)
# pix chan map
elif int(args.quantum)==-1:
q_train, q_valid = gen_quantum(x_train, x_valid, kernelSize, output=quantumPath, poolSize=PoolSize, quanv=False)
# load from disk
else:
print("Loading from disk...")
q_train = np.load(f"{quantumPath}/quanv_train.npy")
q_valid = np.load(f"{quantumPath}/quanv_valid.npy")
exp = export(topic=TOPIC, identifier="quantumData", dataDir=exportPath)
exp.setData(export.DESCRIPTION, f"Quantum data generated (T)/ loaded (F): {args.quantum}; FeaturePath: {quantumPath}; PoolSize: {PoolSize};")
exp.setData(export.GENERICDATA, {"q_train":q_train, "q_valid":q_valid})
exp.doExport()
print(f"\n\n\n-----------------------\n\n\n")
print(f"Starting Training @{time.time()}")
print(f"\n\n\n-----------------------\n\n\n")
from fitModel import fit_model
if args.train:
#if quanv completely disabled and no pix channel map
if int(args.quantum)==-2 or q_train.shape[3]==1:
print("using ablation")
# pass quanv data for training and validation
model, history = fit_model(q_train, y_train, q_valid, y_valid, checkpointsPath, epochs=epochs, batchSize=batchSize, ablation=True)
else:
# pass quanv data for training and validation
model, history = fit_model(q_train, y_train, q_valid, y_valid, checkpointsPath, epochs=epochs, batchSize=batchSize, ablation=False)
data_ix = time.strftime("%Y%m%d_%H%M")
model.save(f"{modelsPath}/model_{time.time()}")
else:
print("Training disabled")
exp = export(topic=TOPIC, identifier="model", dataDir=exportPath)
exp.setData(export.DESCRIPTION, f"Model trained (T)/ loaded (F): {args.train}; CheckpointsPath: {checkpointsPath}; ModelsPath: {modelsPath}")
exp.setData(export.GENERICDATA, {"history_acc":history.history['accuracy'], "history_val_acc":history.history['val_accuracy'], "history_loss":history.history['loss'], "history_val_loss":history.history['val_loss']})
exp.doExport() | 42.595588 | 268 | 0.669256 | [
"MIT"
] | stroblme/hqsp-main | train.py | 5,793 | Python |
# coding: utf-8
"""
ThingsBoard REST API
ThingsBoard Professional Edition IoT platform REST API documentation. # noqa: E501
OpenAPI spec version: 3.3.3PAAS-RC1
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tb_rest_client.api_client import ApiClient
class EntityGroupControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_entities_to_entity_group_using_post(self, entity_group_id, **kwargs): # noqa: E501
"""Add entities to the group (addEntitiesToEntityGroup) # noqa: E501
Add entities to the specified entity group. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'ADD_TO_GROUP' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_entities_to_entity_group_using_post(entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param list[str] body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_entities_to_entity_group_using_post_with_http_info(entity_group_id, **kwargs) # noqa: E501
else:
(data) = self.add_entities_to_entity_group_using_post_with_http_info(entity_group_id, **kwargs) # noqa: E501
return data
def add_entities_to_entity_group_using_post_with_http_info(self, entity_group_id, **kwargs): # noqa: E501
"""Add entities to the group (addEntitiesToEntityGroup) # noqa: E501
Add entities to the specified entity group. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'ADD_TO_GROUP' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_entities_to_entity_group_using_post_with_http_info(entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param list[str] body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_group_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_entities_to_entity_group_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_group_id' is set
if ('entity_group_id' not in params or
params['entity_group_id'] is None):
raise ValueError("Missing the required parameter `entity_group_id` when calling `add_entities_to_entity_group_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_group_id' in params:
path_params['entityGroupId'] = params['entity_group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroup/{entityGroupId}/addEntities', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def assign_entity_group_to_edge_using_post(self, edge_id, group_type, entity_group_id, **kwargs): # noqa: E501
"""Assign entity group to edge (assignEntityGroupToEdge) # noqa: E501
Creates assignment of an existing entity group to an instance of The Edge. Assignment works in async way - first, notification event pushed to edge service queue on platform. Second, remote edge service will receive a copy of assignment entity group (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once entity group will be delivered to edge service, edge will request entities of this group to be send to edge. Once entities will be delivered to edge service, they are going to be available for usage on remote edge instance. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'WRITE' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entity_group_to_edge_using_post(edge_id, group_type, entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: A string value representing the edge id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str group_type: EntityGroup type (required)
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.assign_entity_group_to_edge_using_post_with_http_info(edge_id, group_type, entity_group_id, **kwargs) # noqa: E501
else:
(data) = self.assign_entity_group_to_edge_using_post_with_http_info(edge_id, group_type, entity_group_id, **kwargs) # noqa: E501
return data
def assign_entity_group_to_edge_using_post_with_http_info(self, edge_id, group_type, entity_group_id, **kwargs): # noqa: E501
"""Assign entity group to edge (assignEntityGroupToEdge) # noqa: E501
Creates assignment of an existing entity group to an instance of The Edge. Assignment works in async way - first, notification event pushed to edge service queue on platform. Second, remote edge service will receive a copy of assignment entity group (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once entity group will be delivered to edge service, edge will request entities of this group to be send to edge. Once entities will be delivered to edge service, they are going to be available for usage on remote edge instance. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'WRITE' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entity_group_to_edge_using_post_with_http_info(edge_id, group_type, entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: A string value representing the edge id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str group_type: EntityGroup type (required)
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['edge_id', 'group_type', 'entity_group_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_entity_group_to_edge_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'edge_id' is set
if ('edge_id' not in params or
params['edge_id'] is None):
raise ValueError("Missing the required parameter `edge_id` when calling `assign_entity_group_to_edge_using_post`") # noqa: E501
# verify the required parameter 'group_type' is set
if ('group_type' not in params or
params['group_type'] is None):
raise ValueError("Missing the required parameter `group_type` when calling `assign_entity_group_to_edge_using_post`") # noqa: E501
# verify the required parameter 'entity_group_id' is set
if ('entity_group_id' not in params or
params['entity_group_id'] is None):
raise ValueError("Missing the required parameter `entity_group_id` when calling `assign_entity_group_to_edge_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edge_id' in params:
path_params['edgeId'] = params['edge_id'] # noqa: E501
if 'group_type' in params:
path_params['groupType'] = params['group_type'] # noqa: E501
if 'entity_group_id' in params:
path_params['entityGroupId'] = params['entity_group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/edge/{edgeId}/entityGroup/{entityGroupId}/{groupType}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_entity_group_using_delete(self, entity_group_id, **kwargs): # noqa: E501
"""Delete Entity Group (deleteEntityGroup) # noqa: E501
Deletes the entity group but does not delete the entities in the group, since they are also present in reserved group 'All'. Referencing non-existing Entity Group Id will cause an error. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'DELETE' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_entity_group_using_delete(entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_entity_group_using_delete_with_http_info(entity_group_id, **kwargs) # noqa: E501
else:
(data) = self.delete_entity_group_using_delete_with_http_info(entity_group_id, **kwargs) # noqa: E501
return data
def delete_entity_group_using_delete_with_http_info(self, entity_group_id, **kwargs): # noqa: E501
"""Delete Entity Group (deleteEntityGroup) # noqa: E501
Deletes the entity group but does not delete the entities in the group, since they are also present in reserved group 'All'. Referencing non-existing Entity Group Id will cause an error. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'DELETE' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_entity_group_using_delete_with_http_info(entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_group_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_entity_group_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_group_id' is set
if ('entity_group_id' not in params or
params['entity_group_id'] is None):
raise ValueError("Missing the required parameter `entity_group_id` when calling `delete_entity_group_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_group_id' in params:
path_params['entityGroupId'] = params['entity_group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroup/{entityGroupId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_edge_entity_groups_using_get(self, edge_id, group_type, **kwargs): # noqa: E501
"""Get All Edge Entity Groups by entity type (getAllEdgeEntityGroups) # noqa: E501
Fetch the list of Entity Group Info objects based on the provided Entity Type and assigned to the provided Edge entity. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously.Entity Group Info extends Entity Group object and adds 'ownerIds' - a list of owner ids. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_edge_entity_groups_using_get(edge_id, group_type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: A string value representing the edge id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str group_type: EntityGroup type (required)
:return: list[EntityGroupInfo]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_edge_entity_groups_using_get_with_http_info(edge_id, group_type, **kwargs) # noqa: E501
else:
(data) = self.get_all_edge_entity_groups_using_get_with_http_info(edge_id, group_type, **kwargs) # noqa: E501
return data
def get_all_edge_entity_groups_using_get_with_http_info(self, edge_id, group_type, **kwargs): # noqa: E501
"""Get All Edge Entity Groups by entity type (getAllEdgeEntityGroups) # noqa: E501
Fetch the list of Entity Group Info objects based on the provided Entity Type and assigned to the provided Edge entity. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously.Entity Group Info extends Entity Group object and adds 'ownerIds' - a list of owner ids. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_edge_entity_groups_using_get_with_http_info(edge_id, group_type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: A string value representing the edge id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str group_type: EntityGroup type (required)
:return: list[EntityGroupInfo]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['edge_id', 'group_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_edge_entity_groups_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'edge_id' is set
if ('edge_id' not in params or
params['edge_id'] is None):
raise ValueError("Missing the required parameter `edge_id` when calling `get_all_edge_entity_groups_using_get`") # noqa: E501
# verify the required parameter 'group_type' is set
if ('group_type' not in params or
params['group_type'] is None):
raise ValueError("Missing the required parameter `group_type` when calling `get_all_edge_entity_groups_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edge_id' in params:
path_params['edgeId'] = params['edge_id'] # noqa: E501
if 'group_type' in params:
path_params['groupType'] = params['group_type'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/allEntityGroups/edge/{edgeId}/{groupType}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[EntityGroupInfo]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_edge_entity_groups_using_get(self, edge_id, group_type, page_size, page, **kwargs): # noqa: E501
"""Get Edge Entity Groups by entity type (getEdgeEntityGroups) # noqa: E501
Returns a page of Entity Group Info objects based on the provided Entity Type and assigned to the provided Edge entity. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously.Entity Group Info extends Entity Group object and adds 'ownerIds' - a list of owner ids.You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_edge_entity_groups_using_get(edge_id, group_type, page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: A string value representing the edge id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str group_type: EntityGroup type (required)
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityGroupInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_edge_entity_groups_using_get_with_http_info(edge_id, group_type, page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_edge_entity_groups_using_get_with_http_info(edge_id, group_type, page_size, page, **kwargs) # noqa: E501
return data
def get_edge_entity_groups_using_get_with_http_info(self, edge_id, group_type, page_size, page, **kwargs): # noqa: E501
"""Get Edge Entity Groups by entity type (getEdgeEntityGroups) # noqa: E501
Returns a page of Entity Group Info objects based on the provided Entity Type and assigned to the provided Edge entity. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously.Entity Group Info extends Entity Group object and adds 'ownerIds' - a list of owner ids.You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_edge_entity_groups_using_get_with_http_info(edge_id, group_type, page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: A string value representing the edge id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str group_type: EntityGroup type (required)
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityGroupInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['edge_id', 'group_type', 'page_size', 'page', 'sort_property', 'sort_order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_edge_entity_groups_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'edge_id' is set
if ('edge_id' not in params or
params['edge_id'] is None):
raise ValueError("Missing the required parameter `edge_id` when calling `get_edge_entity_groups_using_get`") # noqa: E501
# verify the required parameter 'group_type' is set
if ('group_type' not in params or
params['group_type'] is None):
raise ValueError("Missing the required parameter `group_type` when calling `get_edge_entity_groups_using_get`") # noqa: E501
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_edge_entity_groups_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_edge_entity_groups_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edge_id' in params:
path_params['edgeId'] = params['edge_id'] # noqa: E501
if 'group_type' in params:
path_params['groupType'] = params['group_type'] # noqa: E501
query_params = []
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroups/edge/{edgeId}/{groupType}{?page,pageSize,sortOrder,sortProperty}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataEntityGroupInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entities_using_get(self, entity_group_id, page_size, page, **kwargs): # noqa: E501
"""Get Group Entities (getEntities) # noqa: E501
Returns a page of Short Entity View objects that belongs to specified Entity Group Id. Short Entity View object contains the entity id and number of fields (attributes, telemetry, etc). List of those fields is configurable and defined in the group configuration.You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entities_using_get(entity_group_id, page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str text_search: The case insensitive 'startsWith' filter based on the entity group name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataShortEntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entities_using_get_with_http_info(entity_group_id, page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_entities_using_get_with_http_info(entity_group_id, page_size, page, **kwargs) # noqa: E501
return data
def get_entities_using_get_with_http_info(self, entity_group_id, page_size, page, **kwargs): # noqa: E501
"""Get Group Entities (getEntities) # noqa: E501
Returns a page of Short Entity View objects that belongs to specified Entity Group Id. Short Entity View object contains the entity id and number of fields (attributes, telemetry, etc). List of those fields is configurable and defined in the group configuration.You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entities_using_get_with_http_info(entity_group_id, page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str text_search: The case insensitive 'startsWith' filter based on the entity group name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataShortEntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_group_id', 'page_size', 'page', 'text_search', 'sort_property', 'sort_order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entities_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_group_id' is set
if ('entity_group_id' not in params or
params['entity_group_id'] is None):
raise ValueError("Missing the required parameter `entity_group_id` when calling `get_entities_using_get`") # noqa: E501
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_entities_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_entities_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_group_id' in params:
path_params['entityGroupId'] = params['entity_group_id'] # noqa: E501
query_params = []
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroup/{entityGroupId}/entities{?page,pageSize,sortOrder,sortProperty,textSearch}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataShortEntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entity_group_all_by_owner_and_type_using_get(self, owner_type, owner_id, group_type, **kwargs): # noqa: E501
"""Get special group All by owner and entity type (getEntityGroupsByOwnerAndType) # noqa: E501
Fetch reserved group 'All' based on the provided Owner Id and Entity Type. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously.Entity Group Info extends Entity Group object and adds 'ownerIds' - a list of owner ids. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_group_all_by_owner_and_type_using_get(owner_type, owner_id, group_type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str owner_type: Tenant or Customer (required)
:param str owner_id: A string value representing the Tenant or Customer id (required)
:param str group_type: Entity Group type (required)
:return: EntityGroupInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entity_group_all_by_owner_and_type_using_get_with_http_info(owner_type, owner_id, group_type, **kwargs) # noqa: E501
else:
(data) = self.get_entity_group_all_by_owner_and_type_using_get_with_http_info(owner_type, owner_id, group_type, **kwargs) # noqa: E501
return data
def get_entity_group_all_by_owner_and_type_using_get_with_http_info(self, owner_type, owner_id, group_type, **kwargs): # noqa: E501
"""Get special group All by owner and entity type (getEntityGroupsByOwnerAndType) # noqa: E501
Fetch reserved group 'All' based on the provided Owner Id and Entity Type. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously.Entity Group Info extends Entity Group object and adds 'ownerIds' - a list of owner ids. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_group_all_by_owner_and_type_using_get_with_http_info(owner_type, owner_id, group_type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str owner_type: Tenant or Customer (required)
:param str owner_id: A string value representing the Tenant or Customer id (required)
:param str group_type: Entity Group type (required)
:return: EntityGroupInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_type', 'owner_id', 'group_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entity_group_all_by_owner_and_type_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_type' is set
if ('owner_type' not in params or
params['owner_type'] is None):
raise ValueError("Missing the required parameter `owner_type` when calling `get_entity_group_all_by_owner_and_type_using_get`") # noqa: E501
# verify the required parameter 'owner_id' is set
if ('owner_id' not in params or
params['owner_id'] is None):
raise ValueError("Missing the required parameter `owner_id` when calling `get_entity_group_all_by_owner_and_type_using_get`") # noqa: E501
# verify the required parameter 'group_type' is set
if ('group_type' not in params or
params['group_type'] is None):
raise ValueError("Missing the required parameter `group_type` when calling `get_entity_group_all_by_owner_and_type_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_type' in params:
path_params['ownerType'] = params['owner_type'] # noqa: E501
if 'owner_id' in params:
path_params['ownerId'] = params['owner_id'] # noqa: E501
if 'group_type' in params:
path_params['groupType'] = params['group_type'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroup/all/{ownerType}/{ownerId}/{groupType}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityGroupInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entity_group_by_id_using_get(self, entity_group_id, **kwargs): # noqa: E501
"""Get Entity Group Info (getEntityGroupById) # noqa: E501
Fetch the Entity Group object based on the provided Entity Group Id. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously.Entity Group Info extends Entity Group object and adds 'ownerIds' - a list of owner ids. Entity group name is unique in the scope of owner and entity type. For example, you can't create two tenant device groups called 'Water meters'. However, you may create device and asset group with the same name. And also you may create groups with the same name for two different customers of the same tenant. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_group_by_id_using_get(entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityGroupInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entity_group_by_id_using_get_with_http_info(entity_group_id, **kwargs) # noqa: E501
else:
(data) = self.get_entity_group_by_id_using_get_with_http_info(entity_group_id, **kwargs) # noqa: E501
return data
def get_entity_group_by_id_using_get_with_http_info(self, entity_group_id, **kwargs): # noqa: E501
"""Get Entity Group Info (getEntityGroupById) # noqa: E501
Fetch the Entity Group object based on the provided Entity Group Id. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously.Entity Group Info extends Entity Group object and adds 'ownerIds' - a list of owner ids. Entity group name is unique in the scope of owner and entity type. For example, you can't create two tenant device groups called 'Water meters'. However, you may create device and asset group with the same name. And also you may create groups with the same name for two different customers of the same tenant. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_group_by_id_using_get_with_http_info(entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityGroupInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_group_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entity_group_by_id_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_group_id' is set
if ('entity_group_id' not in params or
params['entity_group_id'] is None):
raise ValueError("Missing the required parameter `entity_group_id` when calling `get_entity_group_by_id_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_group_id' in params:
path_params['entityGroupId'] = params['entity_group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroup/{entityGroupId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityGroupInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entity_group_by_owner_and_name_and_type_using_get(self, owner_type, owner_id, group_type, group_name, **kwargs): # noqa: E501
"""Get Entity Group by owner, type and name (getEntityGroupByOwnerAndNameAndType) # noqa: E501
Fetch the Entity Group object based on the provided Entity Group Id. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously.Entity Group Info extends Entity Group object and adds 'ownerIds' - a list of owner ids. Entity group name is unique in the scope of owner and entity type. For example, you can't create two tenant device groups called 'Water meters'. However, you may create device and asset group with the same name. And also you may create groups with the same name for two different customers of the same tenant. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_group_by_owner_and_name_and_type_using_get(owner_type, owner_id, group_type, group_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str owner_type: Tenant or Customer (required)
:param str owner_id: A string value representing the Tenant or Customer id (required)
:param str group_type: Entity Group type (required)
:param str group_name: Entity Group name (required)
:return: EntityGroupInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entity_group_by_owner_and_name_and_type_using_get_with_http_info(owner_type, owner_id, group_type, group_name, **kwargs) # noqa: E501
else:
(data) = self.get_entity_group_by_owner_and_name_and_type_using_get_with_http_info(owner_type, owner_id, group_type, group_name, **kwargs) # noqa: E501
return data
def get_entity_group_by_owner_and_name_and_type_using_get_with_http_info(self, owner_type, owner_id, group_type, group_name, **kwargs): # noqa: E501
"""Get Entity Group by owner, type and name (getEntityGroupByOwnerAndNameAndType) # noqa: E501
Fetch the Entity Group object based on the provided Entity Group Id. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously.Entity Group Info extends Entity Group object and adds 'ownerIds' - a list of owner ids. Entity group name is unique in the scope of owner and entity type. For example, you can't create two tenant device groups called 'Water meters'. However, you may create device and asset group with the same name. And also you may create groups with the same name for two different customers of the same tenant. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_group_by_owner_and_name_and_type_using_get_with_http_info(owner_type, owner_id, group_type, group_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str owner_type: Tenant or Customer (required)
:param str owner_id: A string value representing the Tenant or Customer id (required)
:param str group_type: Entity Group type (required)
:param str group_name: Entity Group name (required)
:return: EntityGroupInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_type', 'owner_id', 'group_type', 'group_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entity_group_by_owner_and_name_and_type_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_type' is set
if ('owner_type' not in params or
params['owner_type'] is None):
raise ValueError("Missing the required parameter `owner_type` when calling `get_entity_group_by_owner_and_name_and_type_using_get`") # noqa: E501
# verify the required parameter 'owner_id' is set
if ('owner_id' not in params or
params['owner_id'] is None):
raise ValueError("Missing the required parameter `owner_id` when calling `get_entity_group_by_owner_and_name_and_type_using_get`") # noqa: E501
# verify the required parameter 'group_type' is set
if ('group_type' not in params or
params['group_type'] is None):
raise ValueError("Missing the required parameter `group_type` when calling `get_entity_group_by_owner_and_name_and_type_using_get`") # noqa: E501
# verify the required parameter 'group_name' is set
if ('group_name' not in params or
params['group_name'] is None):
raise ValueError("Missing the required parameter `group_name` when calling `get_entity_group_by_owner_and_name_and_type_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_type' in params:
path_params['ownerType'] = params['owner_type'] # noqa: E501
if 'owner_id' in params:
path_params['ownerId'] = params['owner_id'] # noqa: E501
if 'group_type' in params:
path_params['groupType'] = params['group_type'] # noqa: E501
if 'group_name' in params:
path_params['groupName'] = params['group_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroup/{ownerType}/{ownerId}/{groupType}/{groupName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityGroupInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entity_groups_by_ids_using_get(self, entity_group_ids, **kwargs): # noqa: E501
"""Get Entity Groups by Ids (getDevicesByIds) # noqa: E501
Requested devices must be owned by tenant or assigned to customer which user is performing the request. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_groups_by_ids_using_get(entity_group_ids, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_ids: A list of group ids, separated by comma ',' (required)
:return: list[EntityGroup]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entity_groups_by_ids_using_get_with_http_info(entity_group_ids, **kwargs) # noqa: E501
else:
(data) = self.get_entity_groups_by_ids_using_get_with_http_info(entity_group_ids, **kwargs) # noqa: E501
return data
def get_entity_groups_by_ids_using_get_with_http_info(self, entity_group_ids, **kwargs): # noqa: E501
"""Get Entity Groups by Ids (getDevicesByIds) # noqa: E501
Requested devices must be owned by tenant or assigned to customer which user is performing the request. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_groups_by_ids_using_get_with_http_info(entity_group_ids, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_ids: A list of group ids, separated by comma ',' (required)
:return: list[EntityGroup]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_group_ids'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entity_groups_by_ids_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_group_ids' is set
if ('entity_group_ids' not in params or
params['entity_group_ids'] is None):
raise ValueError("Missing the required parameter `entity_group_ids` when calling `get_entity_groups_by_ids_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'entity_group_ids' in params:
query_params.append(('entityGroupIds', params['entity_group_ids'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroups{?entityGroupIds}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[EntityGroup]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entity_groups_by_owner_and_type_using_get(self, owner_type, owner_id, group_type, **kwargs): # noqa: E501
"""Get Entity Groups by owner and entity type (getEntityGroupsByOwnerAndType) # noqa: E501
Fetch the list of Entity Group Info objects based on the provided Owner Id and Entity Type. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously.Entity Group Info extends Entity Group object and adds 'ownerIds' - a list of owner ids. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_groups_by_owner_and_type_using_get(owner_type, owner_id, group_type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str owner_type: Tenant or Customer (required)
:param str owner_id: A string value representing the Tenant or Customer id (required)
:param str group_type: Entity Group type (required)
:return: list[EntityGroupInfo]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entity_groups_by_owner_and_type_using_get_with_http_info(owner_type, owner_id, group_type, **kwargs) # noqa: E501
else:
(data) = self.get_entity_groups_by_owner_and_type_using_get_with_http_info(owner_type, owner_id, group_type, **kwargs) # noqa: E501
return data
def get_entity_groups_by_owner_and_type_using_get_with_http_info(self, owner_type, owner_id, group_type, **kwargs): # noqa: E501
"""Get Entity Groups by owner and entity type (getEntityGroupsByOwnerAndType) # noqa: E501
Fetch the list of Entity Group Info objects based on the provided Owner Id and Entity Type. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously.Entity Group Info extends Entity Group object and adds 'ownerIds' - a list of owner ids. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_groups_by_owner_and_type_using_get_with_http_info(owner_type, owner_id, group_type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str owner_type: Tenant or Customer (required)
:param str owner_id: A string value representing the Tenant or Customer id (required)
:param str group_type: Entity Group type (required)
:return: list[EntityGroupInfo]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['owner_type', 'owner_id', 'group_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entity_groups_by_owner_and_type_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'owner_type' is set
if ('owner_type' not in params or
params['owner_type'] is None):
raise ValueError("Missing the required parameter `owner_type` when calling `get_entity_groups_by_owner_and_type_using_get`") # noqa: E501
# verify the required parameter 'owner_id' is set
if ('owner_id' not in params or
params['owner_id'] is None):
raise ValueError("Missing the required parameter `owner_id` when calling `get_entity_groups_by_owner_and_type_using_get`") # noqa: E501
# verify the required parameter 'group_type' is set
if ('group_type' not in params or
params['group_type'] is None):
raise ValueError("Missing the required parameter `group_type` when calling `get_entity_groups_by_owner_and_type_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'owner_type' in params:
path_params['ownerType'] = params['owner_type'] # noqa: E501
if 'owner_id' in params:
path_params['ownerId'] = params['owner_id'] # noqa: E501
if 'group_type' in params:
path_params['groupType'] = params['group_type'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroups/{ownerType}/{ownerId}/{groupType}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[EntityGroupInfo]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entity_groups_by_type_using_get(self, group_type, **kwargs): # noqa: E501
"""Get Entity Groups by entity type (getEntityGroupsByType) # noqa: E501
Fetch the list of Entity Group Info objects based on the provided Entity Type. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously.Entity Group Info extends Entity Group object and adds 'ownerIds' - a list of owner ids. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_groups_by_type_using_get(group_type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group_type: Entity Group type (required)
:return: list[EntityGroupInfo]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entity_groups_by_type_using_get_with_http_info(group_type, **kwargs) # noqa: E501
else:
(data) = self.get_entity_groups_by_type_using_get_with_http_info(group_type, **kwargs) # noqa: E501
return data
def get_entity_groups_by_type_using_get_with_http_info(self, group_type, **kwargs): # noqa: E501
"""Get Entity Groups by entity type (getEntityGroupsByType) # noqa: E501
Fetch the list of Entity Group Info objects based on the provided Entity Type. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously.Entity Group Info extends Entity Group object and adds 'ownerIds' - a list of owner ids. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_groups_by_type_using_get_with_http_info(group_type, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str group_type: Entity Group type (required)
:return: list[EntityGroupInfo]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entity_groups_by_type_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_type' is set
if ('group_type' not in params or
params['group_type'] is None):
raise ValueError("Missing the required parameter `group_type` when calling `get_entity_groups_by_type_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'group_type' in params:
path_params['groupType'] = params['group_type'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroups/{groupType}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[EntityGroupInfo]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entity_groups_for_entity_using_get(self, entity_type, entity_id, **kwargs): # noqa: E501
"""Get Entity Groups by Entity Id (getEntityGroupsForEntity) # noqa: E501
Returns a list of groups that contain the specified Entity Id. For example, all device groups that contain specific device. The list always contain at least one element - special group 'All'.You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_groups_for_entity_using_get(entity_type, entity_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_type: Entity Group type (required)
:param str entity_id: A string value representing the entity id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: list[EntityGroupId]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entity_groups_for_entity_using_get_with_http_info(entity_type, entity_id, **kwargs) # noqa: E501
else:
(data) = self.get_entity_groups_for_entity_using_get_with_http_info(entity_type, entity_id, **kwargs) # noqa: E501
return data
def get_entity_groups_for_entity_using_get_with_http_info(self, entity_type, entity_id, **kwargs): # noqa: E501
"""Get Entity Groups by Entity Id (getEntityGroupsForEntity) # noqa: E501
Returns a list of groups that contain the specified Entity Id. For example, all device groups that contain specific device. The list always contain at least one element - special group 'All'.You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_groups_for_entity_using_get_with_http_info(entity_type, entity_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_type: Entity Group type (required)
:param str entity_id: A string value representing the entity id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: list[EntityGroupId]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_type', 'entity_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entity_groups_for_entity_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_type' is set
if ('entity_type' not in params or
params['entity_type'] is None):
raise ValueError("Missing the required parameter `entity_type` when calling `get_entity_groups_for_entity_using_get`") # noqa: E501
# verify the required parameter 'entity_id' is set
if ('entity_id' not in params or
params['entity_id'] is None):
raise ValueError("Missing the required parameter `entity_id` when calling `get_entity_groups_for_entity_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_type' in params:
path_params['entityType'] = params['entity_type'] # noqa: E501
if 'entity_id' in params:
path_params['entityId'] = params['entity_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroups/{entityType}/{entityId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[EntityGroupId]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_group_entity_using_get(self, entity_group_id, entity_id, **kwargs): # noqa: E501
"""Get Group Entity (getGroupEntity) # noqa: E501
Fetch the Short Entity View object based on the group and entity id. Short Entity View object contains the entity id and number of fields (attributes, telemetry, etc). List of those fields is configurable and defined in the group configuration. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_group_entity_using_get(entity_group_id, entity_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str entity_id: A string value representing the entity id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: ShortEntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_group_entity_using_get_with_http_info(entity_group_id, entity_id, **kwargs) # noqa: E501
else:
(data) = self.get_group_entity_using_get_with_http_info(entity_group_id, entity_id, **kwargs) # noqa: E501
return data
def get_group_entity_using_get_with_http_info(self, entity_group_id, entity_id, **kwargs): # noqa: E501
"""Get Group Entity (getGroupEntity) # noqa: E501
Fetch the Short Entity View object based on the group and entity id. Short Entity View object contains the entity id and number of fields (attributes, telemetry, etc). List of those fields is configurable and defined in the group configuration. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_group_entity_using_get_with_http_info(entity_group_id, entity_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str entity_id: A string value representing the entity id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: ShortEntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_group_id', 'entity_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_group_entity_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_group_id' is set
if ('entity_group_id' not in params or
params['entity_group_id'] is None):
raise ValueError("Missing the required parameter `entity_group_id` when calling `get_group_entity_using_get`") # noqa: E501
# verify the required parameter 'entity_id' is set
if ('entity_id' not in params or
params['entity_id'] is None):
raise ValueError("Missing the required parameter `entity_id` when calling `get_group_entity_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_group_id' in params:
path_params['entityGroupId'] = params['entity_group_id'] # noqa: E501
if 'entity_id' in params:
path_params['entityId'] = params['entity_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroup/{entityGroupId}/{entityId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ShortEntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_owners_using_get(self, page_size, page, **kwargs): # noqa: E501
"""Get Owners (getOwners) # noqa: E501
Provides a rage view of Customers that the current user has READ access to. If the current user is Tenant administrator, the result set also contains the tenant. The call is designed for the UI auto-complete component to show tenant and all possible Customers that the user may select to change the owner of the particular entity or entity group. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_owners_using_get(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str text_search: The case insensitive 'startsWith' filter based on the entity group name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataContactBasedobject
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_owners_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_owners_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
return data
def get_owners_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501
"""Get Owners (getOwners) # noqa: E501
Provides a rage view of Customers that the current user has READ access to. If the current user is Tenant administrator, the result set also contains the tenant. The call is designed for the UI auto-complete component to show tenant and all possible Customers that the user may select to change the owner of the particular entity or entity group. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'READ' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_owners_using_get_with_http_info(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str text_search: The case insensitive 'startsWith' filter based on the entity group name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataContactBasedobject
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_size', 'page', 'text_search', 'sort_property', 'sort_order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_owners_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_owners_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_owners_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/owners{?page,pageSize,sortOrder,sortProperty,textSearch}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataContactBasedobject', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def make_entity_group_private_using_post(self, entity_group_id, **kwargs): # noqa: E501
"""Make Entity Group Private (makeEntityGroupPrivate) # noqa: E501
Make the entity group not available for non authorized users. Every group is private by default. This call is useful to hide the group that was previously made public. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'WRITE' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.make_entity_group_private_using_post(entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.make_entity_group_private_using_post_with_http_info(entity_group_id, **kwargs) # noqa: E501
else:
(data) = self.make_entity_group_private_using_post_with_http_info(entity_group_id, **kwargs) # noqa: E501
return data
def make_entity_group_private_using_post_with_http_info(self, entity_group_id, **kwargs): # noqa: E501
"""Make Entity Group Private (makeEntityGroupPrivate) # noqa: E501
Make the entity group not available for non authorized users. Every group is private by default. This call is useful to hide the group that was previously made public. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'WRITE' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.make_entity_group_private_using_post_with_http_info(entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_group_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method make_entity_group_private_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_group_id' is set
if ('entity_group_id' not in params or
params['entity_group_id'] is None):
raise ValueError("Missing the required parameter `entity_group_id` when calling `make_entity_group_private_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_group_id' in params:
path_params['entityGroupId'] = params['entity_group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroup/{entityGroupId}/makePrivate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def make_entity_group_public_using_post(self, entity_group_id, **kwargs): # noqa: E501
"""Make Entity Group Publicly available (makeEntityGroupPublic) # noqa: E501
Make the entity group available for non authorized users. Useful for public dashboards that will be embedded into the public websites. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'WRITE' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.make_entity_group_public_using_post(entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.make_entity_group_public_using_post_with_http_info(entity_group_id, **kwargs) # noqa: E501
else:
(data) = self.make_entity_group_public_using_post_with_http_info(entity_group_id, **kwargs) # noqa: E501
return data
def make_entity_group_public_using_post_with_http_info(self, entity_group_id, **kwargs): # noqa: E501
"""Make Entity Group Publicly available (makeEntityGroupPublic) # noqa: E501
Make the entity group available for non authorized users. Useful for public dashboards that will be embedded into the public websites. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'WRITE' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.make_entity_group_public_using_post_with_http_info(entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_group_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method make_entity_group_public_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_group_id' is set
if ('entity_group_id' not in params or
params['entity_group_id'] is None):
raise ValueError("Missing the required parameter `entity_group_id` when calling `make_entity_group_public_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_group_id' in params:
path_params['entityGroupId'] = params['entity_group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroup/{entityGroupId}/makePublic', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_entities_from_entity_group_using_post(self, entity_group_id, **kwargs): # noqa: E501
"""Remove entities from the group (removeEntitiesFromEntityGroup) # noqa: E501
Removes entities from the specified entity group. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'REMOVE_FROM_GROUP' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_entities_from_entity_group_using_post(entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param list[str] body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_entities_from_entity_group_using_post_with_http_info(entity_group_id, **kwargs) # noqa: E501
else:
(data) = self.remove_entities_from_entity_group_using_post_with_http_info(entity_group_id, **kwargs) # noqa: E501
return data
def remove_entities_from_entity_group_using_post_with_http_info(self, entity_group_id, **kwargs): # noqa: E501
"""Remove entities from the group (removeEntitiesFromEntityGroup) # noqa: E501
Removes entities from the specified entity group. Entity group allows you to group multiple entities of the same entity type (Device, Asset, Customer, User, Dashboard, etc). Entity Group always have an owner - particular Tenant or Customer. Each entity may belong to multiple groups simultaneously. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'REMOVE_FROM_GROUP' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_entities_from_entity_group_using_post_with_http_info(entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param list[str] body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_group_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_entities_from_entity_group_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_group_id' is set
if ('entity_group_id' not in params or
params['entity_group_id'] is None):
raise ValueError("Missing the required parameter `entity_group_id` when calling `remove_entities_from_entity_group_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_group_id' in params:
path_params['entityGroupId'] = params['entity_group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroup/{entityGroupId}/deleteEntities', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def save_entity_group_using_post(self, **kwargs): # noqa: E501
"""Create Or Update Entity Group (saveEntityGroup) # noqa: E501
Create or update the Entity Group. When creating Entity Group, platform generates Entity Group Id as [time-based UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_1_(date-time_and_MAC_address)). The newly created Entity Group Id will be present in the response. Specify existing Entity Group Id to update the group. Referencing non-existing Entity Group Id will cause 'Not Found' error. Entity group name is unique in the scope of owner and entity type. For example, you can't create two tenant device groups called 'Water meters'. However, you may create device and asset group with the same name. And also you may create groups with the same name for two different customers of the same tenant. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'WRITE' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_entity_group_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param EntityGroup body:
:return: EntityGroupInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.save_entity_group_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.save_entity_group_using_post_with_http_info(**kwargs) # noqa: E501
return data
def save_entity_group_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Create Or Update Entity Group (saveEntityGroup) # noqa: E501
Create or update the Entity Group. When creating Entity Group, platform generates Entity Group Id as [time-based UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier#Version_1_(date-time_and_MAC_address)). The newly created Entity Group Id will be present in the response. Specify existing Entity Group Id to update the group. Referencing non-existing Entity Group Id will cause 'Not Found' error. Entity group name is unique in the scope of owner and entity type. For example, you can't create two tenant device groups called 'Water meters'. However, you may create device and asset group with the same name. And also you may create groups with the same name for two different customers of the same tenant. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'WRITE' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_entity_group_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param EntityGroup body:
:return: EntityGroupInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method save_entity_group_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroup', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityGroupInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def share_entity_group_to_child_owner_user_group_using_post(self, entity_group_id, user_group_id, role_id, **kwargs): # noqa: E501
"""Share the Entity Group with User group (shareEntityGroupToChildOwnerUserGroup) # noqa: E501
Share the entity group with specified user group using specified role. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'WRITE' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.share_entity_group_to_child_owner_user_group_using_post(entity_group_id, user_group_id, role_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id that you would like to share. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str user_group_id: A string value representing the Entity(User) Group Id that you would like to share with. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str role_id: A string value representing the Role Id that describes set of permissions you would like to share (read, write, etc). For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.share_entity_group_to_child_owner_user_group_using_post_with_http_info(entity_group_id, user_group_id, role_id, **kwargs) # noqa: E501
else:
(data) = self.share_entity_group_to_child_owner_user_group_using_post_with_http_info(entity_group_id, user_group_id, role_id, **kwargs) # noqa: E501
return data
def share_entity_group_to_child_owner_user_group_using_post_with_http_info(self, entity_group_id, user_group_id, role_id, **kwargs): # noqa: E501
"""Share the Entity Group with User group (shareEntityGroupToChildOwnerUserGroup) # noqa: E501
Share the entity group with specified user group using specified role. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'WRITE' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.share_entity_group_to_child_owner_user_group_using_post_with_http_info(entity_group_id, user_group_id, role_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id that you would like to share. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str user_group_id: A string value representing the Entity(User) Group Id that you would like to share with. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str role_id: A string value representing the Role Id that describes set of permissions you would like to share (read, write, etc). For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_group_id', 'user_group_id', 'role_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method share_entity_group_to_child_owner_user_group_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_group_id' is set
if ('entity_group_id' not in params or
params['entity_group_id'] is None):
raise ValueError("Missing the required parameter `entity_group_id` when calling `share_entity_group_to_child_owner_user_group_using_post`") # noqa: E501
# verify the required parameter 'user_group_id' is set
if ('user_group_id' not in params or
params['user_group_id'] is None):
raise ValueError("Missing the required parameter `user_group_id` when calling `share_entity_group_to_child_owner_user_group_using_post`") # noqa: E501
# verify the required parameter 'role_id' is set
if ('role_id' not in params or
params['role_id'] is None):
raise ValueError("Missing the required parameter `role_id` when calling `share_entity_group_to_child_owner_user_group_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_group_id' in params:
path_params['entityGroupId'] = params['entity_group_id'] # noqa: E501
if 'user_group_id' in params:
path_params['userGroupId'] = params['user_group_id'] # noqa: E501
if 'role_id' in params:
path_params['roleId'] = params['role_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroup/{entityGroupId}/{userGroupId}/{roleId}/share', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def share_entity_group_using_post(self, entity_group_id, **kwargs): # noqa: E501
"""Share the Entity Group (shareEntityGroup) # noqa: E501
Share the entity group with certain user group based on the provided Share Group Request. The request is quite flexible and processing of the request involves multiple security checks using platform RBAC feature. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'WRITE' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.share_entity_group_using_post(entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param ShareGroupRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.share_entity_group_using_post_with_http_info(entity_group_id, **kwargs) # noqa: E501
else:
(data) = self.share_entity_group_using_post_with_http_info(entity_group_id, **kwargs) # noqa: E501
return data
def share_entity_group_using_post_with_http_info(self, entity_group_id, **kwargs): # noqa: E501
"""Share the Entity Group (shareEntityGroup) # noqa: E501
Share the entity group with certain user group based on the provided Share Group Request. The request is quite flexible and processing of the request involves multiple security checks using platform RBAC feature. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'WRITE' permission for specified group. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.share_entity_group_using_post_with_http_info(entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param ShareGroupRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_group_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method share_entity_group_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_group_id' is set
if ('entity_group_id' not in params or
params['entity_group_id'] is None):
raise ValueError("Missing the required parameter `entity_group_id` when calling `share_entity_group_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_group_id' in params:
path_params['entityGroupId'] = params['entity_group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityGroup/{entityGroupId}/share', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def unassign_entity_group_from_edge_using_delete(self, edge_id, group_type, entity_group_id, **kwargs): # noqa: E501
"""Unassign entity group from edge (unassignEntityGroupFromEdge) # noqa: E501
Clears assignment of the entity group to the edge. Unassignment works in async way - first, 'unassign' notification event pushed to edge queue on platform. Second, remote edge service will receive an 'unassign' command to remove entity group (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once 'unassign' command will be delivered to edge service, it's going to remove entity group and entities inside this group locally. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'WRITE' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unassign_entity_group_from_edge_using_delete(edge_id, group_type, entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: A string value representing the edge id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str group_type: EntityGroup type (required)
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.unassign_entity_group_from_edge_using_delete_with_http_info(edge_id, group_type, entity_group_id, **kwargs) # noqa: E501
else:
(data) = self.unassign_entity_group_from_edge_using_delete_with_http_info(edge_id, group_type, entity_group_id, **kwargs) # noqa: E501
return data
def unassign_entity_group_from_edge_using_delete_with_http_info(self, edge_id, group_type, entity_group_id, **kwargs): # noqa: E501
"""Unassign entity group from edge (unassignEntityGroupFromEdge) # noqa: E501
Clears assignment of the entity group to the edge. Unassignment works in async way - first, 'unassign' notification event pushed to edge queue on platform. Second, remote edge service will receive an 'unassign' command to remove entity group (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once 'unassign' command will be delivered to edge service, it's going to remove entity group and entities inside this group locally. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. Security check is performed to verify that the user has 'WRITE' permission for the entity (entities). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unassign_entity_group_from_edge_using_delete_with_http_info(edge_id, group_type, entity_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: A string value representing the edge id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str group_type: EntityGroup type (required)
:param str entity_group_id: A string value representing the Entity Group Id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['edge_id', 'group_type', 'entity_group_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unassign_entity_group_from_edge_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'edge_id' is set
if ('edge_id' not in params or
params['edge_id'] is None):
raise ValueError("Missing the required parameter `edge_id` when calling `unassign_entity_group_from_edge_using_delete`") # noqa: E501
# verify the required parameter 'group_type' is set
if ('group_type' not in params or
params['group_type'] is None):
raise ValueError("Missing the required parameter `group_type` when calling `unassign_entity_group_from_edge_using_delete`") # noqa: E501
# verify the required parameter 'entity_group_id' is set
if ('entity_group_id' not in params or
params['entity_group_id'] is None):
raise ValueError("Missing the required parameter `entity_group_id` when calling `unassign_entity_group_from_edge_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edge_id' in params:
path_params['edgeId'] = params['edge_id'] # noqa: E501
if 'group_type' in params:
path_params['groupType'] = params['group_type'] # noqa: E501
if 'entity_group_id' in params:
path_params['entityGroupId'] = params['entity_group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/edge/{edgeId}/entityGroup/{entityGroupId}/{groupType}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityGroup', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 56.113752 | 911 | 0.670978 | [
"Apache-2.0"
] | D34DPlayer/thingsboard-python-rest-client | tb_rest_client/api/api_pe/entity_group_controller_api.py | 132,204 | Python |
from distutils.core import setup
setup(
name='filekit',
version='0.1',
packages=['filekit'],
license='MIT',
long_description=open('README.md').read(),
long_description_content_type='md'
)
| 19.363636 | 46 | 0.661972 | [
"MIT"
] | foundling/filekit | setup.py | 213 | Python |
#==========================================================
#
# This prepare the hdf5 datasets of the DRIVE database
#
#============================================================
import os
import h5py
import numpy as np
from PIL import Image
#content/add2/E2/DRIVE_datasets_training_testing
def write_hdf5(arr,outfile):
with h5py.File(outfile,"w") as f:
f.create_dataset("image", data=arr, dtype=arr.dtype)
#------------Path of the images --------------------------------------------------------------
#train
original_imgs_train = "/content/add2/E2/training/images/"
groundTruth_imgs_train = "/content/add2/E2/training/1st_manual/"
borderMasks_imgs_train = "/content/add2/E2/training/mask/"
#test
original_imgs_test = "/content/add2/E2//test/images/"
groundTruth_imgs_test = "/content/add2/E2/test/1st_manual/"
borderMasks_imgs_test = "content/add2/E2/test/mask/"
#---------------------------------------------------------------------------------------------
Nimgs = 20
channels = 3
height = 584
width = 565
dataset_path = "/content/add2/E2/DRIVE_datasets_training_testing/"
def get_datasets(imgs_dir,groundTruth_dir,borderMasks_dir,train_test="null"):
imgs = np.empty((Nimgs,height,width,channels))
groundTruth = np.empty((Nimgs,height,width))
border_masks = np.empty((Nimgs,height,width))
for path, subdirs, files in os.walk(imgs_dir): #list all files, directories in the path
for i in range(len(files)):
#original
print ("original image: " +files[i])
img = Image.open(imgs_dir+files[i])
imgs[i] = np.asarray(img)
#corresponding ground truth
groundTruth_name = files[i][0:2] + "_manual1.gif"
print ("ground truth name: " + groundTruth_name)
g_truth = Image.open(groundTruth_dir + groundTruth_name)
groundTruth[i] = np.asarray(g_truth)
#corresponding border masks
border_masks_name = ""
if train_test=="train":
border_masks_name = files[i][0:2] + "_training_mask.gif"
elif train_test=="test":
border_masks_name = files[i][0:2] + "_test_mask.gif"
else:
print ("specify if train or test!!")
exit()
print ("border masks name: " + border_masks_name)
b_mask = Image.open(borderMasks_dir + border_masks_name)
border_masks[i] = np.asarray(b_mask)
print ("imgs max: " +str(np.max(imgs)))
print ("imgs min: " +str(np.min(imgs)))
assert(np.max(groundTruth)==255 and np.max(border_masks)==255)
assert(np.min(groundTruth)==0 and np.min(border_masks)==0)
print ("ground truth and border masks are correctly withih pixel value range 0-255 (black-white)")
#reshaping for my standard tensors
imgs = np.transpose(imgs,(0,3,1,2))
assert(imgs.shape == (Nimgs,channels,height,width))
groundTruth = np.reshape(groundTruth,(Nimgs,1,height,width))
border_masks = np.reshape(border_masks,(Nimgs,1,height,width))
assert(groundTruth.shape == (Nimgs,1,height,width))
assert(border_masks.shape == (Nimgs,1,height,width))
return imgs, groundTruth, border_masks
if not os.path.exists(dataset_path):
os.makedirs(dataset_path)
#getting the training datasets
imgs_train, groundTruth_train, border_masks_train = get_datasets(original_imgs_train,groundTruth_imgs_train,borderMasks_imgs_train,"train")
print ("saving train datasets")
write_hdf5(imgs_train, dataset_path + "DRIVE_dataset_imgs_train.hdf5")
write_hdf5(groundTruth_train, dataset_path + "DRIVE_dataset_groundTruth_train.hdf5")
write_hdf5(border_masks_train,dataset_path + "DRIVE_dataset_borderMasks_train.hdf5")
#getting the testing datasets
imgs_test, groundTruth_test, border_masks_test = get_datasets(original_imgs_test,groundTruth_imgs_test,borderMasks_imgs_test,"test")
print ("saving test datasets")
write_hdf5(imgs_test,dataset_path + "DRIVE_dataset_imgs_test.hdf5")
write_hdf5(groundTruth_test, dataset_path + "DRIVE_dataset_groundTruth_test.hdf5")
write_hdf5(border_masks_test,dataset_path + "DRIVE_dataset_borderMasks_test.hdf5")
| 44.462366 | 139 | 0.662636 | [
"MIT"
] | Hacker-007/E2 | prepare_datasets_DRIVE.py | 4,135 | Python |
import unittest
from niedoida_test_case import NiedoidaTestCase
class NiedoidaSmokeCaseMethodsRhf(NiedoidaTestCase):
@classmethod
def setUpClass(cls):
cls.data = cls.run_calculations("smoke-methods-rhf")
def test_energy(self):
self.assertAlmostEqual(self.data.scfenergies[0], -2056.7986203616, 3)
if __name__ == '__main__':
unittest.main()
| 25.066667 | 77 | 0.739362 | [
"MPL-2.0"
] | grzegorzmazur/niedoida | tests/niedoida/smoke-methods-rhf.py | 376 | Python |
import cv2
cap = cv2.VideoCapture(1)
cap.set(3, 640) #WIDTH
cap.set(4, 480) #HEIGHT
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
while True:
# while True:
# ret, frame = cap.read()
#
# # Our operations on the frame come here
# gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# faces = face_cascade.detectMultiScale(gray, 1.3, 5)
# try:
# number = len(faces)
# size = [faces[0][2], faces[0][3]]
# position = [faces[0][0], faces[0][1]]
# break
# except:
# a = 1
ret, frame = cap.read()
# Our operations on the frame come here
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
# print(number)
# print(size)
# print(position)
#print(len(faces))
# Display the resulting frame
for (x,y,w,h) in faces:
cv2.rectangle(frame,(x,y),(x+w,y+h),(255,0,0),2)
roi_gray = gray[y:y+h, x:x+w]
roi_color = frame[y:y+h, x:x+w]
cv2.imshow('frame',frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows()
| 26.468085 | 75 | 0.578778 | [
"MIT"
] | clevtech/Zhuldyz-Upper-RPI | faces.py | 1,244 | Python |
#!/usr/bin/env python
# Copyright (c) 2015-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree.
from __future__ import print_function
from clang.cindex import Cursor, CursorKind, TokenKind
from utils import range_dict_relative
import ctypes
import itertools
import re
# Function/method cursor kinds.
FUNCTION_KINDS = set([
'FUNCTION_DECL',
'FUNCTION_TEMPLATE',
'CXX_METHOD',
'CONSTRUCTOR',
'DESTRUCTOR',
'OBJC_INSTANCE_METHOD_DECL',
'OBJC_CLASS_METHOD_DECL',
])
# Class-like cursors.
CLASS_KINDS = set([
'STRUCT_DECL',
'UNION_DECL',
'CLASS_DECL',
'ENUM_DECL',
'OBJC_INTERFACE_DECL',
'OBJC_CATEGORY_DECL',
'OBJC_PROTOCOL_DECL',
'OBJC_IMPLEMENTATION_DECL',
'OBJC_CATEGORY_IMPL_DECL',
'CLASS_TEMPLATE',
'CLASS_TEMPLATE_PARTIAL_SPECIALIZATION',
'NAMESPACE',
])
# (Possibly external) members of CLASS_KINDS.
MEMBER_KINDS = set([
'CXX_METHOD',
'CONSTRUCTOR',
'DESTRUCTOR',
'FIELD_DECL',
'VAR_DECL',
'ENUM_CONSTANT_DECL',
])
# Variables and fields.
VAR_KINDS = set([
'OBJC_IVAR_DECL',
'FIELD_DECL',
'VAR_DECL',
])
# Capture the ubiquitous GTest-style TEST/TEST_F macros.
GTEST_MACROS = set(['TEST', 'TEST_F'])
MACRO_INSTANTIATION = 'MACRO_INSTANTIATION'
OTHER_KINDS = set([
MACRO_INSTANTIATION,
])
# Record any of the cursor types listed above.
ALL_KINDS = FUNCTION_KINDS | CLASS_KINDS | MEMBER_KINDS | VAR_KINDS | OTHER_KINDS
# People like adding a '-' by convention, but strip that out.
PRAGMA_MARK_REGEX = re.compile(
'^[ \t]*#[ \t]*pragma[ \t]+mark[ \t]+(?:-[ \t]*)?(.+)$', re.MULTILINE)
def visit_cursor(libclang, cursor):
try:
kind = cursor.kind.name
except:
# Some cursor kinds aren't supported by the Python binding.
return None
if kind not in ALL_KINDS:
return None
# Skip symbols from other files.
if not libclang.clang_Location_isFromMainFile(cursor.location):
return None
# Names of function parameters.
params = None
# Names of template parameters.
tparams = None
children = None
name = cursor.spelling
# Display types for variables and typedefs.
cursor_type = cursor.type.spelling if kind in VAR_KINDS else None
if kind in FUNCTION_KINDS:
# We can't use displayname as it also includes the arguments.
params = []
tparams = []
for child in cursor.get_children():
if child.kind == CursorKind.PARM_DECL:
# Use the param name, but fall back to the raw type if unnamed.
params.append(child.spelling or child.type.spelling)
elif child.kind == CursorKind.TEMPLATE_TYPE_PARAMETER:
tparams.append(child.spelling)
# TODO(hansonw): non-type and "template template" params?
if kind in MEMBER_KINDS:
# Name should be fully qualified if outside the parent.
if cursor.semantic_parent != cursor.lexical_parent:
name = cursor.semantic_parent.spelling + '::' + name
elif kind in CLASS_KINDS:
# Include template information.
name = cursor.displayname
children = []
for child in cursor.get_children():
child_outline = visit_cursor(libclang, child)
if child_outline is not None:
children.append(child_outline)
if kind == MACRO_INSTANTIATION:
params = []
if name in GTEST_MACROS:
# Should look like TEST(id, id).
tokens = list(itertools.islice(cursor.get_tokens(), 1, 6))
if len(tokens) == 5 and (
tokens[0].kind == TokenKind.PUNCTUATION and
tokens[1].kind == TokenKind.IDENTIFIER and
tokens[2].kind == TokenKind.PUNCTUATION and
tokens[3].kind == TokenKind.IDENTIFIER and
tokens[4].kind == TokenKind.PUNCTUATION
):
params = [tokens[1].spelling, tokens[3].spelling]
else:
return None
else:
# TODO(hansonw): Handle other special macros like DEFINE_ params.
return None
ret = {
'name': name,
'cursor_kind': kind,
'cursor_type': cursor_type,
'extent': range_dict_relative(cursor.extent),
'params': params,
'tparams': tparams,
'children': children,
}
return {k: v for k, v in ret.items() if v is not None}
# Scan through the outline tree and insert pragma marks as we pass by them.
def insert_pragma_marks(marks, outline_tree, tree_end=None):
new_result = []
for node in outline_tree:
while len(marks) > 0:
if marks[-1]['extent']['start']['row'] > node['extent']['start']['row']:
break
new_result.append(marks.pop())
children = node.get('children')
if children:
children[:] = insert_pragma_marks(marks, children, node['extent']['end']['row'])
new_result.append(node)
# Consume all remaining marks included in this subtree.
while len(marks) > 0:
if tree_end is not None and marks[-1]['extent']['start']['row'] > tree_end:
break
new_result.append(marks.pop())
return new_result
def get_outline(libclang, translation_unit, contents):
root_cursor = translation_unit.cursor
# This is the same as Cursor.get_children minus an assert in visitor().
# This results in a ~2x speedup!
callback_type = ctypes.CFUNCTYPE(ctypes.c_int, Cursor, Cursor, ctypes.py_object)
def visitor(child, parent, result):
child._tu = translation_unit
child_outline = visit_cursor(libclang, child)
if child_outline is not None:
result.append(child_outline)
return 1 # continue
result = []
libclang.clang_visitChildren(root_cursor, callback_type(visitor), result)
# Look for pragma marks. These are not detectable in the AST.
line = 0
lastpos = 0
pragma_marks = []
for mark in PRAGMA_MARK_REGEX.finditer(contents):
while lastpos < mark.start():
if contents[lastpos] == '\n':
line += 1
lastpos += 1
pragma_marks.append({
'name': mark.group(1),
'cursor_kind': 'PRAGMA_MARK',
'extent': {
'start': {'row': line, 'column': 0},
'end': {'row': line + 1, 'column': 0},
},
})
# Top-level macro instantiations appear out of order.
result = sorted(result, key=lambda x: (
x['extent']['start']['row'],
x['extent']['start']['column'],
x['extent']['end']['row'],
x['extent']['end']['column'],
))
# Convert into a stack for efficient removal.
pragma_marks.reverse()
return insert_pragma_marks(pragma_marks, result)
| 31.035714 | 92 | 0.621116 | [
"Unlicense"
] | kevingatera/kgatewebapp | node_modules/nuclide/pkg/nuclide-clang-rpc/python/outline.py | 6,952 | Python |
# -*- coding: utf-8 -*-
"""
@date Created on Fri May 22 2020
@author martin_g for Eomys
"""
# Third party imports
import numpy as np
from scipy import signal
# Local application imports
from mosqito.sq_metrics.loudness.loudness_zwtv._square_and_smooth import (
_square_and_smooth,
)
def _third_octave_levels(sig, fs):
"""3rd octave filtering, squaring, smoothing, level calculation and
downsampling to temporal resolution: 0,5 ms, i.e. sampling rate: 2 kHz
See ISO 532-1 section 6.3
Parameters
----------
sig : numpy.ndarray
time signal sampled at 48 kHz[pa]
fs : int
time signal sampling frequency
Outputs
-------
third_octave_levels : numpy.ndarray
Set of time signals filtered per third octave bands
"""
# Sampling frequency shall be equal to 48 kHz (as per ISO 532)
if fs != 48000:
raise ValueError("""ERROR: Sampling frequency shall be equal to 48 kHz""")
# Constants
n_level_band = 28
n_filter_coeff = 6
dec_factor = int(fs / 2000)
# Initialisation
coeff = np.zeros(n_filter_coeff)
# Filter coefficients of one-third-octave-band filters (reference
# table)
# ISO 532-1 Table A.1
third_octave_filter_ref = np.array(
[[1, 2, 1, 1, -2, 1], [1, 0, -1, 1, -2, 1], [1, -2, 1, 1, -2, 1]]
)
# Filter coefficients of one-third-octave-band filters (difference to
# reference table for 28 one-third-octave-band filters)
# ISO 532-1 Table A.2
third_octave_filter = np.array(
[
[
[0, 0, 0, 0, -6.70260e-004, 6.59453e-004],
[0, 0, 0, 0, -3.75071e-004, 3.61926e-004],
[0, 0, 0, 0, -3.06523e-004, 2.97634e-004],
],
[
[0, 0, 0, 0, -8.47258e-004, 8.30131e-004],
[0, 0, 0, 0, -4.76448e-004, 4.55616e-004],
[0, 0, 0, 0, -3.88773e-004, 3.74685e-004],
],
[
[0, 0, 0, 0, -1.07210e-003, 1.04496e-003],
[0, 0, 0, 0, -6.06567e-004, 5.73553e-004],
[0, 0, 0, 0, -4.94004e-004, 4.71677e-004],
],
[
[0, 0, 0, 0, -1.35836e-003, 1.31535e-003],
[0, 0, 0, 0, -7.74327e-004, 7.22007e-004],
[0, 0, 0, 0, -6.29154e-004, 5.93771e-004],
],
[
[0, 0, 0, 0, -1.72380e-003, 1.65564e-003],
[0, 0, 0, 0, -9.91780e-004, 9.08866e-004],
[0, 0, 0, 0, -8.03529e-004, 7.47455e-004],
],
[
[0, 0, 0, 0, -2.19188e-003, 2.08388e-003],
[0, 0, 0, 0, -1.27545e-003, 1.14406e-003],
[0, 0, 0, 0, -1.02976e-003, 9.40900e-004],
],
[
[0, 0, 0, 0, -2.79386e-003, 2.62274e-003],
[0, 0, 0, 0, -1.64828e-003, 1.44006e-003],
[0, 0, 0, 0, -1.32520e-003, 1.18438e-003],
],
[
[0, 0, 0, 0, -3.57182e-003, 3.30071e-003],
[0, 0, 0, 0, -2.14252e-003, 1.81258e-003],
[0, 0, 0, 0, -1.71397e-003, 1.49082e-003],
],
[
[0, 0, 0, 0, -4.58305e-003, 4.15355e-003],
[0, 0, 0, 0, -2.80413e-003, 2.28135e-003],
[0, 0, 0, 0, -2.23006e-003, 1.87646e-003],
],
[
[0, 0, 0, 0, -5.90655e-003, 5.22622e-003],
[0, 0, 0, 0, -3.69947e-003, 2.87118e-003],
[0, 0, 0, 0, -2.92205e-003, 2.36178e-003],
],
[
[0, 0, 0, 0, -7.65243e-003, 6.57493e-003],
[0, 0, 0, 0, -4.92540e-003, 3.61318e-003],
[0, 0, 0, 0, -3.86007e-003, 2.97240e-003],
],
[
[0, 0, 0, 0, -1.00023e-002, 8.29610e-003],
[0, 0, 0, 0, -6.63788e-003, 4.55999e-003],
[0, 0, 0, 0, -5.15982e-003, 3.75306e-003],
],
[
[0, 0, 0, 0, -1.31230e-002, 1.04220e-002],
[0, 0, 0, 0, -9.02274e-003, 5.73132e-003],
[0, 0, 0, 0, -6.94543e-003, 4.71734e-003],
],
[
[0, 0, 0, 0, -1.73693e-002, 1.30947e-002],
[0, 0, 0, 0, -1.24176e-002, 7.20526e-003],
[0, 0, 0, 0, -9.46002e-003, 5.93145e-003],
],
[
[0, 0, 0, 0, -2.31934e-002, 1.64308e-002],
[0, 0, 0, 0, -1.73009e-002, 9.04761e-003],
[0, 0, 0, 0, -1.30358e-002, 7.44926e-003],
],
[
[0, 0, 0, 0, -3.13292e-002, 2.06370e-002],
[0, 0, 0, 0, -2.44342e-002, 1.13731e-002],
[0, 0, 0, 0, -1.82108e-002, 9.36778e-003],
],
[
[0, 0, 0, 0, -4.28261e-002, 2.59325e-002],
[0, 0, 0, 0, -3.49619e-002, 1.43046e-002],
[0, 0, 0, 0, -2.57855e-002, 1.17912e-002],
],
[
[0, 0, 0, 0, -5.91733e-002, 3.25054e-002],
[0, 0, 0, 0, -5.06072e-002, 1.79513e-002],
[0, 0, 0, 0, -3.69401e-002, 1.48094e-002],
],
[
[0, 0, 0, 0, -8.26348e-002, 4.05894e-002],
[0, 0, 0, 0, -7.40348e-002, 2.24476e-002],
[0, 0, 0, 0, -5.34977e-002, 1.85371e-002],
],
[
[0, 0, 0, 0, -1.17018e-001, 5.08116e-002],
[0, 0, 0, 0, -1.09516e-001, 2.81387e-002],
[0, 0, 0, 0, -7.85097e-002, 2.32872e-002],
],
[
[0, 0, 0, 0, -1.67714e-001, 6.37872e-002],
[0, 0, 0, 0, -1.63378e-001, 3.53729e-002],
[0, 0, 0, 0, -1.16419e-001, 2.93723e-002],
],
[
[0, 0, 0, 0, -2.42528e-001, 7.98576e-002],
[0, 0, 0, 0, -2.45161e-001, 4.43370e-002],
[0, 0, 0, 0, -1.73972e-001, 3.70015e-002],
],
[
[0, 0, 0, 0, -3.53142e-001, 9.96330e-002],
[0, 0, 0, 0, -3.69163e-001, 5.53535e-002],
[0, 0, 0, 0, -2.61399e-001, 4.65428e-002],
],
[
[0, 0, 0, 0, -5.16316e-001, 1.24177e-001],
[0, 0, 0, 0, -5.55473e-001, 6.89403e-002],
[0, 0, 0, 0, -3.93998e-001, 5.86715e-002],
],
[
[0, 0, 0, 0, -7.56635e-001, 1.55023e-001],
[0, 0, 0, 0, -8.34281e-001, 8.58123e-002],
[0, 0, 0, 0, -5.94547e-001, 7.43960e-002],
],
[
[0, 0, 0, 0, -1.10165e000, 1.91713e-001],
[0, 0, 0, 0, -1.23939e000, 1.05243e-001],
[0, 0, 0, 0, -8.91666e-001, 9.40354e-002],
],
[
[0, 0, 0, 0, -1.58477e000, 2.39049e-001],
[0, 0, 0, 0, -1.80505e000, 1.28794e-001],
[0, 0, 0, 0, -1.32500e000, 1.21333e-001],
],
[
[0, 0, 0, 0, -2.50630e000, 1.42308e-001],
[0, 0, 0, 0, -2.19464e000, 2.76470e-001],
[0, 0, 0, 0, -1.90231e000, 1.47304e-001],
],
]
)
# Filter gain values
# ISO 532-1 Table A.2
filter_gain = np.array(
[
4.30764e-011,
8.59340e-011,
1.71424e-010,
3.41944e-010,
6.82035e-010,
1.36026e-009,
2.71261e-009,
5.40870e-009,
1.07826e-008,
2.14910e-008,
4.28228e-008,
8.54316e-008,
1.70009e-007,
3.38215e-007,
6.71990e-007,
1.33531e-006,
2.65172e-006,
5.25477e-006,
1.03780e-005,
2.04870e-005,
4.05198e-005,
7.97914e-005,
1.56511e-004,
3.04954e-004,
5.99157e-004,
1.16544e-003,
2.27488e-003,
3.91006e-003,
]
)
# Definition of the range of preferred filter center frequency
freq = [
25,
31.5,
40,
50,
63,
80,
100,
125,
160,
200,
250,
315,
400,
500,
630,
800,
1000,
1250,
1600,
2000,
2500,
3150,
4000,
5000,
6300,
8000,
10000,
12500,
]
n_time = len(sig[::dec_factor])
time_axis = np.linspace(0, len(sig) / fs, num=n_time)
third_octave_level = np.zeros((n_level_band, n_time))
for i_bands in range(n_level_band):
# Initialisation
tiny_value = 10 ** -12
i_ref = 4 * 10 ** -10
# 2nd order fltering (See ISO 532-1 section 6.3 and A.2)
coeff = third_octave_filter_ref - third_octave_filter[i_bands, :, :]
sig_filt = filter_gain[i_bands] * signal.sosfilt(coeff, sig)
# Calculate center frequency of filter
center_freq = 10 ** ((i_bands - 16) / 10) * 1000
# Squaring and smoothing of filtered signal
sig_filt = _square_and_smooth(sig_filt, center_freq, 48000)
# SPL calculation and decimation
third_octave_level[i_bands, :] = 10 * np.log10(
(sig_filt[::dec_factor] + tiny_value) / i_ref
)
return third_octave_level, time_axis, freq
| 33.388112 | 82 | 0.428003 | [
"Apache-2.0"
] | Igarciac117/MoSQITo | mosqito/sq_metrics/loudness/loudness_zwtv/_third_octave_levels.py | 9,549 | Python |
# -*- coding: utf-8 -*-
import uuid
import pytz
from faker import Faker
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.gis.db.models import PointField
from django.contrib.postgres.indexes import BrinIndex
from django.contrib.postgres.fields import JSONField
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
from django.template.defaultfilters import slugify
from django.urls import reverse
from django.utils.crypto import get_random_string
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from oauthlib.common import generate_token
from oauth2_provider.models import (
Application,
AbstractApplication,
AbstractAccessToken,
AccessToken,
RefreshToken
)
from foundation.constants import *
class ProblemDataSheetManager(models.Manager):
def delete_all(self):
items = ProblemDataSheet.objects.all()
for item in items.all():
item.delete()
# def seed(self, user, product, length=25):
# results = []
# faker = Faker('en_CA')
# for i in range(0,length):
# farm = ProblemDataSheet.objects.create(
# name = faker.domain_word(),
# description = faker.sentence(nb_words=6, variable_nb_words=True, ext_word_list=None),
# user = user,
# product = product,
# )
# results.append(farm)
# return results
class ProblemDataSheet(models.Model):
"""
Class model represents danger / destructive element to a production crop.
Special thanks:
(1) Preventing, Diagnosing, and Correcting Common Houseplant Problems via URL
https://extension.psu.edu/preventing-diagnosing-and-correcting-common-houseplant-problems
"""
'''
Metadata
'''
class Meta:
app_label = 'foundation'
db_table = 'mika_problem_data_sheet'
verbose_name = _('Problem Data Sheet')
verbose_name_plural = _('Problem Data Sheets')
default_permissions = ()
permissions = (
# ("can_get_opening_hours_specifications", "Can get opening hours specifications"),
# ("can_get_opening_hours_specification", "Can get opening hours specifications"),
# ("can_post_opening_hours_specification", "Can create opening hours specifications"),
# ("can_put_opening_hours_specification", "Can update opening hours specifications"),
# ("can_delete_opening_hours_specification", "Can delete opening hours specifications"),
)
'''
Constants & Choices
'''
class TYPE_OF:
PEST = 1
DISEASE = 2
ABIOTIC = 3
NONE = 4
TYPE_OF_CHOICES = (
(TYPE_OF.PEST, _('Pest')),
(TYPE_OF.DISEASE, _('Disease')),
(TYPE_OF.ABIOTIC, _('Abiotic')),
(TYPE_OF.NONE, _('None')),
)
'''
Object Managers
'''
objects = ProblemDataSheetManager()
'''
Fields
'''
#
# Internal Related Fields
#
slug = models.SlugField(
_("Slug"),
help_text=_('The unique slug used for this crop when accessing details page.'),
max_length=127,
blank=True,
null=False,
db_index=True,
unique=True,
editable=False,
)
text = models.CharField(
_("Text"),
max_length=127,
help_text=_('The variety name of the crop.'),
blank=True,
null=True,
db_index=True,
)
type_of = models.PositiveSmallIntegerField(
_("Type of"),
help_text=_('The type of production crop problem.'),
blank=False,
null=False,
choices=TYPE_OF_CHOICES,
)
'''
Methods
'''
def __str__(self):
return str(self.slug)
| 28.223022 | 103 | 0.638542 | [
"BSD-3-Clause"
] | mikaponics/mikaponics-back | mikaponics/foundation/models/problem_data_sheet.py | 3,923 | Python |
# Copyright (c) 2015 - present Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
import logging
from . import util
from inferlib import jwlib
MODULE_NAME = __name__
MODULE_DESCRIPTION = '''Run analysis of code built with a command like:
ant [options] [target]
Analysis examples:
infer -- ant compile'''
LANG = ['java']
def gen_instance(*args):
return AntCapture(*args)
# This creates an empty argparser for the module, which provides only
# description/usage information and no arguments.
create_argparser = util.base_argparser(MODULE_DESCRIPTION, MODULE_NAME)
class AntCapture:
def __init__(self, args, cmd):
self.args = args
util.log_java_version()
logging.info(util.run_cmd_ignore_fail(['ant', '-version']))
# TODO: make the extraction of targets smarter
self.build_cmd = ['ant', '-verbose'] + cmd[1:]
def is_interesting(self, content):
return self.is_quoted(content) or content.endswith('.java')
def is_quoted(self, argument):
quote = '\''
return len(argument) > 2 and argument[0] == quote\
and argument[-1] == quote
def remove_quotes(self, argument):
if self.is_quoted(argument):
return argument[1:-1]
else:
return argument
def get_infer_commands(self, verbose_output):
javac_pattern = '[javac]'
argument_start_pattern = 'Compilation arguments'
calls = []
javac_arguments = []
collect = False
for line in verbose_output:
if javac_pattern in line:
if argument_start_pattern in line:
collect = True
if javac_arguments != []:
capture = jwlib.create_infer_command(self.args,
javac_arguments)
calls.append(capture)
javac_arguments = []
if collect:
pos = line.index(javac_pattern) + len(javac_pattern)
content = line[pos:].strip()
if self.is_interesting(content):
arg = self.remove_quotes(content)
javac_arguments.append(arg)
if javac_arguments != []:
capture = jwlib.create_infer_command(self.args, javac_arguments)
calls.append(capture)
javac_arguments = []
return calls
def capture(self):
cmds = self.get_infer_commands(util.get_build_output(self.build_cmd))
clean_cmd = '%s clean' % self.build_cmd[0]
return util.run_compilation_commands(cmds, clean_cmd)
| 34.297619 | 77 | 0.609511 | [
"BSD-3-Clause"
] | stefb965/infer | infer/lib/python/inferlib/capture/ant.py | 2,881 | Python |
#!/usr/bin/env python
# --coding:utf-8--
# Copyright (c) 2020 vesoft inc. All rights reserved.
#
# This source code is licensed under Apache 2.0 License,
# attached with Common Clause Condition 1.0, found in the LICENSES directory.
import logging
from nebula2.common.ttypes import ErrorCode
from nebula2.Exception import (
AuthFailedException,
IOErrorException,
NotValidConnectionException,
InValidHostname,
)
from nebula2.data.ResultSet import ResultSet
from nebula2.gclient.net.AuthResult import AuthResult
from nebula2.gclient.net.Session import Session
from nebula2.gclient.net.Connection import Connection
from nebula2.gclient.net.ConnectionPool import ConnectionPool
logging.basicConfig(level=logging.INFO, format='[%(asctime)s] %(levelname)-8s [%(filename)s:%(lineno)d]:%(message)s')
| 29.071429 | 117 | 0.782555 | [
"Apache-2.0"
] | knwng/nebula-python | nebula2/gclient/net/__init__.py | 814 | Python |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
"""
A family of functions used by CurvefittingAssessor
"""
import numpy as np
all_models = {}
model_para = {}
model_para_num = {}
curve_combination_models = ['vap', 'pow3', 'linear', 'logx_linear', 'dr_hill_zero_background', 'log_power', 'pow4', 'mmf',
'exp4', 'ilog2', 'weibull', 'janoschek']
def vap(x, a, b, c):
"""Vapor pressure model
Parameters
----------
x : int
a : float
b : float
c : float
Returns
-------
float
np.exp(a+b/x+c*np.log(x))
"""
return np.exp(a+b/x+c*np.log(x))
all_models['vap'] = vap
model_para['vap'] = [-0.622028, -0.470050, 0.042322]
model_para_num['vap'] = 3
def pow3(x, c, a, alpha):
"""pow3
Parameters
----------
x : int
c : float
a : float
alpha : float
Returns
-------
float
c - a * x**(-alpha)
"""
return c - a * x**(-alpha)
all_models['pow3'] = pow3
model_para['pow3'] = [0.84, 0.52, 0.01]
model_para_num['pow3'] = 3
def linear(x, a, b):
"""linear
Parameters
----------
x : int
a : float
b : float
Returns
-------
float
a*x + b
"""
return a*x + b
all_models['linear'] = linear
model_para['linear'] = [1., 0]
model_para_num['linear'] = 2
def logx_linear(x, a, b):
"""logx linear
Parameters
----------
x : int
a : float
b : float
Returns
-------
float
a * np.log(x) + b
"""
x = np.log(x)
return a*x + b
all_models['logx_linear'] = logx_linear
model_para['logx_linear'] = [0.378106, 0.046506]
model_para_num['logx_linear'] = 2
def dr_hill_zero_background(x, theta, eta, kappa):
"""dr hill zero background
Parameters
----------
x : int
theta : float
eta : float
kappa : float
Returns
-------
float
(theta* x**eta) / (kappa**eta + x**eta)
"""
return (theta* x**eta) / (kappa**eta + x**eta)
all_models['dr_hill_zero_background'] = dr_hill_zero_background
model_para['dr_hill_zero_background'] = [0.772320, 0.586449, 2.460843]
model_para_num['dr_hill_zero_background'] = 3
def log_power(x, a, b, c):
""""logistic power
Parameters
----------
x : int
a : float
b : float
c : float
Returns
-------
float
a/(1.+(x/np.exp(b))**c)
"""
return a/(1.+(x/np.exp(b))**c)
all_models['log_power'] = log_power
model_para['log_power'] = [0.77, 2.98, -0.51]
model_para_num['log_power'] = 3
def pow4(x, alpha, a, b, c):
"""pow4
Parameters
----------
x : int
alpha : float
a : float
b : float
c : float
Returns
-------
float
c - (a*x+b)**-alpha
"""
return c - (a*x+b)**-alpha
all_models['pow4'] = pow4
model_para['pow4'] = [0.1, 200, 0., 0.8]
model_para_num['pow4'] = 4
def mmf(x, alpha, beta, kappa, delta):
"""Morgan-Mercer-Flodin
http://www.pisces-conservation.com/growthhelp/index.html?morgan_mercer_floden.htm
Parameters
----------
x : int
alpha : float
beta : float
kappa : float
delta : float
Returns
-------
float
alpha - (alpha - beta) / (1. + (kappa * x)**delta)
"""
return alpha - (alpha - beta) / (1. + (kappa * x)**delta)
all_models['mmf'] = mmf
model_para['mmf'] = [0.7, 0.1, 0.01, 5]
model_para_num['mmf'] = 4
def exp4(x, c, a, b, alpha):
"""exp4
Parameters
----------
x : int
c : float
a : float
b : float
alpha : float
Returns
-------
float
c - np.exp(-a*(x**alpha)+b)
"""
return c - np.exp(-a*(x**alpha)+b)
all_models['exp4'] = exp4
model_para['exp4'] = [0.7, 0.8, -0.8, 0.3]
model_para_num['exp4'] = 4
def ilog2(x, c, a):
"""ilog2
Parameters
----------
x : int
c : float
a : float
Returns
-------
float
c - a / np.log(x)
"""
return c - a / np.log(x)
all_models['ilog2'] = ilog2
model_para['ilog2'] = [0.78, 0.43]
model_para_num['ilog2'] = 2
def weibull(x, alpha, beta, kappa, delta):
"""Weibull model
http://www.pisces-conservation.com/growthhelp/index.html?morgan_mercer_floden.htm
Parameters
----------
x : int
alpha : float
beta : float
kappa : float
delta : float
Returns
-------
float
alpha - (alpha - beta) * np.exp(-(kappa * x)**delta)
"""
return alpha - (alpha - beta) * np.exp(-(kappa * x)**delta)
all_models['weibull'] = weibull
model_para['weibull'] = [0.7, 0.1, 0.01, 1]
model_para_num['weibull'] = 4
def janoschek(x, a, beta, k, delta):
"""http://www.pisces-conservation.com/growthhelp/janoschek.htm
Parameters
----------
x : int
a : float
beta : float
k : float
delta : float
Returns
-------
float
a - (a - beta) * np.exp(-k*x**delta)
"""
return a - (a - beta) * np.exp(-k*x**delta)
all_models['janoschek'] = janoschek
model_para['janoschek'] = [0.73, 0.07, 0.355, 0.46]
model_para_num['janoschek'] = 4
| 18.644689 | 122 | 0.532417 | [
"MIT"
] | Ascarshen/nni | src/sdk/pynni/nni/curvefitting_assessor/curvefunctions.py | 5,090 | Python |
from ourstylePy import data
def our_colours(colours=[]):
'''
Extract hexcodes for our colours
If passed a sting, returns the matching hexcode.
If passed a list, returns a list of hexcodes.
Method from https://drsimonj.svbtle.com/creating-corporate-colour-palettes-for-ggplot2.
- colours, list of strings
Examples:
data.our_colours_raw
our_colours()
our_colours('green', 'blue', 'green')
our_colours('not a colour', 'also not a colour', 'green')
our_colors('blue')
'''
if len(colours) == 0:
return data.our_colours_raw
elif isinstance(colours, str):
return data.our_colours_raw[colours]
else:
return [data.our_colours_raw[i] for i in colours]
def our_colors(colours=[]):
'''
Alias for our_colours()
'''
return our_colours(colours)
| 27.9 | 91 | 0.665472 | [
"MIT"
] | PeterGrahamJersey/ourstylePy | ourstylePy/our_colours.py | 837 | Python |
from bayesianABTest import sampleSuccessRateForBinomial
from numpy import mean
def bestOfFive(A,B,C,D,E,F):
return mean( (A > B) & (A > C) & (A > D) & (A > E) & (A > F))
############# Example: Binomial Distribution #############
# Actual data for all cases
installs = [986,1013,959,968,1029,1014]
returns = [340,298,274,287,325,291]
A = sampleSuccessRateForBinomial(installs[0],returns[0])
B = sampleSuccessRateForBinomial(installs[1],returns[1])
C = sampleSuccessRateForBinomial(installs[2],returns[2])
D = sampleSuccessRateForBinomial(installs[3],returns[3])
E = sampleSuccessRateForBinomial(installs[4],returns[4])
F = sampleSuccessRateForBinomial(installs[5],returns[5])
A_best = bestOfFive(A,B,C,D,E,F)
B_best = bestOfFive(B,A,C,D,E,F)
C_best = bestOfFive(C,B,A,D,E,F)
D_best = bestOfFive(D,B,C,A,E,F)
E_best = bestOfFive(E,B,C,D,A,F)
F_best = bestOfFive(F,B,C,D,E,A)
# Get samples from the posterior
print "The probability of 20 being the best choice is {}".format(A_best)
print "The probability of 21 being the best choice is {}".format(B_best)
print "The probability of 22 being the best choice is {}".format(C_best)
print "The probability of 23 being the best choice is {}".format(D_best)
print "The probability of 24 being the best choice is {}".format(E_best)
print "The probability of 25 being the best choice is {}".format(F_best)
| 38.714286 | 72 | 0.720295 | [
"Apache-2.0"
] | hugopibernat/BayesianABTestAnalysis | code/examples/example_mikhail.py | 1,355 | Python |
__version__ = "2.2.3"
# Work around to update TensorFlow's absl.logging threshold which alters the
# default Python logging output behavior when present.
# see: https://github.com/abseil/abseil-py/issues/99
# and: https://github.com/tensorflow/tensorflow/issues/26691#issuecomment-500369493
try:
import absl.logging
absl.logging.set_verbosity('info')
absl.logging.set_stderrthreshold('info')
absl.logging._warn_preinit_stderr = False
except:
pass
import logging
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
# Files and general utilities
from .file_utils import (TRANSFORMERS_CACHE, PYTORCH_TRANSFORMERS_CACHE, PYTORCH_PRETRAINED_BERT_CACHE,
cached_path, add_start_docstrings, add_end_docstrings,
WEIGHTS_NAME, TF2_WEIGHTS_NAME, TF_WEIGHTS_NAME, CONFIG_NAME,
is_tf_available, is_torch_available)
from .data import (is_sklearn_available,
InputExample, InputFeatures, DataProcessor,
glue_output_modes, glue_convert_examples_to_features,
glue_processors, glue_tasks_num_labels,
xnli_output_modes, xnli_processors, xnli_tasks_num_labels,
squad_convert_examples_to_features, SquadFeatures,
SquadExample, SquadV1Processor, SquadV2Processor)
if is_sklearn_available():
from .data import glue_compute_metrics, xnli_compute_metrics
# ETRI modified ver
from .etri_tf_tokenization import FullTokenizer
# Tokenizers
from .tokenization_utils import (PreTrainedTokenizer)
from .tokenization_auto import AutoTokenizer
from .tokenization_bert import BertTokenizer, BasicTokenizer, WordpieceTokenizer
from .tokenization_bert_japanese import BertJapaneseTokenizer, MecabTokenizer, CharacterTokenizer
from .tokenization_openai import OpenAIGPTTokenizer
from .tokenization_transfo_xl import (TransfoXLTokenizer, TransfoXLCorpus)
from .tokenization_gpt2 import GPT2Tokenizer
from .tokenization_ctrl import CTRLTokenizer
from .tokenization_xlnet import XLNetTokenizer, SPIECE_UNDERLINE
from .tokenization_xlm import XLMTokenizer
from .tokenization_roberta import RobertaTokenizer
from .tokenization_distilbert import DistilBertTokenizer
from .tokenization_albert import AlbertTokenizer
from .tokenization_camembert import CamembertTokenizer
from .tokenization_t5 import T5Tokenizer
# Configurations
from .configuration_utils import PretrainedConfig
from .configuration_auto import AutoConfig
from .configuration_bert import BertConfig, BERT_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_openai import OpenAIGPTConfig, OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_transfo_xl import TransfoXLConfig, TRANSFO_XL_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_gpt2 import GPT2Config, GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_ctrl import CTRLConfig, CTRL_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_xlnet import XLNetConfig, XLNET_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_xlm import XLMConfig, XLM_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_roberta import RobertaConfig, ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_distilbert import DistilBertConfig, DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_albert import AlbertConfig, ALBERT_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_camembert import CamembertConfig, CAMEMBERT_PRETRAINED_CONFIG_ARCHIVE_MAP
from .configuration_t5 import T5Config, T5_PRETRAINED_CONFIG_ARCHIVE_MAP
# Modeling
if is_torch_available():
from .modeling_utils import (PreTrainedModel, prune_layer, Conv1D)
from .modeling_auto import (AutoModel, AutoModelForSequenceClassification, AutoModelForQuestionAnswering,
AutoModelWithLMHead)
from .modeling_bert import (BertPreTrainedModel, BertModel, BertForPreTraining,
BertForMaskedLM, BertForNextSentencePrediction,
BertForSequenceClassification, BertForMultipleChoice,
BertForTokenClassification, BertForQuestionAnswering,
load_tf_weights_in_bert, BERT_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_openai import (OpenAIGPTPreTrainedModel, OpenAIGPTModel,
OpenAIGPTLMHeadModel, OpenAIGPTDoubleHeadsModel,
load_tf_weights_in_openai_gpt, OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_transfo_xl import (TransfoXLPreTrainedModel, TransfoXLModel, TransfoXLLMHeadModel,
AdaptiveEmbedding,
load_tf_weights_in_transfo_xl, TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_gpt2 import (GPT2PreTrainedModel, GPT2Model,
GPT2LMHeadModel, GPT2DoubleHeadsModel,
load_tf_weights_in_gpt2, GPT2_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_ctrl import (CTRLPreTrainedModel, CTRLModel,
CTRLLMHeadModel,
CTRL_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_xlnet import (XLNetPreTrainedModel, XLNetModel, XLNetLMHeadModel,
XLNetForSequenceClassification, XLNetForTokenClassification,
XLNetForMultipleChoice, XLNetForQuestionAnsweringSimple,
XLNetForQuestionAnswering, load_tf_weights_in_xlnet,
XLNET_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_xlm import (XLMPreTrainedModel , XLMModel,
XLMWithLMHeadModel, XLMForSequenceClassification,
XLMForQuestionAnswering, XLMForQuestionAnsweringSimple,
XLM_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_roberta import (RobertaForMaskedLM, RobertaModel,
RobertaForSequenceClassification, RobertaForMultipleChoice,
RobertaForTokenClassification,
ROBERTA_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_distilbert import (DistilBertPreTrainedModel, DistilBertForMaskedLM, DistilBertModel,
DistilBertForSequenceClassification, DistilBertForQuestionAnswering,
DistilBertForTokenClassification,
DISTILBERT_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_camembert import (CamembertForMaskedLM, CamembertModel,
CamembertForSequenceClassification, CamembertForMultipleChoice,
CamembertForTokenClassification,
CAMEMBERT_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_encoder_decoder import PreTrainedEncoderDecoder, Model2Model
from .modeling_t5 import (T5PreTrainedModel, T5Model, T5WithLMHeadModel,
load_tf_weights_in_t5,
T5_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_albert import (AlbertPreTrainedModel, AlbertModel, AlbertForMaskedLM, AlbertForSequenceClassification,
AlbertForQuestionAnswering,
load_tf_weights_in_albert, ALBERT_PRETRAINED_MODEL_ARCHIVE_MAP)
# Optimization
from .optimization import (AdamW, get_constant_schedule, get_constant_schedule_with_warmup, get_cosine_schedule_with_warmup,
get_cosine_with_hard_restarts_schedule_with_warmup, get_linear_schedule_with_warmup)
# TensorFlow
if is_tf_available():
from .modeling_tf_utils import TFPreTrainedModel, TFSharedEmbeddings, TFSequenceSummary, shape_list
from .modeling_tf_auto import (TFAutoModel, TFAutoModelForSequenceClassification, TFAutoModelForQuestionAnswering,
TFAutoModelWithLMHead)
from .modeling_tf_bert import (TFBertPreTrainedModel, TFBertMainLayer, TFBertEmbeddings,
TFBertModel, TFBertForPreTraining,
TFBertForMaskedLM, TFBertForNextSentencePrediction,
TFBertForSequenceClassification, TFBertForMultipleChoice,
TFBertForTokenClassification, TFBertForQuestionAnswering,
TF_BERT_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_tf_gpt2 import (TFGPT2PreTrainedModel, TFGPT2MainLayer,
TFGPT2Model, TFGPT2LMHeadModel, TFGPT2DoubleHeadsModel,
TF_GPT2_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_tf_openai import (TFOpenAIGPTPreTrainedModel, TFOpenAIGPTMainLayer,
TFOpenAIGPTModel, TFOpenAIGPTLMHeadModel, TFOpenAIGPTDoubleHeadsModel,
TF_OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_tf_transfo_xl import (TFTransfoXLPreTrainedModel, TFTransfoXLMainLayer,
TFTransfoXLModel, TFTransfoXLLMHeadModel,
TF_TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_tf_xlnet import (TFXLNetPreTrainedModel, TFXLNetMainLayer,
TFXLNetModel, TFXLNetLMHeadModel,
TFXLNetForSequenceClassification,
TFXLNetForTokenClassification,
TFXLNetForQuestionAnsweringSimple,
TF_XLNET_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_tf_xlm import (TFXLMPreTrainedModel, TFXLMMainLayer,
TFXLMModel, TFXLMWithLMHeadModel,
TFXLMForSequenceClassification,
TFXLMForQuestionAnsweringSimple,
TF_XLM_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_tf_roberta import (TFRobertaPreTrainedModel, TFRobertaMainLayer,
TFRobertaModel, TFRobertaForMaskedLM,
TFRobertaForSequenceClassification,
TFRobertaForTokenClassification,
TF_ROBERTA_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_tf_distilbert import (TFDistilBertPreTrainedModel, TFDistilBertMainLayer,
TFDistilBertModel, TFDistilBertForMaskedLM,
TFDistilBertForSequenceClassification,
TFDistilBertForTokenClassification,
TFDistilBertForQuestionAnswering,
TF_DISTILBERT_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_tf_ctrl import (TFCTRLPreTrainedModel, TFCTRLModel,
TFCTRLLMHeadModel,
TF_CTRL_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_tf_albert import (TFAlbertPreTrainedModel, TFAlbertModel, TFAlbertForMaskedLM,
TFAlbertForSequenceClassification,
TF_ALBERT_PRETRAINED_MODEL_ARCHIVE_MAP)
from .modeling_tf_t5 import (TFT5PreTrainedModel, TFT5Model, TFT5WithLMHeadModel,
TF_T5_PRETRAINED_MODEL_ARCHIVE_MAP)
# Optimization
from .optimization_tf import (WarmUp, create_optimizer, AdamWeightDecay, GradientAccumulator)
# TF 2.0 <=> PyTorch conversion utilities
from .modeling_tf_pytorch_utils import (convert_tf_weight_name_to_pt_weight_name,
load_pytorch_checkpoint_in_tf2_model,
load_pytorch_weights_in_tf2_model,
load_pytorch_model_in_tf2_model,
load_tf2_checkpoint_in_pytorch_model,
load_tf2_weights_in_pytorch_model,
load_tf2_model_in_pytorch_model)
if not is_tf_available() and not is_torch_available():
logger.warning("Neither PyTorch nor TensorFlow >= 2.0 have been found."
"Models won't be available and only tokenizers, configuration"
"and file/data utilities can be used.")
| 59.990385 | 128 | 0.677432 | [
"Apache-2.0"
] | seongwookchun/transformers | transformers/__init__.py | 12,478 | Python |
from ..http import dump_header
from ..http import parse_set_header
from ..utils import environ_property
from ..utils import header_property
class CORSRequestMixin(object):
"""A mixin for :class:`~werkzeug.wrappers.BaseRequest` subclasses
that adds descriptors for Cross Origin Resource Sharing (CORS)
headers.
.. versionadded:: 1.0
"""
origin = environ_property(
"HTTP_ORIGIN",
doc=(
"The host that the request originated from. Set"
" :attr:`~CORSResponseMixin.access_control_allow_origin` on"
" the response to indicate which origins are allowed."
),
)
access_control_request_headers = environ_property(
"HTTP_ACCESS_CONTROL_REQUEST_HEADERS",
load_func=parse_set_header,
doc=(
"Sent with a preflight request to indicate which headers"
" will be sent with the cross origin request. Set"
" :attr:`~CORSResponseMixin.access_control_allow_headers`"
" on the response to indicate which headers are allowed."
),
)
access_control_request_method = environ_property(
"HTTP_ACCESS_CONTROL_REQUEST_METHOD",
doc=(
"Sent with a preflight request to indicate which method"
" will be used for the cross origin request. Set"
" :attr:`~CORSResponseMixin.access_control_allow_methods`"
" on the response to indicate which methods are allowed."
),
)
class CORSResponseMixin(object):
"""A mixin for :class:`~werkzeug.wrappers.BaseResponse` subclasses
that adds descriptors for Cross Origin Resource Sharing (CORS)
headers.
.. versionadded:: 1.0
"""
@property
def access_control_allow_credentials(self):
"""Whether credentials can be shared by the browser to
JavaScript code. As part of the preflight request it indicates
whether credentials can be used on the cross origin request.
"""
return "Access-Control-Allow-Credentials" in self.headers
@access_control_allow_credentials.setter
def access_control_allow_credentials(self, value):
if value is True:
self.headers["Access-Control-Allow-Credentials"] = "true"
else:
self.headers.pop("Access-Control-Allow-Credentials", None)
access_control_allow_headers = header_property(
"Access-Control-Allow-Headers",
load_func=parse_set_header,
dump_func=dump_header,
doc="Which headers can be sent with the cross origin request.",
)
access_control_allow_methods = header_property(
"Access-Control-Allow-Methods",
load_func=parse_set_header,
dump_func=dump_header,
doc="Which methods can be used for the cross origin request.",
)
access_control_allow_origin = header_property(
"Access-Control-Allow-Origin",
load_func=parse_set_header,
dump_func=dump_header,
doc="The origins that may make cross origin requests.",
)
access_control_expose_headers = header_property(
"Access-Control-Expose-Headers",
load_func=parse_set_header,
dump_func=dump_header,
doc="Which headers can be shared by the browser to JavaScript code.",
)
access_control_max_age = header_property(
"Access-Control-Max-Age",
load_func=int,
dump_func=str,
doc="The maximum age in seconds the access control settings can be cached for.",
)
| 34.097087 | 88 | 0.669989 | [
"MIT"
] | 997Yi/Flask-web | venv/Lib/site-packages/werkzeug/wrappers/cors.py | 3,512 | Python |
import os
import RPi.GPIO as gpio
import time
import random
from mesafe import distance
motorhizi = 2.5
hiz = 100
aci2 = aci3 = aci4 = 6
aci = 5.5
in4 = 26
in3 = 4
in2 = 12
in1 = 8
solled = 9
sagled = 11
gpio.setwarnings(False)
def init():
gpio.setwarnings(False)
gpio.setmode(gpio.BCM)
gpio.setup(22,gpio.OUT)
gpio.setup(27,gpio.OUT)
gpio.setup(17,gpio.OUT)
gpio.setup(18,gpio.OUT)
gpio.setup(in4,gpio.OUT)
gpio.setup(in3,gpio.OUT)
gpio.setup(in2,gpio.OUT)
gpio.setup(in1,gpio.OUT)
gpio.setup(21,gpio.OUT)
gpio.setup(solled,gpio.OUT)
gpio.setup(sagled,gpio.OUT)
gpio.setup(23,gpio.IN)
gpio.setup(24,gpio.IN)
gpio.output(22,0)
gpio.output(18,0)
gpio.output(17,0)
gpio.output(27,0)
gpio.output(in4,0)
gpio.output(in3,0)
gpio.output(in2,0)
gpio.output(in1,0)
gpio.output(21,0)
gpio.output(solled,0)
gpio.output(sagled,0)
def ileri(tf,ff):
init()
gpio.output(17,0)
gpio.output(22,0)
ip = gpio.PWM(27,50)
ip2 = gpio.PWM(18,50)
ip.start(ff)
ip2.start(ff)
tf = float(tf)
tf = tf / motorhizi
time.sleep(tf)
gpio.cleanup()
def geri(tf,ff):
init()
gpio.output(18,0)
gpio.output(27,0)
gp = gpio.PWM(22,50)
gp2 = gpio.PWM(17,50)
gp.start(ff)
gp2.start(ff)
tf = float(tf)
tf = tf / motorhizi
time.sleep(tf)
gpio.cleanup()
def sol(tf,ff):
init()
gpio.output(17,0)
gpio.output(27,0)
sp = gpio.PWM(22,50)
sp2 = gpio.PWM(18,50)
sp.start(ff)
sp2.start(ff)
tf = float(tf)
tf = tf / motorhizi
time.sleep(tf)
gpio.cleanup()
def sag(tf,ff):
init()
gpio.output(18,0)
gpio.output(22,0)
sap = gpio.PWM(27,50)
sap2 = gpio.PWM(17,50)
sap.start(ff)
sap2.start(ff)
tf = float(tf)
tf = tf / motorhizi
time.sleep(tf)
gpio.cleanup()
def dur():
init()
gpio.output(22,0)
gpio.output(17,0)
gpio.output(18,0)
gpio.output(27,0)
gpio.cleanup()
def adim1(tf,y):
init()
if (y == 1): # sol
gpio.output(in1,1)
gpio.output(in2,0)
gpio.output(in3,0)
gpio.output(in4,0)
if (y == 0): # sag
gpio.output(in1,0)
gpio.output(in2,0)
gpio.output(in3,0)
gpio.output(in4,1)
time.sleep(tf)
gpio.cleanup()
def adim2(tf,y):
init()
if (y == 1): # sol
gpio.output(in1,0)
gpio.output(in2,1)
gpio.output(in3,0)
gpio.output(in4,0)
if (y == 0): # sag
gpio.output(in1,0)
gpio.output(in2,0)
gpio.output(in3,1)
gpio.output(in4,0)
time.sleep(tf)
gpio.cleanup()
def adim3(tf,y):
init()
if (y == 1): # sol
gpio.output(in1,0)
gpio.output(in2,0)
gpio.output(in3,1)
gpio.output(in4,0)
if (y == 0): # sag
gpio.output(in1,0)
gpio.output(in2,1)
gpio.output(in3,0)
gpio.output(in4,0)
time.sleep(tf)
gpio.cleanup()
def adim4(tf,y):
init()
if (y == 1): # sol
gpio.output(in1,0)
gpio.output(in2,0)
gpio.output(in3,0)
gpio.output(in4,1)
if (y == 0): # sag
gpio.output(in1,1)
gpio.output(in2,0)
gpio.output(in3,0)
gpio.output(in4,0)
time.sleep(tf)
gpio.cleanup()
def stepper(tf,ff,yf):
ff = float(ff)
ff = ff / 1000
if (yf == 0): # sag
for i in range(0,tf):
adim1(ff,0)
adim2(ff,0)
adim3(ff,0)
adim4(ff,0)
if (yf == 1): # sol
for i in range(0,tf):
adim1(ff,1)
adim2(ff,1)
adim3(ff,1)
adim4(ff,1)
def servo(tf):
gpio.setmode(gpio.BCM)
gpio.setup(5,gpio.OUT)
p = gpio.PWM(5,50)
p.start(5.5)
p.ChangeDutyCycle(tf)
time.sleep(0.7)
gpio.cleanup()
def servo2(tf):
gpio.setmode(gpio.BCM)
gpio.setup(6,gpio.OUT)
p2 = gpio.PWM(6,50)
p2.start(6)
p2.ChangeDutyCycle(tf)
time.sleep(0.7)
gpio.cleanup()
def servo3(tf):
gpio.setmode(gpio.BCM)
gpio.setup(20,gpio.OUT)
p3 = gpio.PWM(20,50)
p3.start(6)
p3.ChangeDutyCycle(tf)
time.sleep(0.7)
gpio.cleanup()
def servo4(tf):
gpio.setmode(gpio.BCM)
gpio.setup(16,gpio.OUT)
p3 = gpio.PWM(16,50)
p3.start(6)
p3.ChangeDutyCycle(tf)
time.sleep(0.7)
gpio.cleanup()
def ses(tf,ff):
init()
sp = gpio.PWM(21,ff)
sp.start(70)
time.sleep(tf)
gpio.cleanup()
def led(ff,tf,sf):
init()
sp = gpio.PWM(solled,500)
sap = gpio.PWM(sagled,500)
if (sf == 0):
sp.start(ff)
time.sleep(tf)
gpio.cleanup()
elif (sf == 1):
sap.start(ff)
time.sleep(tf)
gpio.cleanup()
elif (sf == 2):
sp.start(ff)
sap.start(ff)
time.sleep(tf)
gpio.cleanup()
def kapat():
os.system("pkill -9 -f main.py")
def kontrol():
x = random.randrange(1,3)
if (x == 1):
print ("sagabak")
servo(3)
time.sleep(0.05)
dis = distance('cm')
print (dis)
if dis < 15:
print ("solabak")
servo(9)
dis = distance('cm')
if dis < 15:
print ("cik")
servo(5.5)
geri(2,hiz)
else:
servo(5.5)
geri(0.5,hiz)
sol(0.7,hiz)
else:
servo(5.5)
geri(0.5,hiz)
sag(0.7,hiz)
if (x == 2):
print ("solabak")
servo(9)
time.sleep(0.05)
dis = distance('cm')
print (dis)
if dis < 15:
print ("sagabak")
servo(3)
dis = distance('cm')
if dis < 15:
print ("cik")
servo(5.5)
geri(2,hiz)
else:
servo(5.5)
geri(0.5,hiz)
sag(0.7,hiz)
else:
servo(5.5)
geri(0.5,hiz)
sol(0.7,hiz)
print (" ")
print ("otonomgorev yazilimi google speech api sesli komutlari ile robotun otonom hareket etmesi icin yazilmistir")
print (" ")
time.sleep(1)
def cizgi(rf):
for i in range(0,rf):
dis = distance('cm')
init()
if (gpio.input(23) == 0 and gpio.input(24) == 0):
ileri(0.1,hiz)
elif (gpio.input(23) == 1 and gpio.input(24) == 0):
sol(0.1,hiz)
elif (gpio.input(23) == 0 and gpio.input(24) == 1):
sag(0.1,hiz)
else:
pass
if dis < 15:
print ("cok dar",dis)
geri(0.5,hiz)
servo(5.5)
kontrol()
elif dis < 25:
print ("dar",dis)
else:
print ("temiz",dis)
dur()
aci2 = aci3 = aci4 = 6
aci = 5.5
| 20.848943 | 115 | 0.501522 | [
"Apache-2.0"
] | ahmetakif/Voice-Controlled-Raspberry-Pi-Robot | Robotics/src/otonomgorev.py | 6,901 | Python |
def gimme(x):
print("It was a %s indeed" % x)
| 10.4 | 35 | 0.538462 | [
"Apache-2.0"
] | DavidLeoni/iep | jup-and-py-example/local.py | 52 | Python |
import argparse
parser = argparse.ArgumentParser(description="PyTorch implementation of action recognition models")
parser.add_argument('--dataset', type=str, choices=['somethingv1','somethingv2','diving48'],
default = 'somethingv1')
parser.add_argument('--root_path', type = str, default = '../',
help = 'root path to video dataset folders')
parser.add_argument('--store_name', type=str, default="")
# ========================= Model Configs ==========================
parser.add_argument('--type', type=str, default="GST",choices=['GST','R3D','S3D', 'I3D'],
help = 'type of temporal models, currently support GST,Res3D and S3D')
parser.add_argument('--arch', type=str, default="resnet50",choices=['resnet50','resnet101'],
help = 'backbone networks, currently only support resnet')
parser.add_argument('--num_segments', type=int, default=8)
parser.add_argument('--alpha', type=int, default=4, help = 'spatial temporal split for output channels')
parser.add_argument('--beta', type=int, default=2, choices=[1,2], help = 'channel splits for input channels, 1 for GST-Large and 2 for GST')
# ========================= Learning Configs ==========================
parser.add_argument('--epochs', default=70, type=int, metavar='N',
help='number of total epochs to run')
parser.add_argument('-b', '--batch-size', default=24, type=int,
metavar='N', help='mini-batch size')
parser.add_argument('--lr', '--learning-rate', default=0.01, type=float,
metavar='LR', help='initial learning rate')
parser.add_argument('--lr_steps', default=[50, 60], type=float, nargs="+",
metavar='LRSteps', help='epochs to decay learning rate by 10')
parser.add_argument('--dropout', '--dp', default=0.3, type=float,
metavar='dp', help='dropout ratio')
parser.add_argument('--warm', default=5, type=float, help='warm up epochs')
#========================= Optimizer Configs ==========================
parser.add_argument('--momentum', default=0.9, type=float, metavar='M',
help='momentum')
parser.add_argument('--weight_decay', '--wd', default=3e-4, type=float,
metavar='W', help='weight decay (default: 3e-4)')
parser.add_argument('--clip-gradient', '--gd', default=20, type=float,
metavar='W', help='gradient norm clipping (default: 20)')
# ========================= Monitor Configs ==========================
parser.add_argument('--print-freq', '-p', default=20, type=int,
metavar='N', help='print frequency (default: 20)')
parser.add_argument('--eval-freq', '-ef', default=1, type=int,
metavar='N', help='evaluation frequency (default: 1)')
# ========================= Runtime Configs ==========================
parser.add_argument('-j', '--workers', default=16, type=int, metavar='N',
help='number of data loading workers (default: 16)')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true',
help='evaluate model on validation set')
parser.add_argument('--start-epoch', default=0, type=int, metavar='N',
help='manual epoch number (useful on restarts)')
parser.add_argument('--checkpoint_dir',type=str, required=True,
help = 'folder to restore checkpoint and training log')
# ========================= Added by Qihang ==========================
parser.add_argument('--op_code', type=str, default="conv3d", help='op code to use')
parser.add_argument('--sparsity-regularization', '-sr', dest='sr', action='store_true',
help='train with channel sparsity regularization')
parser.add_argument('--s', type=float, default=0.0001,
help='scale sparse rate (default: 0.0001)')
parser.add_argument('--conv_config', type=str, default='',
help='conv config')
parser.add_argument('--search', action='store_true', default=False,
help='search mode')
parser.add_argument('--prune', action='store_true', default=False,
help='prune after training')
parser.add_argument('--prune_model_path', type=str, default='',
help='model to prune')
parser.add_argument('--reweight', action='store_true', default=False,
help='reweight the prune factor')
parser.add_argument('--finetune', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
| 62.773333 | 140 | 0.594095 | [
"MIT"
] | yucornetto/CAKES | opts.py | 4,708 | Python |
from typing import List, Tuple, Optional
import aiosqlite
from spare.types.blockchain_format.sized_bytes import bytes32
from spare.util.db_wrapper import DBWrapper
class WalletInterestedStore:
"""
Stores coin ids that we are interested in receiving
"""
db_connection: aiosqlite.Connection
db_wrapper: DBWrapper
@classmethod
async def create(cls, wrapper: DBWrapper):
self = cls()
self.db_connection = wrapper.db
self.db_wrapper = wrapper
await self.db_connection.execute("pragma journal_mode=wal")
await self.db_connection.execute("pragma synchronous=2")
await self.db_connection.execute("CREATE TABLE IF NOT EXISTS interested_coins(coin_name text PRIMARY KEY)")
await self.db_connection.execute(
"CREATE TABLE IF NOT EXISTS interested_puzzle_hashes(puzzle_hash text PRIMARY KEY, wallet_id integer)"
)
await self.db_connection.commit()
return self
async def _clear_database(self):
cursor = await self.db_connection.execute("DELETE FROM puzzle_hashes")
await cursor.close()
cursor = await self.db_connection.execute("DELETE FROM interested_coins")
await cursor.close()
await self.db_connection.commit()
async def get_interested_coin_ids(self) -> List[bytes32]:
cursor = await self.db_connection.execute("SELECT coin_name FROM interested_coins")
rows_hex = await cursor.fetchall()
return [bytes32(bytes.fromhex(row[0])) for row in rows_hex]
async def add_interested_coin_id(self, coin_id: bytes32, in_transaction: bool = False) -> None:
if not in_transaction:
await self.db_wrapper.lock.acquire()
try:
cursor = await self.db_connection.execute(
"INSERT OR REPLACE INTO interested_coins VALUES (?)", (coin_id.hex(),)
)
await cursor.close()
finally:
if not in_transaction:
await self.db_connection.commit()
self.db_wrapper.lock.release()
async def get_interested_puzzle_hashes(self) -> List[Tuple[bytes32, int]]:
cursor = await self.db_connection.execute("SELECT puzzle_hash, wallet_id FROM interested_puzzle_hashes")
rows_hex = await cursor.fetchall()
return [(bytes32(bytes.fromhex(row[0])), row[1]) for row in rows_hex]
async def get_interested_puzzle_hash_wallet_id(self, puzzle_hash: bytes32) -> Optional[int]:
cursor = await self.db_connection.execute(
"SELECT wallet_id FROM interested_puzzle_hashes WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
row = await cursor.fetchone()
if row is None:
return None
return row[0]
async def add_interested_puzzle_hash(
self, puzzle_hash: bytes32, wallet_id: int, in_transaction: bool = False
) -> None:
if not in_transaction:
await self.db_wrapper.lock.acquire()
try:
cursor = await self.db_connection.execute(
"INSERT OR REPLACE INTO interested_puzzle_hashes VALUES (?, ?)", (puzzle_hash.hex(), wallet_id)
)
await cursor.close()
finally:
if not in_transaction:
await self.db_connection.commit()
self.db_wrapper.lock.release()
async def remove_interested_puzzle_hash(self, puzzle_hash: bytes32, in_transaction: bool = False) -> None:
if not in_transaction:
await self.db_wrapper.lock.acquire()
try:
cursor = await self.db_connection.execute(
"DELETE FROM interested_puzzle_hashes WHERE puzzle_hash=?", (puzzle_hash.hex(),)
)
await cursor.close()
finally:
if not in_transaction:
await self.db_connection.commit()
self.db_wrapper.lock.release()
| 38.313725 | 115 | 0.651228 | [
"Apache-2.0"
] | Spare-Network/spare-blockchain | spare/wallet/wallet_interested_store.py | 3,908 | Python |
"""
Client for Yandex.Disk.
"""
__version__ = '0.0.1'
| 10.8 | 23 | 0.611111 | [
"MIT"
] | comalex/ProjectBacup | project_backup/__init__.py | 54 | Python |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['StorageInsightConfigArgs', 'StorageInsightConfig']
@pulumi.input_type
class StorageInsightConfigArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
storage_account: pulumi.Input['StorageAccountArgs'],
workspace_name: pulumi.Input[str],
containers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
e_tag: Optional[pulumi.Input[str]] = None,
storage_insight_name: Optional[pulumi.Input[str]] = None,
tables: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a StorageInsightConfig resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input['StorageAccountArgs'] storage_account: The storage account connection details
:param pulumi.Input[str] workspace_name: The name of the workspace.
:param pulumi.Input[Sequence[pulumi.Input[str]]] containers: The names of the blob containers that the workspace should read
:param pulumi.Input[str] e_tag: The ETag of the storage insight.
:param pulumi.Input[str] storage_insight_name: Name of the storageInsightsConfigs resource
:param pulumi.Input[Sequence[pulumi.Input[str]]] tables: The names of the Azure tables that the workspace should read
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "storage_account", storage_account)
pulumi.set(__self__, "workspace_name", workspace_name)
if containers is not None:
pulumi.set(__self__, "containers", containers)
if e_tag is not None:
pulumi.set(__self__, "e_tag", e_tag)
if storage_insight_name is not None:
pulumi.set(__self__, "storage_insight_name", storage_insight_name)
if tables is not None:
pulumi.set(__self__, "tables", tables)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="storageAccount")
def storage_account(self) -> pulumi.Input['StorageAccountArgs']:
"""
The storage account connection details
"""
return pulumi.get(self, "storage_account")
@storage_account.setter
def storage_account(self, value: pulumi.Input['StorageAccountArgs']):
pulumi.set(self, "storage_account", value)
@property
@pulumi.getter(name="workspaceName")
def workspace_name(self) -> pulumi.Input[str]:
"""
The name of the workspace.
"""
return pulumi.get(self, "workspace_name")
@workspace_name.setter
def workspace_name(self, value: pulumi.Input[str]):
pulumi.set(self, "workspace_name", value)
@property
@pulumi.getter
def containers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The names of the blob containers that the workspace should read
"""
return pulumi.get(self, "containers")
@containers.setter
def containers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "containers", value)
@property
@pulumi.getter(name="eTag")
def e_tag(self) -> Optional[pulumi.Input[str]]:
"""
The ETag of the storage insight.
"""
return pulumi.get(self, "e_tag")
@e_tag.setter
def e_tag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "e_tag", value)
@property
@pulumi.getter(name="storageInsightName")
def storage_insight_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the storageInsightsConfigs resource
"""
return pulumi.get(self, "storage_insight_name")
@storage_insight_name.setter
def storage_insight_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "storage_insight_name", value)
@property
@pulumi.getter
def tables(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The names of the Azure tables that the workspace should read
"""
return pulumi.get(self, "tables")
@tables.setter
def tables(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tables", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class StorageInsightConfig(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
containers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
e_tag: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
storage_account: Optional[pulumi.Input[pulumi.InputType['StorageAccountArgs']]] = None,
storage_insight_name: Optional[pulumi.Input[str]] = None,
tables: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
The top level storage insight resource container.
API Version: 2020-08-01.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] containers: The names of the blob containers that the workspace should read
:param pulumi.Input[str] e_tag: The ETag of the storage insight.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[pulumi.InputType['StorageAccountArgs']] storage_account: The storage account connection details
:param pulumi.Input[str] storage_insight_name: Name of the storageInsightsConfigs resource
:param pulumi.Input[Sequence[pulumi.Input[str]]] tables: The names of the Azure tables that the workspace should read
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[str] workspace_name: The name of the workspace.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: StorageInsightConfigArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
The top level storage insight resource container.
API Version: 2020-08-01.
:param str resource_name: The name of the resource.
:param StorageInsightConfigArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(StorageInsightConfigArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
containers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
e_tag: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
storage_account: Optional[pulumi.Input[pulumi.InputType['StorageAccountArgs']]] = None,
storage_insight_name: Optional[pulumi.Input[str]] = None,
tables: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = StorageInsightConfigArgs.__new__(StorageInsightConfigArgs)
__props__.__dict__["containers"] = containers
__props__.__dict__["e_tag"] = e_tag
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if storage_account is None and not opts.urn:
raise TypeError("Missing required property 'storage_account'")
__props__.__dict__["storage_account"] = storage_account
__props__.__dict__["storage_insight_name"] = storage_insight_name
__props__.__dict__["tables"] = tables
__props__.__dict__["tags"] = tags
if workspace_name is None and not opts.urn:
raise TypeError("Missing required property 'workspace_name'")
__props__.__dict__["workspace_name"] = workspace_name
__props__.__dict__["name"] = None
__props__.__dict__["status"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:operationalinsights:StorageInsightConfig"), pulumi.Alias(type_="azure-native:operationalinsights/v20150320:StorageInsightConfig"), pulumi.Alias(type_="azure-nextgen:operationalinsights/v20150320:StorageInsightConfig"), pulumi.Alias(type_="azure-native:operationalinsights/v20200301preview:StorageInsightConfig"), pulumi.Alias(type_="azure-nextgen:operationalinsights/v20200301preview:StorageInsightConfig"), pulumi.Alias(type_="azure-native:operationalinsights/v20200801:StorageInsightConfig"), pulumi.Alias(type_="azure-nextgen:operationalinsights/v20200801:StorageInsightConfig")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(StorageInsightConfig, __self__).__init__(
'azure-native:operationalinsights:StorageInsightConfig',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'StorageInsightConfig':
"""
Get an existing StorageInsightConfig resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = StorageInsightConfigArgs.__new__(StorageInsightConfigArgs)
__props__.__dict__["containers"] = None
__props__.__dict__["e_tag"] = None
__props__.__dict__["name"] = None
__props__.__dict__["status"] = None
__props__.__dict__["storage_account"] = None
__props__.__dict__["tables"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
return StorageInsightConfig(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def containers(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The names of the blob containers that the workspace should read
"""
return pulumi.get(self, "containers")
@property
@pulumi.getter(name="eTag")
def e_tag(self) -> pulumi.Output[Optional[str]]:
"""
The ETag of the storage insight.
"""
return pulumi.get(self, "e_tag")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def status(self) -> pulumi.Output['outputs.StorageInsightStatusResponse']:
"""
The status of the storage insight
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="storageAccount")
def storage_account(self) -> pulumi.Output['outputs.StorageAccountResponse']:
"""
The storage account connection details
"""
return pulumi.get(self, "storage_account")
@property
@pulumi.getter
def tables(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The names of the Azure tables that the workspace should read
"""
return pulumi.get(self, "tables")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
| 44.391691 | 670 | 0.654412 | [
"Apache-2.0"
] | polivbr/pulumi-azure-native | sdk/python/pulumi_azure_native/operationalinsights/storage_insight_config.py | 14,960 | Python |
# This file will consist of some wrapper for using MySQL
# It is mainly used for preparing and calling mysql cli
import logging
from mysql_autoxtrabackup.general_conf import path_config
from mysql_autoxtrabackup.general_conf.generalops import GeneralClass
from mysql_autoxtrabackup.process_runner.process_runner import ProcessRunner
logger = logging.getLogger(__name__)
class MySQLClientHelper:
def __init__(self, config: str = path_config.config_path_file):
self.conf = config
# Using Composition instead of Inheritance here
options_obj = GeneralClass(config=self.conf)
self.mysql_options = options_obj.mysql_options
def create_mysql_client_command(self, statement: str) -> str:
command_connection = "{} --defaults-file={} -u{} --password={}".format(
self.mysql_options.get("mysql"),
self.mysql_options.get("mycnf"),
self.mysql_options.get("mysql_user"),
self.mysql_options.get("mysql_password"),
)
command_execute = ' -e "{}"'
if self.mysql_options.get("mysql_socket"):
command_connection += " --socket={}"
new_command = command_connection.format(
self.mysql_options.get("mysql_socket")
)
else:
command_connection += " --host={} --port={}"
new_command = command_connection.format(
self.mysql_options.get("mysql_host"),
self.mysql_options.get("mysql_port"),
)
new_command += command_execute
return new_command.format(statement)
def mysql_run_command(self, statement: str) -> bool:
command = self.create_mysql_client_command(statement=statement)
return ProcessRunner.run_command(command)
| 40.409091 | 79 | 0.669291 | [
"MIT"
] | Big-Ele/MySQL-AutoXtraBackup | mysql_autoxtrabackup/utils/mysql_cli.py | 1,778 | Python |
# Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cirq
from cirq.contrib.paulistring import (
optimized_circuit,
)
def test_optimize():
q0, q1, q2 = cirq.LineQubit.range(3)
c_orig = cirq.Circuit.from_ops(
cirq.X(q0) ** 0.5,
cirq.X(q1),
cirq.CZ(q1, q2),
cirq.X(q2) ** 0.125,
cirq.Z(q1) ** 0.5,
cirq.Y(q1) ** 0.5,
cirq.CZ(q0, q1),
cirq.Z(q1) ** 0.5,
cirq.CZ(q1, q2),
cirq.Z(q1) ** 0.5,
cirq.X(q2) ** 0.875,
cirq.CZ(q1, q2),
cirq.X(q2) ** 0.125,
)
cirq.testing.assert_has_diagram(c_orig, """
0: ───X^0.5─────────────────────────@───────────────────────────────────
│
1: ───X───────@───S─────────Y^0.5───@───S───@───S─────────@─────────────
│ │ │
2: ───────────@───X^0.125───────────────────@───X^0.875───@───X^0.125───
""")
c_opt = optimized_circuit(c_orig)
cirq.testing.assert_allclose_up_to_global_phase(
c_orig.to_unitary_matrix(),
c_opt.to_unitary_matrix(),
atol=1e-7,
)
cirq.testing.assert_has_diagram(c_opt, """
0: ───X^0.5────────────@────────────────────────────────────────
│
1: ───@───────X^-0.5───@───@────────────────@───Z^-0.5──────────
│ │ │
2: ───@────────────────────@───[X]^-0.875───@───[X]^-0.25───Z───
""")
def test_optimize_large_circuit():
q0, q1, q2 = cirq.LineQubit.range(3)
c_orig = cirq.testing.nonoptimal_toffoli_circuit(q0, q1, q2)
c_opt = optimized_circuit(c_orig)
cirq.testing.assert_allclose_up_to_global_phase(
c_orig.to_unitary_matrix(),
c_opt.to_unitary_matrix(),
atol=1e-7,
)
assert sum(1 for op in c_opt.all_operations()
if isinstance(op, cirq.GateOperation)
and isinstance(op.gate, cirq.CZPowGate)) == 10
def test_repeat_limit():
q0, q1, q2 = cirq.LineQubit.range(3)
c_orig = cirq.testing.nonoptimal_toffoli_circuit(q0, q1, q2)
c_opt = optimized_circuit(c_orig, repeat=1)
cirq.testing.assert_allclose_up_to_global_phase(
c_orig.to_unitary_matrix(),
c_opt.to_unitary_matrix(),
atol=1e-7,
)
assert sum(1 for op in c_opt.all_operations()
if isinstance(op, cirq.GateOperation)
and isinstance(op.gate, cirq.CZPowGate)) >= 10
| 30.917526 | 74 | 0.532844 | [
"Apache-2.0"
] | jaywha/Cirq | cirq/contrib/paulistring/optimize_test.py | 3,619 | Python |
# coding=utf-8
# Copyright 2019 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements 1D cubic Hermite spline interpolation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torch
def interpolate1d(x, values, tangents):
r"""Perform cubic hermite spline interpolation on a 1D spline.
The x coordinates of the spline knots are at [0 : 1 : len(values)-1].
Queries outside of the range of the spline are computed using linear
extrapolation. See https://en.wikipedia.org/wiki/Cubic_Hermite_spline
for details, where "x" corresponds to `x`, "p" corresponds to `values`, and
"m" corresponds to `tangents`.
Args:
x: A tensor of any size of single or double precision floats containing the
set of values to be used for interpolation into the spline.
values: A vector of single or double precision floats containing the value
of each knot of the spline being interpolated into. Must be the same
length as `tangents` and the same type as `x`.
tangents: A vector of single or double precision floats containing the
tangent (derivative) of each knot of the spline being interpolated into.
Must be the same length as `values` and the same type as `x`.
Returns:
The result of interpolating along the spline defined by `values`, and
`tangents`, using `x` as the query values. Will be the same length and type
as `x`.
"""
# if x.dtype == 'float64' or torch.as_tensor(x).dtype == torch.float64:
# float_dtype = torch.float64
# else:
# float_dtype = torch.float32
# x = torch.as_tensor(x, dtype=float_dtype)
# values = torch.as_tensor(values, dtype=float_dtype)
# tangents = torch.as_tensor(tangents, dtype=float_dtype)
assert torch.is_tensor(x)
assert torch.is_tensor(values)
assert torch.is_tensor(tangents)
float_dtype = x.dtype
assert values.dtype == float_dtype
assert tangents.dtype == float_dtype
assert len(values.shape) == 1
assert len(tangents.shape) == 1
assert values.shape[0] == tangents.shape[0]
x_lo = torch.floor(torch.clamp(x, torch.as_tensor(0),
values.shape[0] - 2)).type(torch.int64)
x_hi = x_lo + 1
# Compute the relative distance between each `x` and the knot below it.
t = x - x_lo.type(float_dtype)
# Compute the cubic hermite expansion of `t`.
t_sq = t**2
t_cu = t * t_sq
h01 = -2. * t_cu + 3. * t_sq
h00 = 1. - h01
h11 = t_cu - t_sq
h10 = h11 - t_sq + t
# Linearly extrapolate above and below the extents of the spline for all
# values.
value_before = tangents[0] * t + values[0]
value_after = tangents[-1] * (t - 1.) + values[-1]
# Cubically interpolate between the knots below and above each query point.
neighbor_values_lo = values[x_lo]
neighbor_values_hi = values[x_hi]
neighbor_tangents_lo = tangents[x_lo]
neighbor_tangents_hi = tangents[x_hi]
value_mid = (
neighbor_values_lo * h00 + neighbor_values_hi * h01 +
neighbor_tangents_lo * h10 + neighbor_tangents_hi * h11)
# Return the interpolated or extrapolated values for each query point,
# depending on whether or not the query lies within the span of the spline.
return torch.where(t < 0., value_before,
torch.where(t > 1., value_after, value_mid))
| 39.346939 | 79 | 0.713174 | [
"MIT"
] | AaltoVision/automodulator | pioneer/robust_loss_pytorch/cubic_spline.py | 3,856 | Python |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for DeleteTrainingPipeline
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-aiplatform
# [START aiplatform_v1_generated_PipelineService_DeleteTrainingPipeline_sync]
from google.cloud import aiplatform_v1
def sample_delete_training_pipeline():
# Create a client
client = aiplatform_v1.PipelineServiceClient()
# Initialize request argument(s)
request = aiplatform_v1.DeleteTrainingPipelineRequest(
name="name_value",
)
# Make the request
operation = client.delete_training_pipeline(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
# [END aiplatform_v1_generated_PipelineService_DeleteTrainingPipeline_sync]
| 32.14 | 85 | 0.764779 | [
"Apache-2.0"
] | TheMichaelHu/python-aiplatform | samples/generated_samples/aiplatform_v1_generated_pipeline_service_delete_training_pipeline_sync.py | 1,607 | Python |
'''
file.readline() 사용해서 csv 파일 열기
'''
#
# def my_csv_reader(fn:str, header=True) -> list:
# '''
# csv 파일의 데이터 2차원 행렬 형태로 리턴
#
#
# :param fn: 읽을 파일 이름(예: data\\exam.csv)
# :param header: csv파일의 헤더 존재 여부
# :return: csv 파일에서 헤더는 제외한 데이터로 이루어진 2차원 리스트
# '''
#
#
# if __name__ == '__main__':
#
# # 작성한 함수들을 테스트
# pass
def print_data(data: list) -> None:
'''
2차원 리스트의 내용을 출력
1 10 20 30 40
2 11 21 31 41
...
:param data: 2차원 행렬 형태의 리스트
:return: None
'''
readcsv = open('data/exam.csv', mode ='r', encoding='utf-8')
line=readcsv.readline()
while line:
print(line.strip())
line = readcsv.readline()
readcsv.close()
# def get_sum_mean(data : list, col: int) -> tuple:
# '''
# 주어진 2차원 리스트(data)에서 해당 컬럼(col)의 데이터들의
# 총합(sum)과 평균(mean)을 계산해서 리턴
#
# :param data: 2차원 행렬 형태의 리스트
# :param col: 컬럼 인덱스(0,1,2,...)
# :return: 컬럼 데이터의 합과 평균
# ''' | 19.18 | 64 | 0.542231 | [
"MIT"
] | SOOIN-KIM/lab-python | lec07_file/file07.py | 1,259 | Python |
from urllib.parse import urlparse
from django.conf import settings
from django.db import models
from django_extensions.db.fields import UUIDField
from pyrabbit.http import HTTPError
from django.contrib.sites.models import Site
from apps.queues import rabbit
class Queue(models.Model):
name = models.CharField(max_length=64)
vhost = UUIDField(unique=True)
is_public = models.BooleanField(default=False)
owner = models.ForeignKey(settings.AUTH_USER_MODEL)
organizers = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='organizers',
blank=True,
null=True,
help_text="(Organizers allowed to view this queue when they assign their competition to a queue)"
)
def __str__(self):
return self.name
@property
def broker_url(self):
# Start with pyamqp://guest:guest@localhost:5672//
broker_url_parts = urlparse(settings.BROKER_URL)
# Get localhost:5672
host = Site.objects.get_current().domain
return "pyamqp://{}:{}@{}:{}/{}".format(
self.owner.rabbitmq_username,
self.owner.rabbitmq_password,
host,
settings.RABBITMQ_PORT,
self.vhost
)
def delete(self, using=None):
try:
rabbit.delete_vhost(self.vhost)
except HTTPError:
# Vhost not found or something
pass
return super(Queue, self).delete(using)
| 29.32 | 105 | 0.656207 | [
"Apache-2.0"
] | OhMaley/codalab-competitions | codalab/apps/queues/models.py | 1,466 | Python |
#
# This example is again a graph coloring problem. In this case, however,
# a stronger object oriented approach is adopted to show how Coopy is
# indeed compatible with such practices.
#
import coopy
import random
class Node:
def __init__(self):
self._color = coopy.symbolic_int('c')
self._neighbors = set()
@property
def color(self):
return self._color
@property
def has_valid_connections(self):
return coopy.all([self.color != n.color for n in self._neighbors])
def direct_edge_towards(self, other):
self._neighbors.add(other)
def __repr__(self):
return str(self.color)
def construct_k_colored_graph(k, n, p):
"""
Constructs a k colored graph of n nodes in which a pair
of nodes shares an edge with probability 0 <= p <= 1.
Note: this code is for demonstrative purposes only; the
solution for such a problem will not necessarily exist,
in which case the concretization process will throw
an exception.
"""
with coopy.scope():
# Instantiate n nodes.
nodes = [Node() for i in range(n)]
# Connect nodes with probability p.
for i in range(n-1):
for j in range(i+1,n):
a = nodes[i]
b = nodes[j]
if random.uniform(0,1) < p:
a.direct_edge_towards(b)
b.direct_edge_towards(a)
# Impose restrictions over the nodes.
for node in nodes:
coopy.any([node.color == i for i in range(k)]).require()
node.has_valid_connections.require()
# Concretize the graph and return it as a list of nodes.
coopy.concretize()
return nodes
graph = construct_k_colored_graph(3, 10, 0.2)
print(graph) | 28.109375 | 74 | 0.617009 | [
"MIT"
] | abarreal/coopy | examples/example-5.py | 1,799 | Python |
# -*- coding: utf-8 -*-
#
# Electrum - lightweight Dogecoin client
# Copyright (C) 2018 The Electrum developers
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import json
from .util import inv_dict, all_subclasses
from . import bitcoin
def read_json(filename, default):
path = os.path.join(os.path.dirname(__file__), filename)
try:
with open(path, 'r') as f:
r = json.loads(f.read())
except:
r = default
return r
GIT_REPO_URL = "https://github.com/spesmilo/electrum"
GIT_REPO_ISSUES_URL = "https://github.com/spesmilo/electrum/issues"
BIP39_WALLET_FORMATS = read_json('bip39_wallet_formats.json', [])
class AbstractNet:
NET_NAME: str
TESTNET: bool
WIF_PREFIX: int
ADDRTYPE_P2PKH: int
ADDRTYPE_P2SH: int
SEGWIT_HRP: str
# BOLT11_HRP: str
GENESIS: str
BLOCK_HEIGHT_FIRST_LIGHTNING_CHANNELS: int = 0
BIP44_COIN_TYPE: int
LN_REALM_BYTE: int
@classmethod
def max_checkpoint(cls) -> int:
return max(0, len(cls.CHECKPOINTS) * 2016 - 1)
@classmethod
def rev_genesis_bytes(cls) -> bytes:
return bytes.fromhex(bitcoin.rev_hex(cls.GENESIS))
class BitcoinMainnet(AbstractNet):
TESTNET = False
WIF_PREFIX = 158
ADDRTYPE_P2PKH = 30
ADDRTYPE_P2SH = 22
SEGWIT_HRP = "doge"
# GENESIS = "000000000062b72c5e2ceb45fbc8587e807c155b0da735e6483dfba2f0a9c770"
GENESIS = "1a91e3dace36e2be3bf030a65679fe821aa1d6ef92e7c9902eb318182c355691"
DEFAULT_PORTS = {'t': '50011', 's': '50022'}
DEFAULT_SERVERS = read_json('servers.json', {})
CHECKPOINTS = read_json('', [])
BLOCK_HEIGHT_FIRST_LIGHTNING_CHANNELS = 200
#BITCOIN_HEADER_PRIV = "02fac398"
#BITCOIN_HEADER_PUB = "02facafd"
XPRV_HEADERS = {
'standard': 0x02fac398, # xprv
# 'p2wpkh-p2sh': 0x02fac398, # yprv
# 'p2wsh-p2sh': 0x02fac398, # Yprv
# 'p2wpkh': 0x02fac398, # zprv
# 'p2wsh': 0x02fac398, # Zprv
}
XPRV_HEADERS_INV = inv_dict(XPRV_HEADERS)
XPUB_HEADERS = {
'standard': 0x02facafd, # xpub
# 'p2wpkh-p2sh': 0x02facafd, # ypub
# 'p2wsh-p2sh': 0x02facafd, # Ypub
# 'p2wpkh': 0x02facafd, # zpub
# 'p2wsh': 0x02facafd, # Zpub
}
XPUB_HEADERS_INV = inv_dict(XPUB_HEADERS)
# BIP44_COIN_TYPE = 1
#namecoin
# BIP44_COIN_TYPE = 7
# dogecoin
BIP44_COIN_TYPE = 1
LN_REALM_BYTE = 0
LN_DNS_SEEDS = [
'radioblockchain.info',
'radiopool.me',
]
AUXPOW_CHAIN_ID = 0x00620004
AUXPOW_START_HEIGHT = 371337
NAME_EXPIRATION = 60
class BitcoinTestnet(AbstractNet):
TESTNET = True
WIF_PREFIX = 239
ADDRTYPE_P2PKH = 111
ADDRTYPE_P2SH = 196
SEGWIT_HRP = "xdoge"
GENESIS = "00000a2ee9363d21e47bc10d5b1e39d4ae4bd950491790e522f90dad86d2d1eb"
# GENESIS = "00000007199508e34a9ff81e6ec0c477a4cccff2a4767a8eee39c11db367b008"
DEFAULT_PORTS = {'t': '51001', 's': '51002'}
DEFAULT_SERVERS = read_json('servers_testnet.json', {})
CHECKPOINTS = read_json('checkpoints_testnet.json', [])
XPRV_HEADERS = {
'standard': 0x04358394, # tprv
# 'p2wpkh-p2sh': 0x044a4e28, # uprv
# 'p2wsh-p2sh': 0x024285b5, # Uprv
# 'p2wpkh': 0x045f18bc, # vprv
# 'p2wsh': 0x02575048, # Vprv
}
XPRV_HEADERS_INV = inv_dict(XPRV_HEADERS)
XPUB_HEADERS = {
'standard': 0x043587cf, # tpub
# 'p2wpkh-p2sh': 0x044a5262, # upub
# 'p2wsh-p2sh': 0x024289ef, # Upub
# 'p2wpkh': 0x045f1cf6, # vpub
# 'p2wsh': 0x02575483, # Vpub
}
XPUB_HEADERS_INV = inv_dict(XPUB_HEADERS)
BIP44_COIN_TYPE = 3
LN_REALM_BYTE = 1
LN_DNS_SEEDS = []
AUXPOW_CHAIN_ID = 0x0062
AUXPOW_START_HEIGHT = 200
NAME_EXPIRATION = 36000
class BitcoinRegtest(BitcoinTestnet):
SEGWIT_HRP = "ncrt"
GENESIS = "0f9188f13cb7b2c71f2a335e3a4fc328bf5beb436012afca590b1a11466e2206"
DEFAULT_SERVERS = read_json('servers_regtest.json', {})
CHECKPOINTS = []
LN_DNS_SEEDS = []
NAME_EXPIRATION = 30
class BitcoinSimnet(BitcoinTestnet):
WIF_PREFIX = 0x64
ADDRTYPE_P2PKH = 0x3f
ADDRTYPE_P2SH = 0x7b
SEGWIT_HRP = "sb"
GENESIS = "683e86bd5c6d110d91b94b97137ba6bfe02dbbdb8e3dff722a669b5d69d77af6"
DEFAULT_SERVERS = read_json('servers_regtest.json', {})
CHECKPOINTS = []
LN_DNS_SEEDS = []
NETS_LIST = tuple(all_subclasses(AbstractNet))
# don't import net directly, import the module instead (so that net is singleton)
net = BitcoinMainnet
def set_signet():
global net
net = BitcoinSignet
def set_simnet():
global net
net = BitcoinSimnet
def set_mainnet():
global net
net = BitcoinMainnet
def set_testnet():
global net
net = BitcoinTestnet
def set_regtest():
global net
net = BitcoinRegtest
| 27.890995 | 81 | 0.683772 | [
"MIT"
] | 0xAyanami/electrum | electrum/constants.py | 5,885 | Python |
# Copyright 2015 ETH Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:mod:`socket` --- Low-level socket library
==========================================
"""
# Stdlib
import logging
import os
import selectors
import struct
from abc import abstractmethod
from errno import EHOSTUNREACH, ENETUNREACH
from socket import (
AF_INET,
AF_INET6,
AF_UNIX,
MSG_DONTWAIT,
SOCK_DGRAM,
SOCK_STREAM,
SOL_SOCKET,
SO_REUSEADDR,
socket,
)
# External
from external import ipaddress
# SCION
from lib.defines import SCION_BUFLEN
from lib.dispatcher import reg_dispatcher
from lib.errors import SCIONIOError
from lib.packet.host_addr import haddr_get_type, haddr_parse_interface
from lib.packet.scmp.errors import SCMPUnreachHost, SCMPUnreachNet
from lib.util import recv_all
from lib.thread import kill_self
from lib.types import AddrType
class Socket(object):
"""
Base class for socket wrappers
"""
@abstractmethod
def bind(self, addr, *args, **kwargs):
raise NotImplementedError
@abstractmethod
def send(self, data, dst=None):
raise NotImplementedError
@abstractmethod
def recv(self, block=True):
raise NotImplementedError
def close(self): # pragma: no cover
"""
Close the socket.
"""
self.sock.close()
def settimeout(self, timeout): # pragma: no cover
prev = self.sock.gettimeout()
self.sock.settimeout(timeout)
return prev
def is_active(self):
return True
class UDPSocket(Socket):
"""
Thin wrapper around BSD/POSIX UDP sockets.
"""
def __init__(self, bind=None, addr_type=AddrType.IPV6, reuse=False):
"""
Initialize a UDP socket, then call superclass init for socket options
and binding.
:param tuple bind:
Optional tuple of (`str`, `int`, `str`) describing respectively the
address and port to bind to, and an optional description.
:param addr_type:
Socket domain. Must be one of :const:`~lib.types.AddrType.IPV4`,
:const:`~lib.types.AddrType.IPV6` (default).
:param reuse:
Boolean value indicating whether SO_REUSEADDR option should be set.
"""
assert addr_type in (AddrType.IPV4, AddrType.IPV6)
self._addr_type = addr_type
af_domain = AF_INET6
if self._addr_type == AddrType.IPV4:
af_domain = AF_INET
self.sock = socket(af_domain, SOCK_DGRAM)
if reuse:
self.sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
self.port = None
if bind:
self.bind(*bind)
self.active = True
def bind(self, addr, port=0, desc=None):
"""
Bind socket to the specified address & port. If `addr` is ``None``, the
socket will bind to all interfaces.
:param str addr: Address to bind to (can be ``None``, see above).
:param int port: Port to bind to.
:param str desc: Optional purpose of the port.
"""
if addr is None:
addr = "::"
if self._addr_type == AddrType.IPV4:
addr = ""
try:
self.sock.bind((addr, port))
except OSError as e:
logging.critical("Error binding to [%s]:%s: %s", addr, port, e)
kill_self()
self.port = self.sock.getsockname()[1]
if desc:
logging.debug("%s bound to %s:%d", desc, addr, self.port)
def send(self, data, dst=None):
"""
Send data to a specified destination.
:param bytes data: Data to send.
:param tuple dst:
Tuple of (`str`, `int`) describing the destination address and port,
respectively.
"""
try:
ret = self.sock.sendto(data, dst)
except OSError as e:
errno = e.args[0]
logging.error("Error sending %dB to %s: %s", len(data), dst, e)
if errno == ENETUNREACH:
raise SCMPUnreachNet(dst)
elif errno == EHOSTUNREACH:
raise SCMPUnreachHost(dst)
return False
if ret != len(data):
logging.error("Wanted to send %dB, only sent %dB", len(data), ret)
return False
return True
def recv(self, block=True):
"""
Read data from socket.
:returns:
Tuple of (`bytes`, (`str`, `int`) containing the data, and remote
host/port respectively.
"""
flags = 0
if not block:
flags = MSG_DONTWAIT
while True:
try:
return self.sock.recvfrom(SCION_BUFLEN, flags)
except InterruptedError:
pass
class ReliableSocket(Socket):
"""
Wrapper around Unix socket with message framing functionality baked in
"""
COOKIE = bytes.fromhex("de00ad01be02ef03")
COOKIE_LEN = len(COOKIE)
def __init__(self, reg=None, bind_ip=(), bind_unix=None, sock=None):
"""
Initialise a socket of the specified type, and optionally bind it to an
address/port.
:param tuple reg:
Optional tuple of (`SCIONAddr`, `int`, `SVCType`, `bool`)
describing respectively the address, port, SVC type, and init value
to register with the dispatcher. In sockets that do not connect to
the dispatcher, this argument is None.
:param tuple bind_ip:
Optional tuple of (`SCIONAddr`, `int`) describing the address and port
of the bind address. Only needed if the bind address is different from
the public address.
:param tuple bind_unix:
Optional tuple of (`str`, `str`) describing path to bind to, and an
optional description.
:param sock:
Optional socket file object to build instance around.
"""
self.sock = sock or socket(AF_UNIX, SOCK_STREAM)
self.addr = None
if reg:
addr, port, init, svc = reg
self.registered = reg_dispatcher(
self, addr, port, bind_ip, init, svc)
if bind_unix:
self.bind(*bind_unix)
self.active = True
@classmethod
def from_socket(cls, sock):
return cls(None, sock=sock)
def bind(self, addr, desc=None):
self.addr = addr
# Use 0666 for socket permissions
old_mask = os.umask(0o111)
try:
self.sock.bind(addr)
except OSError as e:
logging.critical("Error binding to %s: %s", addr, e)
kill_self()
os.umask(old_mask)
self.sock.listen(5)
if desc:
logging.debug("%s bound to %s", desc, addr)
def accept(self, block=True):
prev = self.sock.gettimeout()
if not block:
self.sock.settimeout(0)
try:
s = self.sock.accept()[0]
except OSError as e:
logging.error("error accepting socket: %s", e)
return None
finally:
self.sock.settimeout(prev)
return ReliableSocket.from_socket(s)
def connect(self, addr):
self.sock.connect(addr)
def send(self, data, dst=None):
"""
Send data through the socket.
:param bytes data: Data to send.
"""
if dst:
dst_addr, dst_port = dst
if isinstance(dst_addr, str):
dst_addr = haddr_parse_interface(dst_addr)
addr_type = struct.pack("B", dst_addr.TYPE)
packed_dst = dst_addr.pack() + struct.pack("!H", dst_port)
else:
addr_type = struct.pack("B", AddrType.NONE)
packed_dst = b""
data_len = struct.pack("!I", len(data))
data = b"".join([self.COOKIE, addr_type, data_len, packed_dst, data])
try:
self.sock.sendall(data)
return True
except OSError as e:
logging.error("error in send: %s", e)
return False
def recv(self, block=True):
"""
Read data from socket.
:returns: bytestring containing received data.
"""
flags = 0
if not block:
flags = MSG_DONTWAIT
buf = recv_all(self.sock, self.COOKIE_LEN + 5, flags)
if not buf:
return None, None
cookie, addr_type, packet_len = struct.unpack("!8sBI", buf)
if cookie != self.COOKIE:
raise SCIONIOError("Dispatcher socket out of sync")
port_len = 0
if addr_type != AddrType.NONE:
port_len = 2
addr_len = haddr_get_type(addr_type).LEN
# We know there is data coming, block here to avoid sync problems.
buf = recv_all(self.sock, addr_len + port_len + packet_len, 0)
if addr_len > 0:
addr = buf[:addr_len]
port = struct.unpack("!H", buf[addr_len:addr_len + port_len])
sender = (str(ipaddress.ip_address(addr)), port)
else:
addr = ""
port = 0
sender = (None, None)
packet = buf[addr_len + port_len:]
return packet, sender
def close(self):
super().close()
if not self.addr:
return
try:
os.unlink(self.addr)
except OSError as e:
logging.critical("Error unlinking unix socket: %s", e)
kill_self()
class SocketMgr(object):
"""
:class:`Socket` manager.
"""
def __init__(self): # pragma: no cover
self._sel = selectors.DefaultSelector()
def add(self, sock, callback): # pragma: no cover
"""
Add new socket.
:param UDPSocket sock: UDPSocket to add.
"""
if not sock.is_active():
return
self._sel.register(sock.sock, selectors.EVENT_READ, (sock, callback))
def remove(self, sock): # pragma: no cover
"""
Remove socket.
:param UDPSocket sock: UDPSocket to remove.
"""
self._sel.unregister(sock.sock)
def select_(self, timeout=None):
"""
Return the set of UDPSockets that have data pending.
:param float timeout:
Number of seconds to wait for at least one UDPSocket to become
ready. ``None`` means wait forever.
"""
for key, _ in self._sel.select(timeout=timeout):
yield key.data
def close(self):
"""
Close all sockets.
"""
mapping = self._sel.get_map()
if mapping:
for entry in list(mapping.values()):
sock = entry.data[0]
self.remove(sock)
sock.close()
self._sel.close()
| 31.066298 | 82 | 0.577983 | [
"Apache-2.0"
] | TpmKranz/netsec-scion | python/lib/socket.py | 11,246 | Python |
#!/bin/python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <[email protected]>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Auth, Mail, PluginManager and various utilities
------------------------------------------------
"""
import base64
try:
import cPickle as pickle
except:
import pickle
import datetime
import thread
import logging
import sys
import glob
import os
import re
import time
import traceback
import smtplib
import urllib
import urllib2
import Cookie
import cStringIO
import ConfigParser
import email.utils
import random
from email import MIMEBase, MIMEMultipart, MIMEText, Encoders, Header, message_from_string, Charset
from gluon.contenttype import contenttype
from gluon.storage import Storage, StorageList, Settings, Messages
from gluon.utils import web2py_uuid
from gluon.fileutils import read_file, check_credentials
from gluon import *
from gluon.contrib.autolinks import expand_one
from gluon.contrib.markmin.markmin2html import \
replace_at_urls, replace_autolinks, replace_components
from pydal.objects import Row, Set, Query
import gluon.serializers as serializers
Table = DAL.Table
Field = DAL.Field
try:
# try stdlib (Python 2.6)
import json as json_parser
except ImportError:
try:
# try external module
import simplejson as json_parser
except:
# fallback to pure-Python module
import gluon.contrib.simplejson as json_parser
__all__ = ['Mail', 'Auth', 'Recaptcha', 'Crud', 'Service', 'Wiki',
'PluginManager', 'fetch', 'geocode', 'reverse_geocode', 'prettydate']
### mind there are two loggers here (logger and crud.settings.logger)!
logger = logging.getLogger("web2py")
DEFAULT = lambda: None
def getarg(position, default=None):
args = current.request.args
if position < 0 and len(args) >= -position:
return args[position]
elif position >= 0 and len(args) > position:
return args[position]
else:
return default
def callback(actions, form, tablename=None):
if actions:
if tablename and isinstance(actions, dict):
actions = actions.get(tablename, [])
if not isinstance(actions, (list, tuple)):
actions = [actions]
[action(form) for action in actions]
def validators(*a):
b = []
for item in a:
if isinstance(item, (list, tuple)):
b = b + list(item)
else:
b.append(item)
return b
def call_or_redirect(f, *args):
if callable(f):
redirect(f(*args))
else:
redirect(f)
def replace_id(url, form):
if url:
url = url.replace('[id]', str(form.vars.id))
if url[0] == '/' or url[:4] == 'http':
return url
return URL(url)
class Mail(object):
"""
Class for configuring and sending emails with alternative text / html
body, multiple attachments and encryption support
Works with SMTP and Google App Engine.
Args:
server: SMTP server address in address:port notation
sender: sender email address
login: sender login name and password in login:password notation
or None if no authentication is required
tls: enables/disables encryption (True by default)
In Google App Engine use ::
server='gae'
For sake of backward compatibility all fields are optional and default
to None, however, to be able to send emails at least server and sender
must be specified. They are available under following fields::
mail.settings.server
mail.settings.sender
mail.settings.login
mail.settings.timeout = 60 # seconds (default)
When server is 'logging', email is logged but not sent (debug mode)
Optionally you can use PGP encryption or X509::
mail.settings.cipher_type = None
mail.settings.gpg_home = None
mail.settings.sign = True
mail.settings.sign_passphrase = None
mail.settings.encrypt = True
mail.settings.x509_sign_keyfile = None
mail.settings.x509_sign_certfile = None
mail.settings.x509_sign_chainfile = None
mail.settings.x509_nocerts = False
mail.settings.x509_crypt_certfiles = None
cipher_type : None
gpg - need a python-pyme package and gpgme lib
x509 - smime
gpg_home : you can set a GNUPGHOME environment variable
to specify home of gnupg
sign : sign the message (True or False)
sign_passphrase : passphrase for key signing
encrypt : encrypt the message (True or False). It defaults
to True
... x509 only ...
x509_sign_keyfile : the signers private key filename or
string containing the key. (PEM format)
x509_sign_certfile: the signers certificate filename or
string containing the cert. (PEM format)
x509_sign_chainfile: sets the optional all-in-one file where you
can assemble the certificates of Certification
Authorities (CA) which form the certificate
chain of email certificate. It can be a
string containing the certs to. (PEM format)
x509_nocerts : if True then no attached certificate in mail
x509_crypt_certfiles: the certificates file or strings to encrypt
the messages with can be a file name /
string or a list of file names /
strings (PEM format)
Examples:
Create Mail object with authentication data for remote server::
mail = Mail('example.com:25', '[email protected]', 'me:password')
Notice for GAE users:
attachments have an automatic content_id='attachment-i' where i is progressive number
in this way the can be referenced from the HTML as <img src="cid:attachment-0" /> etc.
"""
class Attachment(MIMEBase.MIMEBase):
"""
Email attachment
Args:
payload: path to file or file-like object with read() method
filename: name of the attachment stored in message; if set to
None, it will be fetched from payload path; file-like
object payload must have explicit filename specified
content_id: id of the attachment; automatically contained within
`<` and `>`
content_type: content type of the attachment; if set to None,
it will be fetched from filename using gluon.contenttype
module
encoding: encoding of all strings passed to this function (except
attachment body)
Content ID is used to identify attachments within the html body;
in example, attached image with content ID 'photo' may be used in
html message as a source of img tag `<img src="cid:photo" />`.
Example::
Create attachment from text file::
attachment = Mail.Attachment('/path/to/file.txt')
Content-Type: text/plain
MIME-Version: 1.0
Content-Disposition: attachment; filename="file.txt"
Content-Transfer-Encoding: base64
SOMEBASE64CONTENT=
Create attachment from image file with custom filename and cid::
attachment = Mail.Attachment('/path/to/file.png',
filename='photo.png',
content_id='photo')
Content-Type: image/png
MIME-Version: 1.0
Content-Disposition: attachment; filename="photo.png"
Content-Id: <photo>
Content-Transfer-Encoding: base64
SOMEOTHERBASE64CONTENT=
"""
def __init__(
self,
payload,
filename=None,
content_id=None,
content_type=None,
encoding='utf-8'):
if isinstance(payload, str):
if filename is None:
filename = os.path.basename(payload)
payload = read_file(payload, 'rb')
else:
if filename is None:
raise Exception('Missing attachment name')
payload = payload.read()
filename = filename.encode(encoding)
if content_type is None:
content_type = contenttype(filename)
self.my_filename = filename
self.my_payload = payload
MIMEBase.MIMEBase.__init__(self, *content_type.split('/', 1))
self.set_payload(payload)
self['Content-Disposition'] = 'attachment; filename="%s"' % filename
if not content_id is None:
self['Content-Id'] = '<%s>' % content_id.encode(encoding)
Encoders.encode_base64(self)
def __init__(self, server=None, sender=None, login=None, tls=True):
settings = self.settings = Settings()
settings.server = server
settings.sender = sender
settings.login = login
settings.tls = tls
settings.timeout = 60 # seconds
settings.hostname = None
settings.ssl = False
settings.cipher_type = None
settings.gpg_home = None
settings.sign = True
settings.sign_passphrase = None
settings.encrypt = True
settings.x509_sign_keyfile = None
settings.x509_sign_certfile = None
settings.x509_sign_chainfile = None
settings.x509_nocerts = False
settings.x509_crypt_certfiles = None
settings.debug = False
settings.lock_keys = True
self.result = {}
self.error = None
def send(self,
to,
subject='[no subject]',
message='[no message]',
attachments=None,
cc=None,
bcc=None,
reply_to=None,
sender=None,
encoding='utf-8',
raw=False,
headers={},
from_address=None,
cipher_type=None,
sign=None,
sign_passphrase=None,
encrypt=None,
x509_sign_keyfile=None,
x509_sign_chainfile=None,
x509_sign_certfile=None,
x509_crypt_certfiles=None,
x509_nocerts=None
):
"""
Sends an email using data specified in constructor
Args:
to: list or tuple of receiver addresses; will also accept single
object
subject: subject of the email
message: email body text; depends on type of passed object:
- if 2-list or 2-tuple is passed: first element will be
source of plain text while second of html text;
- otherwise: object will be the only source of plain text
and html source will be set to None
If text or html source is:
- None: content part will be ignored,
- string: content part will be set to it,
- file-like object: content part will be fetched from it using
it's read() method
attachments: list or tuple of Mail.Attachment objects; will also
accept single object
cc: list or tuple of carbon copy receiver addresses; will also
accept single object
bcc: list or tuple of blind carbon copy receiver addresses; will
also accept single object
reply_to: address to which reply should be composed
encoding: encoding of all strings passed to this method (including
message bodies)
headers: dictionary of headers to refine the headers just before
sending mail, e.g. `{'X-Mailer' : 'web2py mailer'}`
from_address: address to appear in the 'From:' header, this is not
the envelope sender. If not specified the sender will be used
cipher_type :
gpg - need a python-pyme package and gpgme lib
x509 - smime
gpg_home : you can set a GNUPGHOME environment variable
to specify home of gnupg
sign : sign the message (True or False)
sign_passphrase : passphrase for key signing
encrypt : encrypt the message (True or False). It defaults to True.
... x509 only ...
x509_sign_keyfile : the signers private key filename or
string containing the key. (PEM format)
x509_sign_certfile: the signers certificate filename or
string containing the cert. (PEM format)
x509_sign_chainfile: sets the optional all-in-one file where you
can assemble the certificates of Certification
Authorities (CA) which form the certificate
chain of email certificate. It can be a
string containing the certs to. (PEM format)
x509_nocerts : if True then no attached certificate in mail
x509_crypt_certfiles: the certificates file or strings to encrypt
the messages with can be a file name / string or
a list of file names / strings (PEM format)
Examples:
Send plain text message to single address::
mail.send('[email protected]',
'Message subject',
'Plain text body of the message')
Send html message to single address::
mail.send('[email protected]',
'Message subject',
'<html>Plain text body of the message</html>')
Send text and html message to three addresses (two in cc)::
mail.send('[email protected]',
'Message subject',
('Plain text body', '<html>html body</html>'),
cc=['[email protected]', '[email protected]'])
Send html only message with image attachment available from the
message by 'photo' content id::
mail.send('[email protected]',
'Message subject',
(None, '<html><img src="cid:photo" /></html>'),
Mail.Attachment('/path/to/photo.jpg'
content_id='photo'))
Send email with two attachments and no body text::
mail.send('[email protected],
'Message subject',
None,
[Mail.Attachment('/path/to/fist.file'),
Mail.Attachment('/path/to/second.file')])
Returns:
True on success, False on failure.
Before return, method updates two object's fields:
- self.result: return value of smtplib.SMTP.sendmail() or GAE's
mail.send_mail() method
- self.error: Exception message or None if above was successful
"""
# We don't want to use base64 encoding for unicode mail
Charset.add_charset('utf-8', Charset.QP, Charset.QP, 'utf-8')
def encode_header(key):
if [c for c in key if 32 > ord(c) or ord(c) > 127]:
return Header.Header(key.encode('utf-8'), 'utf-8')
else:
return key
# encoded or raw text
def encoded_or_raw(text):
if raw:
text = encode_header(text)
return text
sender = sender or self.settings.sender
if not isinstance(self.settings.server, str):
raise Exception('Server address not specified')
if not isinstance(sender, str):
raise Exception('Sender address not specified')
if not raw and attachments:
# Use multipart/mixed if there is attachments
payload_in = MIMEMultipart.MIMEMultipart('mixed')
elif raw:
# no encoding configuration for raw messages
if not isinstance(message, basestring):
message = message.read()
if isinstance(message, unicode):
text = message.encode('utf-8')
elif not encoding == 'utf-8':
text = message.decode(encoding).encode('utf-8')
else:
text = message
# No charset passed to avoid transport encoding
# NOTE: some unicode encoded strings will produce
# unreadable mail contents.
payload_in = MIMEText.MIMEText(text)
if to:
if not isinstance(to, (list, tuple)):
to = [to]
else:
raise Exception('Target receiver address not specified')
if cc:
if not isinstance(cc, (list, tuple)):
cc = [cc]
if bcc:
if not isinstance(bcc, (list, tuple)):
bcc = [bcc]
if message is None:
text = html = None
elif isinstance(message, (list, tuple)):
text, html = message
elif message.strip().startswith('<html') and \
message.strip().endswith('</html>'):
text = self.settings.server == 'gae' and message or None
html = message
else:
text = message
html = None
if (not text is None or not html is None) and (not raw):
if not text is None:
if not isinstance(text, basestring):
text = text.read()
if isinstance(text, unicode):
text = text.encode('utf-8')
elif not encoding == 'utf-8':
text = text.decode(encoding).encode('utf-8')
if not html is None:
if not isinstance(html, basestring):
html = html.read()
if isinstance(html, unicode):
html = html.encode('utf-8')
elif not encoding == 'utf-8':
html = html.decode(encoding).encode('utf-8')
# Construct mime part only if needed
if text is not None and html:
# We have text and html we need multipart/alternative
attachment = MIMEMultipart.MIMEMultipart('alternative')
attachment.attach(MIMEText.MIMEText(text, _charset='utf-8'))
attachment.attach(
MIMEText.MIMEText(html, 'html', _charset='utf-8'))
elif text is not None:
attachment = MIMEText.MIMEText(text, _charset='utf-8')
elif html:
attachment = \
MIMEText.MIMEText(html, 'html', _charset='utf-8')
if attachments:
# If there is attachments put text and html into
# multipart/mixed
payload_in.attach(attachment)
else:
# No attachments no multipart/mixed
payload_in = attachment
if (attachments is None) or raw:
pass
elif isinstance(attachments, (list, tuple)):
for attachment in attachments:
payload_in.attach(attachment)
else:
payload_in.attach(attachments)
#######################################################
# CIPHER #
#######################################################
cipher_type = cipher_type or self.settings.cipher_type
sign = sign if sign != None else self.settings.sign
sign_passphrase = sign_passphrase or self.settings.sign_passphrase
encrypt = encrypt if encrypt != None else self.settings.encrypt
#######################################################
# GPGME #
#######################################################
if cipher_type == 'gpg':
if self.settings.gpg_home:
# Set GNUPGHOME environment variable to set home of gnupg
import os
os.environ['GNUPGHOME'] = self.settings.gpg_home
if not sign and not encrypt:
self.error = "No sign and no encrypt is set but cipher type to gpg"
return False
# need a python-pyme package and gpgme lib
from pyme import core, errors
from pyme.constants.sig import mode
############################################
# sign #
############################################
if sign:
import string
core.check_version(None)
pin = string.replace(payload_in.as_string(), '\n', '\r\n')
plain = core.Data(pin)
sig = core.Data()
c = core.Context()
c.set_armor(1)
c.signers_clear()
# search for signing key for From:
for sigkey in c.op_keylist_all(sender, 1):
if sigkey.can_sign:
c.signers_add(sigkey)
if not c.signers_enum(0):
self.error = 'No key for signing [%s]' % sender
return False
c.set_passphrase_cb(lambda x, y, z: sign_passphrase)
try:
# make a signature
c.op_sign(plain, sig, mode.DETACH)
sig.seek(0, 0)
# make it part of the email
payload = MIMEMultipart.MIMEMultipart('signed',
boundary=None,
_subparts=None,
**dict(
micalg="pgp-sha1",
protocol="application/pgp-signature"))
# insert the origin payload
payload.attach(payload_in)
# insert the detached signature
p = MIMEBase.MIMEBase("application", 'pgp-signature')
p.set_payload(sig.read())
payload.attach(p)
# it's just a trick to handle the no encryption case
payload_in = payload
except errors.GPGMEError, ex:
self.error = "GPG error: %s" % ex.getstring()
return False
############################################
# encrypt #
############################################
if encrypt:
core.check_version(None)
plain = core.Data(payload_in.as_string())
cipher = core.Data()
c = core.Context()
c.set_armor(1)
# collect the public keys for encryption
recipients = []
rec = to[:]
if cc:
rec.extend(cc)
if bcc:
rec.extend(bcc)
for addr in rec:
c.op_keylist_start(addr, 0)
r = c.op_keylist_next()
if r is None:
self.error = 'No key for [%s]' % addr
return False
recipients.append(r)
try:
# make the encryption
c.op_encrypt(recipients, 1, plain, cipher)
cipher.seek(0, 0)
# make it a part of the email
payload = MIMEMultipart.MIMEMultipart('encrypted',
boundary=None,
_subparts=None,
**dict(protocol="application/pgp-encrypted"))
p = MIMEBase.MIMEBase("application", 'pgp-encrypted')
p.set_payload("Version: 1\r\n")
payload.attach(p)
p = MIMEBase.MIMEBase("application", 'octet-stream')
p.set_payload(cipher.read())
payload.attach(p)
except errors.GPGMEError, ex:
self.error = "GPG error: %s" % ex.getstring()
return False
#######################################################
# X.509 #
#######################################################
elif cipher_type == 'x509':
if not sign and not encrypt:
self.error = "No sign and no encrypt is set but cipher type to x509"
return False
import os
x509_sign_keyfile = x509_sign_keyfile or\
self.settings.x509_sign_keyfile
x509_sign_chainfile = x509_sign_chainfile or\
self.settings.x509_sign_chainfile
x509_sign_certfile = x509_sign_certfile or\
self.settings.x509_sign_certfile or\
x509_sign_keyfile or\
self.settings.x509_sign_certfile
# crypt certfiles could be a string or a list
x509_crypt_certfiles = x509_crypt_certfiles or\
self.settings.x509_crypt_certfiles
x509_nocerts = x509_nocerts or\
self.settings.x509_nocerts
# need m2crypto
try:
from M2Crypto import BIO, SMIME, X509
except Exception, e:
self.error = "Can't load M2Crypto module"
return False
msg_bio = BIO.MemoryBuffer(payload_in.as_string())
s = SMIME.SMIME()
# SIGN
if sign:
# key for signing
try:
keyfile_bio = BIO.openfile(x509_sign_keyfile)\
if os.path.isfile(x509_sign_keyfile)\
else BIO.MemoryBuffer(x509_sign_keyfile)
sign_certfile_bio = BIO.openfile(x509_sign_certfile)\
if os.path.isfile(x509_sign_certfile)\
else BIO.MemoryBuffer(x509_sign_certfile)
s.load_key_bio(keyfile_bio, sign_certfile_bio,
callback=lambda x: sign_passphrase)
if x509_sign_chainfile:
sk = X509.X509_Stack()
chain = X509.load_cert(x509_sign_chainfile)\
if os.path.isfile(x509_sign_chainfile)\
else X509.load_cert_string(x509_sign_chainfile)
sk.push(chain)
s.set_x509_stack(sk)
except Exception, e:
self.error = "Something went wrong on certificate / private key loading: <%s>" % str(e)
return False
try:
if x509_nocerts:
flags = SMIME.PKCS7_NOCERTS
else:
flags = 0
if not encrypt:
flags += SMIME.PKCS7_DETACHED
p7 = s.sign(msg_bio, flags=flags)
msg_bio = BIO.MemoryBuffer(payload_in.as_string(
)) # Recreate coz sign() has consumed it.
except Exception, e:
self.error = "Something went wrong on signing: <%s> %s" % (
str(e), str(flags))
return False
# ENCRYPT
if encrypt:
try:
sk = X509.X509_Stack()
if not isinstance(x509_crypt_certfiles, (list, tuple)):
x509_crypt_certfiles = [x509_crypt_certfiles]
# make an encryption cert's stack
for crypt_certfile in x509_crypt_certfiles:
certfile = X509.load_cert(crypt_certfile)\
if os.path.isfile(crypt_certfile)\
else X509.load_cert_string(crypt_certfile)
sk.push(certfile)
s.set_x509_stack(sk)
s.set_cipher(SMIME.Cipher('des_ede3_cbc'))
tmp_bio = BIO.MemoryBuffer()
if sign:
s.write(tmp_bio, p7)
else:
tmp_bio.write(payload_in.as_string())
p7 = s.encrypt(tmp_bio)
except Exception, e:
self.error = "Something went wrong on encrypting: <%s>" % str(e)
return False
# Final stage in sign and encryption
out = BIO.MemoryBuffer()
if encrypt:
s.write(out, p7)
else:
if sign:
s.write(out, p7, msg_bio, SMIME.PKCS7_DETACHED)
else:
out.write('\r\n')
out.write(payload_in.as_string())
out.close()
st = str(out.read())
payload = message_from_string(st)
else:
# no cryptography process as usual
payload = payload_in
if from_address:
payload['From'] = encoded_or_raw(from_address.decode(encoding))
else:
payload['From'] = encoded_or_raw(sender.decode(encoding))
origTo = to[:]
if to:
payload['To'] = encoded_or_raw(', '.join(to).decode(encoding))
if reply_to:
payload['Reply-To'] = encoded_or_raw(reply_to.decode(encoding))
if cc:
payload['Cc'] = encoded_or_raw(', '.join(cc).decode(encoding))
to.extend(cc)
if bcc:
to.extend(bcc)
payload['Subject'] = encoded_or_raw(subject.decode(encoding))
payload['Date'] = email.utils.formatdate()
for k, v in headers.iteritems():
payload[k] = encoded_or_raw(v.decode(encoding))
result = {}
try:
if self.settings.server == 'logging':
logger.warn('email not sent\n%s\nFrom: %s\nTo: %s\nSubject: %s\n\n%s\n%s\n' %
('-' * 40, sender,
', '.join(to), subject,
text or html, '-' * 40))
elif self.settings.server == 'gae':
xcc = dict()
if cc:
xcc['cc'] = cc
if bcc:
xcc['bcc'] = bcc
if reply_to:
xcc['reply_to'] = reply_to
from google.appengine.api import mail
attachments = attachments and [mail.Attachment(
a.my_filename,
a.my_payload,
contebt_id='<attachment-%s>' % k
) for k,a in enumerate(attachments) if not raw]
if attachments:
result = mail.send_mail(
sender=sender, to=origTo,
subject=subject, body=text, html=html,
attachments=attachments, **xcc)
elif html and (not raw):
result = mail.send_mail(
sender=sender, to=origTo,
subject=subject, body=text, html=html, **xcc)
else:
result = mail.send_mail(
sender=sender, to=origTo,
subject=subject, body=text, **xcc)
else:
smtp_args = self.settings.server.split(':')
kwargs = dict(timeout=self.settings.timeout)
if self.settings.ssl:
server = smtplib.SMTP_SSL(*smtp_args, **kwargs)
else:
server = smtplib.SMTP(*smtp_args, **kwargs)
if self.settings.tls and not self.settings.ssl:
server.ehlo(self.settings.hostname)
server.starttls()
server.ehlo(self.settings.hostname)
if self.settings.login:
server.login(*self.settings.login.split(':', 1))
result = server.sendmail(
sender, to, payload.as_string())
server.quit()
except Exception, e:
logger.warn('Mail.send failure:%s' % e)
self.result = result
self.error = e
return False
self.result = result
self.error = None
return True
class Recaptcha(DIV):
"""
Examples:
Use as::
form = FORM(Recaptcha(public_key='...',private_key='...'))
or::
form = SQLFORM(...)
form.append(Recaptcha(public_key='...',private_key='...'))
"""
API_SSL_SERVER = 'https://www.google.com/recaptcha/api'
API_SERVER = 'http://www.google.com/recaptcha/api'
VERIFY_SERVER = 'http://www.google.com/recaptcha/api/verify'
def __init__(self,
request=None,
public_key='',
private_key='',
use_ssl=False,
error=None,
error_message='invalid',
label='Verify:',
options='',
comment='',
ajax=False
):
request = request or current.request
self.request_vars = request and request.vars or current.request.vars
self.remote_addr = request.env.remote_addr
self.public_key = public_key
self.private_key = private_key
self.use_ssl = use_ssl
self.error = error
self.errors = Storage()
self.error_message = error_message
self.components = []
self.attributes = {}
self.label = label
self.options = options
self.comment = comment
self.ajax = ajax
def _validate(self):
# for local testing:
recaptcha_challenge_field = \
self.request_vars.recaptcha_challenge_field
recaptcha_response_field = \
self.request_vars.recaptcha_response_field
private_key = self.private_key
remoteip = self.remote_addr
if not (recaptcha_response_field and recaptcha_challenge_field
and len(recaptcha_response_field)
and len(recaptcha_challenge_field)):
self.errors['captcha'] = self.error_message
return False
params = urllib.urlencode({
'privatekey': private_key,
'remoteip': remoteip,
'challenge': recaptcha_challenge_field,
'response': recaptcha_response_field,
})
request = urllib2.Request(
url=self.VERIFY_SERVER,
data=params,
headers={'Content-type': 'application/x-www-form-urlencoded',
'User-agent': 'reCAPTCHA Python'})
httpresp = urllib2.urlopen(request)
return_values = httpresp.read().splitlines()
httpresp.close()
return_code = return_values[0]
if return_code == 'true':
del self.request_vars.recaptcha_challenge_field
del self.request_vars.recaptcha_response_field
self.request_vars.captcha = ''
return True
else:
# In case we get an error code, store it so we can get an error message
# from the /api/challenge URL as described in the reCAPTCHA api docs.
self.error = return_values[1]
self.errors['captcha'] = self.error_message
return False
def xml(self):
public_key = self.public_key
use_ssl = self.use_ssl
error_param = ''
if self.error:
error_param = '&error=%s' % self.error
if use_ssl:
server = self.API_SSL_SERVER
else:
server = self.API_SERVER
if not self.ajax:
captcha = DIV(
SCRIPT("var RecaptchaOptions = {%s};" % self.options),
SCRIPT(_type="text/javascript",
_src="%s/challenge?k=%s%s" % (server, public_key, error_param)),
TAG.noscript(
IFRAME(
_src="%s/noscript?k=%s%s" % (
server, public_key, error_param),
_height="300", _width="500", _frameborder="0"), BR(),
INPUT(
_type='hidden', _name='recaptcha_response_field',
_value='manual_challenge')), _id='recaptcha')
else: #use Google's ajax interface, needed for LOADed components
url_recaptcha_js = "%s/js/recaptcha_ajax.js" % server
RecaptchaOptions = "var RecaptchaOptions = {%s}" % self.options
script = """%(options)s;
jQuery.getScript('%(url)s',function() {
Recaptcha.create('%(public_key)s',
'recaptcha',jQuery.extend(RecaptchaOptions,{'callback':Recaptcha.focus_response_field}))
}) """ % ({'options': RecaptchaOptions, 'url': url_recaptcha_js, 'public_key': public_key})
captcha = DIV(
SCRIPT(
script,
_type="text/javascript",
),
TAG.noscript(
IFRAME(
_src="%s/noscript?k=%s%s" % (
server, public_key, error_param),
_height="300", _width="500", _frameborder="0"), BR(),
INPUT(
_type='hidden', _name='recaptcha_response_field',
_value='manual_challenge')), _id='recaptcha')
if not self.errors.captcha:
return XML(captcha).xml()
else:
captcha.append(DIV(self.errors['captcha'], _class='error'))
return XML(captcha).xml()
# this should only be used for catcha and perhaps not even for that
def addrow(form, a, b, c, style, _id, position=-1):
if style == "divs":
form[0].insert(position, DIV(DIV(LABEL(a), _class='w2p_fl'),
DIV(b, _class='w2p_fw'),
DIV(c, _class='w2p_fc'),
_id=_id))
elif style == "table2cols":
form[0].insert(position, TR(TD(LABEL(a), _class='w2p_fl'),
TD(c, _class='w2p_fc')))
form[0].insert(position + 1, TR(TD(b, _class='w2p_fw'),
_colspan=2, _id=_id))
elif style == "ul":
form[0].insert(position, LI(DIV(LABEL(a), _class='w2p_fl'),
DIV(b, _class='w2p_fw'),
DIV(c, _class='w2p_fc'),
_id=_id))
elif style == "bootstrap":
form[0].insert(position, DIV(LABEL(a, _class='control-label'),
DIV(b, SPAN(c, _class='inline-help'),
_class='controls'),
_class='control-group', _id=_id))
else:
form[0].insert(position, TR(TD(LABEL(a), _class='w2p_fl'),
TD(b, _class='w2p_fw'),
TD(c, _class='w2p_fc'), _id=_id))
class Auth(object):
default_settings = dict(
hideerror=False,
password_min_length=4,
cas_maps=None,
reset_password_requires_verification=False,
registration_requires_verification=False,
registration_requires_approval=False,
login_after_registration=False,
login_after_password_change=True,
alternate_requires_registration=False,
create_user_groups="user_%(id)s",
everybody_group_id=None,
manager_actions={},
auth_manager_role=None,
two_factor_authentication_group = None,
login_captcha=None,
register_captcha=None,
pre_registration_div=None,
retrieve_username_captcha=None,
retrieve_password_captcha=None,
captcha=None,
prevent_open_redirect_attacks=True,
prevent_password_reset_attacks=True,
expiration=3600, # one hour
long_expiration=3600 * 30 * 24, # one month
remember_me_form=True,
allow_basic_login=False,
allow_basic_login_only=False,
on_failed_authentication=lambda x: redirect(x),
formstyle=None,
label_separator=None,
logging_enabled = True,
allow_delete_accounts=False,
password_field='password',
table_user_name='auth_user',
table_group_name='auth_group',
table_membership_name='auth_membership',
table_permission_name='auth_permission',
table_event_name='auth_event',
table_cas_name='auth_cas',
table_user=None,
table_group=None,
table_membership=None,
table_permission=None,
table_event=None,
table_cas=None,
showid=False,
use_username=False,
login_email_validate=True,
login_userfield=None,
multi_login=False,
logout_onlogout=None,
register_fields=None,
register_verify_password=True,
profile_fields=None,
email_case_sensitive=True,
username_case_sensitive=True,
update_fields=['email'],
ondelete="CASCADE",
client_side=True,
renew_session_onlogin=True,
renew_session_onlogout=True,
keep_session_onlogin=True,
keep_session_onlogout=False,
wiki=Settings(),
)
# ## these are messages that can be customized
default_messages = dict(
login_button='Log In',
register_button='Sign Up',
password_reset_button='Request reset password',
password_change_button='Change password',
profile_save_button='Apply changes',
submit_button='Submit',
verify_password='Verify Password',
delete_label='Check to delete',
function_disabled='Function disabled',
access_denied='Insufficient privileges',
registration_verifying='Registration needs verification',
registration_pending='Registration is pending approval',
email_taken='This email already has an account',
invalid_username='Invalid username',
username_taken='Username already taken',
login_disabled='Login disabled by administrator',
logged_in='Logged in',
email_sent='Email sent',
unable_to_send_email='Unable to send email',
email_verified='Email verified',
logged_out='Logged out',
registration_successful='Registration successful',
invalid_email='Invalid email',
unable_send_email='Unable to send email',
invalid_login='Invalid login',
invalid_user='Invalid user',
invalid_password='Invalid password',
is_empty="Cannot be empty",
mismatched_password="Password fields don't match",
verify_email='Welcome %(username)s! Click on the link %(link)s to verify your email',
verify_email_subject='Email verification',
username_sent='Your username was emailed to you',
new_password_sent='A new password was emailed to you',
password_changed='Password changed',
retrieve_username='Your username is: %(username)s',
retrieve_username_subject='Username retrieve',
retrieve_password='Your password is: %(password)s',
retrieve_password_subject='Password retrieve',
reset_password='Click on the link %(link)s to reset your password',
reset_password_subject='Password reset',
invalid_reset_password='Invalid reset password',
profile_updated='Profile updated',
new_password='New password',
old_password='Old password',
group_description='Group uniquely assigned to user %(id)s',
register_log='User %(id)s Registered',
login_log='User %(id)s Logged-in',
login_failed_log=None,
logout_log='User %(id)s Logged-out',
profile_log='User %(id)s Profile updated',
verify_email_log='User %(id)s Verification email sent',
retrieve_username_log='User %(id)s Username retrieved',
retrieve_password_log='User %(id)s Password retrieved',
reset_password_log='User %(id)s Password reset',
change_password_log='User %(id)s Password changed',
add_group_log='Group %(group_id)s created',
del_group_log='Group %(group_id)s deleted',
add_membership_log=None,
del_membership_log=None,
has_membership_log=None,
add_permission_log=None,
del_permission_log=None,
has_permission_log=None,
impersonate_log='User %(id)s is impersonating %(other_id)s',
label_first_name='First name',
label_last_name='Last name',
label_username='Username',
label_email='E-mail',
label_password='Password',
label_registration_key='Registration key',
label_reset_password_key='Reset Password key',
label_registration_id='Registration identifier',
label_role='Role',
label_description='Description',
label_user_id='User ID',
label_group_id='Group ID',
label_name='Name',
label_table_name='Object or table name',
label_record_id='Record ID',
label_time_stamp='Timestamp',
label_client_ip='Client IP',
label_origin='Origin',
label_remember_me="Remember me (for 30 days)",
verify_password_comment='please input your password again',
)
"""
Class for authentication, authorization, role based access control.
Includes:
- registration and profile
- login and logout
- username and password retrieval
- event logging
- role creation and assignment
- user defined group/role based permission
Args:
environment: is there for legacy but unused (awful)
db: has to be the database where to create tables for authentication
mailer: `Mail(...)` or None (no mailer) or True (make a mailer)
hmac_key: can be a hmac_key or hmac_key=Auth.get_or_create_key()
controller: (where is the user action?)
cas_provider: (delegate authentication to the URL, CAS2)
Authentication Example::
from gluon.contrib.utils import *
mail=Mail()
mail.settings.server='smtp.gmail.com:587'
mail.settings.sender='[email protected]'
mail.settings.login='username:password'
auth=Auth(db)
auth.settings.mailer=mail
# auth.settings....=...
auth.define_tables()
def authentication():
return dict(form=auth())
Exposes:
- `http://.../{application}/{controller}/authentication/login`
- `http://.../{application}/{controller}/authentication/logout`
- `http://.../{application}/{controller}/authentication/register`
- `http://.../{application}/{controller}/authentication/verify_email`
- `http://.../{application}/{controller}/authentication/retrieve_username`
- `http://.../{application}/{controller}/authentication/retrieve_password`
- `http://.../{application}/{controller}/authentication/reset_password`
- `http://.../{application}/{controller}/authentication/profile`
- `http://.../{application}/{controller}/authentication/change_password`
On registration a group with role=new_user.id is created
and user is given membership of this group.
You can create a group with::
group_id=auth.add_group('Manager', 'can access the manage action')
auth.add_permission(group_id, 'access to manage')
Here "access to manage" is just a user defined string.
You can give access to a user::
auth.add_membership(group_id, user_id)
If user id is omitted, the logged in user is assumed
Then you can decorate any action::
@auth.requires_permission('access to manage')
def manage():
return dict()
You can restrict a permission to a specific table::
auth.add_permission(group_id, 'edit', db.sometable)
@auth.requires_permission('edit', db.sometable)
Or to a specific record::
auth.add_permission(group_id, 'edit', db.sometable, 45)
@auth.requires_permission('edit', db.sometable, 45)
If authorization is not granted calls::
auth.settings.on_failed_authorization
Other options::
auth.settings.mailer=None
auth.settings.expiration=3600 # seconds
...
### these are messages that can be customized
...
"""
@staticmethod
def get_or_create_key(filename=None, alg='sha512'):
request = current.request
if not filename:
filename = os.path.join(request.folder, 'private', 'auth.key')
if os.path.exists(filename):
key = open(filename, 'r').read().strip()
else:
key = alg + ':' + web2py_uuid()
open(filename, 'w').write(key)
return key
def url(self, f=None, args=None, vars=None, scheme=False):
if args is None:
args = []
if vars is None:
vars = {}
return URL(c=self.settings.controller,
f=f, args=args, vars=vars, scheme=scheme)
def here(self):
return URL(args=current.request.args, vars=current.request.get_vars)
def __init__(self, environment=None, db=None, mailer=True,
hmac_key=None, controller='default', function='user',
cas_provider=None, signature=True, secure=False,
csrf_prevention=True, propagate_extension=None,
url_index=None):
## next two lines for backward compatibility
if not db and environment and isinstance(environment, DAL):
db = environment
self.db = db
self.environment = current
self.csrf_prevention = csrf_prevention
request = current.request
session = current.session
auth = session.auth
self.user_groups = auth and auth.user_groups or {}
if secure:
request.requires_https()
now = request.now
# if we have auth info
# if not expired it, used it
# if expired, clear the session
# else, only clear auth info in the session
if auth:
delta = datetime.timedelta(days=0, seconds=auth.expiration)
if auth.last_visit and auth.last_visit + delta > now:
self.user = auth.user
# this is a trick to speed up sessions to avoid many writes
if (now - auth.last_visit).seconds > (auth.expiration / 10):
auth.last_visit = request.now
else:
self.user = None
if session.auth:
del session.auth
session.renew(clear_session=True)
else:
self.user = None
if session.auth:
del session.auth
# ## what happens after login?
url_index = url_index or URL(controller, 'index')
url_login = URL(controller, function, args='login',
extension = propagate_extension)
# ## what happens after registration?
settings = self.settings = Settings()
settings.update(Auth.default_settings)
settings.update(
cas_domains=[request.env.http_host],
cas_provider=cas_provider,
cas_actions=dict(login='login',
validate='validate',
servicevalidate='serviceValidate',
proxyvalidate='proxyValidate',
logout='logout'),
extra_fields={},
actions_disabled=[],
controller=controller,
function=function,
login_url=url_login,
logged_url=URL(controller, function, args='profile'),
download_url=URL(controller, 'download'),
mailer=(mailer is True) and Mail() or mailer,
on_failed_authorization =
URL(controller, function, args='not_authorized'),
login_next = url_index,
login_onvalidation = [],
login_onaccept = [],
login_onfail = [],
login_methods = [self],
login_form = self,
logout_next = url_index,
logout_onlogout = None,
register_next = url_index,
register_onvalidation = [],
register_onaccept = [],
verify_email_next = url_login,
verify_email_onaccept = [],
profile_next = url_index,
profile_onvalidation = [],
profile_onaccept = [],
retrieve_username_next = url_index,
retrieve_password_next = url_index,
request_reset_password_next = url_login,
reset_password_next = url_index,
change_password_next = url_index,
change_password_onvalidation = [],
change_password_onaccept = [],
retrieve_password_onvalidation = [],
reset_password_onvalidation = [],
reset_password_onaccept = [],
hmac_key = hmac_key,
formstyle = current.response.formstyle,
label_separator = current.response.form_label_separator
)
settings.lock_keys = True
# ## these are messages that can be customized
messages = self.messages = Messages(current.T)
messages.update(Auth.default_messages)
messages.update(ajax_failed_authentication=
DIV(H4('NOT AUTHORIZED'),
'Please ',
A('login',
_href=self.settings.login_url +
('?_next=' + urllib.quote(current.request.env.http_web2py_component_location))
if current.request.env.http_web2py_component_location else ''),
' to view this content.',
_class='not-authorized alert alert-block'))
messages.lock_keys = True
# for "remember me" option
response = current.response
if auth and auth.remember_me:
# when user wants to be logged in for longer
response.session_cookie_expires = auth.expiration
if signature:
self.define_signature()
else:
self.signature = None
def get_vars_next(self):
next = current.request.vars._next
if isinstance(next, (list, tuple)):
next = next[0]
return next
def _get_user_id(self):
"""accessor for auth.user_id"""
return self.user and self.user.id or None
user_id = property(_get_user_id, doc="user.id or None")
def table_user(self):
return self.db[self.settings.table_user_name]
def table_group(self):
return self.db[self.settings.table_group_name]
def table_membership(self):
return self.db[self.settings.table_membership_name]
def table_permission(self):
return self.db[self.settings.table_permission_name]
def table_event(self):
return self.db[self.settings.table_event_name]
def table_cas(self):
return self.db[self.settings.table_cas_name]
def _HTTP(self, *a, **b):
"""
only used in lambda: self._HTTP(404)
"""
raise HTTP(*a, **b)
def __call__(self):
"""
Example:
Use as::
def authentication():
return dict(form=auth())
"""
request = current.request
args = request.args
if not args:
redirect(self.url(args='login', vars=request.vars))
elif args[0] in self.settings.actions_disabled:
raise HTTP(404)
if args[0] in ('login', 'logout', 'register', 'verify_email',
'retrieve_username', 'retrieve_password',
'reset_password', 'request_reset_password',
'change_password', 'profile', 'groups',
'impersonate', 'not_authorized'):
if len(request.args) >= 2 and args[0] == 'impersonate':
return getattr(self, args[0])(request.args[1])
else:
return getattr(self, args[0])()
elif args[0] == 'cas' and not self.settings.cas_provider:
if args(1) == self.settings.cas_actions['login']:
return self.cas_login(version=2)
elif args(1) == self.settings.cas_actions['validate']:
return self.cas_validate(version=1)
elif args(1) == self.settings.cas_actions['servicevalidate']:
return self.cas_validate(version=2, proxy=False)
elif args(1) == self.settings.cas_actions['proxyvalidate']:
return self.cas_validate(version=2, proxy=True)
elif args(1) == self.settings.cas_actions['logout']:
return self.logout(next=request.vars.service or DEFAULT)
else:
raise HTTP(404)
def navbar(self, prefix='Welcome', action=None,
separators=(' [ ', ' | ', ' ] '), user_identifier=DEFAULT,
referrer_actions=DEFAULT, mode='default'):
""" Navbar with support for more templates
This uses some code from the old navbar.
Args:
mode: see options for list of
"""
items = [] # Hold all menu items in a list
self.bar = '' # The final
T = current.T
referrer_actions = [] if not referrer_actions else referrer_actions
if not action:
action = self.url(self.settings.function)
request = current.request
if URL() == action:
next = ''
else:
next = '?_next=' + urllib.quote(URL(args=request.args,
vars=request.get_vars))
href = lambda function: '%s/%s%s' % (action, function, next
if referrer_actions is DEFAULT
or function in referrer_actions
else '')
if isinstance(prefix, str):
prefix = T(prefix)
if prefix:
prefix = prefix.strip() + ' '
def Anr(*a, **b):
b['_rel'] = 'nofollow'
return A(*a, **b)
if self.user_id: # User is logged in
logout_next = self.settings.logout_next
items.append({'name': T('Log Out'),
'href': '%s/logout?_next=%s' % (action,
urllib.quote(
logout_next)),
'icon': 'icon-off'})
if not 'profile' in self.settings.actions_disabled:
items.append({'name': T('Profile'), 'href': href('profile'),
'icon': 'icon-user'})
if not 'change_password' in self.settings.actions_disabled:
items.append({'name': T('Password'),
'href': href('change_password'),
'icon': 'icon-lock'})
if user_identifier is DEFAULT:
user_identifier = '%(first_name)s'
if callable(user_identifier):
user_identifier = user_identifier(self.user)
elif ((isinstance(user_identifier, str) or
type(user_identifier).__name__ == 'lazyT') and
re.search(r'%\(.+\)s', user_identifier)):
user_identifier = user_identifier % self.user
if not user_identifier:
user_identifier = ''
else: # User is not logged in
items.append({'name': T('Log In'), 'href': href('login'),
'icon': 'icon-off'})
if not 'register' in self.settings.actions_disabled:
items.append({'name': T('Sign Up'), 'href': href('register'),
'icon': 'icon-user'})
if not 'request_reset_password' in self.settings.actions_disabled:
items.append({'name': T('Lost password?'),
'href': href('request_reset_password'),
'icon': 'icon-lock'})
if (self.settings.use_username and not
'retrieve_username' in self.settings.actions_disabled):
items.append({'name': T('Forgot username?'),
'href': href('retrieve_username'),
'icon': 'icon-edit'})
def menu(): # For inclusion in MENU
self.bar = [(items[0]['name'], False, items[0]['href'], [])]
del items[0]
for item in items:
self.bar[0][3].append((item['name'], False, item['href']))
def bootstrap3(): # Default web2py scaffolding
def rename(icon): return icon+' '+icon.replace('icon', 'glyphicon')
self.bar = UL(LI(Anr(I(_class=rename('icon '+items[0]['icon'])),
' ' + items[0]['name'],
_href=items[0]['href'])), _class='dropdown-menu')
del items[0]
for item in items:
self.bar.insert(-1, LI(Anr(I(_class=rename('icon '+item['icon'])),
' ' + item['name'],
_href=item['href'])))
self.bar.insert(-1, LI('', _class='divider'))
if self.user_id:
self.bar = LI(Anr(prefix, user_identifier,
_href='#', _class="dropdown-toggle",
data={'toggle': 'dropdown'}),
self.bar, _class='dropdown')
else:
self.bar = LI(Anr(T('Log In'),
_href='#', _class="dropdown-toggle",
data={'toggle': 'dropdown'}), self.bar,
_class='dropdown')
def bare():
""" In order to do advanced customization we only need the
prefix, the user_identifier and the href attribute of items
Examples:
Use as::
# in module custom_layout.py
from gluon import *
def navbar(auth_navbar):
bar = auth_navbar
user = bar["user"]
if not user:
btn_login = A(current.T("Login"),
_href=bar["login"],
_class="btn btn-success",
_rel="nofollow")
btn_register = A(current.T("Sign up"),
_href=bar["register"],
_class="btn btn-primary",
_rel="nofollow")
return DIV(btn_register, btn_login, _class="btn-group")
else:
toggletext = "%s back %s" % (bar["prefix"], user)
toggle = A(toggletext,
_href="#",
_class="dropdown-toggle",
_rel="nofollow",
**{"_data-toggle": "dropdown"})
li_profile = LI(A(I(_class="icon-user"), ' ',
current.T("Account details"),
_href=bar["profile"], _rel="nofollow"))
li_custom = LI(A(I(_class="icon-book"), ' ',
current.T("My Agenda"),
_href="#", rel="nofollow"))
li_logout = LI(A(I(_class="icon-off"), ' ',
current.T("logout"),
_href=bar["logout"], _rel="nofollow"))
dropdown = UL(li_profile,
li_custom,
LI('', _class="divider"),
li_logout,
_class="dropdown-menu", _role="menu")
return LI(toggle, dropdown, _class="dropdown")
# in models db.py
import custom_layout as custom
# in layout.html
<ul id="navbar" class="nav pull-right">
{{='auth' in globals() and \
custom.navbar(auth.navbar(mode='bare')) or ''}}</ul>
"""
bare = {}
bare['prefix'] = prefix
bare['user'] = user_identifier if self.user_id else None
for i in items:
if i['name'] == T('Log In'):
k = 'login'
elif i['name'] == T('Sign Up'):
k = 'register'
elif i['name'] == T('Lost password?'):
k = 'request_reset_password'
elif i['name'] == T('Forgot username?'):
k = 'retrieve_username'
elif i['name'] == T('Log Out'):
k = 'logout'
elif i['name'] == T('Profile'):
k = 'profile'
elif i['name'] == T('Password'):
k = 'change_password'
bare[k] = i['href']
self.bar = bare
options = {'asmenu': menu,
'dropdown': bootstrap3,
'bare': bare
} # Define custom modes.
if mode in options and callable(options[mode]):
options[mode]()
else:
s1, s2, s3 = separators
if self.user_id:
self.bar = SPAN(prefix, user_identifier, s1,
Anr(items[0]['name'],
_href=items[0]['href']), s3,
_class='auth_navbar')
else:
self.bar = SPAN(s1, Anr(items[0]['name'],
_href=items[0]['href']), s3,
_class='auth_navbar')
for item in items[1:]:
self.bar.insert(-1, s2)
self.bar.insert(-1, Anr(item['name'], _href=item['href']))
return self.bar
def __get_migrate(self, tablename, migrate=True):
if type(migrate).__name__ == 'str':
return (migrate + tablename + '.table')
elif migrate == False:
return False
else:
return True
def enable_record_versioning(self,
tables,
archive_db=None,
archive_names='%(tablename)s_archive',
current_record='current_record',
current_record_label=None):
"""
Used to enable full record versioning (including auth tables)::
auth = Auth(db)
auth.define_tables(signature=True)
# define our own tables
db.define_table('mything',Field('name'),auth.signature)
auth.enable_record_versioning(tables=db)
tables can be the db (all table) or a list of tables.
only tables with modified_by and modified_on fiels (as created
by auth.signature) will have versioning. Old record versions will be
in table 'mything_archive' automatically defined.
when you enable enable_record_versioning, records are never
deleted but marked with is_active=False.
enable_record_versioning enables a common_filter for
every table that filters out records with is_active = False
Note:
If you use auth.enable_record_versioning,
do not use auth.archive or you will end up with duplicates.
auth.archive does explicitly what enable_record_versioning
does automatically.
"""
current_record_label = current_record_label or current.T(
current_record.replace('_', ' ').title())
for table in tables:
fieldnames = table.fields()
if ('id' in fieldnames and
'modified_on' in fieldnames and
not current_record in fieldnames):
table._enable_record_versioning(
archive_db=archive_db,
archive_name=archive_names,
current_record=current_record,
current_record_label=current_record_label)
def define_signature(self):
db = self.db
settings = self.settings
request = current.request
T = current.T
reference_user = 'reference %s' % settings.table_user_name
def lazy_user(auth=self):
return auth.user_id
def represent(id, record=None, s=settings):
try:
user = s.table_user(id)
return '%s %s' % (user.get("first_name", user.get("email")),
user.get("last_name", ''))
except:
return id
ondelete = self.settings.ondelete
self.signature = Table(
self.db, 'auth_signature',
Field('is_active', 'boolean',
default=True,
readable=False, writable=False,
label=T('Is Active')),
Field('created_on', 'datetime',
default=request.now,
writable=False, readable=False,
label=T('Created On')),
Field('created_by',
reference_user,
default=lazy_user, represent=represent,
writable=False, readable=False,
label=T('Created By'), ondelete=ondelete),
Field('modified_on', 'datetime',
update=request.now, default=request.now,
writable=False, readable=False,
label=T('Modified On')),
Field('modified_by',
reference_user, represent=represent,
default=lazy_user, update=lazy_user,
writable=False, readable=False,
label=T('Modified By'), ondelete=ondelete))
def define_tables(self, username=None, signature=None,
migrate=None, fake_migrate=None):
"""
To be called unless tables are defined manually
Examples:
Use as::
# defines all needed tables and table files
# 'myprefix_auth_user.table', ...
auth.define_tables(migrate='myprefix_')
# defines all needed tables without migration/table files
auth.define_tables(migrate=False)
"""
db = self.db
if migrate is None:
migrate = db._migrate
if fake_migrate is None:
fake_migrate = db._fake_migrate
settings = self.settings
if username is None:
username = settings.use_username
else:
settings.use_username = username
if not self.signature:
self.define_signature()
if signature == True:
signature_list = [self.signature]
elif not signature:
signature_list = []
elif isinstance(signature, Table):
signature_list = [signature]
else:
signature_list = signature
is_not_empty = IS_NOT_EMPTY(error_message=self.messages.is_empty)
is_crypted = CRYPT(key=settings.hmac_key,
min_length=settings.password_min_length)
is_unique_email = [
IS_EMAIL(error_message=self.messages.invalid_email),
IS_NOT_IN_DB(db, '%s.email' % settings.table_user_name,
error_message=self.messages.email_taken)]
if not settings.email_case_sensitive:
is_unique_email.insert(1, IS_LOWER())
if not settings.table_user_name in db.tables:
passfield = settings.password_field
extra_fields = settings.extra_fields.get(
settings.table_user_name, []) + signature_list
if username or settings.cas_provider:
is_unique_username = \
[IS_MATCH('[\w\.\-]+', strict=True,
error_message=self.messages.invalid_username),
IS_NOT_IN_DB(db, '%s.username' % settings.table_user_name,
error_message=self.messages.username_taken)]
if not settings.username_case_sensitive:
is_unique_username.insert(1, IS_LOWER())
db.define_table(
settings.table_user_name,
Field('first_name', length=128, default='',
label=self.messages.label_first_name,
requires=is_not_empty),
Field('last_name', length=128, default='',
label=self.messages.label_last_name,
requires=is_not_empty),
Field('email', length=512, default='',
label=self.messages.label_email,
requires=is_unique_email),
Field('username', length=128, default='',
label=self.messages.label_username,
requires=is_unique_username),
Field(passfield, 'password', length=512,
readable=False, label=self.messages.label_password,
requires=[is_crypted]),
Field('registration_key', length=512,
writable=False, readable=False, default='',
label=self.messages.label_registration_key),
Field('reset_password_key', length=512,
writable=False, readable=False, default='',
label=self.messages.label_reset_password_key),
Field('registration_id', length=512,
writable=False, readable=False, default='',
label=self.messages.label_registration_id),
*extra_fields,
**dict(
migrate=self.__get_migrate(settings.table_user_name,
migrate),
fake_migrate=fake_migrate,
format='%(username)s'))
else:
db.define_table(
settings.table_user_name,
Field('first_name', length=128, default='',
label=self.messages.label_first_name,
requires=is_not_empty),
Field('last_name', length=128, default='',
label=self.messages.label_last_name,
requires=is_not_empty),
Field('email', length=512, default='',
label=self.messages.label_email,
requires=is_unique_email),
Field(passfield, 'password', length=512,
readable=False, label=self.messages.label_password,
requires=[is_crypted]),
Field('registration_key', length=512,
writable=False, readable=False, default='',
label=self.messages.label_registration_key),
Field('reset_password_key', length=512,
writable=False, readable=False, default='',
label=self.messages.label_reset_password_key),
Field('registration_id', length=512,
writable=False, readable=False, default='',
label=self.messages.label_registration_id),
*extra_fields,
**dict(
migrate=self.__get_migrate(settings.table_user_name,
migrate),
fake_migrate=fake_migrate,
format='%(first_name)s %(last_name)s (%(id)s)'))
reference_table_user = 'reference %s' % settings.table_user_name
if not settings.table_group_name in db.tables:
extra_fields = settings.extra_fields.get(
settings.table_group_name, []) + signature_list
db.define_table(
settings.table_group_name,
Field('role', length=512, default='',
label=self.messages.label_role,
requires=IS_NOT_IN_DB(db, '%s.role' % settings.table_group_name)),
Field('description', 'text',
label=self.messages.label_description),
*extra_fields,
**dict(
migrate=self.__get_migrate(
settings.table_group_name, migrate),
fake_migrate=fake_migrate,
format='%(role)s (%(id)s)'))
reference_table_group = 'reference %s' % settings.table_group_name
if not settings.table_membership_name in db.tables:
extra_fields = settings.extra_fields.get(
settings.table_membership_name, []) + signature_list
db.define_table(
settings.table_membership_name,
Field('user_id', reference_table_user,
label=self.messages.label_user_id),
Field('group_id', reference_table_group,
label=self.messages.label_group_id),
*extra_fields,
**dict(
migrate=self.__get_migrate(
settings.table_membership_name, migrate),
fake_migrate=fake_migrate))
if not settings.table_permission_name in db.tables:
extra_fields = settings.extra_fields.get(
settings.table_permission_name, []) + signature_list
db.define_table(
settings.table_permission_name,
Field('group_id', reference_table_group,
label=self.messages.label_group_id),
Field('name', default='default', length=512,
label=self.messages.label_name,
requires=is_not_empty),
Field('table_name', length=512,
label=self.messages.label_table_name),
Field('record_id', 'integer', default=0,
label=self.messages.label_record_id,
requires=IS_INT_IN_RANGE(0, 10 ** 9)),
*extra_fields,
**dict(
migrate=self.__get_migrate(
settings.table_permission_name, migrate),
fake_migrate=fake_migrate))
if not settings.table_event_name in db.tables:
db.define_table(
settings.table_event_name,
Field('time_stamp', 'datetime',
default=current.request.now,
label=self.messages.label_time_stamp),
Field('client_ip',
default=current.request.client,
label=self.messages.label_client_ip),
Field('user_id', reference_table_user, default=None,
label=self.messages.label_user_id),
Field('origin', default='auth', length=512,
label=self.messages.label_origin,
requires=is_not_empty),
Field('description', 'text', default='',
label=self.messages.label_description,
requires=is_not_empty),
*settings.extra_fields.get(settings.table_event_name, []),
**dict(
migrate=self.__get_migrate(
settings.table_event_name, migrate),
fake_migrate=fake_migrate))
now = current.request.now
if settings.cas_domains:
if not settings.table_cas_name in db.tables:
db.define_table(
settings.table_cas_name,
Field('user_id', reference_table_user, default=None,
label=self.messages.label_user_id),
Field('created_on', 'datetime', default=now),
Field('service', requires=IS_URL()),
Field('ticket'),
Field('renew', 'boolean', default=False),
*settings.extra_fields.get(settings.table_cas_name, []),
**dict(
migrate=self.__get_migrate(
settings.table_cas_name, migrate),
fake_migrate=fake_migrate))
if not db._lazy_tables:
settings.table_user = db[settings.table_user_name]
settings.table_group = db[settings.table_group_name]
settings.table_membership = db[settings.table_membership_name]
settings.table_permission = db[settings.table_permission_name]
settings.table_event = db[settings.table_event_name]
if settings.cas_domains:
settings.table_cas = db[settings.table_cas_name]
if settings.cas_provider: # THIS IS NOT LAZY
settings.actions_disabled = \
['profile', 'register', 'change_password',
'request_reset_password', 'retrieve_username']
from gluon.contrib.login_methods.cas_auth import CasAuth
maps = settings.cas_maps
if not maps:
table_user = self.table_user()
maps = dict((name, lambda v, n=name: v.get(n, None)) for name in
table_user.fields if name != 'id'
and table_user[name].readable)
maps['registration_id'] = \
lambda v, p=settings.cas_provider: '%s/%s' % (p, v['user'])
actions = [settings.cas_actions['login'],
settings.cas_actions['servicevalidate'],
settings.cas_actions['logout']]
settings.login_form = CasAuth(
casversion=2,
urlbase=settings.cas_provider,
actions=actions,
maps=maps)
return self
def log_event(self, description, vars=None, origin='auth'):
"""
Examples:
Use as::
auth.log_event(description='this happened', origin='auth')
"""
if not self.settings.logging_enabled or not description:
return
elif self.is_logged_in():
user_id = self.user.id
else:
user_id = None # user unknown
vars = vars or {}
# log messages should not be translated
if type(description).__name__ == 'lazyT':
description = description.m
self.table_event().insert(
description=str(description % vars),
origin=origin, user_id=user_id)
def get_or_create_user(self, keys, update_fields=['email'],
login=True, get=True):
"""
Used for alternate login methods:
If the user exists already then password is updated.
If the user doesn't yet exist, then they are created.
"""
table_user = self.table_user()
user = None
checks = []
# make a guess about who this user is
for fieldname in ['registration_id', 'username', 'email']:
if fieldname in table_user.fields() and \
keys.get(fieldname, None):
checks.append(fieldname)
value = keys[fieldname]
user = table_user(**{fieldname: value})
if user:
break
if not checks:
return None
if not 'registration_id' in keys:
keys['registration_id'] = keys[checks[0]]
# if we think we found the user but registration_id does not match,
# make new user
if 'registration_id' in checks \
and user \
and user.registration_id \
and ('registration_id' not in keys or user.registration_id != str(keys['registration_id'])):
user = None # THINK MORE ABOUT THIS? DO WE TRUST OPENID PROVIDER?
if user:
if not get:
# added for register_bare to avoid overwriting users
return None
update_keys = dict(registration_id=keys['registration_id'])
for key in update_fields:
if key in keys:
update_keys[key] = keys[key]
user.update_record(**update_keys)
elif checks:
if not 'first_name' in keys and 'first_name' in table_user.fields:
guess = keys.get('email', 'anonymous').split('@')[0]
keys['first_name'] = keys.get('username', guess)
user_id = table_user.insert(**table_user._filter_fields(keys))
user = table_user[user_id]
if self.settings.create_user_groups:
group_id = self.add_group(
self.settings.create_user_groups % user)
self.add_membership(group_id, user_id)
if self.settings.everybody_group_id:
self.add_membership(self.settings.everybody_group_id, user_id)
if login:
self.user = user
return user
def basic(self, basic_auth_realm=False):
"""
Performs basic login.
Args:
basic_auth_realm: optional basic http authentication realm. Can take
str or unicode or function or callable or boolean.
reads current.request.env.http_authorization
and returns basic_allowed,basic_accepted,user.
if basic_auth_realm is defined is a callable it's return value
is used to set the basic authentication realm, if it's a string
its content is used instead. Otherwise basic authentication realm
is set to the application name.
If basic_auth_realm is None or False (the default) the behavior
is to skip sending any challenge.
"""
if not self.settings.allow_basic_login:
return (False, False, False)
basic = current.request.env.http_authorization
if basic_auth_realm:
if callable(basic_auth_realm):
basic_auth_realm = basic_auth_realm()
elif isinstance(basic_auth_realm, (unicode, str)):
basic_realm = unicode(basic_auth_realm)
elif basic_auth_realm is True:
basic_realm = u'' + current.request.application
http_401 = HTTP(401, u'Not Authorized', **{'WWW-Authenticate': u'Basic realm="' + basic_realm + '"'})
if not basic or not basic[:6].lower() == 'basic ':
if basic_auth_realm:
raise http_401
return (True, False, False)
(username, sep, password) = base64.b64decode(basic[6:]).partition(':')
is_valid_user = sep and self.login_bare(username, password)
if not is_valid_user and basic_auth_realm:
raise http_401
return (True, True, is_valid_user)
def login_user(self, user):
"""
Logins the `user = db.auth_user(id)`
"""
from gluon.settings import global_settings
if global_settings.web2py_runtime_gae:
user = Row(self.table_user()._filter_fields(user, id=True))
delattr(user, 'password')
else:
user = Row(user)
for key, value in user.items():
if callable(value) or key == 'password':
delattr(user, key)
if self.settings.renew_session_onlogin:
current.session.renew(clear_session=not self.settings.keep_session_onlogin)
current.session.auth = Storage(user=user,
last_visit=current.request.now,
expiration=self.settings.expiration,
hmac_key=web2py_uuid())
self.user = user
self.update_groups()
def _get_login_settings(self):
table_user = self.table_user()
userfield = self.settings.login_userfield or 'username' \
if 'username' in table_user.fields else 'email'
passfield = self.settings.password_field
return Storage({"table_user": table_user,
"userfield": userfield,
"passfield": passfield})
def login_bare(self, username, password):
"""
Logins user as specified by username (or email) and password
"""
settings = self._get_login_settings()
user = settings.table_user(**{settings.userfield: \
username})
if user and user.get(settings.passfield, False):
password = settings.table_user[
settings.passfield].validate(password)[0]
if ((user.registration_key is None or
not user.registration_key.strip()) and
password == user[settings.passfield]):
self.login_user(user)
return user
else:
# user not in database try other login methods
for login_method in self.settings.login_methods:
if login_method != self and login_method(username, password):
self.user = username
return username
return False
def register_bare(self, **fields):
"""
Registers a user as specified by username (or email)
and a raw password.
"""
settings = self._get_login_settings()
if not fields.get(settings.passfield):
raise ValueError("register_bare: " +
"password not provided or invalid")
elif not fields.get(settings.userfield):
raise ValueError("register_bare: " +
"userfield not provided or invalid")
fields[settings.passfield] = settings.table_user[settings.passfield].validate(fields[settings.passfield])[0]
user = self.get_or_create_user(fields, login=False, get=False, update_fields=self.settings.update_fields)
if not user:
# get or create did not create a user (it ignores duplicate records)
return False
return user
def cas_login(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
version=2,
):
request = current.request
response = current.response
session = current.session
db, table = self.db, self.table_cas()
session._cas_service = request.vars.service or session._cas_service
if not request.env.http_host in self.settings.cas_domains or \
not session._cas_service:
raise HTTP(403, 'not authorized')
def allow_access(interactivelogin=False):
row = table(service=session._cas_service, user_id=self.user.id)
if row:
ticket = row.ticket
else:
ticket = 'ST-' + web2py_uuid()
table.insert(service=session._cas_service,
user_id=self.user.id,
ticket=ticket,
created_on=request.now,
renew=interactivelogin)
service = session._cas_service
query_sep = '&' if '?' in service else '?'
del session._cas_service
if 'warn' in request.vars and not interactivelogin:
response.headers[
'refresh'] = "5;URL=%s" % service + query_sep + "ticket=" + ticket
return A("Continue to %s" % service,
_href=service + query_sep + "ticket=" + ticket)
else:
redirect(service + query_sep + "ticket=" + ticket)
if self.is_logged_in() and not 'renew' in request.vars:
return allow_access()
elif not self.is_logged_in() and 'gateway' in request.vars:
redirect(service)
def cas_onaccept(form, onaccept=onaccept):
if not onaccept is DEFAULT:
onaccept(form)
return allow_access(interactivelogin=True)
return self.login(next, onvalidation, cas_onaccept, log)
def cas_validate(self, version=2, proxy=False):
request = current.request
db, table = self.db, self.table_cas()
current.response.headers['Content-Type'] = 'text'
ticket = request.vars.ticket
renew = 'renew' in request.vars
row = table(ticket=ticket)
success = False
if row:
userfield = self.settings.login_userfield or 'username' \
if 'username' in table.fields else 'email'
# If ticket is a service Ticket and RENEW flag respected
if ticket[0:3] == 'ST-' and \
not ((row.renew and renew) ^ renew):
user = self.table_user()(row.user_id)
row.delete_record()
success = True
def build_response(body):
return '<?xml version="1.0" encoding="UTF-8"?>\n' +\
TAG['cas:serviceResponse'](
body, **{'_xmlns:cas': 'http://www.yale.edu/tp/cas'}).xml()
if success:
if version == 1:
message = 'yes\n%s' % user[userfield]
else: # assume version 2
username = user.get('username', user[userfield])
message = build_response(
TAG['cas:authenticationSuccess'](
TAG['cas:user'](username),
*[TAG['cas:' + field.name](user[field.name])
for field in self.table_user()
if field.readable]))
else:
if version == 1:
message = 'no\n'
elif row:
message = build_response(TAG['cas:authenticationFailure']())
else:
message = build_response(
TAG['cas:authenticationFailure'](
'Ticket %s not recognized' % ticket,
_code='INVALID TICKET'))
raise HTTP(200, message)
def _reset_two_factor_auth(self, session):
"""When two-step authentication is enabled, this function is used to
clear the session after successfully completing second challenge
or when the maximum number of tries allowed has expired.
"""
session.auth_two_factor_user = None
session.auth_two_factor = None
session.auth_two_factor_enabled = False
# Allow up to 4 attempts (the 1st one plus 3 more)
session.auth_two_factor_tries_left = 3
def login(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a login form
"""
table_user = self.table_user()
settings = self.settings
if 'username' in table_user.fields or \
not settings.login_email_validate:
tmpvalidator = IS_NOT_EMPTY(error_message=self.messages.is_empty)
if not settings.username_case_sensitive:
tmpvalidator = [IS_LOWER(), tmpvalidator]
else:
tmpvalidator = IS_EMAIL(error_message=self.messages.invalid_email)
if not settings.email_case_sensitive:
tmpvalidator = [IS_LOWER(), tmpvalidator]
request = current.request
response = current.response
session = current.session
passfield = settings.password_field
try:
table_user[passfield].requires[-1].min_length = 0
except:
pass
### use session for federated login
snext = self.get_vars_next()
if snext and self.settings.prevent_open_redirect_attacks:
items = snext.split('/')
if '//' in snext and items[2] != request.env.http_host:
snext = None
if snext:
session._auth_next = snext
elif session._auth_next:
snext = session._auth_next
### pass
if next is DEFAULT:
# important for security
next = settings.login_next
if callable(next):
next = next()
user_next = snext
if user_next:
external = user_next.split('://')
if external[0].lower() in ['http', 'https', 'ftp']:
host_next = user_next.split('//', 1)[-1].split('/')[0]
if host_next in settings.cas_domains:
next = user_next
else:
next = user_next
if onvalidation is DEFAULT:
onvalidation = settings.login_onvalidation
if onaccept is DEFAULT:
onaccept = settings.login_onaccept
if log is DEFAULT:
log = self.messages['login_log']
onfail = settings.login_onfail
user = None # default
#Setup the default field used for the form
multi_login = False
if self.settings.login_userfield:
username = self.settings.login_userfield
else:
if 'username' in table_user.fields:
username = 'username'
else:
username = 'email'
if self.settings.multi_login:
multi_login = True
old_requires = table_user[username].requires
table_user[username].requires = tmpvalidator
# If two-factor authentication is enabled, and the maximum
# number of tries allowed is used up, reset the session to
# pre-login state with two-factor auth
if session.auth_two_factor_enabled and session.auth_two_factor_tries_left < 1:
# Exceeded maximum allowed tries for this code. Require user to enter
# username and password again.
user = None
accepted_form = False
self._reset_two_factor_auth(session)
# Redirect to the default 'next' page without logging
# in. If that page requires login, user will be redirected
# back to the main login form
redirect(next, client_side=settings.client_side)
# Before showing the default login form, check whether
# we are already on the second step of two-step authentication.
# If we are, then skip this login form and use the form for the
# second challenge instead.
# Note to devs: The code inside the if-block is unchanged from the
# previous version of this file, other than for indentation inside
# to put it inside the if-block
if session.auth_two_factor_user is None:
if settings.remember_me_form:
extra_fields = [
Field('remember_me', 'boolean', default=False,
label = self.messages.label_remember_me)]
else:
extra_fields = []
# do we use our own login form, or from a central source?
if settings.login_form == self:
form = SQLFORM(
table_user,
fields=[username, passfield],
hidden=dict(_next=next),
showid=settings.showid,
submit_button=self.messages.login_button,
delete_label=self.messages.delete_label,
formstyle=settings.formstyle,
separator=settings.label_separator,
extra_fields = extra_fields,
)
captcha = settings.login_captcha or \
(settings.login_captcha != False and settings.captcha)
if captcha:
addrow(form, captcha.label, captcha, captcha.comment,
settings.formstyle, 'captcha__row')
accepted_form = False
if form.accepts(request, session if self.csrf_prevention else None,
formname='login', dbio=False,
onvalidation=onvalidation,
hideerror=settings.hideerror):
accepted_form = True
# check for username in db
entered_username = form.vars[username]
if multi_login and '@' in entered_username:
# if '@' in username check for email, not username
user = table_user(email = entered_username)
else:
user = table_user(**{username: entered_username})
if user:
# user in db, check if registration pending or disabled
temp_user = user
if temp_user.registration_key == 'pending':
response.flash = self.messages.registration_pending
return form
elif temp_user.registration_key in ('disabled', 'blocked'):
response.flash = self.messages.login_disabled
return form
elif (not temp_user.registration_key is None
and temp_user.registration_key.strip()):
response.flash = \
self.messages.registration_verifying
return form
# try alternate logins 1st as these have the
# current version of the password
user = None
for login_method in settings.login_methods:
if login_method != self and \
login_method(request.vars[username],
request.vars[passfield]):
if not self in settings.login_methods:
# do not store password in db
form.vars[passfield] = None
user = self.get_or_create_user(
form.vars, settings.update_fields)
break
if not user:
# alternates have failed, maybe because service inaccessible
if settings.login_methods[0] == self:
# try logging in locally using cached credentials
if form.vars.get(passfield, '') == temp_user[passfield]:
# success
user = temp_user
else:
# user not in db
if not settings.alternate_requires_registration:
# we're allowed to auto-register users from external systems
for login_method in settings.login_methods:
if login_method != self and \
login_method(request.vars[username],
request.vars[passfield]):
if not self in settings.login_methods:
# do not store password in db
form.vars[passfield] = None
user = self.get_or_create_user(
form.vars, settings.update_fields)
break
if not user:
self.log_event(self.messages['login_failed_log'],
request.post_vars)
# invalid login
session.flash = self.messages.invalid_login
callback(onfail, None)
redirect(
self.url(args=request.args, vars=request.get_vars),
client_side=settings.client_side)
else: # use a central authentication server
cas = settings.login_form
cas_user = cas.get_user()
if cas_user:
cas_user[passfield] = None
user = self.get_or_create_user(
table_user._filter_fields(cas_user),
settings.update_fields)
elif hasattr(cas, 'login_form'):
return cas.login_form()
else:
# we need to pass through login again before going on
next = self.url(settings.function, args='login')
redirect(cas.login_url(next),
client_side=settings.client_side)
# Extra login logic for two-factor authentication
#################################################
# If the 'user' variable has a value, this means that the first
# authentication step was successful (i.e. user provided correct
# username and password at the first challenge).
# Check if this user is signed up for two-factor authentication
# Default rule is that the user must be part of a group that is called
# auth.settings.two_factor_authentication_group
if user and self.settings.two_factor_authentication_group:
role = self.settings.two_factor_authentication_group
session.auth_two_factor_enabled = self.has_membership(user_id=user.id, role=role)
# challenge
if session.auth_two_factor_enabled:
form = SQLFORM.factory(
Field('authentication_code',
required=True,
comment='This code was emailed to you and is required for login.'),
hidden=dict(_next=next),
formstyle=settings.formstyle,
separator=settings.label_separator
)
# accepted_form is used by some default web2py code later in the
# function that handles running specified functions before redirect
# Set it to False until the challenge form is accepted.
accepted_form = False
# Handle the case when a user has submitted the login/password
# form successfully, and the password has been validated, but
# the two-factor form has not been displayed or validated yet.
if session.auth_two_factor_user is None and user is not None:
session.auth_two_factor_user = user # store the validated user and associate with this session
session.auth_two_factor = random.randint(100000, 999999)
session.auth_two_factor_tries_left = 3 # Allow user to try up to 4 times
# TODO: Add some error checking to handle cases where email cannot be sent
self.settings.mailer.send(
to=user.email,
subject="Two-step Login Authentication Code",
message="Your temporary login code is {0}".format(session.auth_two_factor))
if form.accepts(request, session if self.csrf_prevention else None,
formname='login', dbio=False,
onvalidation=onvalidation,
hideerror=settings.hideerror):
accepted_form = True
if form.vars['authentication_code'] == str(session.auth_two_factor):
# Handle the case when the two-factor form has been successfully validated
# and the user was previously stored (the current user should be None because
# in this case, the previous username/password login form should not be displayed.
# This will allow the code after the 2-factor authentication block to proceed as
# normal.
if user is None or user == session.auth_two_factor_user:
user = session.auth_two_factor_user
# For security, because the username stored in the
# session somehow does not match the just validated
# user. Should not be possible without session stealing
# which is hard with SSL.
elif user != session.auth_two_factor_user:
user = None
# Either way, the user and code associated with this session should
# be removed. This handles cases where the session login may have
# expired but browser window is open, so the old session key and
# session usernamem will still exist
self._reset_two_factor_auth(session)
else:
# TODO: Limit the number of retries allowed.
response.flash = 'Incorrect code. {0} more attempt(s) remaining.'.format(session.auth_two_factor_tries_left)
session.auth_two_factor_tries_left -= 1
return form
else:
return form
# End login logic for two-factor authentication
# process authenticated users
if user:
user = Row(table_user._filter_fields(user, id=True))
# process authenticated users
# user wants to be logged in for longer
self.login_user(user)
session.auth.expiration = \
request.post_vars.remember_me and \
settings.long_expiration or \
settings.expiration
session.auth.remember_me = 'remember_me' in request.post_vars
self.log_event(log, user)
session.flash = self.messages.logged_in
# how to continue
if settings.login_form == self:
if accepted_form:
callback(onaccept, form)
if next == session._auth_next:
session._auth_next = None
next = replace_id(next, form)
redirect(next, client_side=settings.client_side)
table_user[username].requires = old_requires
return form
elif user:
callback(onaccept, None)
if next == session._auth_next:
del session._auth_next
redirect(next, client_side=settings.client_side)
def logout(self, next=DEFAULT, onlogout=DEFAULT, log=DEFAULT):
"""
Logouts and redirects to login
"""
# Clear out 2-step authentication information if user logs
# out. This information is also cleared on successful login.
self._reset_two_factor_auth(current.session)
if next is DEFAULT:
next = self.get_vars_next() or self.settings.logout_next
if onlogout is DEFAULT:
onlogout = self.settings.logout_onlogout
if onlogout:
onlogout(self.user)
if log is DEFAULT:
log = self.messages['logout_log']
if self.user:
self.log_event(log, self.user)
if self.settings.login_form != self:
cas = self.settings.login_form
cas_user = cas.get_user()
if cas_user:
next = cas.logout_url(next)
current.session.auth = None
if self.settings.renew_session_onlogout:
current.session.renew(clear_session=not self.settings.keep_session_onlogout)
current.session.flash = self.messages.logged_out
if not next is None:
redirect(next)
def register(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a registration form
"""
table_user = self.table_user()
request = current.request
response = current.response
session = current.session
if self.is_logged_in():
redirect(self.settings.logged_url,
client_side=self.settings.client_side)
if next is DEFAULT:
next = self.get_vars_next() or self.settings.register_next
if onvalidation is DEFAULT:
onvalidation = self.settings.register_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.register_onaccept
if log is DEFAULT:
log = self.messages['register_log']
table_user = self.table_user()
if self.settings.login_userfield:
username = self.settings.login_userfield
elif 'username' in table_user.fields:
username = 'username'
else:
username = 'email'
# Ensure the username field is unique.
unique_validator = IS_NOT_IN_DB(self.db, table_user[username])
if not table_user[username].requires:
table_user[username].requires = unique_validator
elif isinstance(table_user[username].requires, (list, tuple)):
if not any([isinstance(validator, IS_NOT_IN_DB) for validator in
table_user[username].requires]):
if isinstance(table_user[username].requires, list):
table_user[username].requires.append(unique_validator)
else:
table_user[username].requires += (unique_validator, )
elif not isinstance(table_user[username].requires, IS_NOT_IN_DB):
table_user[username].requires = [table_user[username].requires,
unique_validator]
passfield = self.settings.password_field
formstyle = self.settings.formstyle
if self.settings.register_verify_password:
extra_fields = [
Field("password_two", "password", requires=IS_EQUAL_TO(
request.post_vars.get(passfield, None),
error_message=self.messages.mismatched_password),
label=current.T("Confirm Password"))]
else:
extra_fields = []
form = SQLFORM(table_user,
fields=self.settings.register_fields,
hidden=dict(_next=next),
showid=self.settings.showid,
submit_button=self.messages.register_button,
delete_label=self.messages.delete_label,
formstyle=formstyle,
separator=self.settings.label_separator,
extra_fields = extra_fields
)
captcha = self.settings.register_captcha or self.settings.captcha
if captcha:
addrow(form, captcha.label, captcha,
captcha.comment, self.settings.formstyle, 'captcha__row')
#Add a message if specified
if self.settings.pre_registration_div:
addrow(form, '',
DIV(_id="pre-reg", *self.settings.pre_registration_div),
'', formstyle, '')
table_user.registration_key.default = key = web2py_uuid()
if form.accepts(request, session if self.csrf_prevention else None,
formname='register',
onvalidation=onvalidation,
hideerror=self.settings.hideerror):
description = self.messages.group_description % form.vars
if self.settings.create_user_groups:
group_id = self.add_group(
self.settings.create_user_groups % form.vars, description)
self.add_membership(group_id, form.vars.id)
if self.settings.everybody_group_id:
self.add_membership(
self.settings.everybody_group_id, form.vars.id)
if self.settings.registration_requires_verification:
link = self.url(
self.settings.function, args=('verify_email', key), scheme=True)
d = dict(form.vars)
d.update(dict(key=key, link=link, username=form.vars[username]))
if not (self.settings.mailer and self.settings.mailer.send(
to=form.vars.email,
subject=self.messages.verify_email_subject,
message=self.messages.verify_email % d)):
self.db.rollback()
response.flash = self.messages.unable_send_email
return form
session.flash = self.messages.email_sent
if self.settings.registration_requires_approval and \
not self.settings.registration_requires_verification:
table_user[form.vars.id] = dict(registration_key='pending')
session.flash = self.messages.registration_pending
elif (not self.settings.registration_requires_verification or
self.settings.login_after_registration):
if not self.settings.registration_requires_verification:
table_user[form.vars.id] = dict(registration_key='')
session.flash = self.messages.registration_successful
user = table_user(**{username: form.vars[username]})
self.login_user(user)
session.flash = self.messages.logged_in
self.log_event(log, form.vars)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next, client_side=self.settings.client_side)
return form
def is_logged_in(self):
"""
Checks if the user is logged in and returns True/False.
If so user is in auth.user as well as in session.auth.user
"""
if self.user:
return True
return False
def verify_email(self,
next=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Action used to verify the registration email
"""
key = getarg(-1)
table_user = self.table_user()
user = table_user(registration_key=key)
if not user:
redirect(self.settings.login_url)
if self.settings.registration_requires_approval:
user.update_record(registration_key='pending')
current.session.flash = self.messages.registration_pending
else:
user.update_record(registration_key='')
current.session.flash = self.messages.email_verified
# make sure session has same user.registrato_key as db record
if current.session.auth and current.session.auth.user:
current.session.auth.user.registration_key = user.registration_key
if log is DEFAULT:
log = self.messages['verify_email_log']
if next is DEFAULT:
next = self.settings.verify_email_next
if onaccept is DEFAULT:
onaccept = self.settings.verify_email_onaccept
self.log_event(log, user)
callback(onaccept, user)
redirect(next)
def retrieve_username(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form to retrieve the user username
(only if there is a username field)
"""
table_user = self.table_user()
if not 'username' in table_user.fields:
raise HTTP(404)
request = current.request
response = current.response
session = current.session
captcha = self.settings.retrieve_username_captcha or \
(self.settings.retrieve_username_captcha != False and self.settings.captcha)
if not self.settings.mailer:
response.flash = self.messages.function_disabled
return ''
if next is DEFAULT:
next = self.get_vars_next() or self.settings.retrieve_username_next
if onvalidation is DEFAULT:
onvalidation = self.settings.retrieve_username_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.retrieve_username_onaccept
if log is DEFAULT:
log = self.messages['retrieve_username_log']
old_requires = table_user.email.requires
table_user.email.requires = [IS_IN_DB(self.db, table_user.email,
error_message=self.messages.invalid_email)]
form = SQLFORM(table_user,
fields=['email'],
hidden=dict(_next=next),
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if captcha:
addrow(form, captcha.label, captcha,
captcha.comment, self.settings.formstyle, 'captcha__row')
if form.accepts(request, session if self.csrf_prevention else None,
formname='retrieve_username', dbio=False,
onvalidation=onvalidation, hideerror=self.settings.hideerror):
users = table_user._db(table_user.email==form.vars.email).select()
if not users:
current.session.flash = \
self.messages.invalid_email
redirect(self.url(args=request.args))
username = ', '.join(u.username for u in users)
self.settings.mailer.send(to=form.vars.email,
subject=self.messages.retrieve_username_subject,
message=self.messages.retrieve_username % dict(username=username))
session.flash = self.messages.email_sent
for user in users:
self.log_event(log, user)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next)
table_user.email.requires = old_requires
return form
def random_password(self):
import string
import random
password = ''
specials = r'!#$*'
for i in range(0, 3):
password += random.choice(string.lowercase)
password += random.choice(string.uppercase)
password += random.choice(string.digits)
password += random.choice(specials)
return ''.join(random.sample(password, len(password)))
def reset_password_deprecated(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form to reset the user password (deprecated)
"""
table_user = self.table_user()
request = current.request
response = current.response
session = current.session
if not self.settings.mailer:
response.flash = self.messages.function_disabled
return ''
if next is DEFAULT:
next = self.get_vars_next() or self.settings.retrieve_password_next
if onvalidation is DEFAULT:
onvalidation = self.settings.retrieve_password_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.retrieve_password_onaccept
if log is DEFAULT:
log = self.messages['retrieve_password_log']
old_requires = table_user.email.requires
table_user.email.requires = [IS_IN_DB(self.db, table_user.email,
error_message=self.messages.invalid_email)]
form = SQLFORM(table_user,
fields=['email'],
hidden=dict(_next=next),
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if form.accepts(request, session if self.csrf_prevention else None,
formname='retrieve_password', dbio=False,
onvalidation=onvalidation, hideerror=self.settings.hideerror):
user = table_user(email=form.vars.email)
if not user:
current.session.flash = \
self.messages.invalid_email
redirect(self.url(args=request.args))
elif user.registration_key in ('pending', 'disabled', 'blocked'):
current.session.flash = \
self.messages.registration_pending
redirect(self.url(args=request.args))
password = self.random_password()
passfield = self.settings.password_field
d = {
passfield: str(table_user[passfield].validate(password)[0]),
'registration_key': ''
}
user.update_record(**d)
if self.settings.mailer and \
self.settings.mailer.send(to=form.vars.email,
subject=self.messages.retrieve_password_subject,
message=self.messages.retrieve_password % dict(password=password)):
session.flash = self.messages.email_sent
else:
session.flash = self.messages.unable_to_send_email
self.log_event(log, user)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next)
table_user.email.requires = old_requires
return form
def reset_password(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form to reset the user password
"""
table_user = self.table_user()
request = current.request
# response = current.response
session = current.session
if next is DEFAULT:
next = self.get_vars_next() or self.settings.reset_password_next
if self.settings.prevent_password_reset_attacks:
key = request.vars.key
if key:
session._reset_password_key = key
redirect(self.url(args='reset_password'))
else:
key = session._reset_password_key
else:
key = request.vars.key
try:
t0 = int(key.split('-')[0])
if time.time() - t0 > 60 * 60 * 24:
raise Exception
user = table_user(reset_password_key=key)
if not user:
raise Exception
except Exception:
session.flash = self.messages.invalid_reset_password
redirect(next, client_side=self.settings.client_side)
passfield = self.settings.password_field
form = SQLFORM.factory(
Field('new_password', 'password',
label=self.messages.new_password,
requires=self.table_user()[passfield].requires),
Field('new_password2', 'password',
label=self.messages.verify_password,
requires=[IS_EXPR(
'value==%s' % repr(request.vars.new_password),
self.messages.mismatched_password)]),
submit_button=self.messages.password_reset_button,
hidden=dict(_next=next),
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if form.accepts(request, session,
hideerror=self.settings.hideerror):
user.update_record(
**{passfield: str(form.vars.new_password),
'registration_key': '',
'reset_password_key': ''})
session.flash = self.messages.password_changed
if self.settings.login_after_password_change:
self.login_user(user)
redirect(next, client_side=self.settings.client_side)
return form
def request_reset_password(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form to reset the user password
"""
table_user = self.table_user()
request = current.request
response = current.response
session = current.session
captcha = self.settings.retrieve_password_captcha or \
(self.settings.retrieve_password_captcha != False and self.settings.captcha)
if next is DEFAULT:
next = self.get_vars_next() or self.settings.request_reset_password_next
if not self.settings.mailer:
response.flash = self.messages.function_disabled
return ''
if onvalidation is DEFAULT:
onvalidation = self.settings.reset_password_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.reset_password_onaccept
if log is DEFAULT:
log = self.messages['reset_password_log']
userfield = self.settings.login_userfield or 'username' \
if 'username' in table_user.fields else 'email'
if userfield == 'email':
table_user.email.requires = [
IS_EMAIL(error_message=self.messages.invalid_email),
IS_IN_DB(self.db, table_user.email,
error_message=self.messages.invalid_email)]
if not self.settings.email_case_sensitive:
table_user.email.requires.insert(0, IS_LOWER())
else:
table_user.username.requires = [
IS_IN_DB(self.db, table_user.username,
error_message=self.messages.invalid_username)]
if not self.settings.username_case_sensitive:
table_user.username.requires.insert(0, IS_LOWER())
form = SQLFORM(table_user,
fields=[userfield],
hidden=dict(_next=next),
showid=self.settings.showid,
submit_button=self.messages.password_reset_button,
delete_label=self.messages.delete_label,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if captcha:
addrow(form, captcha.label, captcha,
captcha.comment, self.settings.formstyle, 'captcha__row')
if form.accepts(request, session if self.csrf_prevention else None,
formname='reset_password', dbio=False,
onvalidation=onvalidation,
hideerror=self.settings.hideerror):
user = table_user(**{userfield:form.vars.get(userfield)})
if not user:
session.flash = self.messages['invalid_%s' % userfield]
redirect(self.url(args=request.args),
client_side=self.settings.client_side)
elif user.registration_key in ('pending', 'disabled', 'blocked'):
session.flash = self.messages.registration_pending
redirect(self.url(args=request.args),
client_side=self.settings.client_side)
if self.email_reset_password(user):
session.flash = self.messages.email_sent
else:
session.flash = self.messages.unable_to_send_email
self.log_event(log, user)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next, client_side=self.settings.client_side)
# old_requires = table_user.email.requires
return form
def email_reset_password(self, user):
reset_password_key = str(int(time.time())) + '-' + web2py_uuid()
link = self.url(self.settings.function,
args=('reset_password',), vars={'key': reset_password_key},
scheme=True)
d = dict(user)
d.update(dict(key=reset_password_key, link=link))
if self.settings.mailer and self.settings.mailer.send(
to=user.email,
subject=self.messages.reset_password_subject,
message=self.messages.reset_password % d):
user.update_record(reset_password_key=reset_password_key)
return True
return False
def retrieve_password(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
if self.settings.reset_password_requires_verification:
return self.request_reset_password(next, onvalidation, onaccept, log)
else:
return self.reset_password_deprecated(next, onvalidation, onaccept, log)
def change_password(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form that lets the user change password
"""
if not self.is_logged_in():
redirect(self.settings.login_url,
client_side=self.settings.client_side)
db = self.db
table_user = self.table_user()
s = db(table_user.id == self.user.id)
request = current.request
session = current.session
if next is DEFAULT:
next = self.get_vars_next() or self.settings.change_password_next
if onvalidation is DEFAULT:
onvalidation = self.settings.change_password_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.change_password_onaccept
if log is DEFAULT:
log = self.messages['change_password_log']
passfield = self.settings.password_field
requires = table_user[passfield].requires
if not isinstance(requires, (list, tuple)):
requires = [requires]
requires = filter(lambda t: isinstance(t, CRYPT), requires)
if requires:
requires[0].min_length = 0
form = SQLFORM.factory(
Field('old_password', 'password', requires=requires,
label=self.messages.old_password),
Field('new_password', 'password',
label=self.messages.new_password,
requires=table_user[passfield].requires),
Field('new_password2', 'password',
label=self.messages.verify_password,
requires=[IS_EXPR(
'value==%s' % repr(request.vars.new_password),
self.messages.mismatched_password)]),
submit_button=self.messages.password_change_button,
hidden=dict(_next=next),
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if form.accepts(request, session,
formname='change_password',
onvalidation=onvalidation,
hideerror=self.settings.hideerror):
current_user = s.select(limitby=(0, 1), orderby_on_limitby=False).first()
if not form.vars['old_password'] == current_user[passfield]:
form.errors['old_password'] = self.messages.invalid_password
else:
d = {passfield: str(form.vars.new_password)}
s.update(**d)
session.flash = self.messages.password_changed
self.log_event(log, self.user)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next, client_side=self.settings.client_side)
return form
def profile(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form that lets the user change his/her profile
"""
table_user = self.table_user()
if not self.is_logged_in():
redirect(self.settings.login_url,
client_side=self.settings.client_side)
passfield = self.settings.password_field
table_user[passfield].writable = False
request = current.request
session = current.session
if next is DEFAULT:
next = self.get_vars_next() or self.settings.profile_next
if onvalidation is DEFAULT:
onvalidation = self.settings.profile_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.profile_onaccept
if log is DEFAULT:
log = self.messages['profile_log']
form = SQLFORM(
table_user,
self.user.id,
fields=self.settings.profile_fields,
hidden=dict(_next=next),
showid=self.settings.showid,
submit_button=self.messages.profile_save_button,
delete_label=self.messages.delete_label,
upload=self.settings.download_url,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator,
deletable=self.settings.allow_delete_accounts,
)
if form.accepts(request, session,
formname='profile',
onvalidation=onvalidation,
hideerror=self.settings.hideerror):
self.user.update(table_user._filter_fields(form.vars))
session.flash = self.messages.profile_updated
self.log_event(log, self.user)
callback(onaccept, form)
if form.deleted:
return self.logout()
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next, client_side=self.settings.client_side)
return form
def run_login_onaccept(self):
onaccept = self.settings.login_onaccept
if onaccept:
form = Storage(dict(vars=self.user))
if not isinstance(onaccept, (list, tuple)):
onaccept = [onaccept]
for callback in onaccept:
callback(form)
def is_impersonating(self):
return self.is_logged_in() and 'impersonator' in current.session.auth
def impersonate(self, user_id=DEFAULT):
"""
To use this make a POST to
`http://..../impersonate request.post_vars.user_id=<id>`
Set request.post_vars.user_id to 0 to restore original user.
requires impersonator is logged in and::
has_permission('impersonate', 'auth_user', user_id)
"""
request = current.request
session = current.session
auth = session.auth
table_user = self.table_user()
if not self.is_logged_in():
raise HTTP(401, "Not Authorized")
current_id = auth.user.id
requested_id = user_id
if user_id is DEFAULT:
user_id = current.request.post_vars.user_id
if user_id and user_id != self.user.id and user_id != '0':
if not self.has_permission('impersonate',
self.table_user(),
user_id):
raise HTTP(403, "Forbidden")
user = table_user(user_id)
if not user:
raise HTTP(401, "Not Authorized")
auth.impersonator = pickle.dumps(session, pickle.HIGHEST_PROTOCOL)
auth.user.update(
table_user._filter_fields(user, True))
self.user = auth.user
self.update_groups()
log = self.messages['impersonate_log']
self.log_event(log, dict(id=current_id, other_id=auth.user.id))
self.run_login_onaccept()
elif user_id in (0, '0'):
if self.is_impersonating():
session.clear()
session.update(pickle.loads(auth.impersonator))
self.user = session.auth.user
self.update_groups()
self.run_login_onaccept()
return None
if requested_id is DEFAULT and not request.post_vars:
return SQLFORM.factory(Field('user_id', 'integer'))
return SQLFORM(table_user, user.id, readonly=True)
def update_groups(self):
if not self.user:
return
user_groups = self.user_groups = {}
if current.session.auth:
current.session.auth.user_groups = self.user_groups
table_group = self.table_group()
table_membership = self.table_membership()
memberships = self.db(
table_membership.user_id == self.user.id).select()
for membership in memberships:
group = table_group(membership.group_id)
if group:
user_groups[membership.group_id] = group.role
def groups(self):
"""
Displays the groups and their roles for the logged in user
"""
if not self.is_logged_in():
redirect(self.settings.login_url)
table_membership = self.table_membership()
memberships = self.db(
table_membership.user_id == self.user.id).select()
table = TABLE()
for membership in memberships:
table_group = self.table_group()
groups = self.db(table_group.id == membership.group_id).select()
if groups:
group = groups[0]
table.append(TR(H3(group.role, '(%s)' % group.id)))
table.append(TR(P(group.description)))
if not memberships:
return None
return table
def not_authorized(self):
"""
You can change the view for this page to make it look as you like
"""
if current.request.ajax:
raise HTTP(403, 'ACCESS DENIED')
return self.messages.access_denied
def requires(self, condition, requires_login=True, otherwise=None):
"""
Decorator that prevents access to action if not logged in
"""
def decorator(action):
def f(*a, **b):
basic_allowed, basic_accepted, user = self.basic()
user = user or self.user
if requires_login:
if not user:
if current.request.ajax:
raise HTTP(401, self.messages.ajax_failed_authentication)
elif not otherwise is None:
if callable(otherwise):
return otherwise()
redirect(otherwise)
elif self.settings.allow_basic_login_only or \
basic_accepted or current.request.is_restful:
raise HTTP(403, "Not authorized")
else:
next = self.here()
current.session.flash = current.response.flash
return call_or_redirect(
self.settings.on_failed_authentication,
self.settings.login_url +
'?_next=' + urllib.quote(next))
if callable(condition):
flag = condition()
else:
flag = condition
if not flag:
current.session.flash = self.messages.access_denied
return call_or_redirect(
self.settings.on_failed_authorization)
return action(*a, **b)
f.__doc__ = action.__doc__
f.__name__ = action.__name__
f.__dict__.update(action.__dict__)
return f
return decorator
def requires_login(self, otherwise=None):
"""
Decorator that prevents access to action if not logged in
"""
return self.requires(True, otherwise=otherwise)
def requires_membership(self, role=None, group_id=None, otherwise=None):
"""
Decorator that prevents access to action if not logged in or
if user logged in is not a member of group_id.
If role is provided instead of group_id then the
group_id is calculated.
"""
def has_membership(self=self, group_id=group_id, role=role):
return self.has_membership(group_id=group_id, role=role)
return self.requires(has_membership, otherwise=otherwise)
def requires_permission(self, name, table_name='', record_id=0,
otherwise=None):
"""
Decorator that prevents access to action if not logged in or
if user logged in is not a member of any group (role) that
has 'name' access to 'table_name', 'record_id'.
"""
def has_permission(self=self, name=name, table_name=table_name, record_id=record_id):
return self.has_permission(name, table_name, record_id)
return self.requires(has_permission, otherwise=otherwise)
def requires_signature(self, otherwise=None, hash_vars=True):
"""
Decorator that prevents access to action if not logged in or
if user logged in is not a member of group_id.
If role is provided instead of group_id then the
group_id is calculated.
"""
def verify():
return URL.verify(current.request, user_signature=True, hash_vars=hash_vars)
return self.requires(verify, otherwise)
def add_group(self, role, description=''):
"""
Creates a group associated to a role
"""
group_id = self.table_group().insert(
role=role, description=description)
self.log_event(self.messages['add_group_log'],
dict(group_id=group_id, role=role))
return group_id
def del_group(self, group_id):
"""
Deletes a group
"""
self.db(self.table_group().id == group_id).delete()
self.db(self.table_membership().group_id == group_id).delete()
self.db(self.table_permission().group_id == group_id).delete()
if group_id in self.user_groups: del self.user_groups[group_id]
self.log_event(self.messages.del_group_log, dict(group_id=group_id))
def id_group(self, role):
"""
Returns the group_id of the group specified by the role
"""
rows = self.db(self.table_group().role == role).select()
if not rows:
return None
return rows[0].id
def user_group(self, user_id=None):
"""
Returns the group_id of the group uniquely associated to this user
i.e. `role=user:[user_id]`
"""
return self.id_group(self.user_group_role(user_id))
def user_group_role(self, user_id=None):
if not self.settings.create_user_groups:
return None
if user_id:
user = self.table_user()[user_id]
else:
user = self.user
return self.settings.create_user_groups % user
def has_membership(self, group_id=None, user_id=None, role=None):
"""
Checks if user is member of group_id or role
"""
group_id = group_id or self.id_group(role)
try:
group_id = int(group_id)
except:
group_id = self.id_group(group_id) # interpret group_id as a role
if not user_id and self.user:
user_id = self.user.id
membership = self.table_membership()
if group_id and user_id and self.db((membership.user_id == user_id)
& (membership.group_id == group_id)).select():
r = True
else:
r = False
self.log_event(self.messages['has_membership_log'],
dict(user_id=user_id, group_id=group_id, check=r))
return r
def add_membership(self, group_id=None, user_id=None, role=None):
"""
Gives user_id membership of group_id or role
if user is None than user_id is that of current logged in user
"""
group_id = group_id or self.id_group(role)
try:
group_id = int(group_id)
except:
group_id = self.id_group(group_id) # interpret group_id as a role
if not user_id and self.user:
user_id = self.user.id
membership = self.table_membership()
record = membership(user_id=user_id, group_id=group_id)
if record:
return record.id
else:
id = membership.insert(group_id=group_id, user_id=user_id)
if role:
self.user_groups[group_id] = role
else:
self.update_groups()
self.log_event(self.messages['add_membership_log'],
dict(user_id=user_id, group_id=group_id))
return id
def del_membership(self, group_id=None, user_id=None, role=None):
"""
Revokes membership from group_id to user_id
if user_id is None than user_id is that of current logged in user
"""
group_id = group_id or self.id_group(role)
if not user_id and self.user:
user_id = self.user.id
membership = self.table_membership()
self.log_event(self.messages['del_membership_log'],
dict(user_id=user_id, group_id=group_id))
ret = self.db(membership.user_id
== user_id)(membership.group_id
== group_id).delete()
if group_id in self.user_groups: del self.user_groups[group_id]
return ret
def has_permission(self,
name='any',
table_name='',
record_id=0,
user_id=None,
group_id=None,
):
"""
Checks if user_id or current logged in user is member of a group
that has 'name' permission on 'table_name' and 'record_id'
if group_id is passed, it checks whether the group has the permission
"""
if not group_id and self.settings.everybody_group_id and \
self.has_permission(
name, table_name, record_id, user_id=None,
group_id=self.settings.everybody_group_id):
return True
if not user_id and not group_id and self.user:
user_id = self.user.id
if user_id:
membership = self.table_membership()
rows = self.db(membership.user_id
== user_id).select(membership.group_id)
groups = set([row.group_id for row in rows])
if group_id and not group_id in groups:
return False
else:
groups = set([group_id])
permission = self.table_permission()
rows = self.db(permission.name == name)(permission.table_name
== str(table_name))(permission.record_id
== record_id).select(permission.group_id)
groups_required = set([row.group_id for row in rows])
if record_id:
rows = self.db(permission.name
== name)(permission.table_name
== str(table_name))(permission.record_id
== 0).select(permission.group_id)
groups_required = groups_required.union(set([row.group_id
for row in rows]))
if groups.intersection(groups_required):
r = True
else:
r = False
if user_id:
self.log_event(self.messages['has_permission_log'],
dict(user_id=user_id, name=name,
table_name=table_name, record_id=record_id))
return r
def add_permission(self,
group_id,
name='any',
table_name='',
record_id=0,
):
"""
Gives group_id 'name' access to 'table_name' and 'record_id'
"""
permission = self.table_permission()
if group_id == 0:
group_id = self.user_group()
record = self.db(permission.group_id == group_id)(permission.name == name)(permission.table_name == str(table_name))(
permission.record_id == long(record_id)).select(limitby=(0, 1), orderby_on_limitby=False).first()
if record:
id = record.id
else:
id = permission.insert(group_id=group_id, name=name,
table_name=str(table_name),
record_id=long(record_id))
self.log_event(self.messages['add_permission_log'],
dict(permission_id=id, group_id=group_id,
name=name, table_name=table_name,
record_id=record_id))
return id
def del_permission(self,
group_id,
name='any',
table_name='',
record_id=0,
):
"""
Revokes group_id 'name' access to 'table_name' and 'record_id'
"""
permission = self.table_permission()
self.log_event(self.messages['del_permission_log'],
dict(group_id=group_id, name=name,
table_name=table_name, record_id=record_id))
return self.db(permission.group_id == group_id)(permission.name
== name)(permission.table_name
== str(table_name))(permission.record_id
== long(record_id)).delete()
def accessible_query(self, name, table, user_id=None):
"""
Returns a query with all accessible records for user_id or
the current logged in user
this method does not work on GAE because uses JOIN and IN
Example:
Use as::
db(auth.accessible_query('read', db.mytable)).select(db.mytable.ALL)
"""
if not user_id:
user_id = self.user_id
db = self.db
if isinstance(table, str) and table in self.db.tables():
table = self.db[table]
elif isinstance(table, (Set, Query)):
# experimental: build a chained query for all tables
if isinstance(table, Set):
cquery = table.query
else:
cquery = table
tablenames = db._adapter.tables(cquery)
for tablename in tablenames:
cquery &= self.accessible_query(name, tablename,
user_id=user_id)
return cquery
if not isinstance(table, str) and\
self.has_permission(name, table, 0, user_id):
return table.id > 0
membership = self.table_membership()
permission = self.table_permission()
query = table.id.belongs(
db(membership.user_id == user_id)
(membership.group_id == permission.group_id)
(permission.name == name)
(permission.table_name == table)
._select(permission.record_id))
if self.settings.everybody_group_id:
query |= table.id.belongs(
db(permission.group_id == self.settings.everybody_group_id)
(permission.name == name)
(permission.table_name == table)
._select(permission.record_id))
return query
@staticmethod
def archive(form,
archive_table=None,
current_record='current_record',
archive_current=False,
fields=None):
"""
If you have a table (db.mytable) that needs full revision history you
can just do::
form=crud.update(db.mytable,myrecord,onaccept=auth.archive)
or::
form=SQLFORM(db.mytable,myrecord).process(onaccept=auth.archive)
crud.archive will define a new table "mytable_archive" and store
a copy of the current record (if archive_current=True)
or a copy of the previous record (if archive_current=False)
in the newly created table including a reference
to the current record.
fields allows to specify extra fields that need to be archived.
If you want to access such table you need to define it yourself
in a model::
db.define_table('mytable_archive',
Field('current_record',db.mytable),
db.mytable)
Notice such table includes all fields of db.mytable plus one: current_record.
crud.archive does not timestamp the stored record unless your original table
has a fields like::
db.define_table(...,
Field('saved_on','datetime',
default=request.now,update=request.now,writable=False),
Field('saved_by',auth.user,
default=auth.user_id,update=auth.user_id,writable=False),
there is nothing special about these fields since they are filled before
the record is archived.
If you want to change the archive table name and the name of the reference field
you can do, for example::
db.define_table('myhistory',
Field('parent_record',db.mytable),
db.mytable)
and use it as::
form=crud.update(db.mytable,myrecord,
onaccept=lambda form:crud.archive(form,
archive_table=db.myhistory,
current_record='parent_record'))
"""
if not archive_current and not form.record:
return None
table = form.table
if not archive_table:
archive_table_name = '%s_archive' % table
if not archive_table_name in table._db:
table._db.define_table(
archive_table_name,
Field(current_record, table),
*[field.clone(unique=False) for field in table])
archive_table = table._db[archive_table_name]
new_record = {current_record: form.vars.id}
for fieldname in archive_table.fields:
if not fieldname in ['id', current_record]:
if archive_current and fieldname in form.vars:
new_record[fieldname] = form.vars[fieldname]
elif form.record and fieldname in form.record:
new_record[fieldname] = form.record[fieldname]
if fields:
new_record.update(fields)
id = archive_table.insert(**new_record)
return id
def wiki(self,
slug=None,
env=None,
render='markmin',
manage_permissions=False,
force_prefix='',
restrict_search=False,
resolve=True,
extra=None,
menu_groups=None,
templates=None,
migrate=True,
controller=None,
function=None,
force_render=False,
groups=None):
if controller and function:
resolve = False
if not hasattr(self, '_wiki'):
self._wiki = Wiki(self, render=render,
manage_permissions=manage_permissions,
force_prefix=force_prefix,
restrict_search=restrict_search,
env=env, extra=extra or {},
menu_groups=menu_groups,
templates=templates,
migrate=migrate,
controller=controller,
function=function,
groups=groups)
else:
self._wiki.env.update(env or {})
# if resolve is set to True, process request as wiki call
# resolve=False allows initial setup without wiki redirection
wiki = None
if resolve:
if slug:
wiki = self._wiki.read(slug, force_render)
if isinstance(wiki, dict) and wiki.has_key('content'): # FIXME: .has_key() is deprecated
# We don't want to return a dict object, just the wiki
wiki = wiki['content']
else:
wiki = self._wiki()
if isinstance(wiki, basestring):
wiki = XML(wiki)
return wiki
def wikimenu(self):
"""To be used in menu.py for app wide wiki menus"""
if (hasattr(self, "_wiki") and
self._wiki.settings.controller and
self._wiki.settings.function):
self._wiki.automenu()
class Crud(object):
def url(self, f=None, args=None, vars=None):
"""
This should point to the controller that exposes
download and crud
"""
if args is None:
args = []
if vars is None:
vars = {}
return URL(c=self.settings.controller, f=f, args=args, vars=vars)
def __init__(self, environment, db=None, controller='default'):
self.db = db
if not db and environment and isinstance(environment, DAL):
self.db = environment
elif not db:
raise SyntaxError("must pass db as first or second argument")
self.environment = current
settings = self.settings = Settings()
settings.auth = None
settings.logger = None
settings.create_next = None
settings.update_next = None
settings.controller = controller
settings.delete_next = self.url()
settings.download_url = self.url('download')
settings.create_onvalidation = StorageList()
settings.update_onvalidation = StorageList()
settings.delete_onvalidation = StorageList()
settings.create_onaccept = StorageList()
settings.update_onaccept = StorageList()
settings.update_ondelete = StorageList()
settings.delete_onaccept = StorageList()
settings.update_deletable = True
settings.showid = False
settings.keepvalues = False
settings.create_captcha = None
settings.update_captcha = None
settings.captcha = None
settings.formstyle = 'table3cols'
settings.label_separator = ': '
settings.hideerror = False
settings.detect_record_change = True
settings.hmac_key = None
settings.lock_keys = True
messages = self.messages = Messages(current.T)
messages.submit_button = 'Submit'
messages.delete_label = 'Check to delete'
messages.record_created = 'Record Created'
messages.record_updated = 'Record Updated'
messages.record_deleted = 'Record Deleted'
messages.update_log = 'Record %(id)s updated'
messages.create_log = 'Record %(id)s created'
messages.read_log = 'Record %(id)s read'
messages.delete_log = 'Record %(id)s deleted'
messages.lock_keys = True
def __call__(self):
args = current.request.args
if len(args) < 1:
raise HTTP(404)
elif args[0] == 'tables':
return self.tables()
elif len(args) > 1 and not args(1) in self.db.tables:
raise HTTP(404)
table = self.db[args(1)]
if args[0] == 'create':
return self.create(table)
elif args[0] == 'select':
return self.select(table, linkto=self.url(args='read'))
elif args[0] == 'search':
form, rows = self.search(table, linkto=self.url(args='read'))
return DIV(form, SQLTABLE(rows))
elif args[0] == 'read':
return self.read(table, args(2))
elif args[0] == 'update':
return self.update(table, args(2))
elif args[0] == 'delete':
return self.delete(table, args(2))
else:
raise HTTP(404)
def log_event(self, message, vars):
if self.settings.logger:
self.settings.logger.log_event(message, vars, origin='crud')
def has_permission(self, name, table, record=0):
if not self.settings.auth:
return True
try:
record_id = record.id
except:
record_id = record
return self.settings.auth.has_permission(name, str(table), record_id)
def tables(self):
return TABLE(*[TR(A(name,
_href=self.url(args=('select', name))))
for name in self.db.tables])
@staticmethod
def archive(form, archive_table=None, current_record='current_record'):
return Auth.archive(form, archive_table=archive_table,
current_record=current_record)
def update(self,
table,
record,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
ondelete=DEFAULT,
log=DEFAULT,
message=DEFAULT,
deletable=DEFAULT,
formname=DEFAULT,
**attributes
):
if not (isinstance(table, Table) or table in self.db.tables) \
or (isinstance(record, str) and not str(record).isdigit()):
raise HTTP(404)
if not isinstance(table, Table):
table = self.db[table]
try:
record_id = record.id
except:
record_id = record or 0
if record_id and not self.has_permission('update', table, record_id):
redirect(self.settings.auth.settings.on_failed_authorization)
if not record_id and not self.has_permission('create', table, record_id):
redirect(self.settings.auth.settings.on_failed_authorization)
request = current.request
response = current.response
session = current.session
if request.extension == 'json' and request.vars.json:
request.vars.update(json_parser.loads(request.vars.json))
if next is DEFAULT:
next = request.get_vars._next \
or request.post_vars._next \
or self.settings.update_next
if onvalidation is DEFAULT:
onvalidation = self.settings.update_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.update_onaccept
if ondelete is DEFAULT:
ondelete = self.settings.update_ondelete
if log is DEFAULT:
log = self.messages['update_log']
if deletable is DEFAULT:
deletable = self.settings.update_deletable
if message is DEFAULT:
message = self.messages.record_updated
if not 'hidden' in attributes:
attributes['hidden'] = {}
attributes['hidden']['_next'] = next
form = SQLFORM(
table,
record,
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
deletable=deletable,
upload=self.settings.download_url,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator,
**attributes # contains hidden
)
self.accepted = False
self.deleted = False
captcha = self.settings.update_captcha or self.settings.captcha
if record and captcha:
addrow(form, captcha.label, captcha, captcha.comment,
self.settings.formstyle, 'captcha__row')
captcha = self.settings.create_captcha or self.settings.captcha
if not record and captcha:
addrow(form, captcha.label, captcha, captcha.comment,
self.settings.formstyle, 'captcha__row')
if not request.extension in ('html', 'load'):
(_session, _formname) = (None, None)
else:
(_session, _formname) = (
session, '%s/%s' % (table._tablename, form.record_id))
if not formname is DEFAULT:
_formname = formname
keepvalues = self.settings.keepvalues
if request.vars.delete_this_record:
keepvalues = False
if isinstance(onvalidation, StorageList):
onvalidation = onvalidation.get(table._tablename, [])
if form.accepts(request, _session, formname=_formname,
onvalidation=onvalidation, keepvalues=keepvalues,
hideerror=self.settings.hideerror,
detect_record_change=self.settings.detect_record_change):
self.accepted = True
response.flash = message
if log:
self.log_event(log, form.vars)
if request.vars.delete_this_record:
self.deleted = True
message = self.messages.record_deleted
callback(ondelete, form, table._tablename)
response.flash = message
callback(onaccept, form, table._tablename)
if not request.extension in ('html', 'load'):
raise HTTP(200, 'RECORD CREATED/UPDATED')
if isinstance(next, (list, tuple)): # fix issue with 2.6
next = next[0]
if next: # Only redirect when explicit
next = replace_id(next, form)
session.flash = response.flash
redirect(next)
elif not request.extension in ('html', 'load'):
raise HTTP(401, serializers.json(dict(errors=form.errors)))
return form
def create(self,
table,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
message=DEFAULT,
formname=DEFAULT,
**attributes
):
if next is DEFAULT:
next = self.settings.create_next
if onvalidation is DEFAULT:
onvalidation = self.settings.create_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.create_onaccept
if log is DEFAULT:
log = self.messages['create_log']
if message is DEFAULT:
message = self.messages.record_created
return self.update(
table,
None,
next=next,
onvalidation=onvalidation,
onaccept=onaccept,
log=log,
message=message,
deletable=False,
formname=formname,
**attributes
)
def read(self, table, record):
if not (isinstance(table, Table) or table in self.db.tables) \
or (isinstance(record, str) and not str(record).isdigit()):
raise HTTP(404)
if not isinstance(table, Table):
table = self.db[table]
if not self.has_permission('read', table, record):
redirect(self.settings.auth.settings.on_failed_authorization)
form = SQLFORM(
table,
record,
readonly=True,
comments=False,
upload=self.settings.download_url,
showid=self.settings.showid,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if not current.request.extension in ('html', 'load'):
return table._filter_fields(form.record, id=True)
return form
def delete(self,
table,
record_id,
next=DEFAULT,
message=DEFAULT,
):
if not (isinstance(table, Table) or table in self.db.tables):
raise HTTP(404)
if not isinstance(table, Table):
table = self.db[table]
if not self.has_permission('delete', table, record_id):
redirect(self.settings.auth.settings.on_failed_authorization)
request = current.request
session = current.session
if next is DEFAULT:
next = request.get_vars._next \
or request.post_vars._next \
or self.settings.delete_next
if message is DEFAULT:
message = self.messages.record_deleted
record = table[record_id]
if record:
callback(self.settings.delete_onvalidation, record)
del table[record_id]
callback(self.settings.delete_onaccept, record, table._tablename)
session.flash = message
redirect(next)
def rows(
self,
table,
query=None,
fields=None,
orderby=None,
limitby=None,
):
if not (isinstance(table, Table) or table in self.db.tables):
raise HTTP(404)
if not self.has_permission('select', table):
redirect(self.settings.auth.settings.on_failed_authorization)
#if record_id and not self.has_permission('select', table):
# redirect(self.settings.auth.settings.on_failed_authorization)
if not isinstance(table, Table):
table = self.db[table]
if not query:
query = table.id > 0
if not fields:
fields = [field for field in table if field.readable]
else:
fields = [table[f] if isinstance(f, str) else f for f in fields]
rows = self.db(query).select(*fields, **dict(orderby=orderby,
limitby=limitby))
return rows
def select(self,
table,
query=None,
fields=None,
orderby=None,
limitby=None,
headers=None,
**attr
):
headers = headers or {}
rows = self.rows(table, query, fields, orderby, limitby)
if not rows:
return None # Nicer than an empty table.
if not 'upload' in attr:
attr['upload'] = self.url('download')
if not current.request.extension in ('html', 'load'):
return rows.as_list()
if not headers:
if isinstance(table, str):
table = self.db[table]
headers = dict((str(k), k.label) for k in table)
return SQLTABLE(rows, headers=headers, **attr)
def get_format(self, field):
rtable = field._db[field.type[10:]]
format = rtable.get('_format', None)
if format and isinstance(format, str):
return format[2:-2]
return field.name
def get_query(self, field, op, value, refsearch=False):
try:
if refsearch:
format = self.get_format(field)
if op == 'equals':
if not refsearch:
return field == value
else:
return lambda row: row[field.name][format] == value
elif op == 'not equal':
if not refsearch:
return field != value
else:
return lambda row: row[field.name][format] != value
elif op == 'greater than':
if not refsearch:
return field > value
else:
return lambda row: row[field.name][format] > value
elif op == 'less than':
if not refsearch:
return field < value
else:
return lambda row: row[field.name][format] < value
elif op == 'starts with':
if not refsearch:
return field.like(value + '%')
else:
return lambda row: str(row[field.name][format]).startswith(value)
elif op == 'ends with':
if not refsearch:
return field.like('%' + value)
else:
return lambda row: str(row[field.name][format]).endswith(value)
elif op == 'contains':
if not refsearch:
return field.like('%' + value + '%')
else:
return lambda row: value in row[field.name][format]
except:
return None
def search(self, *tables, **args):
"""
Creates a search form and its results for a table
Examples:
Use as::
form, results = crud.search(db.test,
queries = ['equals', 'not equal', 'contains'],
query_labels={'equals':'Equals',
'not equal':'Not equal'},
fields = ['id','children'],
field_labels = {
'id':'ID','children':'Children'},
zero='Please choose',
query = (db.test.id > 0)&(db.test.id != 3) )
"""
table = tables[0]
fields = args.get('fields', table.fields)
validate = args.get('validate', True)
request = current.request
db = self.db
if not (isinstance(table, Table) or table in db.tables):
raise HTTP(404)
attributes = {}
for key in ('orderby', 'groupby', 'left', 'distinct', 'limitby', 'cache'):
if key in args:
attributes[key] = args[key]
tbl = TABLE()
selected = []
refsearch = []
results = []
showall = args.get('showall', False)
if showall:
selected = fields
chkall = args.get('chkall', False)
if chkall:
for f in fields:
request.vars['chk%s' % f] = 'on'
ops = args.get('queries', [])
zero = args.get('zero', '')
if not ops:
ops = ['equals', 'not equal', 'greater than',
'less than', 'starts with',
'ends with', 'contains']
ops.insert(0, zero)
query_labels = args.get('query_labels', {})
query = args.get('query', table.id > 0)
field_labels = args.get('field_labels', {})
for field in fields:
field = table[field]
if not field.readable:
continue
fieldname = field.name
chkval = request.vars.get('chk' + fieldname, None)
txtval = request.vars.get('txt' + fieldname, None)
opval = request.vars.get('op' + fieldname, None)
row = TR(TD(INPUT(_type="checkbox", _name="chk" + fieldname,
_disabled=(field.type == 'id'),
value=(field.type == 'id' or chkval == 'on'))),
TD(field_labels.get(fieldname, field.label)),
TD(SELECT([OPTION(query_labels.get(op, op),
_value=op) for op in ops],
_name="op" + fieldname,
value=opval)),
TD(INPUT(_type="text", _name="txt" + fieldname,
_value=txtval, _id='txt' + fieldname,
_class=str(field.type))))
tbl.append(row)
if request.post_vars and (chkval or field.type == 'id'):
if txtval and opval != '':
if field.type[0:10] == 'reference ':
refsearch.append(self.get_query(field, opval, txtval, refsearch=True))
elif validate:
value, error = field.validate(txtval)
if not error:
### TODO deal with 'starts with', 'ends with', 'contains' on GAE
query &= self.get_query(field, opval, value)
else:
row[3].append(DIV(error, _class='error'))
else:
query &= self.get_query(field, opval, txtval)
selected.append(field)
form = FORM(tbl, INPUT(_type="submit"))
if selected:
try:
results = db(query).select(*selected, **attributes)
for r in refsearch:
results = results.find(r)
except: # hmmm, we should do better here
results = None
return form, results
urllib2.install_opener(urllib2.build_opener(urllib2.HTTPCookieProcessor()))
def fetch(url, data=None, headers=None,
cookie=Cookie.SimpleCookie(),
user_agent='Mozilla/5.0'):
headers = headers or {}
if not data is None:
data = urllib.urlencode(data)
if user_agent:
headers['User-agent'] = user_agent
headers['Cookie'] = ' '.join(
['%s=%s;' % (c.key, c.value) for c in cookie.values()])
try:
from google.appengine.api import urlfetch
except ImportError:
req = urllib2.Request(url, data, headers)
html = urllib2.urlopen(req).read()
else:
method = ((data is None) and urlfetch.GET) or urlfetch.POST
while url is not None:
response = urlfetch.fetch(url=url, payload=data,
method=method, headers=headers,
allow_truncated=False, follow_redirects=False,
deadline=10)
# next request will be a get, so no need to send the data again
data = None
method = urlfetch.GET
# load cookies from the response
cookie.load(response.headers.get('set-cookie', ''))
url = response.headers.get('location')
html = response.content
return html
regex_geocode = \
re.compile(r"""<geometry>[\W]*?<location>[\W]*?<lat>(?P<la>[^<]*)</lat>[\W]*?<lng>(?P<lo>[^<]*)</lng>[\W]*?</location>""")
def geocode(address):
try:
a = urllib.quote(address)
txt = fetch('http://maps.googleapis.com/maps/api/geocode/xml?sensor=false&address=%s'
% a)
item = regex_geocode.search(txt)
(la, lo) = (float(item.group('la')), float(item.group('lo')))
return (la, lo)
except:
return (0.0, 0.0)
def reverse_geocode(lat, lng, lang=None):
""" Try to get an approximate address for a given latitude, longitude. """
if not lang:
lang = current.T.accepted_language
try:
return json_parser.loads(fetch('http://maps.googleapis.com/maps/api/geocode/json?latlng=%(lat)s,%(lng)s&language=%(lang)s' % locals()))['results'][0]['formatted_address']
except:
return ''
def universal_caller(f, *a, **b):
c = f.func_code.co_argcount
n = f.func_code.co_varnames[:c]
defaults = f.func_defaults or []
pos_args = n[0:-len(defaults)]
named_args = n[-len(defaults):]
arg_dict = {}
# Fill the arg_dict with name and value for the submitted, positional values
for pos_index, pos_val in enumerate(a[:c]):
arg_dict[n[pos_index]] = pos_val # n[pos_index] is the name of the argument
# There might be pos_args left, that are sent as named_values. Gather them as well.
# If a argument already is populated with values we simply replaces them.
for arg_name in pos_args[len(arg_dict):]:
if arg_name in b:
arg_dict[arg_name] = b[arg_name]
if len(arg_dict) >= len(pos_args):
# All the positional arguments is found. The function may now be called.
# However, we need to update the arg_dict with the values from the named arguments as well.
for arg_name in named_args:
if arg_name in b:
arg_dict[arg_name] = b[arg_name]
return f(**arg_dict)
# Raise an error, the function cannot be called.
raise HTTP(404, "Object does not exist")
class Service(object):
def __init__(self, environment=None):
self.run_procedures = {}
self.csv_procedures = {}
self.xml_procedures = {}
self.rss_procedures = {}
self.json_procedures = {}
self.jsonrpc_procedures = {}
self.jsonrpc2_procedures = {}
self.xmlrpc_procedures = {}
self.amfrpc_procedures = {}
self.amfrpc3_procedures = {}
self.soap_procedures = {}
def run(self, f):
"""
Example:
Use as::
service = Service()
@service.run
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with::
wget http://..../app/default/call/run/myfunction?a=3&b=4
"""
self.run_procedures[f.__name__] = f
return f
def csv(self, f):
"""
Example:
Use as::
service = Service()
@service.csv
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with::
wget http://..../app/default/call/csv/myfunction?a=3&b=4
"""
self.run_procedures[f.__name__] = f
return f
def xml(self, f):
"""
Example:
Use as::
service = Service()
@service.xml
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with::
wget http://..../app/default/call/xml/myfunction?a=3&b=4
"""
self.run_procedures[f.__name__] = f
return f
def rss(self, f):
"""
Example:
Use as::
service = Service()
@service.rss
def myfunction():
return dict(title=..., link=..., description=...,
created_on=..., entries=[dict(title=..., link=...,
description=..., created_on=...])
def call():
return service()
Then call it with:
wget http://..../app/default/call/rss/myfunction
"""
self.rss_procedures[f.__name__] = f
return f
def json(self, f):
"""
Example:
Use as::
service = Service()
@service.json
def myfunction(a, b):
return [{a: b}]
def call():
return service()
Then call it with:;
wget http://..../app/default/call/json/myfunction?a=hello&b=world
"""
self.json_procedures[f.__name__] = f
return f
def jsonrpc(self, f):
"""
Example:
Use as::
service = Service()
@service.jsonrpc
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with:
wget http://..../app/default/call/jsonrpc/myfunction?a=hello&b=world
"""
self.jsonrpc_procedures[f.__name__] = f
return f
def jsonrpc2(self, f):
"""
Example:
Use as::
service = Service()
@service.jsonrpc2
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with:
wget --post-data '{"jsonrpc": "2.0", "id": 1, "method": "myfunction", "params": {"a": 1, "b": 2}}' http://..../app/default/call/jsonrpc2
"""
self.jsonrpc2_procedures[f.__name__] = f
return f
def xmlrpc(self, f):
"""
Example:
Use as::
service = Service()
@service.xmlrpc
def myfunction(a, b):
return a + b
def call():
return service()
The call it with:
wget http://..../app/default/call/xmlrpc/myfunction?a=hello&b=world
"""
self.xmlrpc_procedures[f.__name__] = f
return f
def amfrpc(self, f):
"""
Example:
Use as::
service = Service()
@service.amfrpc
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with::
wget http://..../app/default/call/amfrpc/myfunction?a=hello&b=world
"""
self.amfrpc_procedures[f.__name__] = f
return f
def amfrpc3(self, domain='default'):
"""
Example:
Use as::
service = Service()
@service.amfrpc3('domain')
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with:
wget http://..../app/default/call/amfrpc3/myfunction?a=hello&b=world
"""
if not isinstance(domain, str):
raise SyntaxError("AMF3 requires a domain for function")
def _amfrpc3(f):
if domain:
self.amfrpc3_procedures[domain + '.' + f.__name__] = f
else:
self.amfrpc3_procedures[f.__name__] = f
return f
return _amfrpc3
def soap(self, name=None, returns=None, args=None, doc=None):
"""
Example:
Use as::
service = Service()
@service.soap('MyFunction',returns={'result':int},args={'a':int,'b':int,})
def myfunction(a, b):
return a + b
def call():
return service()
Then call it with::
from gluon.contrib.pysimplesoap.client import SoapClient
client = SoapClient(wsdl="http://..../app/default/call/soap?WSDL")
response = client.MyFunction(a=1,b=2)
return response['result']
It also exposes online generated documentation and xml example messages
at `http://..../app/default/call/soap`
"""
def _soap(f):
self.soap_procedures[name or f.__name__] = f, returns, args, doc
return f
return _soap
def serve_run(self, args=None):
request = current.request
if not args:
args = request.args
if args and args[0] in self.run_procedures:
return str(universal_caller(self.run_procedures[args[0]],
*args[1:], **dict(request.vars)))
self.error()
def serve_csv(self, args=None):
request = current.request
response = current.response
response.headers['Content-Type'] = 'text/x-csv'
if not args:
args = request.args
def none_exception(value):
if isinstance(value, unicode):
return value.encode('utf8')
if hasattr(value, 'isoformat'):
return value.isoformat()[:19].replace('T', ' ')
if value is None:
return '<NULL>'
return value
if args and args[0] in self.run_procedures:
import types
r = universal_caller(self.run_procedures[args[0]],
*args[1:], **dict(request.vars))
s = cStringIO.StringIO()
if hasattr(r, 'export_to_csv_file'):
r.export_to_csv_file(s)
elif r and not isinstance(r, types.GeneratorType) and isinstance(r[0], (dict, Storage)):
import csv
writer = csv.writer(s)
writer.writerow(r[0].keys())
for line in r:
writer.writerow([none_exception(v)
for v in line.values()])
else:
import csv
writer = csv.writer(s)
for line in r:
writer.writerow(line)
return s.getvalue()
self.error()
def serve_xml(self, args=None):
request = current.request
response = current.response
response.headers['Content-Type'] = 'text/xml'
if not args:
args = request.args
if args and args[0] in self.run_procedures:
s = universal_caller(self.run_procedures[args[0]],
*args[1:], **dict(request.vars))
if hasattr(s, 'as_list'):
s = s.as_list()
return serializers.xml(s, quote=False)
self.error()
def serve_rss(self, args=None):
request = current.request
response = current.response
if not args:
args = request.args
if args and args[0] in self.rss_procedures:
feed = universal_caller(self.rss_procedures[args[0]],
*args[1:], **dict(request.vars))
else:
self.error()
response.headers['Content-Type'] = 'application/rss+xml'
return serializers.rss(feed)
def serve_json(self, args=None):
request = current.request
response = current.response
response.headers['Content-Type'] = 'application/json; charset=utf-8'
if not args:
args = request.args
d = dict(request.vars)
if args and args[0] in self.json_procedures:
s = universal_caller(self.json_procedures[args[0]], *args[1:], **d)
if hasattr(s, 'as_list'):
s = s.as_list()
return response.json(s)
self.error()
class JsonRpcException(Exception):
def __init__(self, code, info):
jrpc_error = Service.jsonrpc_errors.get(code)
if jrpc_error:
self.message, self.description = jrpc_error
self.code, self.info = code, info
# jsonrpc 2.0 error types. records the following structure {code: (message,meaning)}
jsonrpc_errors = {
-32700: ("Parse error. Invalid JSON was received by the server.", "An error occurred on the server while parsing the JSON text."),
-32600: ("Invalid Request", "The JSON sent is not a valid Request object."),
-32601: ("Method not found", "The method does not exist / is not available."),
-32602: ("Invalid params", "Invalid method parameter(s)."),
-32603: ("Internal error", "Internal JSON-RPC error."),
-32099: ("Server error", "Reserved for implementation-defined server-errors.")}
def serve_jsonrpc(self):
def return_response(id, result):
return serializers.json({'version': '1.1',
'id': id, 'result': result, 'error': None})
def return_error(id, code, message, data=None):
error = {'name': 'JSONRPCError',
'code': code, 'message': message}
if data is not None:
error['data'] = data
return serializers.json({'id': id,
'version': '1.1',
'error': error,
})
request = current.request
response = current.response
response.headers['Content-Type'] = 'application/json; charset=utf-8'
methods = self.jsonrpc_procedures
data = json_parser.loads(request.body.read())
jsonrpc_2 = data.get('jsonrpc')
if jsonrpc_2: #hand over to version 2 of the protocol
return self.serve_jsonrpc2(data)
id, method, params = data.get('id'), data.get('method'), data.get('params', [])
if id is None:
return return_error(0, 100, 'missing id')
if not method in methods:
return return_error(id, 100, 'method "%s" does not exist' % method)
try:
if isinstance(params, dict):
s = methods[method](**params)
else:
s = methods[method](*params)
if hasattr(s, 'as_list'):
s = s.as_list()
return return_response(id, s)
except Service.JsonRpcException, e:
return return_error(id, e.code, e.info)
except:
etype, eval, etb = sys.exc_info()
message = '%s: %s' % (etype.__name__, eval)
data = request.is_local and traceback.format_tb(etb)
logger.warning('jsonrpc exception %s\n%s' % (message, traceback.format_tb(etb)))
return return_error(id, 100, message, data)
def serve_jsonrpc2(self, data=None, batch_element=False):
def return_response(id, result):
if not must_respond:
return None
return serializers.json({'jsonrpc': '2.0',
'id': id, 'result': result})
def return_error(id, code, message=None, data=None):
error = {'code': code}
if Service.jsonrpc_errors.has_key(code):
error['message'] = Service.jsonrpc_errors[code][0]
error['data'] = Service.jsonrpc_errors[code][1]
if message is not None:
error['message'] = message
if data is not None:
error['data'] = data
return serializers.json({'jsonrpc': '2.0',
'id': id,
'error': error})
def validate(data):
"""
Validate request as defined in: http://www.jsonrpc.org/specification#request_object.
Args:
data(str): The json object.
Returns:
- True -- if successful
- False -- if no error should be reported (i.e. data is missing 'id' member)
Raises:
JsonRPCException
"""
iparms = set(data.keys())
mandatory_args = set(['jsonrpc', 'method'])
missing_args = mandatory_args - iparms
if missing_args:
raise Service.JsonRpcException(-32600, 'Missing arguments %s.' % list(missing_args))
if data['jsonrpc'] != '2.0':
raise Service.JsonRpcException(-32603, 'Unsupported jsonrpc version "%s"' % data['jsonrpc'])
if 'id' not in iparms:
return False
return True
request = current.request
response = current.response
if not data:
response.headers['Content-Type'] = 'application/json; charset=utf-8'
try:
data = json_parser.loads(request.body.read())
except ValueError: # decoding error in json lib
return return_error(None, -32700)
# Batch handling
if isinstance(data, list) and not batch_element:
retlist = []
for c in data:
retstr = self.serve_jsonrpc2(c, batch_element=True)
if retstr: # do not add empty responses
retlist.append(retstr)
if len(retlist) == 0: # return nothing
return ''
else:
return "[" + ','.join(retlist) + "]"
methods = self.jsonrpc2_procedures
methods.update(self.jsonrpc_procedures)
try:
must_respond = validate(data)
except Service.JsonRpcException, e:
return return_error(None, e.code, e.info)
id, method, params = data.get('id'), data['method'], data.get('params', '')
if not method in methods:
return return_error(id, -32601, data='Method "%s" does not exist' % method)
try:
if isinstance(params, dict):
s = methods[method](**params)
else:
s = methods[method](*params)
if hasattr(s, 'as_list'):
s = s.as_list()
if must_respond:
return return_response(id, s)
else:
return ''
except HTTP, e:
raise e
except Service.JsonRpcException, e:
return return_error(id, e.code, e.info)
except:
etype, eval, etb = sys.exc_info()
data = '%s: %s\n' % (etype.__name__, eval) + str(request.is_local and traceback.format_tb(etb))
logger.warning('%s: %s\n%s' % (etype.__name__, eval, traceback.format_tb(etb)))
return return_error(id, -32099, data=data)
def serve_xmlrpc(self):
request = current.request
response = current.response
services = self.xmlrpc_procedures.values()
return response.xmlrpc(request, services)
def serve_amfrpc(self, version=0):
try:
import pyamf
import pyamf.remoting.gateway
except:
return "pyamf not installed or not in Python sys.path"
request = current.request
response = current.response
if version == 3:
services = self.amfrpc3_procedures
base_gateway = pyamf.remoting.gateway.BaseGateway(services)
pyamf_request = pyamf.remoting.decode(request.body)
else:
services = self.amfrpc_procedures
base_gateway = pyamf.remoting.gateway.BaseGateway(services)
context = pyamf.get_context(pyamf.AMF0)
pyamf_request = pyamf.remoting.decode(request.body, context)
pyamf_response = pyamf.remoting.Envelope(pyamf_request.amfVersion)
for name, message in pyamf_request:
pyamf_response[name] = base_gateway.getProcessor(message)(message)
response.headers['Content-Type'] = pyamf.remoting.CONTENT_TYPE
if version == 3:
return pyamf.remoting.encode(pyamf_response).getvalue()
else:
return pyamf.remoting.encode(pyamf_response, context).getvalue()
def serve_soap(self, version="1.1"):
try:
from gluon.contrib.pysimplesoap.server import SoapDispatcher
except:
return "pysimplesoap not installed in contrib"
request = current.request
response = current.response
procedures = self.soap_procedures
location = "%s://%s%s" % (
request.env.wsgi_url_scheme,
request.env.http_host,
URL(r=request, f="call/soap", vars={}))
namespace = 'namespace' in response and response.namespace or location
documentation = response.description or ''
dispatcher = SoapDispatcher(
name=response.title,
location=location,
action=location, # SOAPAction
namespace=namespace,
prefix='pys',
documentation=documentation,
ns=True)
for method, (function, returns, args, doc) in procedures.iteritems():
dispatcher.register_function(method, function, returns, args, doc)
if request.env.request_method == 'POST':
fault = {}
# Process normal Soap Operation
response.headers['Content-Type'] = 'text/xml'
xml = dispatcher.dispatch(request.body.read(), fault=fault)
if fault:
# May want to consider populating a ticket here...
response.status = 500
# return the soap response
return xml
elif 'WSDL' in request.vars:
# Return Web Service Description
response.headers['Content-Type'] = 'text/xml'
return dispatcher.wsdl()
elif 'op' in request.vars:
# Return method help webpage
response.headers['Content-Type'] = 'text/html'
method = request.vars['op']
sample_req_xml, sample_res_xml, doc = dispatcher.help(method)
body = [H1("Welcome to Web2Py SOAP webservice gateway"),
A("See all webservice operations",
_href=URL(r=request, f="call/soap", vars={})),
H2(method),
P(doc),
UL(LI("Location: %s" % dispatcher.location),
LI("Namespace: %s" % dispatcher.namespace),
LI("SoapAction: %s" % dispatcher.action),
),
H3("Sample SOAP XML Request Message:"),
CODE(sample_req_xml, language="xml"),
H3("Sample SOAP XML Response Message:"),
CODE(sample_res_xml, language="xml"),
]
return {'body': body}
else:
# Return general help and method list webpage
response.headers['Content-Type'] = 'text/html'
body = [H1("Welcome to Web2Py SOAP webservice gateway"),
P(response.description),
P("The following operations are available"),
A("See WSDL for webservice description",
_href=URL(r=request, f="call/soap", vars={"WSDL":None})),
UL([LI(A("%s: %s" % (method, doc or ''),
_href=URL(r=request, f="call/soap", vars={'op': method})))
for method, doc in dispatcher.list_methods()]),
]
return {'body': body}
def __call__(self):
"""
Registers services with::
service = Service()
@service.run
@service.rss
@service.json
@service.jsonrpc
@service.xmlrpc
@service.amfrpc
@service.amfrpc3('domain')
@service.soap('Method', returns={'Result':int}, args={'a':int,'b':int,})
Exposes services with::
def call():
return service()
You can call services with::
http://..../app/default/call/run?[parameters]
http://..../app/default/call/rss?[parameters]
http://..../app/default/call/json?[parameters]
http://..../app/default/call/jsonrpc
http://..../app/default/call/xmlrpc
http://..../app/default/call/amfrpc
http://..../app/default/call/amfrpc3
http://..../app/default/call/soap
"""
request = current.request
if len(request.args) < 1:
raise HTTP(404, "Not Found")
arg0 = request.args(0)
if arg0 == 'run':
return self.serve_run(request.args[1:])
elif arg0 == 'rss':
return self.serve_rss(request.args[1:])
elif arg0 == 'csv':
return self.serve_csv(request.args[1:])
elif arg0 == 'xml':
return self.serve_xml(request.args[1:])
elif arg0 == 'json':
return self.serve_json(request.args[1:])
elif arg0 == 'jsonrpc':
return self.serve_jsonrpc()
elif arg0 == 'jsonrpc2':
return self.serve_jsonrpc2()
elif arg0 == 'xmlrpc':
return self.serve_xmlrpc()
elif arg0 == 'amfrpc':
return self.serve_amfrpc()
elif arg0 == 'amfrpc3':
return self.serve_amfrpc(3)
elif arg0 == 'soap':
return self.serve_soap()
else:
self.error()
def error(self):
raise HTTP(404, "Object does not exist")
def completion(callback):
"""
Executes a task on completion of the called action.
Example:
Use as::
from gluon.tools import completion
@completion(lambda d: logging.info(repr(d)))
def index():
return dict(message='hello')
It logs the output of the function every time input is called.
The argument of completion is executed in a new thread.
"""
def _completion(f):
def __completion(*a, **b):
d = None
try:
d = f(*a, **b)
return d
finally:
thread.start_new_thread(callback, (d,))
return __completion
return _completion
def prettydate(d, T=lambda x: x):
if isinstance(d, datetime.datetime):
dt = datetime.datetime.now() - d
elif isinstance(d, datetime.date):
dt = datetime.date.today() - d
elif not d:
return ''
else:
return '[invalid date]'
if dt.days < 0:
suffix = ' from now'
dt = -dt
else:
suffix = ' ago'
if dt.days >= 2 * 365:
return T('%d years' + suffix) % int(dt.days / 365)
elif dt.days >= 365:
return T('1 year' + suffix)
elif dt.days >= 60:
return T('%d months' + suffix) % int(dt.days / 30)
elif dt.days > 21:
return T('1 month' + suffix)
elif dt.days >= 14:
return T('%d weeks' + suffix) % int(dt.days / 7)
elif dt.days >= 7:
return T('1 week' + suffix)
elif dt.days > 1:
return T('%d days' + suffix) % dt.days
elif dt.days == 1:
return T('1 day' + suffix)
elif dt.seconds >= 2 * 60 * 60:
return T('%d hours' + suffix) % int(dt.seconds / 3600)
elif dt.seconds >= 60 * 60:
return T('1 hour' + suffix)
elif dt.seconds >= 2 * 60:
return T('%d minutes' + suffix) % int(dt.seconds / 60)
elif dt.seconds >= 60:
return T('1 minute' + suffix)
elif dt.seconds > 1:
return T('%d seconds' + suffix) % dt.seconds
elif dt.seconds == 1:
return T('1 second' + suffix)
else:
return T('now')
def test_thread_separation():
def f():
c = PluginManager()
lock1.acquire()
lock2.acquire()
c.x = 7
lock1.release()
lock2.release()
lock1 = thread.allocate_lock()
lock2 = thread.allocate_lock()
lock1.acquire()
thread.start_new_thread(f, ())
a = PluginManager()
a.x = 5
lock1.release()
lock2.acquire()
return a.x
class PluginManager(object):
"""
Plugin Manager is similar to a storage object but it is a single level
singleton. This means that multiple instances within the same thread share
the same attributes.
Its constructor is also special. The first argument is the name of the
plugin you are defining.
The named arguments are parameters needed by the plugin with default values.
If the parameters were previous defined, the old values are used.
Example:
in some general configuration file::
plugins = PluginManager()
plugins.me.param1=3
within the plugin model::
_ = PluginManager('me',param1=5,param2=6,param3=7)
where the plugin is used::
>>> print plugins.me.param1
3
>>> print plugins.me.param2
6
>>> plugins.me.param3 = 8
>>> print plugins.me.param3
8
Here are some tests::
>>> a=PluginManager()
>>> a.x=6
>>> b=PluginManager('check')
>>> print b.x
6
>>> b=PluginManager() # reset settings
>>> print b.x
<Storage {}>
>>> b.x=7
>>> print a.x
7
>>> a.y.z=8
>>> print b.y.z
8
>>> test_thread_separation()
5
>>> plugins=PluginManager('me',db='mydb')
>>> print plugins.me.db
mydb
>>> print 'me' in plugins
True
>>> print plugins.me.installed
True
"""
instances = {}
def __new__(cls, *a, **b):
id = thread.get_ident()
lock = thread.allocate_lock()
try:
lock.acquire()
try:
return cls.instances[id]
except KeyError:
instance = object.__new__(cls, *a, **b)
cls.instances[id] = instance
return instance
finally:
lock.release()
def __init__(self, plugin=None, **defaults):
if not plugin:
self.__dict__.clear()
settings = self.__getattr__(plugin)
settings.installed = True
settings.update(
(k, v) for k, v in defaults.items() if not k in settings)
def __getattr__(self, key):
if not key in self.__dict__:
self.__dict__[key] = Storage()
return self.__dict__[key]
def keys(self):
return self.__dict__.keys()
def __contains__(self, key):
return key in self.__dict__
class Expose(object):
def __init__(self, base=None, basename=None, extensions=None, allow_download=True):
"""
Examples:
Use as::
def static():
return dict(files=Expose())
or::
def static():
path = os.path.join(request.folder,'static','public')
return dict(files=Expose(path,basename='public'))
Args:
extensions: an optional list of file extensions for filtering
displayed files: e.g. `['.py', '.jpg']`
allow_download: whether to allow downloading selected files
"""
current.session.forget()
base = base or os.path.join(current.request.folder, 'static')
basename = basename or current.request.function
self.basename = basename
if current.request.raw_args:
self.args = [arg for arg in current.request.raw_args.split('/') if arg]
else:
self.args = [arg for arg in current.request.args if arg]
filename = os.path.join(base, *self.args)
if not os.path.exists(filename):
raise HTTP(404, "FILE NOT FOUND")
if not os.path.normpath(filename).startswith(base):
raise HTTP(401, "NOT AUTHORIZED")
if allow_download and not os.path.isdir(filename):
current.response.headers['Content-Type'] = contenttype(filename)
raise HTTP(200, open(filename, 'rb'), **current.response.headers)
self.path = path = os.path.join(filename, '*')
self.folders = [f[len(path) - 1:] for f in sorted(glob.glob(path))
if os.path.isdir(f) and not self.isprivate(f)]
self.filenames = [f[len(path) - 1:] for f in sorted(glob.glob(path))
if not os.path.isdir(f) and not self.isprivate(f)]
if 'README' in self.filenames:
readme = open(os.path.join(filename, 'README')).read()
self.paragraph = MARKMIN(readme)
else:
self.paragraph = None
if extensions:
self.filenames = [f for f in self.filenames
if os.path.splitext(f)[-1] in extensions]
def breadcrumbs(self, basename):
path = []
span = SPAN()
span.append(A(basename, _href=URL()))
for arg in self.args:
span.append('/')
path.append(arg)
span.append(A(arg, _href=URL(args='/'.join(path))))
return span
def table_folders(self):
if self.folders:
return SPAN(H3('Folders'), TABLE(
*[TR(TD(A(folder, _href=URL(args=self.args + [folder]))))
for folder in self.folders],
**dict(_class="table")))
return ''
@staticmethod
def isprivate(f):
return 'private' in f or f.startswith('.') or f.endswith('~')
@staticmethod
def isimage(f):
return os.path.splitext(f)[-1].lower() in (
'.png', '.jpg', '.jpeg', '.gif', '.tiff')
def table_files(self, width=160):
if self.filenames:
return SPAN(H3('Files'),
TABLE(*[TR(TD(A(f, _href=URL(args=self.args + [f]))),
TD(IMG(_src=URL(args=self.args + [f]),
_style='max-width:%spx' % width)
if width and self.isimage(f) else ''))
for f in self.filenames],
**dict(_class="table")))
return ''
def xml(self):
return DIV(
H2(self.breadcrumbs(self.basename)),
self.paragraph or '',
self.table_folders(),
self.table_files()).xml()
class Wiki(object):
everybody = 'everybody'
rows_page = 25
def markmin_base(self, body):
return MARKMIN(body, extra=self.settings.extra,
url=True, environment=self.env,
autolinks=lambda link: expand_one(link, {})).xml()
def render_tags(self, tags):
return DIV(
_class='w2p_wiki_tags',
*[A(t.strip(), _href=URL(args='_search', vars=dict(q=t)))
for t in tags or [] if t.strip()])
def markmin_render(self, page):
return self.markmin_base(page.body) + self.render_tags(page.tags).xml()
def html_render(self, page):
html = page.body
# @///function -> http://..../function
html = replace_at_urls(html, URL)
# http://...jpg -> <img src="http://...jpg/> or embed
html = replace_autolinks(html, lambda link: expand_one(link, {}))
# @{component:name} -> <script>embed component name</script>
html = replace_components(html, self.env)
html = html + self.render_tags(page.tags).xml()
return html
@staticmethod
def component(text):
"""
In wiki docs allows `@{component:controller/function/args}`
which renders as a `LOAD(..., ajax=True)`
"""
items = text.split('/')
controller, function, args = items[0], items[1], items[2:]
return LOAD(controller, function, args=args, ajax=True).xml()
def get_renderer(self):
if isinstance(self.settings.render, basestring):
r = getattr(self, "%s_render" % self.settings.render)
elif callable(self.settings.render):
r = self.settings.render
elif isinstance(self.settings.render, dict):
def custom_render(page):
if page.render:
if page.render in self.settings.render.keys():
my_render = self.settings.render[page.render]
else:
my_render = getattr(self, "%s_render" % page.render)
else:
my_render = self.markmin_render
return my_render(page)
r = custom_render
else:
raise ValueError(
"Invalid render type %s" % type(self.settings.render))
return r
def __init__(self, auth, env=None, render='markmin',
manage_permissions=False, force_prefix='',
restrict_search=False, extra=None,
menu_groups=None, templates=None, migrate=True,
controller=None, function=None, groups=None):
settings = self.settings = auth.settings.wiki
"""
Args:
render:
- "markmin"
- "html"
- `<function>` : Sets a custom render function
- `dict(html=<function>, markmin=...)`: dict(...) allows
multiple custom render functions
- "multiple" : Is the same as `{}`. It enables per-record
formats using builtins
"""
engines = set(['markmin', 'html'])
show_engine = False
if render == "multiple":
render = {}
if isinstance(render, dict):
[engines.add(key) for key in render]
show_engine = True
settings.render = render
perms = settings.manage_permissions = manage_permissions
settings.force_prefix = force_prefix
settings.restrict_search = restrict_search
settings.extra = extra or {}
settings.menu_groups = menu_groups
settings.templates = templates
settings.controller = controller
settings.function = function
settings.groups = auth.user_groups.values() \
if groups is None else groups
db = auth.db
self.env = env or {}
self.env['component'] = Wiki.component
self.auth = auth
self.wiki_menu_items = None
if self.auth.user:
self.settings.force_prefix = force_prefix % self.auth.user
else:
self.settings.force_prefix = force_prefix
self.host = current.request.env.http_host
table_definitions = [
('wiki_page', {
'args': [
Field('slug',
requires=[IS_SLUG(),
IS_NOT_IN_DB(db, 'wiki_page.slug')],
writable=False),
Field('title', length=255, unique=True),
Field('body', 'text', notnull=True),
Field('tags', 'list:string'),
Field('can_read', 'list:string',
writable=perms,
readable=perms,
default=[Wiki.everybody]),
Field('can_edit', 'list:string',
writable=perms, readable=perms,
default=[Wiki.everybody]),
Field('changelog'),
Field('html', 'text',
compute=self.get_renderer(),
readable=False, writable=False),
Field('render', default="markmin",
readable=show_engine,
writable=show_engine,
requires=IS_EMPTY_OR(
IS_IN_SET(engines))),
auth.signature],
'vars': {'format': '%(title)s', 'migrate': migrate}}),
('wiki_tag', {
'args': [
Field('name'),
Field('wiki_page', 'reference wiki_page'),
auth.signature],
'vars':{'format': '%(title)s', 'migrate': migrate}}),
('wiki_media', {
'args': [
Field('wiki_page', 'reference wiki_page'),
Field('title', required=True),
Field('filename', 'upload', required=True),
auth.signature],
'vars': {'format': '%(title)s', 'migrate': migrate}}),
]
# define only non-existent tables
for key, value in table_definitions:
args = []
if not key in db.tables():
# look for wiki_ extra fields in auth.settings
extra_fields = auth.settings.extra_fields
if extra_fields:
if key in extra_fields:
if extra_fields[key]:
for field in extra_fields[key]:
args.append(field)
args += value['args']
db.define_table(key, *args, **value['vars'])
if self.settings.templates is None and not \
self.settings.manage_permissions:
self.settings.templates = db.wiki_page.tags.contains('template') & \
db.wiki_page.can_read.contains('everybody')
def update_tags_insert(page, id, db=db):
for tag in page.tags or []:
tag = tag.strip().lower()
if tag:
db.wiki_tag.insert(name=tag, wiki_page=id)
def update_tags_update(dbset, page, db=db):
page = dbset.select(limitby=(0, 1)).first()
db(db.wiki_tag.wiki_page == page.id).delete()
for tag in page.tags or []:
tag = tag.strip().lower()
if tag:
db.wiki_tag.insert(name=tag, wiki_page=page.id)
db.wiki_page._after_insert.append(update_tags_insert)
db.wiki_page._after_update.append(update_tags_update)
if (auth.user and
check_credentials(current.request, gae_login=False) and
not 'wiki_editor' in auth.user_groups.values() and
self.settings.groups == auth.user_groups.values()):
group = db.auth_group(role='wiki_editor')
gid = group.id if group else db.auth_group.insert(
role='wiki_editor')
auth.add_membership(gid)
settings.lock_keys = True
# WIKI ACCESS POLICY
def not_authorized(self, page=None):
raise HTTP(401)
def can_read(self, page):
if 'everybody' in page.can_read or not \
self.settings.manage_permissions:
return True
elif self.auth.user:
groups = self.settings.groups
if ('wiki_editor' in groups or
set(groups).intersection(set(page.can_read + page.can_edit)) or
page.created_by == self.auth.user.id):
return True
return False
def can_edit(self, page=None):
if not self.auth.user:
redirect(self.auth.settings.login_url)
groups = self.settings.groups
return ('wiki_editor' in groups or
(page is None and 'wiki_author' in groups) or
not page is None and (
set(groups).intersection(set(page.can_edit)) or
page.created_by == self.auth.user.id))
def can_manage(self):
if not self.auth.user:
return False
groups = self.settings.groups
return 'wiki_editor' in groups
def can_search(self):
return True
def can_see_menu(self):
if self.auth.user:
if self.settings.menu_groups is None:
return True
else:
groups = self.settings.groups
if any(t in self.settings.menu_groups for t in groups):
return True
return False
### END POLICY
def automenu(self):
"""adds the menu if not present"""
if (not self.wiki_menu_items and
self.settings.controller and
self.settings.function):
self.wiki_menu_items = self.menu(self.settings.controller,
self.settings.function)
current.response.menu += self.wiki_menu_items
def __call__(self):
request = current.request
settings = self.settings
settings.controller = settings.controller or request.controller
settings.function = settings.function or request.function
self.automenu()
zero = request.args(0) or 'index'
if zero and zero.isdigit():
return self.media(int(zero))
elif not zero or not zero.startswith('_'):
return self.read(zero)
elif zero == '_edit':
return self.edit(request.args(1) or 'index', request.args(2) or 0)
elif zero == '_editmedia':
return self.editmedia(request.args(1) or 'index')
elif zero == '_create':
return self.create()
elif zero == '_pages':
return self.pages()
elif zero == '_search':
return self.search()
elif zero == '_recent':
ipage = int(request.vars.page or 0)
query = self.auth.db.wiki_page.created_by == request.args(
1, cast=int)
return self.search(query=query,
orderby=~self.auth.db.wiki_page.created_on,
limitby=(ipage * self.rows_page,
(ipage + 1) * self.rows_page),
)
elif zero == '_cloud':
return self.cloud()
elif zero == '_preview':
return self.preview(self.get_renderer())
def first_paragraph(self, page):
if not self.can_read(page):
mm = (page.body or '').replace('\r', '')
ps = [p for p in mm.split('\n\n')
if not p.startswith('#') and p.strip()]
if ps:
return ps[0]
return ''
def fix_hostname(self, body):
return (body or '').replace('://HOSTNAME', '://%s' % self.host)
def read(self, slug, force_render=False):
if slug in '_cloud':
return self.cloud()
elif slug in '_search':
return self.search()
page = self.auth.db.wiki_page(slug=slug)
if page and (not self.can_read(page)):
return self.not_authorized(page)
if current.request.extension == 'html':
if not page:
url = URL(args=('_create', slug))
return dict(content=A('Create page "%s"' % slug, _href=url, _class="btn"))
else:
html = page.html if not force_render else self.get_renderer()(page)
content = XML(self.fix_hostname(html))
return dict(title=page.title,
slug=page.slug,
page=page,
content=content,
tags=page.tags,
created_on=page.created_on,
modified_on=page.modified_on)
elif current.request.extension == 'load':
return self.fix_hostname(page.html) if page else ''
else:
if not page:
raise HTTP(404)
else:
return dict(title=page.title,
slug=page.slug,
page=page,
content=page.body,
tags=page.tags,
created_on=page.created_on,
modified_on=page.modified_on)
def edit(self, slug, from_template=0):
auth = self.auth
db = auth.db
page = db.wiki_page(slug=slug)
if not self.can_edit(page):
return self.not_authorized(page)
title_guess = ' '.join(c.capitalize() for c in slug.split('-'))
if not page:
if not (self.can_manage() or
slug.startswith(self.settings.force_prefix)):
current.session.flash = 'slug must have "%s" prefix' \
% self.settings.force_prefix
redirect(URL(args=('_create')))
db.wiki_page.can_read.default = [Wiki.everybody]
db.wiki_page.can_edit.default = [auth.user_group_role()]
db.wiki_page.title.default = title_guess
db.wiki_page.slug.default = slug
if slug == 'wiki-menu':
db.wiki_page.body.default = \
'- Menu Item > @////index\n- - Submenu > http://web2py.com'
else:
db.wiki_page.body.default = db(db.wiki_page.id == from_template).select(db.wiki_page.body)[0].body \
if int(from_template) > 0 else '## %s\n\npage content' % title_guess
vars = current.request.post_vars
if vars.body:
vars.body = vars.body.replace('://%s' % self.host, '://HOSTNAME')
form = SQLFORM(db.wiki_page, page, deletable=True,
formstyle='table2cols', showid=False).process()
if form.deleted:
current.session.flash = 'page deleted'
redirect(URL())
elif form.accepted:
current.session.flash = 'page created'
redirect(URL(args=slug))
script = """
jQuery(function() {
if (!jQuery('#wiki_page_body').length) return;
var pagecontent = jQuery('#wiki_page_body');
pagecontent.css('font-family',
'Monaco,Menlo,Consolas,"Courier New",monospace');
var prevbutton = jQuery('<button class="btn nopreview">Preview</button>');
var preview = jQuery('<div id="preview"></div>').hide();
var previewmedia = jQuery('<div id="previewmedia"></div>');
var form = pagecontent.closest('form');
preview.insertBefore(form);
prevbutton.insertBefore(form);
if(%(link_media)s) {
var mediabutton = jQuery('<button class="btn nopreview">Media</button>');
mediabutton.insertBefore(form);
previewmedia.insertBefore(form);
mediabutton.click(function() {
if (mediabutton.hasClass('nopreview')) {
web2py_component('%(urlmedia)s', 'previewmedia');
} else {
previewmedia.empty();
}
mediabutton.toggleClass('nopreview');
});
}
prevbutton.click(function(e) {
e.preventDefault();
if (prevbutton.hasClass('nopreview')) {
prevbutton.addClass('preview').removeClass(
'nopreview').html('Edit Source');
try{var wiki_render = jQuery('#wiki_page_render').val()}
catch(e){var wiki_render = null;}
web2py_ajax_page('post', \
'%(url)s', {body: jQuery('#wiki_page_body').val(), \
render: wiki_render}, 'preview');
form.fadeOut('fast', function() {preview.fadeIn()});
} else {
prevbutton.addClass(
'nopreview').removeClass('preview').html('Preview');
preview.fadeOut('fast', function() {form.fadeIn()});
}
})
})
""" % dict(url=URL(args=('_preview', slug)), link_media=('true' if page else 'false'),
urlmedia=URL(extension='load',
args=('_editmedia', slug),
vars=dict(embedded=1)))
return dict(content=TAG[''](form, SCRIPT(script)))
def editmedia(self, slug):
auth = self.auth
db = auth.db
page = db.wiki_page(slug=slug)
if not (page and self.can_edit(page)):
return self.not_authorized(page)
self.auth.db.wiki_media.id.represent = lambda id, row: \
id if not row.filename else \
SPAN('@////%i/%s.%s' % (id, IS_SLUG.urlify(row.title.split('.')[0]), row.filename.split('.')[-1]))
self.auth.db.wiki_media.wiki_page.default = page.id
self.auth.db.wiki_media.wiki_page.writable = False
links = []
csv = True
create = True
if current.request.vars.embedded:
script = "var c = jQuery('#wiki_page_body'); c.val(c.val() + jQuery('%s').text()); return false;"
fragment = self.auth.db.wiki_media.id.represent
csv = False
create = False
links= [
lambda row:
A('copy into source', _href='#', _onclick=script % (fragment(row.id, row)))
]
content = SQLFORM.grid(
self.auth.db.wiki_media.wiki_page == page.id,
orderby=self.auth.db.wiki_media.title,
links=links,
csv=csv,
create=create,
args=['_editmedia', slug],
user_signature=False)
return dict(content=content)
def create(self):
if not self.can_edit():
return self.not_authorized()
db = self.auth.db
slugs = db(db.wiki_page.id > 0).select(db.wiki_page.id, db.wiki_page.slug)
options = [OPTION(row.slug, _value=row.id) for row in slugs]
options.insert(0, OPTION('', _value=''))
fields = [Field("slug", default=current.request.args(1) or
self.settings.force_prefix,
requires=(IS_SLUG(), IS_NOT_IN_DB(db, db.wiki_page.slug))),]
if self.settings.templates:
fields.append(
Field("from_template", "reference wiki_page",
requires=IS_EMPTY_OR(
IS_IN_DB(db(self.settings.templates),
db.wiki_page._id,
'%(slug)s')),
comment=current.T(
"Choose Template or empty for new Page")))
form = SQLFORM.factory(*fields, **dict(_class="well"))
form.element("[type=submit]").attributes["_value"] = \
current.T("Create Page from Slug")
if form.process().accepted:
form.vars.from_template = 0 if not form.vars.from_template \
else form.vars.from_template
redirect(URL(args=('_edit', form.vars.slug, form.vars.from_template or 0))) # added param
return dict(content=form)
def pages(self):
if not self.can_manage():
return self.not_authorized()
self.auth.db.wiki_page.slug.represent = lambda slug, row: SPAN(
'@////%s' % slug)
self.auth.db.wiki_page.title.represent = lambda title, row: \
A(title, _href=URL(args=row.slug))
wiki_table = self.auth.db.wiki_page
content = SQLFORM.grid(
wiki_table,
fields=[wiki_table.slug,
wiki_table.title, wiki_table.tags,
wiki_table.can_read, wiki_table.can_edit],
links=[
lambda row:
A('edit', _href=URL(args=('_edit', row.slug)), _class='btn'),
lambda row:
A('media', _href=URL(args=('_editmedia', row.slug)), _class='btn')],
details=False, editable=False, deletable=False, create=False,
orderby=self.auth.db.wiki_page.title,
args=['_pages'],
user_signature=False)
return dict(content=content)
def media(self, id):
request, response, db = current.request, current.response, self.auth.db
media = db.wiki_media(id)
if media:
if self.settings.manage_permissions:
page = db.wiki_page(media.wiki_page)
if not self.can_read(page):
return self.not_authorized(page)
request.args = [media.filename]
m = response.download(request, db)
current.session.forget() # get rid of the cookie
response.headers['Last-Modified'] = \
request.utcnow.strftime("%a, %d %b %Y %H:%M:%S GMT")
if 'Content-Disposition' in response.headers:
del response.headers['Content-Disposition']
response.headers['Pragma'] = 'cache'
response.headers['Cache-Control'] = 'private'
return m
else:
raise HTTP(404)
def menu(self, controller='default', function='index'):
db = self.auth.db
request = current.request
menu_page = db.wiki_page(slug='wiki-menu')
menu = []
if menu_page:
tree = {'': menu}
regex = re.compile('[\r\n\t]*(?P<base>(\s*\-\s*)+)(?P<title>\w.*?)\s+\>\s+(?P<link>\S+)')
for match in regex.finditer(self.fix_hostname(menu_page.body)):
base = match.group('base').replace(' ', '')
title = match.group('title')
link = match.group('link')
title_page = None
if link.startswith('@'):
items = link[2:].split('/')
if len(items) > 3:
title_page = items[3]
link = URL(a=items[0] or None, c=items[1] or controller,
f=items[2] or function, args=items[3:])
parent = tree.get(base[1:], tree[''])
subtree = []
tree[base] = subtree
parent.append((current.T(title),
request.args(0) == title_page,
link, subtree))
if self.can_see_menu():
submenu = []
menu.append((current.T('[Wiki]'), None, None, submenu))
if URL() == URL(controller, function):
if not str(request.args(0)).startswith('_'):
slug = request.args(0) or 'index'
mode = 1
elif request.args(0) == '_edit':
slug = request.args(1) or 'index'
mode = 2
elif request.args(0) == '_editmedia':
slug = request.args(1) or 'index'
mode = 3
else:
mode = 0
if mode in (2, 3):
submenu.append((current.T('View Page'), None,
URL(controller, function, args=slug)))
if mode in (1, 3):
submenu.append((current.T('Edit Page'), None,
URL(controller, function, args=('_edit', slug))))
if mode in (1, 2):
submenu.append((current.T('Edit Page Media'), None,
URL(controller, function, args=('_editmedia', slug))))
submenu.append((current.T('Create New Page'), None,
URL(controller, function, args=('_create'))))
# Moved next if to inside self.auth.user check
if self.can_manage():
submenu.append((current.T('Manage Pages'), None,
URL(controller, function, args=('_pages'))))
submenu.append((current.T('Edit Menu'), None,
URL(controller, function, args=('_edit', 'wiki-menu'))))
# Also moved inside self.auth.user check
submenu.append((current.T('Search Pages'), None,
URL(controller, function, args=('_search'))))
return menu
def search(self, tags=None, query=None, cloud=True, preview=True,
limitby=(0, 100), orderby=None):
if not self.can_search():
return self.not_authorized()
request = current.request
content = CAT()
if tags is None and query is None:
form = FORM(INPUT(_name='q', requires=IS_NOT_EMPTY(),
value=request.vars.q),
INPUT(_type="submit", _value=current.T('Search')),
_method='GET')
content.append(DIV(form, _class='w2p_wiki_form'))
if request.vars.q:
tags = [v.strip() for v in request.vars.q.split(',')]
tags = [v.lower() for v in tags if v]
if tags or not query is None:
db = self.auth.db
count = db.wiki_tag.wiki_page.count()
fields = [db.wiki_page.id, db.wiki_page.slug,
db.wiki_page.title, db.wiki_page.tags,
db.wiki_page.can_read]
if preview:
fields.append(db.wiki_page.body)
if query is None:
query = (db.wiki_page.id == db.wiki_tag.wiki_page) &\
(db.wiki_tag.name.belongs(tags))
query = query | db.wiki_page.title.contains(request.vars.q)
if self.settings.restrict_search and not self.manage():
query = query & (db.wiki_page.created_by == self.auth.user_id)
pages = db(query).select(count,
*fields, **dict(orderby=orderby or ~count,
groupby=reduce(lambda a, b: a | b, fields),
distinct=True,
limitby=limitby))
if request.extension in ('html', 'load'):
if not pages:
content.append(DIV(current.T("No results"),
_class='w2p_wiki_form'))
def link(t):
return A(t, _href=URL(args='_search', vars=dict(q=t)))
items = [DIV(H3(A(p.wiki_page.title, _href=URL(
args=p.wiki_page.slug))),
MARKMIN(self.first_paragraph(p.wiki_page))
if preview else '',
DIV(_class='w2p_wiki_tags',
*[link(t.strip()) for t in
p.wiki_page.tags or [] if t.strip()]),
_class='w2p_wiki_search_item')
for p in pages]
content.append(DIV(_class='w2p_wiki_pages', *items))
else:
cloud = False
content = [p.wiki_page.as_dict() for p in pages]
elif cloud:
content.append(self.cloud()['content'])
if request.extension == 'load':
return content
return dict(content=content)
def cloud(self):
db = self.auth.db
count = db.wiki_tag.wiki_page.count(distinct=True)
ids = db(db.wiki_tag).select(
db.wiki_tag.name, count,
distinct=True,
groupby=db.wiki_tag.name,
orderby=~count, limitby=(0, 20))
if ids:
a, b = ids[0](count), ids[-1](count)
def style(c):
STYLE = 'padding:0 0.2em;line-height:%.2fem;font-size:%.2fem'
size = (1.5 * (c - b) / max(a - b, 1) + 1.3)
return STYLE % (1.3, size)
items = []
for item in ids:
items.append(A(item.wiki_tag.name,
_style=style(item(count)),
_href=URL(args='_search',
vars=dict(q=item.wiki_tag.name))))
items.append(' ')
return dict(content=DIV(_class='w2p_cloud', *items))
def preview(self, render):
request = current.request
# FIXME: This is an ugly hack to ensure a default render
# engine if not specified (with multiple render engines)
if not "render" in request.post_vars:
request.post_vars.render = None
return render(request.post_vars)
class Config(object):
def __init__(
self,
filename,
section,
default_values={}
):
self.config = ConfigParser.ConfigParser(default_values)
self.config.read(filename)
if not self.config.has_section(section):
self.config.add_section(section)
self.section = section
self.filename = filename
def read(self):
if not(isinstance(current.session['settings_%s' % self.section], dict)):
settings = dict(self.config.items(self.section))
else:
settings = current.session['settings_%s' % self.section]
return settings
def save(self, options):
for option, value in options:
self.config.set(self.section, option, value)
try:
self.config.write(open(self.filename, 'w'))
result = True
except:
current.session['settings_%s' % self.section] = dict(self.config.items(self.section))
result = False
return result
if __name__ == '__main__':
import doctest
doctest.testmod()
| 40.736364 | 178 | 0.531913 | [
"BSD-3-Clause"
] | kvk3008/project | gluon/tools.py | 250,936 | Python |
"""
WSGI config for car_selling_parts project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'car_selling_parts.settings')
application = get_wsgi_application()
| 24.176471 | 78 | 0.79562 | [
"MIT"
] | vis7/car_parts_selling | car_selling_parts/wsgi.py | 411 | Python |
'''
UnrealCV
========
Provides functions to interact with games built using Unreal Engine.
>>> import unrealcv
>>> (HOST, PORT) = ('localhost', 9000)
>>> client = unrealcv.Client((HOST, PORT))
'''
import sys, ctypes, struct, threading, socket, re, time, logging
try:
from Queue import Queue
except:
from queue import Queue # for Python 3
_L = logging.getLogger(__name__)
# _L.addHandler(logging.NullHandler()) # Let client to decide how to do logging
_L.handlers = []
h = logging.StreamHandler()
h.setFormatter(logging.Formatter('%(levelname)s:%(module)s:%(lineno)d:%(message)s'))
_L.addHandler(h)
_L.propagate = False
_L.setLevel(logging.INFO)
fmt = 'I'
class SocketMessage(object):
'''
Define the format of a message. This class is defined similar to the class FNFSMessageHeader in UnrealEngine4, but without CRC check.
The magic number is from Unreal implementation
See https://github.com/EpicGames/UnrealEngine/blob/dff3c48be101bb9f84633a733ef79c91c38d9542/Engine/Source/Runtime/Sockets/Public/NetworkMessage.h
'''
magic = ctypes.c_uint32(0x9E2B83C1).value
def __init__(self, payload):
self.magic = SocketMessage.magic
self.payload_size = ctypes.c_uint32(len(payload)).value
@classmethod
def ReceivePayload(cls, socket):
'''
Return only payload, not the raw message, None if failed.
socket: a blocking socket for read data.
'''
# rbufsize = -1 # From SocketServer.py
rbufsize = 0
rfile = socket.makefile('rb', rbufsize)
_L.debug('read raw_magic %s', threading.current_thread().name)
try:
raw_magic = rfile.read(4) # socket is disconnected or invalid
except Exception as e:
_L.debug('Fail to read raw_magic, %s', e)
raw_magic = None
_L.debug('read raw_magic %s done: %s', threading.current_thread().name, repr(raw_magic))
if not raw_magic: # nothing to read
# _L.debug('socket disconnect')
return None
# print 'Receive raw magic: %d, %s' % (len(raw_magic), raw_magic)
magic = struct.unpack(fmt, raw_magic)[0] # 'I' means unsigned int
# print 'Receive magic:', magic
if magic != cls.magic:
_L.error('Error: receive a malformat message, the message should start from a four bytes uint32 magic number')
return None
# The next time it will read four bytes again
_L.debug('read payload')
raw_payload_size = rfile.read(4)
# print 'Receive raw payload size: %d, %s' % (len(raw_payload_size), raw_payload_size)
payload_size = struct.unpack('I', raw_payload_size)[0]
_L.debug('Receive payload size %d', payload_size)
# if the message is incomplete, should wait until all the data received
payload = b""
remain_size = payload_size
while remain_size > 0:
data = rfile.read(remain_size)
if not data:
return None
payload += data
bytes_read = len(data) # len(data) is its string length, but we want length of bytes
# print 'bytes_read %d, remain_size %d, read_str %s' % (bytes_read, remain_size, data)
assert(bytes_read <= remain_size)
remain_size -= bytes_read
rfile.close()
return payload
@classmethod
def WrapAndSendPayload(cls, socket, payload):
'''
Send payload, true if success, false if failed
'''
try:
# From SocketServer.py
# wbufsize = 0, flush immediately
wbufsize = -1
# Convert
socket_message = SocketMessage(payload)
wfile = socket.makefile('wb', wbufsize)
# Write the message
wfile.write(struct.pack(fmt, socket_message.magic))
# Need to send the packed version
# print 'Sent ', socket_message.magic
wfile.write(struct.pack(fmt, socket_message.payload_size))
# print 'Sent ', socket_message.payload_size
wfile.write(payload)
# print 'Sent ', payload
wfile.flush()
wfile.close() # Close file object, not close the socket
return True
except Exception as e:
_L.error('Fail to send message %s', e)
return False
class BaseClient(object):
'''
BaseClient send message out and receiving message in a seperate thread.
After calling the `send` function, only True or False will be returned
to indicate whether the operation was successful.
If you are trying to send a request and get a response, consider using `Client` instead.
This class adds message framing on top of TCP
'''
def __init__(self, endpoint, raw_message_handler):
'''
Parameters:
endpoint: a tuple (ip, port)
message_handler: a function defined as `def message_handler(msg)` to handle incoming message, msg is a string
'''
self.endpoint = endpoint
self.raw_message_handler = raw_message_handler
self.socket = None # if socket == None, means client is not connected
self.wait_connected = threading.Event()
# Start a thread to get data from the socket
receiving_thread = threading.Thread(target = self.__receiving)
receiving_thread.setDaemon(1)
receiving_thread.start()
def connect(self, timeout = 1):
'''
Try to connect to server, return whether connection successful
'''
if self.isconnected():
return True
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(self.endpoint)
self.socket = s
_L.debug('BaseClient: wait for connection confirm')
self.wait_connected.clear()
isset = self.wait_connected.wait(timeout)
assert(isset != None) # in python prior to 2.7 wait will return None
if isset:
return True
else:
self.socket = None
_L.error('Socket is created, but can not get connection confirm from %s, timeout after %.2f seconds', self.endpoint, timeout)
return False
# only assign self.socket to connected socket
# so it is safe to use self.socket != None to check connection status
# This does not neccessarily mean connection successful, might be closed by server
# Unless explicitly to tell the server to accept new socket
except Exception as e:
_L.error('Can not connect to %s', str(self.endpoint))
_L.error("Error %s", e)
self.socket = None
return False
def isconnected(self):
return self.socket is not None
def disconnect(self):
if self.isconnected():
_L.debug("BaseClient, request disconnect from server in %s", threading.current_thread().name)
self.socket.shutdown(socket.SHUT_RD)
# Because socket is on read in __receiving thread, need to call shutdown to force it to close
if self.socket: # This may also be set to None in the __receiving thread
self.socket.close()
self.socket = None
time.sleep(0.1) # TODO, this is tricky
def __receiving(self):
'''
Receive packages, Extract message from packages
Call self.message_handler if got a message
Also check whether client is still connected
'''
_L.debug('BaseClient start receiving in %s', threading.current_thread().name)
while True:
if self.isconnected():
# Only this thread is allowed to read from socket, otherwise need lock to avoid competing
message = SocketMessage.ReceivePayload(self.socket)
_L.debug('Got server raw message %s', message)
if not message:
_L.debug('BaseClient: remote disconnected, no more message')
self.socket = None
continue
if message.startswith(b'connected'):
_L.info('Got connection confirm: %s', repr(message))
self.wait_connected.set()
# self.wait_connected.clear()
continue
if self.raw_message_handler:
self.raw_message_handler(message) # will block this thread
else:
_L.error('No message handler for raw message %s', message)
def send(self, message):
'''
Send message out, return whether the message was successfully sent
'''
if self.isconnected():
_L.debug('BaseClient: Send message %s', self.socket)
SocketMessage.WrapAndSendPayload(self.socket, message)
return True
else:
_L.error('Fail to send message, client is not connected')
return False
class Client(object):
'''
Client can be used to send request to a game and get response
Currently only one client is allowed at a time
More clients will be rejected
'''
def __raw_message_handler(self, raw_message):
# print 'Waiting for message id %d' % self.message_id
match = self.raw_message_regexp.match(raw_message)
if match:
[message_id, message_body] = (int(match.group(1)), match.group(2)) # TODO: handle multiline response
message_body = raw_message[len(match.group(1))+1:]
# Convert to utf-8 if it's not a byte array (as is the case for images)
try:
message_body = message_body.decode('utf-8')
except UnicodeDecodeError:
pass
# print 'Received message id %s' % message_id
if message_id == self.message_id:
self.response = message_body
self.wait_response.set()
else:
assert(False)
else:
if self.message_handler:
def do_callback():
self.message_handler(raw_message)
self.queue.put(do_callback)
else:
# Instead of just dropping this message, give a verbose notice
_L.error('No message handler to handle message %s', raw_message)
def __init__(self, endpoint, message_handler=None):
self.raw_message_regexp = re.compile(b'(\d{1,8}):(.*)')
self.message_client = BaseClient(endpoint, self.__raw_message_handler)
self.message_handler = message_handler
self.message_id = 0
self.wait_response = threading.Event()
self.response = ''
self.isconnected = self.message_client.isconnected
self.connect = self.message_client.connect
self.disconnect = self.message_client.disconnect
self.queue = Queue()
self.main_thread = threading.Thread(target = self.worker)
self.main_thread.setDaemon(1)
self.main_thread.start()
def worker(self):
while True:
task = self.queue.get()
task()
self.queue.task_done()
def request(self, message, timeout=5):
# docstring in numpy style
"""
Send a request to server and wait util get a response from server or timeout.
Parameters
----------
cmd : str
command to control the game. More info can be seen from http://docs.unrealcv.org/en/master/reference/commands.html
Returns
-------
str
plain text message from server
Examples
--------
>>> client = Client('localhost', 9000)
>>> client.connect()
>>> response = client.request('vget /camera/0/view')
"""
if sys.version_info[0] == 3:
if not isinstance(message, bytes):
message = message.encode("utf-8")
def do_request():
raw_message = b'%d:%s' % (self.message_id, message)
_L.debug('Request: %s', raw_message.decode("utf-8"))
if not self.message_client.send(raw_message):
return None
# request can only be sent in the main thread, do not support multi-thread submitting request together
if threading.current_thread().name == self.main_thread.name:
do_request()
else:
self.queue.put(do_request)
# Timeout is required
# see: https://bugs.python.org/issue8844
self.wait_response.clear() # This is important
isset = self.wait_response.wait(timeout)
self.message_id += 1 # Increment it only after the request/response cycle finished
assert(isset != None) # only python prior to 2.7 will return None
if isset:
return self.response
else:
_L.error('Can not receive a response from server, timeout after %.2f seconds', timeout)
return None
(HOST, PORT) = ('localhost', 9000)
client = Client((HOST, PORT), None)
| 38.478006 | 149 | 0.604146 | [
"MIT"
] | AI-cecream/unrealcv | client/python/unrealcv/__init__.py | 13,121 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.