repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
rcbops/keystone-buildpackage | keystone/middleware/auth_token.py | 1 | 16245 | #!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
TOKEN-BASED AUTH MIDDLEWARE
This WSGI component performs multiple jobs:
* it verifies that incoming client requests have valid tokens by verifying
tokens with the auth service.
* it will reject unauthenticated requests UNLESS it is in 'delay_auth_decision'
mode, which means the final decision is delegated to the downstream WSGI
component (usually the OpenStack service)
* it will collect and forward identity information from a valid token
such as user name etc...
Refer to: http://wiki.openstack.org/openstack-authn
HEADERS
-------
* Headers starting with HTTP\_ is a standard http header
* Headers starting with HTTP_X is an extended http header
Coming in from initial call from client or customer
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
HTTP_X_AUTH_TOKEN
the client token being passed in
HTTP_X_STORAGE_TOKEN
the client token being passed in (legacy Rackspace use) to support
cloud files
Used for communication between components
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
www-authenticate
only used if this component is being used remotely
HTTP_AUTHORIZATION
basic auth password used to validate the connection
What we add to the request for use by the OpenStack service
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
HTTP_X_AUTHORIZATION
the client identity being passed in
"""
import eventlet
from eventlet import wsgi
import httplib
import json
import os
from paste.deploy import loadapp
from urlparse import urlparse
from webob.exc import HTTPUnauthorized
from webob.exc import Request, Response
import keystone.tools.tracer # @UnusedImport # module runs on import
from keystone.common.bufferedhttp import http_connect_raw as http_connect
PROTOCOL_NAME = "Token Authentication"
class AuthProtocol(object):
"""Auth Middleware that handles authenticating client calls"""
def _init_protocol_common(self, app, conf):
""" Common initialization code"""
print "Starting the %s component" % PROTOCOL_NAME
self.conf = conf
self.app = app
#if app is set, then we are in a WSGI pipeline and requests get passed
# on to app. If it is not set, this component should forward requests
# where to find the OpenStack service (if not in local WSGI chain)
# these settings are only used if this component is acting as a proxy
# and the OpenSTack service is running remotely
self.service_protocol = conf.get('service_protocol', 'https')
self.service_host = conf.get('service_host')
self.service_port = int(conf.get('service_port'))
self.service_url = '%s://%s:%s' % (self.service_protocol,
self.service_host,
self.service_port)
# used to verify this component with the OpenStack service or PAPIAuth
self.service_pass = conf.get('service_pass')
# delay_auth_decision means we still allow unauthenticated requests
# through and we let the downstream service make the final decision
self.delay_auth_decision = int(conf.get('delay_auth_decision', 0))
def _init_protocol(self, conf):
""" Protocol specific initialization """
# where to find the auth service (we use this to validate tokens)
self.auth_host = conf.get('auth_host')
self.auth_port = int(conf.get('auth_port'))
self.auth_protocol = conf.get('auth_protocol', 'https')
# where to tell clients to find the auth service (default to url
# constructed based on endpoint we have for the service to use)
self.auth_location = conf.get('auth_uri',
"%s://%s:%s" % (self.auth_protocol,
self.auth_host,
self.auth_port))
# Credentials used to verify this component with the Auth service since
# validating tokens is a privileged call
self.admin_token = conf.get('admin_token')
def __init__(self, app, conf):
""" Common initialization code """
#TODO(ziad): maybe we refactor this into a superclass
self._init_protocol_common(app, conf) # Applies to all protocols
self._init_protocol(conf) # Specific to this protocol
def __call__(self, env, start_response):
""" Handle incoming request. Authenticate. And send downstream. """
#Prep headers to forward request to local or remote downstream service
proxy_headers = env.copy()
for header in proxy_headers.iterkeys():
if header[0:5] == 'HTTP_':
proxy_headers[header[5:]] = proxy_headers[header]
del proxy_headers[header]
#Look for authentication claims
claims = self._get_claims(env)
if not claims:
#No claim(s) provided
if self.delay_auth_decision:
#Configured to allow downstream service to make final decision.
#So mark status as Invalid and forward the request downstream
self._decorate_request("X_IDENTITY_STATUS",
"Invalid", env, proxy_headers)
else:
#Respond to client as appropriate for this auth protocol
return self._reject_request(env, start_response)
else:
# this request is presenting claims. Let's validate them
valid = self._validate_claims(claims)
if not valid:
# Keystone rejected claim
if self.delay_auth_decision:
# Downstream service will receive call still and decide
self._decorate_request("X_IDENTITY_STATUS",
"Invalid", env, proxy_headers)
else:
#Respond to client as appropriate for this auth protocol
return self._reject_claims(env, start_response)
else:
self._decorate_request("X_IDENTITY_STATUS",
"Confirmed", env, proxy_headers)
#Collect information about valid claims
if valid:
claims = self._expound_claims(claims)
# Store authentication data
if claims:
self._decorate_request('X_AUTHORIZATION', "Proxy %s" %
claims['user'], env, proxy_headers)
# For legacy compatibility before we had ID and Name
self._decorate_request('X_TENANT',
claims['tenant'], env, proxy_headers)
# Services should use these
self._decorate_request('X_TENANT_NAME',
claims.get('tenant_name', claims['tenant']),
env, proxy_headers)
self._decorate_request('X_TENANT_ID',
claims['tenant'], env, proxy_headers)
self._decorate_request('X_USER',
claims['user'], env, proxy_headers)
if 'roles' in claims and len(claims['roles']) > 0:
if claims['roles'] != None:
roles = ''
for role in claims['roles']:
if len(roles) > 0:
roles += ','
roles += role
self._decorate_request('X_ROLE',
roles, env, proxy_headers)
# NOTE(todd): unused
self.expanded = True
#Send request downstream
return self._forward_request(env, start_response, proxy_headers)
# NOTE(todd): unused
def get_admin_auth_token(self, username, password):
"""
This function gets an admin auth token to be used by this service to
validate a user's token. Validate_token is a priviledged call so
it needs to be authenticated by a service that is calling it
"""
headers = {"Content-type": "application/json",
"Accept": "application/json"}
params = {"passwordCredentials": {"username": username,
"password": password,
"tenantId": "1"}}
conn = httplib.HTTPConnection("%s:%s" \
% (self.auth_host, self.auth_port))
conn.request("POST", "/v2.0/tokens", json.dumps(params), \
headers=headers)
response = conn.getresponse()
data = response.read()
return data
def _get_claims(self, env):
"""Get claims from request"""
claims = env.get('HTTP_X_AUTH_TOKEN', env.get('HTTP_X_STORAGE_TOKEN'))
return claims
def _reject_request(self, env, start_response):
"""Redirect client to auth server"""
return HTTPUnauthorized("Authentication required",
[("WWW-Authenticate",
"Keystone uri='%s'" % self.auth_location)])(env,
start_response)
def _reject_claims(self, env, start_response):
"""Client sent bad claims"""
return HTTPUnauthorized()(env,
start_response)
def _validate_claims(self, claims):
"""Validate claims, and provide identity information isf applicable """
# Step 1: We need to auth with the keystone service, so get an
# admin token
#TODO(ziad): Need to properly implement this, where to store creds
# for now using token from ini
#auth = self.get_admin_auth_token("admin", "secrete", "1")
#admin_token = json.loads(auth)["auth"]["token"]["id"]
# Step 2: validate the user's token with the auth service
# since this is a priviledged op,m we need to auth ourselves
# by using an admin token
headers = {"Content-type": "application/json",
"Accept": "application/json",
"X-Auth-Token": self.admin_token}
##TODO(ziad):we need to figure out how to auth to keystone
#since validate_token is a priviledged call
#Khaled's version uses creds to get a token
# "X-Auth-Token": admin_token}
# we're using a test token from the ini file for now
conn = http_connect(self.auth_host, self.auth_port, 'GET',
'/v2.0/tokens/%s' % claims, headers=headers)
resp = conn.getresponse()
# data = resp.read()
conn.close()
if not str(resp.status).startswith('20'):
# Keystone rejected claim
return False
else:
#TODO(Ziad): there is an optimization we can do here. We have just
#received data from Keystone that we can use instead of making
#another call in _expound_claims
return True
def _expound_claims(self, claims):
# Valid token. Get user data and put it in to the call
# so the downstream service can use it
headers = {"Content-type": "application/json",
"Accept": "application/json",
"X-Auth-Token": self.admin_token}
##TODO(ziad):we need to figure out how to auth to keystone
#since validate_token is a priviledged call
#Khaled's version uses creds to get a token
# "X-Auth-Token": admin_token}
# we're using a test token from the ini file for now
conn = http_connect(self.auth_host, self.auth_port, 'GET',
'/v2.0/tokens/%s' % claims, headers=headers)
resp = conn.getresponse()
data = resp.read()
conn.close()
if not str(resp.status).startswith('20'):
raise LookupError('Unable to locate claims: %s' % resp.status)
token_info = json.loads(data)
roles = []
role_refs = token_info["access"]["user"]["roles"]
if role_refs != None:
for role_ref in role_refs:
# Nova looks for the non case-sensitive role 'Admin'
# to determine admin-ness
roles.append(role_ref["name"])
try:
tenant = token_info['access']['token']['tenant']['id']
tenant_name = token_info['access']['token']['tenant']['name']
except:
tenant = None
tenant_name = None
if not tenant:
tenant = token_info['access']['user'].get('tenantId')
tenant_name = token_info['access']['user'].get('tenantName')
verified_claims = {'user': token_info['access']['user']['username'],
'tenant': tenant,
'roles': roles}
if tenant_name:
verified_claims['tenantName'] = tenant_name
return verified_claims
def _decorate_request(self, index, value, env, proxy_headers):
"""Add headers to request"""
proxy_headers[index] = value
env["HTTP_%s" % index] = value
def _forward_request(self, env, start_response, proxy_headers):
"""Token/Auth processed & claims added to headers"""
self._decorate_request('AUTHORIZATION',
"Basic %s" % self.service_pass, env, proxy_headers)
#now decide how to pass on the call
if self.app:
# Pass to downstream WSGI component
return self.app(env, start_response)
#.custom_start_response)
else:
# We are forwarding to a remote service (no downstream WSGI app)
req = Request(proxy_headers)
parsed = urlparse(req.url)
conn = http_connect(self.service_host,
self.service_port,
req.method,
parsed.path,
proxy_headers,
ssl=(self.service_protocol == 'https'))
resp = conn.getresponse()
data = resp.read()
#TODO(ziad): use a more sophisticated proxy
# we are rewriting the headers now
if resp.status == 401 or resp.status == 305:
# Add our own headers to the list
headers = [("WWW_AUTHENTICATE",
"Keystone uri='%s'" % self.auth_location)]
return Response(status=resp.status, body=data,
headerlist=headers)(env,
start_response)
else:
return Response(status=resp.status, body=data)(env,
start_response)
def filter_factory(global_conf, **local_conf):
"""Returns a WSGI filter app for use with paste.deploy."""
conf = global_conf.copy()
conf.update(local_conf)
def auth_filter(app):
return AuthProtocol(app, conf)
return auth_filter
def app_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
return AuthProtocol(None, conf)
if __name__ == "__main__":
app = loadapp("config:" + \
os.path.join(os.path.abspath(os.path.dirname(__file__)),
os.pardir,
os.pardir,
"examples/paste/auth_token.ini"),
global_conf={"log_name": "auth_token.log"})
wsgi.server(eventlet.listen(('', 8090)), app)
| apache-2.0 |
Prashant-Surya/anitya | tests/test_backend_npmjs.py | 4 | 5411 | # -*- coding: utf-8 -*-
#
# Copyright © 2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions
# of the GNU General Public License v.2, or (at your option) any later
# version. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details. You
# should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Any Red Hat trademarks that are incorporated in the source
# code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission
# of Red Hat, Inc.
#
'''
anitya tests for the custom backend.
'''
import unittest
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(
os.path.abspath(__file__)), '..'))
import anitya.lib.backends.npmjs as backend
import anitya.lib.model as model
from anitya.lib.exceptions import AnityaPluginException
from tests import Modeltests, create_distro, skip_jenkins
BACKEND = 'npmjs'
class NpmjsBackendtests(Modeltests):
""" Drupal backend tests. """
@skip_jenkins
def setUp(self):
""" Set up the environnment, ran before every tests. """
super(NpmjsBackendtests, self).setUp()
create_distro(self.session)
self.create_project()
def create_project(self):
""" Create some basic projects to work with. """
project = model.Project(
name='request',
homepage='https://www.npmjs.org/package/request',
backend=BACKEND,
)
self.session.add(project)
self.session.commit()
project = model.Project(
name='foobarasd',
homepage='https://www.npmjs.org/package/foobarasd',
backend=BACKEND,
)
self.session.add(project)
self.session.commit()
project = model.Project(
name='colors',
homepage='https://www.npmjs.org/package/colors',
backend=BACKEND,
)
self.session.add(project)
self.session.commit()
def test_get_version(self):
""" Test the get_version function of the npmjs backend. """
pid = 1
project = model.Project.get(self.session, pid)
exp = '2.56.0'
obs = backend.NpmjsBackend.get_version(project)
self.assertEqual(obs, exp)
pid = 2
project = model.Project.get(self.session, pid)
self.assertRaises(
AnityaPluginException,
backend.NpmjsBackend.get_version,
project
)
pid = 3
project = model.Project.get(self.session, pid)
exp = '1.1.0'
obs = backend.NpmjsBackend.get_version(project)
self.assertEqual(obs, exp)
def test_get_versions(self):
""" Test the get_versions function of the npmjs backend. """
pid = 1
project = model.Project.get(self.session, pid)
exp = [
u'0.8.3', u'0.9.0', u'0.9.1', u'0.9.5', u'0.10.0',
u'1.0.0', u'1.1.0', u'1.1.1', u'1.2.0',
u'1.9.0', u'1.9.1', u'1.9.2', u'1.9.3', u'1.9.5', u'1.9.7',
u'1.9.8', u'1.9.9',
u'2.0.0', u'2.0.1', u'2.0.2', u'2.0.3', u'2.0.4', u'2.0.5',
u'2.1.0', u'2.1.1',
u'2.2.0', u'2.2.5', u'2.2.6', u'2.2.9',
u'2.9.0', u'2.9.1', u'2.9.2', u'2.9.3',
u'2.9.100', u'2.9.150', u'2.9.151', u'2.9.152', u'2.9.153',
u'2.9.200', u'2.9.201', u'2.9.202', u'2.9.203',
u'2.10.0',
u'2.11.0', u'2.11.1', u'2.11.2', u'2.11.3', u'2.11.4',
u'2.12.0',
u'2.14.0',
u'2.16.0', u'2.16.2', u'2.16.4', u'2.16.6',
u'2.18.0', u'2.19.0', u'2.20.0', u'2.21.0', u'2.22.0', u'2.23.0',
u'2.24.0', u'2.25.0', u'2.26.0', u'2.27.0', u'2.28.0', u'2.29.0',
u'2.30.0', u'2.31.0', u'2.32.0', u'2.33.0', u'2.34.0', u'2.35.0',
u'2.36.0', u'2.37.0', u'2.38.0', u'2.39.0', u'2.40.0', u'2.41.0',
u'2.42.0', u'2.43.0', u'2.44.0', u'2.45.0', u'2.46.0', u'2.47.0',
u'2.48.0', u'2.49.0', u'2.50.0', u'2.51.0', u'2.52.0', u'2.53.0',
u'2.54.0', u'2.55.0', u'2.56.0',
]
obs = backend.NpmjsBackend.get_ordered_versions(project)
self.assertEqual(obs, exp)
pid = 2
project = model.Project.get(self.session, pid)
self.assertRaises(
AnityaPluginException,
backend.NpmjsBackend.get_versions,
project
)
pid = 3
project = model.Project.get(self.session, pid)
exp = [
u'0.3.0', u'0.5.0', u'0.5.1',
u'0.6.0', u'0.6.0-1', u'0.6.1', u'0.6.2',
u'1.0.0', u'1.0.1', u'1.0.2', u'1.0.3',
u'1.1.0',
]
obs = backend.NpmjsBackend.get_ordered_versions(project)
self.assertEqual(obs, exp)
if __name__ == '__main__':
SUITE = unittest.TestLoader().loadTestsFromTestCase(NpmjsBackendtests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
| gpl-2.0 |
briandalessandro/courses | deeplearning1/nbs/utils/utils.py | 8 | 7644 | from __future__ import division,print_function
import math, os, json, sys, re
import cPickle as pickle
from glob import glob
import numpy as np
from matplotlib import pyplot as plt
from operator import itemgetter, attrgetter, methodcaller
from collections import OrderedDict
import itertools
from itertools import chain
import pandas as pd
import PIL
from PIL import Image
from numpy.random import random, permutation, randn, normal, uniform, choice
from numpy import newaxis
import scipy
from scipy import misc, ndimage
from scipy.ndimage.interpolation import zoom
from scipy.ndimage import imread
from sklearn.metrics import confusion_matrix
import bcolz
from sklearn.preprocessing import OneHotEncoder
from sklearn.manifold import TSNE
from IPython.lib.display import FileLink
import theano
from theano import shared, tensor as T
from theano.tensor.nnet import conv2d, nnet
from theano.tensor.signal import pool
import keras
from keras import backend as K
from keras.utils.data_utils import get_file
from keras.utils import np_utils
from keras.utils.np_utils import to_categorical
from keras.models import Sequential, Model
from keras.layers import Input, Embedding, Reshape, merge, LSTM, Bidirectional
from keras.layers import TimeDistributed, Activation, SimpleRNN, GRU
from keras.layers.core import Flatten, Dense, Dropout, Lambda
from keras.regularizers import l2, activity_l2, l1, activity_l1
from keras.layers.normalization import BatchNormalization
from keras.optimizers import SGD, RMSprop, Adam
from keras.utils.layer_utils import layer_from_config
from keras.metrics import categorical_crossentropy, categorical_accuracy
from keras.layers.convolutional import *
from keras.preprocessing import image, sequence
from keras.preprocessing.text import Tokenizer
from vgg16 import *
from vgg16bn import *
np.set_printoptions(precision=4, linewidth=100)
to_bw = np.array([0.299, 0.587, 0.114])
def gray(img):
return np.rollaxis(img,0,3).dot(to_bw)
def to_plot(img):
return np.rollaxis(img, 0, 3).astype(np.uint8)
def plot(img):
plt.imshow(to_plot(img))
def floor(x):
return int(math.floor(x))
def ceil(x):
return int(math.ceil(x))
def plots(ims, figsize=(12,6), rows=1, interp=False, titles=None):
if type(ims[0]) is np.ndarray:
ims = np.array(ims).astype(np.uint8)
if (ims.shape[-1] != 3):
ims = ims.transpose((0,2,3,1))
f = plt.figure(figsize=figsize)
for i in range(len(ims)):
sp = f.add_subplot(rows, len(ims)//rows, i+1)
if titles is not None:
sp.set_title(titles[i], fontsize=18)
plt.imshow(ims[i], interpolation=None if interp else 'none')
def do_clip(arr, mx):
clipped = np.clip(arr, (1-mx)/1, mx)
return clipped/clipped.sum(axis=1)[:, np.newaxis]
def get_batches(dirname, gen=image.ImageDataGenerator(), shuffle=True, batch_size=4, class_mode='categorical',
target_size=(224,224)):
return gen.flow_from_directory(dirname, target_size=target_size,
class_mode=class_mode, shuffle=shuffle, batch_size=batch_size)
def onehot(x):
return to_categorical(x)
def wrap_config(layer):
return {'class_name': layer.__class__.__name__, 'config': layer.get_config()}
def copy_layer(layer): return layer_from_config(wrap_config(layer))
def copy_layers(layers): return [copy_layer(layer) for layer in layers]
def copy_weights(from_layers, to_layers):
for from_layer,to_layer in zip(from_layers, to_layers):
to_layer.set_weights(from_layer.get_weights())
def copy_model(m):
res = Sequential(copy_layers(m.layers))
copy_weights(m.layers, res.layers)
return res
def insert_layer(model, new_layer, index):
res = Sequential()
for i,layer in enumerate(model.layers):
if i==index: res.add(new_layer)
copied = layer_from_config(wrap_config(layer))
res.add(copied)
copied.set_weights(layer.get_weights())
return res
def adjust_dropout(weights, prev_p, new_p):
scal = (1-prev_p)/(1-new_p)
return [o*scal for o in weights]
def get_data(path, target_size=(224,224)):
batches = get_batches(path, shuffle=False, batch_size=1, class_mode=None, target_size=target_size)
return np.concatenate([batches.next() for i in range(batches.nb_sample)])
def plot_confusion_matrix(cm, classes, normalize=False, title='Confusion matrix', cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
(This function is copied from the scikit docs.)
"""
plt.figure()
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print(cm)
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, cm[i, j], horizontalalignment="center", color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
def save_array(fname, arr):
c=bcolz.carray(arr, rootdir=fname, mode='w')
c.flush()
def load_array(fname):
return bcolz.open(fname)[:]
def mk_size(img, r2c):
r,c,_ = img.shape
curr_r2c = r/c
new_r, new_c = r,c
if r2c>curr_r2c:
new_r = floor(c*r2c)
else:
new_c = floor(r/r2c)
arr = np.zeros((new_r, new_c, 3), dtype=np.float32)
r2=(new_r-r)//2
c2=(new_c-c)//2
arr[floor(r2):floor(r2)+r,floor(c2):floor(c2)+c] = img
return arr
def mk_square(img):
x,y,_ = img.shape
maxs = max(img.shape[:2])
y2=(maxs-y)//2
x2=(maxs-x)//2
arr = np.zeros((maxs,maxs,3), dtype=np.float32)
arr[floor(x2):floor(x2)+x,floor(y2):floor(y2)+y] = img
return arr
def vgg_ft(out_dim):
vgg = Vgg16()
vgg.ft(out_dim)
model = vgg.model
return model
def vgg_ft_bn(out_dim):
vgg = Vgg16BN()
vgg.ft(out_dim)
model = vgg.model
return model
def get_classes(path):
batches = get_batches(path+'train', shuffle=False, batch_size=1)
val_batches = get_batches(path+'valid', shuffle=False, batch_size=1)
test_batches = get_batches(path+'test', shuffle=False, batch_size=1)
return (val_batches.classes, batches.classes, onehot(val_batches.classes), onehot(batches.classes),
val_batches.filenames, batches.filenames, test_batches.filenames)
def split_at(model, layer_type):
layers = model.layers
layer_idx = [index for index,layer in enumerate(layers)
if type(layer) is layer_type][-1]
return layers[:layer_idx+1], layers[layer_idx+1:]
class MixIterator(object):
def __init__(self, iters):
self.iters = iters
self.multi = type(iters) is list
if self.multi:
self.N = sum([it[0].N for it in self.iters])
else:
self.N = sum([it.N for it in self.iters])
def reset(self):
for it in self.iters: it.reset()
def __iter__(self):
return self
def next(self, *args, **kwargs):
if self.multi:
nexts = [[next(it) for it in o] for o in self.iters]
n0s = np.concatenate([n[0] for n in o])
n1s = np.concatenate([n[1] for n in o])
return (n0, n1)
else:
nexts = [next(it) for it in self.iters]
n0 = np.concatenate([n[0] for n in nexts])
n1 = np.concatenate([n[1] for n in nexts])
return (n0, n1)
| apache-2.0 |
Zosoled/Cumulus | robot/Cumulus/resources/locators_45.py | 1 | 15089 | """Locators for Spring '19"""
npsp_lex_locators={
'breadcrumb': "//li[contains(@class, 'slds-breadcrumb__item')]/span[text()='{}']",
'breadcrumb-link':"//a[@title='{}' and contains(@class,'BreadCrumbItem')]",
'placeholder': "//*[contains(@placeholder,'{}')]",
'app_launcher':{
'select-option':'//span/mark[text()="{}"]',
},
'object_dd':'//h1[contains(@class,"slds-page-header__title")]//a',
"header_text": "//h1/div/span",
"record": {
'button': "//div[@class='actionsContainer']/button[@title='{}']",
'datepicker':"//div[contains(@class,'uiDatePickerGrid')]/table[@class='calGrid']//span[text()='{}']",
'month_pick':"//div[@class='dateBar']//a[@title='{}']",
'edit_button':'//*[@title="{}"]',
'edit_form': 'css: div.forcePageBlockItemEdit',
'list':"//div[contains(@class,'forcePageBlockSectionRow')]/div[contains(@class,'forcePageBlockItem')]/div[contains(@class,'slds-hint-parent')]/div[@class='slds-form-element__control']/div[.//span[text()='{}']][//div[contains(@class,'uiMenu')]//a[@class='select']]",
'dropdown':"//div[@class='select-options']/ul[@class='scrollable']/li[@class='uiMenuItem uiRadioMenuItem']/a[contains(text(),'{}')]",
'related': {
'button': "//article[contains(@class, 'forceRelatedListCardDesktop')][.//img][.//span[@title='{}']]//a[@title='{}']",
'check_occurrence':'//h2/a/span[@title="{}"]/following-sibling::span',
'drop-down':'//div[contains(@class, "slds-card")]/header[.//span[@title="{}"]]/parent::*/div/div/div/a[contains(@class, "slds-button")]',
'title':'//div[contains(@class, "slds-card")]/header[.//span[@title="{}"]]',
'viewall':'//a[.//span[text()="View All"]/span[text()="{}"]]',
'item':"//article[contains(@class, 'forceRelatedListCardDesktop')][.//img][.//span[@title='{}']]//h3//a",
'field_value': '//a[text()="{}"]/ancestor::li//div[contains(@class, "slds-item--detail")]//*[text()="{}"]',
'link':"//article[contains(@class, 'forceRelatedListCardDesktop')][.//img][.//span[@title='{}']]//table[contains(@class,'forceRecordLayout')]/tbody/tr[.//th/div/a[contains(@class,'textUnderline')]][.//td//span[text()='{}']]/th//a",
},
},
"alert": "//span[contains(@class,'toastMessage')]/a",
"alert-text":"//span[contains(@class,'toastMessage')]",
'popup': "//div[contains(@class, 'uiPopupTarget')][contains(@class, 'visible')]",
'test':'/html/body/div[6]/table/tbody/tr[23]/td[1]/a',
'frame_new':'//iframe[contains(@name, "{}") or contains(@title, "{}")]',
'frame':'//iframe[@title= "{}"]',
'frame_by_name': "//iframe[contains(@name, '${}')]",
'id':'//*[contains(@id,"{}")]',
'button':'//input[contains(@value,"{}")]',
'link-text':'//a[text()="{}"]',
'link-title':'//a[@title="{}"]',
'checkbox':'//div[contains(@class,"uiInputCheckbox")]/label/span[text()="{}"]/../following-sibling::input[@type="checkbox"]',
'table_checkbox':'//tbody/tr[./td[2]/a[text()="{}"]]/td/input[@type="checkbox"]',
'tab': "//div[@class='uiTabBar']/ul[@class='tabs__nav']/li[contains(@class,'uiTabItem')]/a[@class='tabHeader']/span[contains(text(), '{}')]",
'desktop_rendered': 'css: div.desktop.container.oneOne.oneAppLayoutHost[data-aura-rendered-by]',
'loading_box': 'css: div.auraLoadingBox.oneLoadingBox',
'spinner': 'css: div.slds-spinner',
'modal_field':"//div[contains(@class, 'lookupInput')][./label[contains(text(), '{}')]]/div//span[@class='lookupInput']/input",
'name':'//tbody/tr/th/span/a',
'select_name':'//tbody//a[text()= "{}"]',
'locate_dropdown':'//tbody/tr[{}]/td/span//div/a/lightning-icon',
'locating_delete_dropdown':'//tbody//a[text()= "{}"]/../../following-sibling::td/span//div/a/lightning-icon',
'related_name':'//tbody/tr/td/a[contains(@class,"forceOutputLookup")]',
'rel_loc_dd':'//tbody/tr[{}]/td[4]//lightning-primitive-icon',
'delete_icon':'//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]',
'aff_list':'//div[@role="tablist"]/following::div[@class = "container forceRelatedListSingleContainer"][7]/article/div[@class="slds-card__body"]/div/div/div/div/div/div/div/table/tbody/tr/td[1]',
'aff_status':'//table[contains(@class,"forceRecordLayout")]/tbody/tr[.//th/div/a[contains(@class,"textUnderline")]][.//td/a[@title="{}"]]/td[3]',
'aff_id':'//table[contains(@class,"forceRecordLayout")]/tbody/tr[.//th/div/a[contains(@class,"textUnderline")]][.//td/a[@title="{}"]]/th//a',
'click_aff_id':'//table[contains(@class,"forceRecordLayout")]/tbody/tr/th/div/a[text()="{}"]',
'check_status':'//div[contains(@class, "forcePageBlockItem")][.//span[text()="{}"]]//following-sibling::div[.//span[contains(@class, "test-id__field-value")]]/span',
'check_field':'//div[contains(@class, "forcePageBlockItem")][.//span[text()="{}"]]//following-sibling::div[.//span[contains(@class, "test-id__field-value")]]/span/div//a',
'account_list':'//tbody/tr/th[.//span[contains(@class, "slds-grid")]]/descendant::a[text()="{}"]',
'dd_options':'//*[@id="p3"]/option[text()="{}"]',
'related_list_items':'//div[@class = "forceRelatedListContainer"][.//a[contains(@class, "slds-card")]]//span[text() = "{}"]/ancestor::div[contains(@class, "slds-card")]/following-sibling::div[contains(@class, "slds-card")][.//div[contains(@class, "outputLookupContainer")]]//a[text()="{}"]',
'span_button':'//span[text()="{}"]',
'header_field_value':'//li[contains(@class, "slds-page-header__detail")][.//span[contains(@class, "slds-form-element__label")][@title="{}"]]//*[text()="{}"]',
'header_datepicker':'//li[contains(@class, "slds-page-header__detail")][.//p[contains(@class, "slds-text-heading--label")][@title="{}"]]//*[@class="uiOutputDate"]',
'select_one_record':"//tbody/tr[1]/th/span/a",
'click_search':'//div[@class="slds-form-element"][./label[text()="{}"]]/div/span/span/input[contains(@id,"inputX")]',
'field': "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//input",
'field_lookup_value': "//a[@role='option'][.//div[@title='{}']]",
'field-value':"//div[contains(@class,'slds-form-element')][./label[text()='{}']]/div/span",
'header':'//h1/div[contains(@title,"{}")]',
'check_related_list_item':'//div[@class = "forceRelatedListContainer"][.//a[contains(@class, "slds-card")]]//span[text() = "{}"]/ancestor::div[contains(@class, "slds-card")]/following-sibling::div[contains(@class, "slds-card")]//tbody//th//a[text()="{}"]',
'detail_page': {
'section_header':'//h3//span[text()="{}"]',
'address':'//h3[contains(@class, "slds-section__title")][.//span[contains(text(),"Address")]]/../..//div[contains(@class, "test-id")]/span[text()= "{}"]/../following-sibling::div//a/div[contains(@class, "slds")]',
'field':'//h3[contains(@class, "slds-section__title")][.//span[text()="{}"]]/../..//div[contains(@class, "test-id")]/span[text()= "{}"]/../following-sibling::div//span[text()="{}"]',
'verify_field_value':'//div[contains(@class, "forcePageBlockItem")]/div/div//span[text()="{}"]/../../div[2]/span/span[text() = "{}"]',
'edit_mode':{
'section_header':'//div[contains(@class,"forcePageBlockSectionEdit")]/h3//span[text()="{}"]',
},
},
'manage_hh_page':{
'address_link':'//h4[text()="{}"]',
'address':'//div[contains(@class, "uiInput")][.//label[contains(@class, "uiLabel")]/span[text()="{}"]]/',
'mhh_checkbox':'//*[@id="SortCanvas"]/li//a[text()="{}"]/ancestor::div[contains(@class, "slds-card__header")]/following-sibling::div[contains(@class,"slds-card__body")]//form//div//label/span[@id = "{}"]',
'button':'//*[text()="{}"]',
},
'opportunity':{
'contact_role':'//div[contains(@class,"listItemBody")][./h3//a[text()="{}"]]//parent::h3/following-sibling::ul/li/div[contains(@class,"forceListRecordItem")]/div[@title="Role:"]/following-sibling::div/span[text()="{}"]',
},
'object':{
'record':'//tbody//a[text()= "{}"]',
'button': "css: div.windowViewMode-normal ul.forceActionsContainer.oneActionsRibbon a[title='{}']",
'radio_button':"//div[contains(@class,'changeRecordTypeRightColumn')]/div/label[@class='slds-radio']/div[.//span[text()='{}']]/preceding::div[1]/span[@class='slds-radio--faux']",
},
'engagement_plan':{
'input_box':'//fieldset[./legend[text()="{}"]]/div[@class="slds-grid"]//div[@class="requiredInput"]/input',
'dropdown':'//div[contains(@class,"slds-p-top_small")]/label[text()="{}"]/following-sibling::div/select',
'checkbox':'//div[contains(@class,"slds-p-top_small")]/label[@class="slds-checkbox"][./span/following-sibling::{}[text()="{}"]/]',
'button':'//div[contains(@class,"slds-button-group")][.//span[text()="toTask {}"]]/button[contains(text(),"{}")]',
'check_eng_plan':'//h2/a/span[@title="{}"]//ancestor::div[@class = "slds-card__header slds-grid"]/following-sibling::div//tbody/tr/th/div/a',
'dd':'//h2/a/span[@title="{}"]//ancestor::div[@class = "slds-card__header slds-grid"]/following-sibling::div//tbody/tr/th/div/a/ancestor::th/following-sibling::td//lightning-primitive-icon',
'tasks':'//div[@class="slds-section__content"]/ul/li//a[text()="{}"]',
},
'levels':{
'id':'//input[contains(@id,"{}")]',
'select':'//select[contains(@id,"{}")]',
},
'payments':{
'date_loc':"//*[@id='pmtTable']/tbody/tr/td[3]/div//input",
'no_payments':'//tbody/tr[./th//a[contains(@title,"PMT")]]/td[3]',
'pays':'//tbody/tr[./th//a[contains(@title,"PMT")]]/td[.//span[text()="{}"]]',
'pay_amount':'//tbody/tr[{}]/td[3]/span/span[text()="{}"]',
'check_occurrence':'//h2/a/span[@title="{}"]/following-sibling::span',
'text':'//*[@id="j_id0:vfForm:j_id76:util_formfield:inputx:util_inputfield:inputX"]',
'field-value':"//div[contains(@class,'slds-form-element')][./span[text()='{}']]/following-sibling::div",
},
'gaus':{
'input_field':'//div[@class="slds-form-element"][./label[text()="{}"]]/div/input',
},
'npsp_settings':{
'panel_sub_link':'//ul/li/a[text()="{}"]',
'field_value':"//div[@class='slds-form-element'][./label[contains(text(),'{}')]]/div/span",
'side_panel':"//ul/div[contains(@id,'RecDonations')]/button[1]",
'list':"//div[contains(@class,'slds-form_horizontal')]/div[@class='slds-form-element']/label[text()='{}']/following-sibling::div/select",
'multi_list':'//div[contains(@class,"slds-form_horizontal")]/div[@class="slds-form-element"][./label[text()="{}"]]/div//select',
'list_val':'//div[@class="slds-form-element"][./label[text()="{}"]]/div/span[text()="{}"]',
'status':'//div[contains(@class,"slds-tile__title")][.//span[text()="{}"]]/div[contains(@class,"slds-col")]//span[text()="{}"]',
'button':'//form[.//h1[contains(text(),"{}")]]//input[contains(@value,"{}")]',
'completed':'//span[contains(@class, \'slds-theme_success\')]',
'batch-button':'//div[@id="{}"]//child::input[@value="{}"]'
},
'data_imports':{
'status':'//div[contains(@class,"slds-tile__title")][./p[text()="BDI_DataImport_BATCH"]]/div[contains(@class,"slds-col")]/span[text()="{}"]',
},
'bge':{
'checkbox':'//label/span[text()="{}"]//parent::label/span[@class="slds-checkbox_faux"]',
'field-duellist':'//label[text()="{}"]/following-sibling::lightning-dual-listbox//div[contains(@class,"slds-dueling-list__column")][./span[text()="{}"]]//div[contains(@class,"slds-dueling-list__options")]/ul/li//span[text()="{}"]',
'duellist':'//h3[./span[text()="{}"]]/following-sibling::div//div[contains(@class,"slds-dueling-list__column")][./span[text()="{}"]]//div[contains(@class,"slds-dueling-list__options")]/ul/li//span[text()="{}"]',
'duellist2':'//div/div[text()="{}"]/following-sibling::div//div[contains(@class,"slds-dueling-list__column")][./span[text()="{}"]]//div[contains(@class,"slds-dueling-list__options")]/ul/li//span[text()="{}"]',
'field-select-button':'//label[text()="{}"]/following-sibling::lightning-dual-listbox//div[contains(@class,"slds-dueling-list__column")]//button[@title="{}"]',
'select-button':'//h3[./span[text()="{}"]]/following-sibling::div//div[contains(@class,"slds-dueling-list__column")]//button[@title="{}"]',
'select-button2':'//div/div[text()="{}"]/following-sibling::div//div[contains(@class,"slds-dueling-list__column")]//button[@title="{}"]',
'title':'//p[text()="{}"]/following-sibling::h1',
'field-input':'//label[text()="{}"]/following-sibling::div/input',
'field-text':'//label[text()="{}"]/following-sibling::div/textarea',
'button':'//button[text()="{}"]',
'month':"//div[@class='slds-align-middle']//button[@title='{}']",
'date':"//div[contains(@class,'slds-datepicker')]/table[@class='slds-datepicker__month']//span[text()='{}']",
'card-header':'//article[./div[@class="slds-card__body"]//lightning-formatted-text[text()="{}"]]/header',
'edit_button':'//td[@data-label="{}"]//button',
'edit_field':'//lightning-primitive-datatable-iedit-panel//input',
'count':'//div[contains(@class,"BGE_DataImportBatchEntry")]//tbody/tr',
'value':'//td[@data-label="{}"]//a',
'name':'//div[contains(@class,"BGE_DataImportBatchEntry")]//tbody/tr/th//a',
'locate_dropdown':'//div[contains(@class,"BGE_DataImportBatchEntry")]//tbody/tr[{}]/td[6]//div//button[./span[text()="Show actions"]]/lightning-primitive-icon',
'gift-amount':'//div[./label[text()="{}"]]',
},
'bge-lists':{
'list1':"//div[./label[text()='{}']]/div//select",
'list2':"//div[contains(@class,'slds-grid')]/div[contains(@class,'slds-text-align_left')]/span[text()='{}']/../following-sibling::div//select",
'list3':"//div[./label/span[text()='{}']]/div//select",
},
'bge-duellist-btn':{
'select-button':'//h3[./span[text()="{}"]]/following-sibling::div//div[contains(@class,"slds-dueling-list__column")]//button[@title="{}"]',
'select-button2':'//div/div[text()="{}"]/following-sibling::div//div[contains(@class,"slds-dueling-list__column")]//button[@title="{}"]',
'field-select-button':'//label[text()="{}"]/following-sibling::lightning-dual-listbox//div[contains(@class,"slds-dueling-list__column")]//button[@title="{}"]',
},
'object_manager':{
'button':'//input[@title="{}"]',
}
}
extra_locators={
'related_list_items1':'//div[@class = "forceRelatedListContainer"][.//a[contains(@class, "slds-card")]]//span[text() = "Relationships"]/ancestor::div[contains(@class, "slds-card")]/following-sibling::div[contains(@class, "slds-card")]//tbody//td/span[text()="{}"]',
}
dnd={ ""
} | bsd-3-clause |
dhanunjaya/neutron | neutron/plugins/ml2/drivers/type_vxlan.py | 28 | 3577 | # Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log
import sqlalchemy as sa
from sqlalchemy import sql
from neutron.common import exceptions as n_exc
from neutron.db import model_base
from neutron.i18n import _LE
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2.drivers import type_tunnel
LOG = log.getLogger(__name__)
vxlan_opts = [
cfg.ListOpt('vni_ranges',
default=[],
help=_("Comma-separated list of <vni_min>:<vni_max> tuples "
"enumerating ranges of VXLAN VNI IDs that are "
"available for tenant network allocation")),
cfg.StrOpt('vxlan_group',
help=_("Multicast group for VXLAN. If unset, disables VXLAN "
"multicast mode.")),
]
cfg.CONF.register_opts(vxlan_opts, "ml2_type_vxlan")
class VxlanAllocation(model_base.BASEV2):
__tablename__ = 'ml2_vxlan_allocations'
vxlan_vni = sa.Column(sa.Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = sa.Column(sa.Boolean, nullable=False, default=False,
server_default=sql.false(), index=True)
class VxlanEndpoints(model_base.BASEV2):
"""Represents tunnel endpoint in RPC mode."""
__tablename__ = 'ml2_vxlan_endpoints'
__table_args__ = (
sa.UniqueConstraint('host',
name='unique_ml2_vxlan_endpoints0host'),
model_base.BASEV2.__table_args__
)
ip_address = sa.Column(sa.String(64), primary_key=True)
udp_port = sa.Column(sa.Integer, nullable=False)
host = sa.Column(sa.String(255), nullable=True)
def __repr__(self):
return "<VxlanTunnelEndpoint(%s)>" % self.ip_address
class VxlanTypeDriver(type_tunnel.EndpointTunnelTypeDriver):
def __init__(self):
super(VxlanTypeDriver, self).__init__(
VxlanAllocation, VxlanEndpoints)
def get_type(self):
return p_const.TYPE_VXLAN
def initialize(self):
try:
self._initialize(cfg.CONF.ml2_type_vxlan.vni_ranges)
except n_exc.NetworkTunnelRangeError:
LOG.exception(_LE("Failed to parse vni_ranges. "
"Service terminated!"))
raise SystemExit()
def get_endpoints(self):
"""Get every vxlan endpoints from database."""
vxlan_endpoints = self._get_endpoints()
return [{'ip_address': vxlan_endpoint.ip_address,
'udp_port': vxlan_endpoint.udp_port,
'host': vxlan_endpoint.host}
for vxlan_endpoint in vxlan_endpoints]
def add_endpoint(self, ip, host, udp_port=p_const.VXLAN_UDP_PORT):
return self._add_endpoint(ip, host, udp_port=udp_port)
def get_mtu(self, physical_network=None):
mtu = super(VxlanTypeDriver, self).get_mtu()
return mtu - p_const.VXLAN_ENCAP_OVERHEAD if mtu else 0
| apache-2.0 |
albertomurillo/ansible | lib/ansible/modules/cloud/kubevirt/kubevirt_template.py | 16 | 14883 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: kubevirt_template
short_description: Manage KubeVirt templates
description:
- Use Openshift Python SDK to manage the state of KubeVirt templates.
version_added: "2.8"
author: KubeVirt Team (@kubevirt)
options:
name:
description:
- Name of the Template object.
required: true
type: str
namespace:
description:
- Namespace where the Template object exists.
required: true
type: str
objects:
description:
- List of any valid API objects, such as a I(DeploymentConfig), I(Service), etc. The object
will be created exactly as defined here, with any parameter values substituted in prior to creation.
The definition of these objects can reference parameters defined earlier.
- As part of the the list user can pass also I(VirtualMachine) kind. When passing I(VirtualMachine)
user must use Ansible structure of the parameters not the Kubernetes API structure. For more information
please take a look at M(kubevirt_vm) module and at EXAMPLES section, where you can see example.
type: list
merge_type:
description:
- Whether to override the default patch merge approach with a specific type. By default, the strategic
merge will typically be used.
type: list
choices: [ json, merge, strategic-merge ]
display_name:
description:
- "A brief, user-friendly name, which can be employed by user interfaces."
type: str
description:
description:
- A description of the template.
- Include enough detail that the user will understand what is being deployed...
and any caveats they need to know before deploying. It should also provide links to additional information,
such as a README file."
type: str
long_description:
description:
- "Additional template description. This may be displayed by the service catalog, for example."
type: str
provider_display_name:
description:
- "The name of the person or organization providing the template."
type: str
documentation_url:
description:
- "A URL referencing further documentation for the template."
type: str
support_url:
description:
- "A URL where support can be obtained for the template."
type: str
editable:
description:
- "Extension for hinting at which elements should be considered editable.
List of jsonpath selectors. The jsonpath root is the objects: element of the template."
- This is parameter can be used only when kubevirt addon is installed on your openshift cluster.
type: list
default_disk:
description:
- "The goal of default disk is to define what kind of disk is supported by the OS mainly in
terms of bus (ide, scsi, sata, virtio, ...)"
- The C(default_disk) parameter define configuration overlay for disks that will be applied on top of disks
during virtual machine creation to define global compatibility and/or performance defaults defined here.
- This is parameter can be used only when kubevirt addon is installed on your openshift cluster.
type: dict
default_volume:
description:
- "The goal of default volume is to be able to configure mostly performance parameters like
caches if those are exposed by the underlying volume implementation."
- The C(default_volume) parameter define configuration overlay for volumes that will be applied on top of volumes
during virtual machine creation to define global compatibility and/or performance defaults defined here.
- This is parameter can be used only when kubevirt addon is installed on your openshift cluster.
type: dict
default_nic:
description:
- "The goal of default network is similar to I(default_disk) and should be used as a template
to ensure OS compatibility and performance."
- The C(default_nic) parameter define configuration overlay for nic that will be applied on top of nics
during virtual machine creation to define global compatibility and/or performance defaults defined here.
- This is parameter can be used only when kubevirt addon is installed on your openshift cluster.
type: dict
default_network:
description:
- "The goal of default network is similar to I(default_volume) and should be used as a template
that specifies performance and connection parameters (L2 bridge for example)"
- The C(default_network) parameter define configuration overlay for networks that will be applied on top of networks
during virtual machine creation to define global compatibility and/or performance defaults defined here.
- This is parameter can be used only when kubevirt addon is installed on your openshift cluster.
type: dict
icon_class:
description:
- "An icon to be displayed with your template in the web console. Choose from our existing logo
icons when possible. You can also use icons from FontAwesome. Alternatively, provide icons through
CSS customizations that can be added to an OpenShift Container Platform cluster that uses your template.
You must specify an icon class that exists, or it will prevent falling back to the generic icon."
type: str
parameters:
description:
- "Parameters allow a value to be supplied by the user or generated when the template is instantiated.
Then, that value is substituted wherever the parameter is referenced. References can be defined in any
field in the objects list field. This is useful for generating random passwords or allowing the user to
supply a host name or other user-specific value that is required to customize the template."
- "More information can be foud at: U(https://docs.openshift.com/container-platform/3.6/dev_guide/templates.html#writing-parameters)"
type: list
version:
description:
- Template structure version.
- This is parameter can be used only when kubevirt addon is installed on your openshift cluster.
type: str
extends_documentation_fragment:
- k8s_auth_options
- k8s_state_options
requirements:
- python >= 2.7
- openshift >= 0.8.2
'''
EXAMPLES = '''
- name: Create template 'mytemplate'
kubevirt_template:
state: present
name: myvmtemplate
namespace: templates
display_name: Generic cirros template
description: Basic cirros template
long_description: Verbose description of cirros template
provider_display_name: Just Be Cool, Inc.
documentation_url: http://theverycoolcompany.com
support_url: http://support.theverycoolcompany.com
icon_class: icon-linux
default_disk:
disk:
bus: virtio
default_nic:
model: virtio
default_network:
resource:
resourceName: bridge.network.kubevirt.io/cnvmgmt
default_volume:
containerDisk:
image: kubevirt/cirros-container-disk-demo:latest
objects:
- name: ${NAME}
kind: VirtualMachine
memory: ${MEMORY_SIZE}
state: present
namespace: vms
parameters:
- name: NAME
description: VM name
generate: expression
from: 'vm-[A-Za-z0-9]{8}'
- name: MEMORY_SIZE
description: Memory size
value: 1Gi
- name: Remove template 'myvmtemplate'
kubevirt_template:
state: absent
name: myvmtemplate
namespace: templates
'''
RETURN = '''
kubevirt_template:
description:
- The template dictionary specification returned by the API.
returned: success
type: complex
contains: {}
'''
import copy
import traceback
from ansible.module_utils.k8s.common import AUTH_ARG_SPEC
from ansible.module_utils.kubevirt import (
virtdict,
KubeVirtRawModule,
API_GROUP,
MAX_SUPPORTED_API_VERSION
)
TEMPLATE_ARG_SPEC = {
'name': {'required': True},
'namespace': {'required': True},
'state': {
'default': 'present',
'choices': ['present', 'absent'],
},
'force': {
'type': 'bool',
'default': False,
},
'merge_type': {
'type': 'list',
'choices': ['json', 'merge', 'strategic-merge']
},
'objects': {
'type': 'list',
},
'display_name': {
'type': 'str',
},
'description': {
'type': 'str',
},
'long_description': {
'type': 'str',
},
'provider_display_name': {
'type': 'str',
},
'documentation_url': {
'type': 'str',
},
'support_url': {
'type': 'str',
},
'icon_class': {
'type': 'str',
},
'version': {
'type': 'str',
},
'editable': {
'type': 'list',
},
'default_disk': {
'type': 'dict',
},
'default_volume': {
'type': 'dict',
},
'default_network': {
'type': 'dict',
},
'default_nic': {
'type': 'dict',
},
'parameters': {
'type': 'list',
},
}
class KubeVirtVMTemplate(KubeVirtRawModule):
@property
def argspec(self):
""" argspec property builder """
argument_spec = copy.deepcopy(AUTH_ARG_SPEC)
argument_spec.update(TEMPLATE_ARG_SPEC)
return argument_spec
def execute_module(self):
# Parse parameters specific for this module:
definition = virtdict()
# Execute the CRUD of VM template:
kind = 'Template'
template_api_version = 'template.openshift.io/v1'
# Fill in template parameters:
definition['parameters'] = self.params.get('parameters')
# Fill in the default Label
labels = definition['metadata']['labels']
labels['template.cnv.io/type'] = 'vm'
# Fill in Openshift/Kubevirt template annotations:
annotations = definition['metadata']['annotations']
if self.params.get('display_name'):
annotations['openshift.io/display-name'] = self.params.get('display_name')
if self.params.get('description'):
annotations['description'] = self.params.get('description')
if self.params.get('long_description'):
annotations['openshift.io/long-description'] = self.params.get('long_description')
if self.params.get('provider_display_name'):
annotations['openshift.io/provider-display-name'] = self.params.get('provider_display_name')
if self.params.get('documentation_url'):
annotations['openshift.io/documentation-url'] = self.params.get('documentation_url')
if self.params.get('support_url'):
annotations['openshift.io/support-url'] = self.params.get('support_url')
if self.params.get('icon_class'):
annotations['iconClass'] = self.params.get('icon_class')
if self.params.get('version'):
annotations['template.cnv.io/version'] = self.params.get('version')
# TODO: Make it more Ansiblish, so user don't have to specify API JSON path, but rather Ansible params:
if self.params.get('editable'):
annotations['template.cnv.io/editable'] = self.params.get('editable')
# Set defaults annotations:
if self.params.get('default_disk'):
annotations['defaults.template.cnv.io/disk'] = self.params.get('default_disk').get('name')
if self.params.get('default_volume'):
annotations['defaults.template.cnv.io/volume'] = self.params.get('default_volume').get('name')
if self.params.get('default_nic'):
annotations['defaults.template.cnv.io/nic'] = self.params.get('default_nic').get('name')
if self.params.get('default_network'):
annotations['defaults.template.cnv.io/network'] = self.params.get('default_network').get('name')
# Proccess objects:
self.client = self.get_api_client()
definition['objects'] = []
objects = self.params.get('objects') or []
for obj in objects:
if obj['kind'] != 'VirtualMachine':
definition['objects'].append(obj)
else:
vm_definition = virtdict()
# Set VM defaults:
if self.params.get('default_disk'):
vm_definition['spec']['template']['spec']['domain']['devices']['disks'] = [self.params.get('default_disk')]
if self.params.get('default_volume'):
vm_definition['spec']['template']['spec']['volumes'] = [self.params.get('default_volume')]
if self.params.get('default_nic'):
vm_definition['spec']['template']['spec']['domain']['devices']['interfaces'] = [self.params.get('default_nic')]
if self.params.get('default_network'):
vm_definition['spec']['template']['spec']['networks'] = [self.params.get('default_network')]
# Set kubevirt API version:
vm_definition['apiVersion'] = '%s/%s' % (API_GROUP, MAX_SUPPORTED_API_VERSION)
# Contruct k8s vm API object:
vm_template = vm_definition['spec']['template']
dummy, vm_def = self.construct_vm_template_definition('VirtualMachine', vm_definition, vm_template, obj)
definition['objects'].append(vm_def)
# Create template:
resource = self.client.resources.get(api_version=template_api_version, kind=kind, name='templates')
definition = self.set_defaults(resource, definition)
result = self.perform_action(resource, definition)
# Return from the module:
self.exit_json(**{
'changed': result['changed'],
'kubevirt_template': result.pop('result'),
'result': result,
})
def main():
module = KubeVirtVMTemplate()
try:
module.execute_module()
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
if __name__ == '__main__':
main()
| gpl-3.0 |
waseem18/oh-mainline | vendor/packages/python-openid/examples/djopenid/consumer/views.py | 63 | 8152 |
from django import http
from django.http import HttpResponseRedirect
from django.views.generic.simple import direct_to_template
from openid.consumer import consumer
from openid.consumer.discover import DiscoveryFailure
from openid.extensions import ax, pape, sreg
from openid.yadis.constants import YADIS_HEADER_NAME, YADIS_CONTENT_TYPE
from openid.server.trustroot import RP_RETURN_TO_URL_TYPE
from djopenid import util
PAPE_POLICIES = [
'AUTH_PHISHING_RESISTANT',
'AUTH_MULTI_FACTOR',
'AUTH_MULTI_FACTOR_PHYSICAL',
]
# List of (name, uri) for use in generating the request form.
POLICY_PAIRS = [(p, getattr(pape, p))
for p in PAPE_POLICIES]
def getOpenIDStore():
"""
Return an OpenID store object fit for the currently-chosen
database backend, if any.
"""
return util.getOpenIDStore('/tmp/djopenid_c_store', 'c_')
def getConsumer(request):
"""
Get a Consumer object to perform OpenID authentication.
"""
return consumer.Consumer(request.session, getOpenIDStore())
def renderIndexPage(request, **template_args):
template_args['consumer_url'] = util.getViewURL(request, startOpenID)
template_args['pape_policies'] = POLICY_PAIRS
response = direct_to_template(
request, 'consumer/index.html', template_args)
response[YADIS_HEADER_NAME] = util.getViewURL(request, rpXRDS)
return response
def startOpenID(request):
"""
Start the OpenID authentication process. Renders an
authentication form and accepts its POST.
* Renders an error message if OpenID cannot be initiated
* Requests some Simple Registration data using the OpenID
library's Simple Registration machinery
* Generates the appropriate trust root and return URL values for
this application (tweak where appropriate)
* Generates the appropriate redirect based on the OpenID protocol
version.
"""
if request.POST:
# Start OpenID authentication.
openid_url = request.POST['openid_identifier']
c = getConsumer(request)
error = None
try:
auth_request = c.begin(openid_url)
except DiscoveryFailure, e:
# Some other protocol-level failure occurred.
error = "OpenID discovery error: %s" % (str(e),)
if error:
# Render the page with an error.
return renderIndexPage(request, error=error)
# Add Simple Registration request information. Some fields
# are optional, some are required. It's possible that the
# server doesn't support sreg or won't return any of the
# fields.
sreg_request = sreg.SRegRequest(optional=['email', 'nickname'],
required=['dob'])
auth_request.addExtension(sreg_request)
# Add Attribute Exchange request information.
ax_request = ax.FetchRequest()
# XXX - uses myOpenID-compatible schema values, which are
# not those listed at axschema.org.
ax_request.add(
ax.AttrInfo('http://schema.openid.net/namePerson',
required=True))
ax_request.add(
ax.AttrInfo('http://schema.openid.net/contact/web/default',
required=False, count=ax.UNLIMITED_VALUES))
auth_request.addExtension(ax_request)
# Add PAPE request information. We'll ask for
# phishing-resistant auth and display any policies we get in
# the response.
requested_policies = []
policy_prefix = 'policy_'
for k, v in request.POST.iteritems():
if k.startswith(policy_prefix):
policy_attr = k[len(policy_prefix):]
if policy_attr in PAPE_POLICIES:
requested_policies.append(getattr(pape, policy_attr))
if requested_policies:
pape_request = pape.Request(requested_policies)
auth_request.addExtension(pape_request)
# Compute the trust root and return URL values to build the
# redirect information.
trust_root = util.getViewURL(request, startOpenID)
return_to = util.getViewURL(request, finishOpenID)
# Send the browser to the server either by sending a redirect
# URL or by generating a POST form.
if auth_request.shouldSendRedirect():
url = auth_request.redirectURL(trust_root, return_to)
return HttpResponseRedirect(url)
else:
# Beware: this renders a template whose content is a form
# and some javascript to submit it upon page load. Non-JS
# users will have to click the form submit button to
# initiate OpenID authentication.
form_id = 'openid_message'
form_html = auth_request.formMarkup(trust_root, return_to,
False, {'id': form_id})
return direct_to_template(
request, 'consumer/request_form.html', {'html': form_html})
return renderIndexPage(request)
def finishOpenID(request):
"""
Finish the OpenID authentication process. Invoke the OpenID
library with the response from the OpenID server and render a page
detailing the result.
"""
result = {}
# Because the object containing the query parameters is a
# MultiValueDict and the OpenID library doesn't allow that, we'll
# convert it to a normal dict.
# OpenID 2 can send arguments as either POST body or GET query
# parameters.
request_args = util.normalDict(request.GET)
if request.method == 'POST':
request_args.update(util.normalDict(request.POST))
if request_args:
c = getConsumer(request)
# Get a response object indicating the result of the OpenID
# protocol.
return_to = util.getViewURL(request, finishOpenID)
response = c.complete(request_args, return_to)
# Get a Simple Registration response object if response
# information was included in the OpenID response.
sreg_response = {}
ax_items = {}
if response.status == consumer.SUCCESS:
sreg_response = sreg.SRegResponse.fromSuccessResponse(response)
ax_response = ax.FetchResponse.fromSuccessResponse(response)
if ax_response:
ax_items = {
'fullname': ax_response.get(
'http://schema.openid.net/namePerson'),
'web': ax_response.get(
'http://schema.openid.net/contact/web/default'),
}
# Get a PAPE response object if response information was
# included in the OpenID response.
pape_response = None
if response.status == consumer.SUCCESS:
pape_response = pape.Response.fromSuccessResponse(response)
if not pape_response.auth_policies:
pape_response = None
# Map different consumer status codes to template contexts.
results = {
consumer.CANCEL:
{'message': 'OpenID authentication cancelled.'},
consumer.FAILURE:
{'error': 'OpenID authentication failed.'},
consumer.SUCCESS:
{'url': response.getDisplayIdentifier(),
'sreg': sreg_response and sreg_response.items(),
'ax': ax_items.items(),
'pape': pape_response}
}
result = results[response.status]
if isinstance(response, consumer.FailureResponse):
# In a real application, this information should be
# written to a log for debugging/tracking OpenID
# authentication failures. In general, the messages are
# not user-friendly, but intended for developers.
result['failure_reason'] = response.message
return renderIndexPage(request, **result)
def rpXRDS(request):
"""
Return a relying party verification XRDS document
"""
return util.renderXRDS(
request,
[RP_RETURN_TO_URL_TYPE],
[util.getViewURL(request, finishOpenID)])
| agpl-3.0 |
auduny/home-assistant | homeassistant/components/demo/fan.py | 7 | 2725 | """Demo fan platform that has a fake fan."""
from homeassistant.const import STATE_OFF
from homeassistant.components.fan import (
SPEED_HIGH, SPEED_LOW, SPEED_MEDIUM, SUPPORT_DIRECTION, SUPPORT_OSCILLATE,
SUPPORT_SET_SPEED, FanEntity)
FULL_SUPPORT = SUPPORT_SET_SPEED | SUPPORT_OSCILLATE | SUPPORT_DIRECTION
LIMITED_SUPPORT = SUPPORT_SET_SPEED
def setup_platform(hass, config, add_entities_callback, discovery_info=None):
"""Set up the demo fan platform."""
add_entities_callback([
DemoFan(hass, "Living Room Fan", FULL_SUPPORT),
DemoFan(hass, "Ceiling Fan", LIMITED_SUPPORT),
])
class DemoFan(FanEntity):
"""A demonstration fan component."""
def __init__(self, hass, name: str, supported_features: int) -> None:
"""Initialize the entity."""
self.hass = hass
self._supported_features = supported_features
self._speed = STATE_OFF
self.oscillating = None
self.direction = None
self._name = name
if supported_features & SUPPORT_OSCILLATE:
self.oscillating = False
if supported_features & SUPPORT_DIRECTION:
self.direction = "forward"
@property
def name(self) -> str:
"""Get entity name."""
return self._name
@property
def should_poll(self):
"""No polling needed for a demo fan."""
return False
@property
def speed(self) -> str:
"""Return the current speed."""
return self._speed
@property
def speed_list(self) -> list:
"""Get the list of available speeds."""
return [STATE_OFF, SPEED_LOW, SPEED_MEDIUM, SPEED_HIGH]
def turn_on(self, speed: str = None, **kwargs) -> None:
"""Turn on the entity."""
if speed is None:
speed = SPEED_MEDIUM
self.set_speed(speed)
def turn_off(self, **kwargs) -> None:
"""Turn off the entity."""
self.oscillate(False)
self.set_speed(STATE_OFF)
def set_speed(self, speed: str) -> None:
"""Set the speed of the fan."""
self._speed = speed
self.schedule_update_ha_state()
def set_direction(self, direction: str) -> None:
"""Set the direction of the fan."""
self.direction = direction
self.schedule_update_ha_state()
def oscillate(self, oscillating: bool) -> None:
"""Set oscillation."""
self.oscillating = oscillating
self.schedule_update_ha_state()
@property
def current_direction(self) -> str:
"""Fan direction."""
return self.direction
@property
def supported_features(self) -> int:
"""Flag supported features."""
return self._supported_features
| apache-2.0 |
Ballz0fSteel/Umeko | lib/pip/_vendor/packaging/version.py | 1151 | 11556 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import collections
import itertools
import re
from ._structures import Infinity
__all__ = [
"parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
]
_Version = collections.namedtuple(
"_Version",
["epoch", "release", "dev", "pre", "post", "local"],
)
def parse(version):
"""
Parse the given version string and return either a :class:`Version` object
or a :class:`LegacyVersion` object depending on if the given version is
a valid PEP 440 version or a legacy version.
"""
try:
return Version(version)
except InvalidVersion:
return LegacyVersion(version)
class InvalidVersion(ValueError):
"""
An invalid version was found, users should refer to PEP 440.
"""
class _BaseVersion(object):
def __hash__(self):
return hash(self._key)
def __lt__(self, other):
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
return self._compare(other, lambda s, o: s != o)
def _compare(self, other, method):
if not isinstance(other, _BaseVersion):
return NotImplemented
return method(self._key, other._key)
class LegacyVersion(_BaseVersion):
def __init__(self, version):
self._version = str(version)
self._key = _legacy_cmpkey(self._version)
def __str__(self):
return self._version
def __repr__(self):
return "<LegacyVersion({0})>".format(repr(str(self)))
@property
def public(self):
return self._version
@property
def base_version(self):
return self._version
@property
def local(self):
return None
@property
def is_prerelease(self):
return False
@property
def is_postrelease(self):
return False
_legacy_version_component_re = re.compile(
r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
)
_legacy_version_replacement_map = {
"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
}
def _parse_version_parts(s):
for part in _legacy_version_component_re.split(s):
part = _legacy_version_replacement_map.get(part, part)
if not part or part == ".":
continue
if part[:1] in "0123456789":
# pad for numeric comparison
yield part.zfill(8)
else:
yield "*" + part
# ensure that alpha/beta/candidate are before final
yield "*final"
def _legacy_cmpkey(version):
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
# greater than or equal to 0. This will effectively put the LegacyVersion,
# which uses the defacto standard originally implemented by setuptools,
# as before all PEP 440 versions.
epoch = -1
# This scheme is taken from pkg_resources.parse_version setuptools prior to
# it's adoption of the packaging library.
parts = []
for part in _parse_version_parts(version.lower()):
if part.startswith("*"):
# remove "-" before a prerelease tag
if part < "*final":
while parts and parts[-1] == "*final-":
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1] == "00000000":
parts.pop()
parts.append(part)
parts = tuple(parts)
return epoch, parts
# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
VERSION_PATTERN = r"""
v?
(?:
(?:(?P<epoch>[0-9]+)!)? # epoch
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
(?P<pre> # pre-release
[-_\.]?
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
[-_\.]?
(?P<pre_n>[0-9]+)?
)?
(?P<post> # post release
(?:-(?P<post_n1>[0-9]+))
|
(?:
[-_\.]?
(?P<post_l>post|rev|r)
[-_\.]?
(?P<post_n2>[0-9]+)?
)
)?
(?P<dev> # dev release
[-_\.]?
(?P<dev_l>dev)
[-_\.]?
(?P<dev_n>[0-9]+)?
)?
)
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
"""
class Version(_BaseVersion):
_regex = re.compile(
r"^\s*" + VERSION_PATTERN + r"\s*$",
re.VERBOSE | re.IGNORECASE,
)
def __init__(self, version):
# Validate the version and parse it into pieces
match = self._regex.search(version)
if not match:
raise InvalidVersion("Invalid version: '{0}'".format(version))
# Store the parsed out pieces of the version
self._version = _Version(
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
release=tuple(int(i) for i in match.group("release").split(".")),
pre=_parse_letter_version(
match.group("pre_l"),
match.group("pre_n"),
),
post=_parse_letter_version(
match.group("post_l"),
match.group("post_n1") or match.group("post_n2"),
),
dev=_parse_letter_version(
match.group("dev_l"),
match.group("dev_n"),
),
local=_parse_local_version(match.group("local")),
)
# Generate a key which will be used for sorting
self._key = _cmpkey(
self._version.epoch,
self._version.release,
self._version.pre,
self._version.post,
self._version.dev,
self._version.local,
)
def __repr__(self):
return "<Version({0})>".format(repr(str(self)))
def __str__(self):
parts = []
# Epoch
if self._version.epoch != 0:
parts.append("{0}!".format(self._version.epoch))
# Release segment
parts.append(".".join(str(x) for x in self._version.release))
# Pre-release
if self._version.pre is not None:
parts.append("".join(str(x) for x in self._version.pre))
# Post-release
if self._version.post is not None:
parts.append(".post{0}".format(self._version.post[1]))
# Development release
if self._version.dev is not None:
parts.append(".dev{0}".format(self._version.dev[1]))
# Local version segment
if self._version.local is not None:
parts.append(
"+{0}".format(".".join(str(x) for x in self._version.local))
)
return "".join(parts)
@property
def public(self):
return str(self).split("+", 1)[0]
@property
def base_version(self):
parts = []
# Epoch
if self._version.epoch != 0:
parts.append("{0}!".format(self._version.epoch))
# Release segment
parts.append(".".join(str(x) for x in self._version.release))
return "".join(parts)
@property
def local(self):
version_string = str(self)
if "+" in version_string:
return version_string.split("+", 1)[1]
@property
def is_prerelease(self):
return bool(self._version.dev or self._version.pre)
@property
def is_postrelease(self):
return bool(self._version.post)
def _parse_letter_version(letter, number):
if letter:
# We consider there to be an implicit 0 in a pre-release if there is
# not a numeral associated with it.
if number is None:
number = 0
# We normalize any letters to their lower case form
letter = letter.lower()
# We consider some words to be alternate spellings of other words and
# in those cases we want to normalize the spellings to our preferred
# spelling.
if letter == "alpha":
letter = "a"
elif letter == "beta":
letter = "b"
elif letter in ["c", "pre", "preview"]:
letter = "rc"
elif letter in ["rev", "r"]:
letter = "post"
return letter, int(number)
if not letter and number:
# We assume if we are given a number, but we are not given a letter
# then this is using the implicit post release syntax (e.g. 1.0-1)
letter = "post"
return letter, int(number)
_local_version_seperators = re.compile(r"[\._-]")
def _parse_local_version(local):
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
for part in _local_version_seperators.split(local)
)
def _cmpkey(epoch, release, pre, post, dev, local):
# When we compare a release version, we want to compare it with all of the
# trailing zeros removed. So we'll use a reverse the list, drop all the now
# leading zeros until we come to something non zero, then take the rest
# re-reverse it back into the correct order and make it a tuple and use
# that for our sorting key.
release = tuple(
reversed(list(
itertools.dropwhile(
lambda x: x == 0,
reversed(release),
)
))
)
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
# We'll do this by abusing the pre segment, but we _only_ want to do this
# if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None:
pre = -Infinity
# Versions without a pre-release (except as noted above) should sort after
# those with one.
elif pre is None:
pre = Infinity
# Versions without a post segment should sort before those with one.
if post is None:
post = -Infinity
# Versions without a development segment should sort after those with one.
if dev is None:
dev = Infinity
if local is None:
# Versions without a local segment should sort before those with one.
local = -Infinity
else:
# Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440.
# - Alpha numeric segments sort before numeric segments
# - Alpha numeric segments sort lexicographically
# - Numeric segments sort numerically
# - Shorter versions sort before longer versions when the prefixes
# match exactly
local = tuple(
(i, "") if isinstance(i, int) else (-Infinity, i)
for i in local
)
return epoch, release, pre, post, dev, local
| gpl-3.0 |
jamespcole/home-assistant | tests/helpers/test_template.py | 4 | 39409 | """Test Home Assistant template helper methods."""
import asyncio
from datetime import datetime
import unittest
import random
import math
import pytz
from unittest.mock import patch
from homeassistant.components import group
from homeassistant.exceptions import TemplateError
from homeassistant.helpers import template
from homeassistant.util.unit_system import UnitSystem
from homeassistant.const import (
LENGTH_METERS,
TEMP_CELSIUS,
MASS_GRAMS,
PRESSURE_PA,
VOLUME_LITERS,
MATCH_ALL,
)
import homeassistant.util.dt as dt_util
from tests.common import get_test_home_assistant
import pytest
class TestHelpersTemplate(unittest.TestCase):
"""Test the Template."""
# pylint: disable=invalid-name
def setUp(self):
"""Set up the tests."""
self.hass = get_test_home_assistant()
self.hass.config.units = UnitSystem('custom', TEMP_CELSIUS,
LENGTH_METERS, VOLUME_LITERS,
MASS_GRAMS, PRESSURE_PA)
# pylint: disable=invalid-name
def tearDown(self):
"""Stop down stuff we started."""
self.hass.stop()
def test_referring_states_by_entity_id(self):
"""Test referring states by entity id."""
self.hass.states.set('test.object', 'happy')
assert 'happy' == \
template.Template(
'{{ states.test.object.state }}', self.hass).render()
def test_iterating_all_states(self):
"""Test iterating all states."""
self.hass.states.set('test.object', 'happy')
self.hass.states.set('sensor.temperature', 10)
assert '10happy' == \
template.Template(
'{% for state in states %}{{ state.state }}{% endfor %}',
self.hass).render()
def test_iterating_domain_states(self):
"""Test iterating domain states."""
self.hass.states.set('test.object', 'happy')
self.hass.states.set('sensor.back_door', 'open')
self.hass.states.set('sensor.temperature', 10)
assert 'open10' == \
template.Template("""
{% for state in states.sensor %}{{ state.state }}{% endfor %}
""", self.hass).render()
def test_float(self):
"""Test float."""
self.hass.states.set('sensor.temperature', '12')
assert '12.0' == \
template.Template(
'{{ float(states.sensor.temperature.state) }}',
self.hass).render()
assert 'True' == \
template.Template(
'{{ float(states.sensor.temperature.state) > 11 }}',
self.hass).render()
def test_rounding_value(self):
"""Test rounding value."""
self.hass.states.set('sensor.temperature', 12.78)
assert '12.8' == \
template.Template(
'{{ states.sensor.temperature.state | round(1) }}',
self.hass).render()
assert '128' == \
template.Template(
'{{ states.sensor.temperature.state | multiply(10) | round }}',
self.hass).render()
assert '12.7' == \
template.Template(
'{{ states.sensor.temperature.state | round(1, "floor") }}',
self.hass).render()
assert '12.8' == \
template.Template(
'{{ states.sensor.temperature.state | round(1, "ceil") }}',
self.hass).render()
def test_rounding_value_get_original_value_on_error(self):
"""Test rounding value get original value on error."""
assert 'None' == \
template.Template('{{ None | round }}', self.hass).render()
assert 'no_number' == \
template.Template(
'{{ "no_number" | round }}', self.hass).render()
def test_multiply(self):
"""Test multiply."""
tests = {
None: 'None',
10: '100',
'"abcd"': 'abcd'
}
for inp, out in tests.items():
assert out == \
template.Template('{{ %s | multiply(10) | round }}' % inp,
self.hass).render()
def test_logarithm(self):
"""Test logarithm."""
tests = [
(4, 2, '2.0'),
(1000, 10, '3.0'),
(math.e, '', '1.0'),
('"invalid"', '_', 'invalid'),
(10, '"invalid"', '10.0'),
]
for value, base, expected in tests:
assert expected == \
template.Template(
'{{ %s | log(%s) | round(1) }}' % (value, base),
self.hass).render()
assert expected == \
template.Template(
'{{ log(%s, %s) | round(1) }}' % (value, base),
self.hass).render()
def test_sine(self):
"""Test sine."""
tests = [
(0, '0.0'),
(math.pi / 2, '1.0'),
(math.pi, '0.0'),
(math.pi * 1.5, '-1.0'),
(math.pi / 10, '0.309')
]
for value, expected in tests:
assert expected == \
template.Template(
'{{ %s | sin | round(3) }}' % value,
self.hass).render()
def test_cos(self):
"""Test cosine."""
tests = [
(0, '1.0'),
(math.pi / 2, '0.0'),
(math.pi, '-1.0'),
(math.pi * 1.5, '-0.0'),
(math.pi / 10, '0.951')
]
for value, expected in tests:
assert expected == \
template.Template(
'{{ %s | cos | round(3) }}' % value,
self.hass).render()
def test_tan(self):
"""Test tangent."""
tests = [
(0, '0.0'),
(math.pi, '-0.0'),
(math.pi / 180 * 45, '1.0'),
(math.pi / 180 * 90, '1.633123935319537e+16'),
(math.pi / 180 * 135, '-1.0')
]
for value, expected in tests:
assert expected == \
template.Template(
'{{ %s | tan | round(3) }}' % value,
self.hass).render()
def test_sqrt(self):
"""Test square root."""
tests = [
(0, '0.0'),
(1, '1.0'),
(2, '1.414'),
(10, '3.162'),
(100, '10.0'),
]
for value, expected in tests:
assert expected == \
template.Template(
'{{ %s | sqrt | round(3) }}' % value,
self.hass).render()
def test_strptime(self):
"""Test the parse timestamp method."""
tests = [
('2016-10-19 15:22:05.588122 UTC',
'%Y-%m-%d %H:%M:%S.%f %Z', None),
('2016-10-19 15:22:05.588122+0100',
'%Y-%m-%d %H:%M:%S.%f%z', None),
('2016-10-19 15:22:05.588122',
'%Y-%m-%d %H:%M:%S.%f', None),
('2016-10-19', '%Y-%m-%d', None),
('2016', '%Y', None),
('15:22:05', '%H:%M:%S', None),
('1469119144', '%Y', '1469119144'),
('invalid', '%Y', 'invalid')
]
for inp, fmt, expected in tests:
if expected is None:
expected = datetime.strptime(inp, fmt)
temp = '{{ strptime(\'%s\', \'%s\') }}' % (inp, fmt)
assert str(expected) == \
template.Template(temp, self.hass).render()
def test_timestamp_custom(self):
"""Test the timestamps to custom filter."""
now = dt_util.utcnow()
tests = [
(None, None, None, 'None'),
(1469119144, None, True, '2016-07-21 16:39:04'),
(1469119144, '%Y', True, '2016'),
(1469119144, 'invalid', True, 'invalid'),
(dt_util.as_timestamp(now), None, False,
now.strftime('%Y-%m-%d %H:%M:%S'))
]
for inp, fmt, local, out in tests:
if fmt:
fil = 'timestamp_custom(\'{}\')'.format(fmt)
elif fmt and local:
fil = 'timestamp_custom(\'{0}\', {1})'.format(fmt, local)
else:
fil = 'timestamp_custom'
assert out == template.Template(
'{{ %s | %s }}' % (inp, fil), self.hass).render()
def test_timestamp_local(self):
"""Test the timestamps to local filter."""
tests = {
None: 'None',
1469119144: '2016-07-21 16:39:04',
}
for inp, out in tests.items():
assert out == \
template.Template('{{ %s | timestamp_local }}' % inp,
self.hass).render()
def test_min(self):
"""Test the min filter."""
assert '1' == \
template.Template('{{ [1, 2, 3] | min }}',
self.hass).render()
def test_max(self):
"""Test the max filter."""
assert '3' == \
template.Template('{{ [1, 2, 3] | max }}',
self.hass).render()
def test_base64_encode(self):
"""Test the base64_encode filter."""
self.assertEqual(
'aG9tZWFzc2lzdGFudA==',
template.Template('{{ "homeassistant" | base64_encode }}',
self.hass).render())
def test_base64_decode(self):
"""Test the base64_decode filter."""
self.assertEqual(
'homeassistant',
template.Template('{{ "aG9tZWFzc2lzdGFudA==" | base64_decode }}',
self.hass).render())
def test_ordinal(self):
"""Test the ordinal filter."""
tests = [
(1, '1st'),
(2, '2nd'),
(3, '3rd'),
(4, '4th'),
(5, '5th'),
]
for value, expected in tests:
self.assertEqual(
expected,
template.Template(
'{{ %s | ordinal }}' % value,
self.hass).render())
def test_timestamp_utc(self):
"""Test the timestamps to local filter."""
now = dt_util.utcnow()
tests = {
None: 'None',
1469119144: '2016-07-21 16:39:04',
dt_util.as_timestamp(now):
now.strftime('%Y-%m-%d %H:%M:%S')
}
for inp, out in tests.items():
assert out == \
template.Template('{{ %s | timestamp_utc }}' % inp,
self.hass).render()
def test_as_timestamp(self):
"""Test the as_timestamp function."""
assert "None" == \
template.Template(
'{{ as_timestamp("invalid") }}', self.hass).render()
self.hass.mock = None
assert "None" == \
template.Template('{{ as_timestamp(states.mock) }}',
self.hass).render()
tpl = '{{ as_timestamp(strptime("2024-02-03T09:10:24+0000", ' \
'"%Y-%m-%dT%H:%M:%S%z")) }}'
assert "1706951424.0" == \
template.Template(tpl, self.hass).render()
@patch.object(random, 'choice')
def test_random_every_time(self, test_choice):
"""Ensure the random filter runs every time, not just once."""
tpl = template.Template('{{ [1,2] | random }}', self.hass)
test_choice.return_value = 'foo'
assert 'foo' == tpl.render()
test_choice.return_value = 'bar'
assert 'bar' == tpl.render()
def test_passing_vars_as_keywords(self):
"""Test passing variables as keywords."""
assert '127' == \
template.Template('{{ hello }}', self.hass).render(hello=127)
def test_passing_vars_as_vars(self):
"""Test passing variables as variables."""
assert '127' == \
template.Template('{{ hello }}', self.hass).render({'hello': 127})
def test_passing_vars_as_list(self):
"""Test passing variables as list."""
assert "['foo', 'bar']" == \
template.render_complex(template.Template('{{ hello }}',
self.hass), {'hello': ['foo', 'bar']})
def test_passing_vars_as_list_element(self):
"""Test passing variables as list."""
assert 'bar' == \
template.render_complex(template.Template('{{ hello[1] }}',
self.hass),
{'hello': ['foo', 'bar']})
def test_passing_vars_as_dict_element(self):
"""Test passing variables as list."""
assert 'bar' == \
template.render_complex(template.Template('{{ hello.foo }}',
self.hass),
{'hello': {'foo': 'bar'}})
def test_passing_vars_as_dict(self):
"""Test passing variables as list."""
assert "{'foo': 'bar'}" == \
template.render_complex(template.Template('{{ hello }}',
self.hass), {'hello': {'foo': 'bar'}})
def test_render_with_possible_json_value_with_valid_json(self):
"""Render with possible JSON value with valid JSON."""
tpl = template.Template('{{ value_json.hello }}', self.hass)
assert 'world' == \
tpl.render_with_possible_json_value('{"hello": "world"}')
def test_render_with_possible_json_value_with_invalid_json(self):
"""Render with possible JSON value with invalid JSON."""
tpl = template.Template('{{ value_json }}', self.hass)
assert '' == \
tpl.render_with_possible_json_value('{ I AM NOT JSON }')
def test_render_with_possible_json_value_with_template_error_value(self):
"""Render with possible JSON value with template error value."""
tpl = template.Template('{{ non_existing.variable }}', self.hass)
assert '-' == \
tpl.render_with_possible_json_value('hello', '-')
def test_render_with_possible_json_value_with_missing_json_value(self):
"""Render with possible JSON value with unknown JSON object."""
tpl = template.Template('{{ value_json.goodbye }}', self.hass)
assert '' == \
tpl.render_with_possible_json_value('{"hello": "world"}')
def test_render_with_possible_json_value_valid_with_is_defined(self):
"""Render with possible JSON value with known JSON object."""
tpl = template.Template('{{ value_json.hello|is_defined }}', self.hass)
assert 'world' == \
tpl.render_with_possible_json_value('{"hello": "world"}')
def test_render_with_possible_json_value_undefined_json(self):
"""Render with possible JSON value with unknown JSON object."""
tpl = template.Template('{{ value_json.bye|is_defined }}', self.hass)
assert '{"hello": "world"}' == \
tpl.render_with_possible_json_value('{"hello": "world"}')
def test_render_with_possible_json_value_undefined_json_error_value(self):
"""Render with possible JSON value with unknown JSON object."""
tpl = template.Template('{{ value_json.bye|is_defined }}', self.hass)
assert '' == \
tpl.render_with_possible_json_value('{"hello": "world"}', '')
def test_render_with_possible_json_value_non_string_value(self):
"""Render with possible JSON value with non-string value."""
tpl = template.Template("""
{{ strptime(value~'+0000', '%Y-%m-%d %H:%M:%S%z') }}
""", self.hass)
value = datetime(2019, 1, 18, 12, 13, 14)
expected = str(pytz.utc.localize(value))
assert expected == \
tpl.render_with_possible_json_value(value)
def test_raise_exception_on_error(self):
"""Test raising an exception on error."""
with pytest.raises(TemplateError):
template.Template('{{ invalid_syntax').ensure_valid()
def test_if_state_exists(self):
"""Test if state exists works."""
self.hass.states.set('test.object', 'available')
tpl = template.Template(
'{% if states.test.object %}exists{% else %}not exists{% endif %}',
self.hass)
assert 'exists' == tpl.render()
def test_is_state(self):
"""Test is_state method."""
self.hass.states.set('test.object', 'available')
tpl = template.Template("""
{% if is_state("test.object", "available") %}yes{% else %}no{% endif %}
""", self.hass)
assert 'yes' == tpl.render()
tpl = template.Template("""
{{ is_state("test.noobject", "available") }}
""", self.hass)
assert 'False' == tpl.render()
def test_is_state_attr(self):
"""Test is_state_attr method."""
self.hass.states.set('test.object', 'available', {'mode': 'on'})
tpl = template.Template("""
{% if is_state_attr("test.object", "mode", "on") %}yes{% else %}no{% endif %}
""", self.hass)
assert 'yes' == tpl.render()
tpl = template.Template("""
{{ is_state_attr("test.noobject", "mode", "on") }}
""", self.hass)
assert 'False' == tpl.render()
def test_state_attr(self):
"""Test state_attr method."""
self.hass.states.set('test.object', 'available', {'mode': 'on'})
tpl = template.Template("""
{% if state_attr("test.object", "mode") == "on" %}yes{% else %}no{% endif %}
""", self.hass)
assert 'yes' == tpl.render()
tpl = template.Template("""
{{ state_attr("test.noobject", "mode") == None }}
""", self.hass)
assert 'True' == tpl.render()
def test_states_function(self):
"""Test using states as a function."""
self.hass.states.set('test.object', 'available')
tpl = template.Template('{{ states("test.object") }}', self.hass)
assert 'available' == tpl.render()
tpl2 = template.Template('{{ states("test.object2") }}', self.hass)
assert 'unknown' == tpl2.render()
@patch('homeassistant.helpers.template.TemplateEnvironment.'
'is_safe_callable', return_value=True)
def test_now(self, mock_is_safe):
"""Test now method."""
now = dt_util.now()
with patch.dict(template.ENV.globals, {'now': lambda: now}):
assert now.isoformat() == \
template.Template('{{ now().isoformat() }}',
self.hass).render()
@patch('homeassistant.helpers.template.TemplateEnvironment.'
'is_safe_callable', return_value=True)
def test_utcnow(self, mock_is_safe):
"""Test utcnow method."""
now = dt_util.utcnow()
with patch.dict(template.ENV.globals, {'utcnow': lambda: now}):
assert now.isoformat() == \
template.Template('{{ utcnow().isoformat() }}',
self.hass).render()
def test_regex_match(self):
"""Test regex_match method."""
tpl = template.Template(r"""
{{ '123-456-7890' | regex_match('(\\d{3})-(\\d{3})-(\\d{4})') }}
""", self.hass)
assert 'True' == tpl.render()
tpl = template.Template("""
{{ 'home assistant test' | regex_match('Home', True) }}
""", self.hass)
assert 'True' == tpl.render()
tpl = template.Template("""
{{ 'Another home assistant test' | regex_match('home') }}
""", self.hass)
assert 'False' == tpl.render()
def test_regex_search(self):
"""Test regex_search method."""
tpl = template.Template(r"""
{{ '123-456-7890' | regex_search('(\\d{3})-(\\d{3})-(\\d{4})') }}
""", self.hass)
assert 'True' == tpl.render()
tpl = template.Template("""
{{ 'home assistant test' | regex_search('Home', True) }}
""", self.hass)
assert 'True' == tpl.render()
tpl = template.Template("""
{{ 'Another home assistant test' | regex_search('home') }}
""", self.hass)
assert 'True' == tpl.render()
def test_regex_replace(self):
"""Test regex_replace method."""
tpl = template.Template(r"""
{{ 'Hello World' | regex_replace('(Hello\\s)',) }}
""", self.hass)
assert 'World' == tpl.render()
def test_regex_findall_index(self):
"""Test regex_findall_index method."""
tpl = template.Template("""
{{ 'Flight from JFK to LHR' | regex_findall_index('([A-Z]{3})', 0) }}
""", self.hass)
assert 'JFK' == tpl.render()
tpl = template.Template("""
{{ 'Flight from JFK to LHR' | regex_findall_index('([A-Z]{3})', 1) }}
""", self.hass)
assert 'LHR' == tpl.render()
def test_bitwise_and(self):
"""Test bitwise_and method."""
tpl = template.Template("""
{{ 8 | bitwise_and(8) }}
""", self.hass)
assert str(8 & 8) == tpl.render()
tpl = template.Template("""
{{ 10 | bitwise_and(2) }}
""", self.hass)
assert str(10 & 2) == tpl.render()
tpl = template.Template("""
{{ 8 | bitwise_and(2) }}
""", self.hass)
assert str(8 & 2) == tpl.render()
def test_bitwise_or(self):
"""Test bitwise_or method."""
tpl = template.Template("""
{{ 8 | bitwise_or(8) }}
""", self.hass)
assert str(8 | 8) == tpl.render()
tpl = template.Template("""
{{ 10 | bitwise_or(2) }}
""", self.hass)
assert str(10 | 2) == tpl.render()
tpl = template.Template("""
{{ 8 | bitwise_or(2) }}
""", self.hass)
assert str(8 | 2) == tpl.render()
def test_distance_function_with_1_state(self):
"""Test distance function with 1 state."""
self.hass.states.set('test.object', 'happy', {
'latitude': 32.87336,
'longitude': -117.22943,
})
tpl = template.Template('{{ distance(states.test.object) | round }}',
self.hass)
assert '187' == tpl.render()
def test_distance_function_with_2_states(self):
"""Test distance function with 2 states."""
self.hass.states.set('test.object', 'happy', {
'latitude': 32.87336,
'longitude': -117.22943,
})
self.hass.states.set('test.object_2', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
tpl = template.Template(
'{{ distance(states.test.object, states.test.object_2) | round }}',
self.hass)
assert '187' == tpl.render()
def test_distance_function_with_1_coord(self):
"""Test distance function with 1 coord."""
tpl = template.Template(
'{{ distance("32.87336", "-117.22943") | round }}', self.hass)
assert '187' == \
tpl.render()
def test_distance_function_with_2_coords(self):
"""Test distance function with 2 coords."""
assert '187' == \
template.Template(
'{{ distance("32.87336", "-117.22943", %s, %s) | round }}'
% (self.hass.config.latitude, self.hass.config.longitude),
self.hass).render()
def test_distance_function_with_1_state_1_coord(self):
"""Test distance function with 1 state 1 coord."""
self.hass.states.set('test.object_2', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
tpl = template.Template(
'{{ distance("32.87336", "-117.22943", states.test.object_2) '
'| round }}', self.hass)
assert '187' == tpl.render()
tpl2 = template.Template(
'{{ distance(states.test.object_2, "32.87336", "-117.22943") '
'| round }}', self.hass)
assert '187' == tpl2.render()
def test_distance_function_return_None_if_invalid_state(self):
"""Test distance function return None if invalid state."""
self.hass.states.set('test.object_2', 'happy', {
'latitude': 10,
})
tpl = template.Template('{{ distance(states.test.object_2) | round }}',
self.hass)
assert 'None' == \
tpl.render()
def test_distance_function_return_None_if_invalid_coord(self):
"""Test distance function return None if invalid coord."""
assert 'None' == \
template.Template(
'{{ distance("123", "abc") }}', self.hass).render()
assert 'None' == \
template.Template('{{ distance("123") }}', self.hass).render()
self.hass.states.set('test.object_2', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
tpl = template.Template('{{ distance("123", states.test_object_2) }}',
self.hass)
assert 'None' == \
tpl.render()
def test_distance_function_with_2_entity_ids(self):
"""Test distance function with 2 entity ids."""
self.hass.states.set('test.object', 'happy', {
'latitude': 32.87336,
'longitude': -117.22943,
})
self.hass.states.set('test.object_2', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
tpl = template.Template(
'{{ distance("test.object", "test.object_2") | round }}',
self.hass)
assert '187' == tpl.render()
def test_distance_function_with_1_entity_1_coord(self):
"""Test distance function with 1 entity_id and 1 coord."""
self.hass.states.set('test.object', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
tpl = template.Template(
'{{ distance("test.object", "32.87336", "-117.22943") | round }}',
self.hass)
assert '187' == tpl.render()
def test_closest_function_home_vs_domain(self):
"""Test closest function home vs domain."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('not_test_domain.but_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
assert 'test_domain.object' == \
template.Template('{{ closest(states.test_domain).entity_id }}',
self.hass).render()
def test_closest_function_home_vs_all_states(self):
"""Test closest function home vs all states."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain_2.and_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
assert 'test_domain_2.and_closer' == \
template.Template('{{ closest(states).entity_id }}',
self.hass).render()
def test_closest_function_home_vs_group_entity_id(self):
"""Test closest function home vs group entity id."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('not_in_group.but_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
group.Group.create_group(
self.hass, 'location group', ['test_domain.object'])
assert 'test_domain.object' == \
template.Template(
'{{ closest("group.location_group").entity_id }}',
self.hass).render()
def test_closest_function_home_vs_group_state(self):
"""Test closest function home vs group state."""
self.hass.states.set('test_domain.object', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('not_in_group.but_closer', 'happy', {
'latitude': self.hass.config.latitude,
'longitude': self.hass.config.longitude,
})
group.Group.create_group(
self.hass, 'location group', ['test_domain.object'])
assert 'test_domain.object' == \
template.Template(
'{{ closest(states.group.location_group).entity_id }}',
self.hass).render()
def test_closest_function_to_coord(self):
"""Test closest function to coord."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain.closest_zone', 'happy', {
'latitude': self.hass.config.latitude + 0.2,
'longitude': self.hass.config.longitude + 0.2,
})
self.hass.states.set('zone.far_away', 'zoning', {
'latitude': self.hass.config.latitude + 0.3,
'longitude': self.hass.config.longitude + 0.3,
})
tpl = template.Template(
'{{ closest("%s", %s, states.test_domain).entity_id }}'
% (self.hass.config.latitude + 0.3,
self.hass.config.longitude + 0.3), self.hass)
assert 'test_domain.closest_zone' == \
tpl.render()
def test_closest_function_to_entity_id(self):
"""Test closest function to entity id."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain.closest_zone', 'happy', {
'latitude': self.hass.config.latitude + 0.2,
'longitude': self.hass.config.longitude + 0.2,
})
self.hass.states.set('zone.far_away', 'zoning', {
'latitude': self.hass.config.latitude + 0.3,
'longitude': self.hass.config.longitude + 0.3,
})
assert 'test_domain.closest_zone' == \
template.Template(
'{{ closest("zone.far_away", '
'states.test_domain).entity_id }}', self.hass).render()
def test_closest_function_to_state(self):
"""Test closest function to state."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
self.hass.states.set('test_domain.closest_zone', 'happy', {
'latitude': self.hass.config.latitude + 0.2,
'longitude': self.hass.config.longitude + 0.2,
})
self.hass.states.set('zone.far_away', 'zoning', {
'latitude': self.hass.config.latitude + 0.3,
'longitude': self.hass.config.longitude + 0.3,
})
assert 'test_domain.closest_zone' == \
template.Template(
'{{ closest(states.zone.far_away, '
'states.test_domain).entity_id }}', self.hass).render()
def test_closest_function_invalid_state(self):
"""Test closest function invalid state."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
for state in ('states.zone.non_existing', '"zone.non_existing"'):
assert 'None' == \
template.Template('{{ closest(%s, states) }}' % state,
self.hass).render()
def test_closest_function_state_with_invalid_location(self):
"""Test closest function state with invalid location."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': 'invalid latitude',
'longitude': self.hass.config.longitude + 0.1,
})
assert 'None' == \
template.Template(
'{{ closest(states.test_domain.closest_home, '
'states) }}', self.hass).render()
def test_closest_function_invalid_coordinates(self):
"""Test closest function invalid coordinates."""
self.hass.states.set('test_domain.closest_home', 'happy', {
'latitude': self.hass.config.latitude + 0.1,
'longitude': self.hass.config.longitude + 0.1,
})
assert 'None' == \
template.Template('{{ closest("invalid", "coord", states) }}',
self.hass).render()
def test_closest_function_no_location_states(self):
"""Test closest function without location states."""
assert '' == \
template.Template('{{ closest(states).entity_id }}',
self.hass).render()
def test_extract_entities_none_exclude_stuff(self):
"""Test extract entities function with none or exclude stuff."""
assert [] == template.extract_entities(None)
assert [] == template.extract_entities("mdi:water")
assert MATCH_ALL == \
template.extract_entities(
'{{ closest(states.zone.far_away, '
'states.test_domain).entity_id }}')
assert MATCH_ALL == \
template.extract_entities(
'{{ distance("123", states.test_object_2) }}')
def test_extract_entities_no_match_entities(self):
"""Test extract entities function with none entities stuff."""
assert MATCH_ALL == \
template.extract_entities(
"{{ value_json.tst | timestamp_custom('%Y' True) }}")
assert MATCH_ALL == \
template.extract_entities("""
{% for state in states.sensor %}
{{ state.entity_id }}={{ state.state }},d
{% endfor %}
""")
def test_extract_entities_match_entities(self):
"""Test extract entities function with entities stuff."""
assert ['device_tracker.phone_1'] == \
template.extract_entities("""
{% if is_state('device_tracker.phone_1', 'home') %}
Ha, Hercules is home!
{% else %}
Hercules is at {{ states('device_tracker.phone_1') }}.
{% endif %}
""")
assert ['binary_sensor.garage_door'] == \
template.extract_entities("""
{{ as_timestamp(states.binary_sensor.garage_door.last_changed) }}
""")
assert ['binary_sensor.garage_door'] == \
template.extract_entities("""
{{ states("binary_sensor.garage_door") }}
""")
assert ['device_tracker.phone_2'] == \
template.extract_entities("""
{{ is_state_attr('device_tracker.phone_2', 'battery', 40) }}
""")
assert sorted([
'device_tracker.phone_1',
'device_tracker.phone_2',
]) == \
sorted(template.extract_entities("""
{% if is_state('device_tracker.phone_1', 'home') %}
Ha, Hercules is home!
{% elif states.device_tracker.phone_2.attributes.battery < 40 %}
Hercules you power goes done!.
{% endif %}
"""))
assert sorted([
'sensor.pick_humidity',
'sensor.pick_temperature',
]) == \
sorted(template.extract_entities("""
{{
states.sensor.pick_temperature.state ~ „°C (“ ~
states.sensor.pick_humidity.state ~ „ %“
}}
"""))
assert sorted([
'sensor.luftfeuchtigkeit_mean',
'input_number.luftfeuchtigkeit',
]) == \
sorted(template.extract_entities(
"{% if (states('sensor.luftfeuchtigkeit_mean') | int)"
" > (states('input_number.luftfeuchtigkeit') | int +1.5)"
" %}true{% endif %}"
))
def test_extract_entities_with_variables(self):
"""Test extract entities function with variables and entities stuff."""
assert ['input_boolean.switch'] == \
template.extract_entities(
"{{ is_state('input_boolean.switch', 'off') }}", {})
assert ['trigger.entity_id'] == \
template.extract_entities(
"{{ is_state(trigger.entity_id, 'off') }}", {})
assert MATCH_ALL == \
template.extract_entities(
"{{ is_state(data, 'off') }}", {})
assert ['input_boolean.switch'] == \
template.extract_entities(
"{{ is_state(data, 'off') }}",
{'data': 'input_boolean.switch'})
assert ['input_boolean.switch'] == \
template.extract_entities(
"{{ is_state(trigger.entity_id, 'off') }}",
{'trigger': {'entity_id': 'input_boolean.switch'}})
assert MATCH_ALL == \
template.extract_entities(
"{{ is_state('media_player.' ~ where , 'playing') }}",
{'where': 'livingroom'})
def test_jinja_namespace(self):
"""Test Jinja's namespace command can be used."""
test_template = template.Template(
(
"{% set ns = namespace(a_key='') %}"
"{% set ns.a_key = states.sensor.dummy.state %}"
"{{ ns.a_key }}"
),
self.hass
)
self.hass.states.set('sensor.dummy', 'a value')
assert 'a value' == test_template.render()
self.hass.states.set('sensor.dummy', 'another value')
assert 'another value' == test_template.render()
@asyncio.coroutine
def test_state_with_unit(hass):
"""Test the state_with_unit property helper."""
hass.states.async_set('sensor.test', '23', {
'unit_of_measurement': 'beers',
})
hass.states.async_set('sensor.test2', 'wow')
tpl = template.Template(
'{{ states.sensor.test.state_with_unit }}', hass)
assert tpl.async_render() == '23 beers'
tpl = template.Template(
'{{ states.sensor.test2.state_with_unit }}', hass)
assert tpl.async_render() == 'wow'
tpl = template.Template(
'{% for state in states %}{{ state.state_with_unit }} {% endfor %}',
hass)
assert tpl.async_render() == '23 beers wow'
tpl = template.Template('{{ states.sensor.non_existing.state_with_unit }}',
hass)
assert tpl.async_render() == ''
@asyncio.coroutine
def test_length_of_states(hass):
"""Test fetching the length of states."""
hass.states.async_set('sensor.test', '23')
hass.states.async_set('sensor.test2', 'wow')
hass.states.async_set('climate.test2', 'cooling')
tpl = template.Template('{{ states | length }}', hass)
assert tpl.async_render() == '3'
tpl = template.Template('{{ states.sensor | length }}', hass)
assert tpl.async_render() == '2'
| apache-2.0 |
t-stark/ec2cli | bash/iam_identities.py | 2 | 2228 | #!/usr/bin/env python3
"""
Summary.
Prints iam usernames from local awscli configuration.
A usernames may be omitted from the output simply by
listing them with a space between them after the call:
$ python3 iam_users.py default
Will print all iam usernames in the local configuration
except the default user (username "default")
"""
import os
import sys
import inspect
from pyaws.utils import stdout_message
try:
from configparser import ConfigParser
except Exception:
print('unable to import configParser library. Exit')
sys.exit(1)
# --- declarations --------------------------------------------------------------------------------
def print_array(content, args):
for x in content.sections():
if x in args:
continue
else:
print(x + ' ', end='')
def shared_credentials_location():
"""
Summary:
Discover alterate location for awscli shared credentials file
Returns:
TYPE: str, Full path of shared credentials file, if exists
"""
if 'AWS_SHARED_CREDENTIALS_FILE' in os.environ:
return os.environ['AWS_SHARED_CREDENTIALS_FILE']
return ''
def awscli_profiles():
"""Returns IAM usernames from local awscli configuration"""
if os.path.isfile(config_file):
config.read(config_file)
else:
stdout_message(
message='awscli configuration file not found on local filesystem. Exit',
prefix='WARN'
)
sys.exit(1)
for profile in config.sections():
if 'role_arn' in config[profile].keys():
config.pop(profile)
return config
def print_profiles(config, args):
"""Execution when no parameters provided"""
try:
print_array(config, args)
except OSError as e:
print('{}: OSError: {}'.format(inspect.stack(0)[3], e))
return False
return True
# --- main --------------------------------------------------------------------------------
# globals
home = os.environ.get('HOME')
config_file = shared_credentials_location() or home + '/.aws/credentials'
config = ConfigParser()
modified_config = awscli_profiles()
sys.exit(print_profiles(modified_config, sys.argv[1:]))
| gpl-2.0 |
Eficent/odoomrp-wip | procurement_plan_mrp/wizard/wiz_change_procurement_date.py | 11 | 1038 | # -*- coding: utf-8 -*-
# (c) 2015 Alfredo de la Fuente - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import models, api
class WizChangeProcurementDate(models.TransientModel):
_inherit = 'wiz.change.procurement.date'
def _take_procurements_to_treat(self, procurement_ids):
procs = super(WizChangeProcurementDate,
self)._take_procurements_to_treat(procurement_ids)
procs = procs.filtered(lambda x: x.level == 0)
procs = procs.filtered(lambda x: not x.production_id or (
x.production_id and x.production_id.state == 'draft'))
return procs
@api.multi
def change_scheduled_date(self):
super(WizChangeProcurementDate, self).change_scheduled_date()
route_id = self.env.ref('mrp.route_warehouse0_manufacture').id,
procurements = self.procurements.filtered(
lambda x: route_id[0] in x.product_id.route_ids.ids)
procurements._change_date_planned_from_plan_for_mo(self.days)
| agpl-3.0 |
le9i0nx/ansible | test/units/modules/network/ios/test_ios_system.py | 57 | 5380 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests.mock import patch
from ansible.modules.network.ios import ios_system
from units.modules.utils import set_module_args
from .ios_module import TestIosModule, load_fixture
class TestIosSystemModule(TestIosModule):
module = ios_system
def setUp(self):
super(TestIosSystemModule, self).setUp()
self.mock_get_config = patch('ansible.modules.network.ios.ios_system.get_config')
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch('ansible.modules.network.ios.ios_system.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestIosSystemModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None):
self.get_config.return_value = load_fixture('ios_system_config.cfg')
self.load_config.return_value = None
def test_ios_system_hostname_changed(self):
set_module_args(dict(hostname='foo'))
commands = ['hostname foo']
self.execute_module(changed=True, commands=commands)
def test_ios_system_domain_name(self):
set_module_args(dict(domain_name=['test.com']))
commands = ['ip domain name test.com',
'no ip domain name eng.example.net',
'no ip domain name vrf management eng.example.net']
self.execute_module(changed=True, commands=commands)
def test_ios_system_domain_name_complex(self):
set_module_args(dict(domain_name=[{'name': 'test.com', 'vrf': 'test'},
{'name': 'eng.example.net'}]))
commands = ['ip domain name vrf test test.com',
'no ip domain name vrf management eng.example.net']
self.execute_module(changed=True, commands=commands)
def test_ios_system_domain_search(self):
set_module_args(dict(domain_search=['ansible.com', 'redhat.com']))
commands = ['no ip domain list vrf management example.net',
'no ip domain list example.net',
'no ip domain list example.com',
'ip domain list ansible.com',
'ip domain list redhat.com']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ios_system_domain_search_complex(self):
set_module_args(dict(domain_search=[{'name': 'ansible.com', 'vrf': 'test'}]))
commands = ['no ip domain list vrf management example.net',
'no ip domain list example.net',
'no ip domain list example.com',
'ip domain list vrf test ansible.com']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ios_system_lookup_source(self):
set_module_args(dict(lookup_source='Ethernet1'))
commands = ['ip domain lookup source-interface Ethernet1']
self.execute_module(changed=True, commands=commands)
def test_ios_system_name_servers(self):
name_servers = ['8.8.8.8', '8.8.4.4']
set_module_args(dict(name_servers=name_servers))
commands = ['no ip name-server vrf management 8.8.8.8',
'ip name-server 8.8.4.4']
self.execute_module(changed=True, commands=commands, sort=False)
def rest_ios_system_name_servers_complex(self):
name_servers = dict(server='8.8.8.8', vrf='test')
set_module_args(dict(name_servers=name_servers))
commands = ['no name-server 8.8.8.8',
'no name-server vrf management 8.8.8.8',
'ip name-server vrf test 8.8.8.8']
self.execute_module(changed=True, commands=commands, sort=False)
def test_ios_system_state_absent(self):
set_module_args(dict(state='absent'))
commands = ['no hostname',
'no ip domain lookup source-interface GigabitEthernet0/0',
'no ip domain list vrf management', 'no ip domain list',
'no ip domain name vrf management', 'no ip domain name',
'no ip name-server vrf management', 'no ip name-server']
self.execute_module(changed=True, commands=commands)
def test_ios_system_no_change(self):
set_module_args(dict(hostname='ios01'))
self.execute_module(commands=[])
def test_ios_system_missing_vrf(self):
name_servers = dict(server='8.8.8.8', vrf='missing')
set_module_args(dict(name_servers=name_servers))
self.execute_module(failed=True)
| gpl-3.0 |
ncdesouza/bookworm | env/lib/python2.7/site-packages/jinja2/testsuite/security.py | 415 | 6204 | # -*- coding: utf-8 -*-
"""
jinja2.testsuite.security
~~~~~~~~~~~~~~~~~~~~~~~~~
Checks the sandbox and other security features.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import unittest
from jinja2.testsuite import JinjaTestCase
from jinja2 import Environment
from jinja2.sandbox import SandboxedEnvironment, \
ImmutableSandboxedEnvironment, unsafe
from jinja2 import Markup, escape
from jinja2.exceptions import SecurityError, TemplateSyntaxError, \
TemplateRuntimeError
from jinja2._compat import text_type
class PrivateStuff(object):
def bar(self):
return 23
@unsafe
def foo(self):
return 42
def __repr__(self):
return 'PrivateStuff'
class PublicStuff(object):
bar = lambda self: 23
_foo = lambda self: 42
def __repr__(self):
return 'PublicStuff'
class SandboxTestCase(JinjaTestCase):
def test_unsafe(self):
env = SandboxedEnvironment()
self.assert_raises(SecurityError, env.from_string("{{ foo.foo() }}").render,
foo=PrivateStuff())
self.assert_equal(env.from_string("{{ foo.bar() }}").render(foo=PrivateStuff()), '23')
self.assert_raises(SecurityError, env.from_string("{{ foo._foo() }}").render,
foo=PublicStuff())
self.assert_equal(env.from_string("{{ foo.bar() }}").render(foo=PublicStuff()), '23')
self.assert_equal(env.from_string("{{ foo.__class__ }}").render(foo=42), '')
self.assert_equal(env.from_string("{{ foo.func_code }}").render(foo=lambda:None), '')
# security error comes from __class__ already.
self.assert_raises(SecurityError, env.from_string(
"{{ foo.__class__.__subclasses__() }}").render, foo=42)
def test_immutable_environment(self):
env = ImmutableSandboxedEnvironment()
self.assert_raises(SecurityError, env.from_string(
'{{ [].append(23) }}').render)
self.assert_raises(SecurityError, env.from_string(
'{{ {1:2}.clear() }}').render)
def test_restricted(self):
env = SandboxedEnvironment()
self.assert_raises(TemplateSyntaxError, env.from_string,
"{% for item.attribute in seq %}...{% endfor %}")
self.assert_raises(TemplateSyntaxError, env.from_string,
"{% for foo, bar.baz in seq %}...{% endfor %}")
def test_markup_operations(self):
# adding two strings should escape the unsafe one
unsafe = '<script type="application/x-some-script">alert("foo");</script>'
safe = Markup('<em>username</em>')
assert unsafe + safe == text_type(escape(unsafe)) + text_type(safe)
# string interpolations are safe to use too
assert Markup('<em>%s</em>') % '<bad user>' == \
'<em><bad user></em>'
assert Markup('<em>%(username)s</em>') % {
'username': '<bad user>'
} == '<em><bad user></em>'
# an escaped object is markup too
assert type(Markup('foo') + 'bar') is Markup
# and it implements __html__ by returning itself
x = Markup("foo")
assert x.__html__() is x
# it also knows how to treat __html__ objects
class Foo(object):
def __html__(self):
return '<em>awesome</em>'
def __unicode__(self):
return 'awesome'
assert Markup(Foo()) == '<em>awesome</em>'
assert Markup('<strong>%s</strong>') % Foo() == \
'<strong><em>awesome</em></strong>'
# escaping and unescaping
assert escape('"<>&\'') == '"<>&''
assert Markup("<em>Foo & Bar</em>").striptags() == "Foo & Bar"
assert Markup("<test>").unescape() == "<test>"
def test_template_data(self):
env = Environment(autoescape=True)
t = env.from_string('{% macro say_hello(name) %}'
'<p>Hello {{ name }}!</p>{% endmacro %}'
'{{ say_hello("<blink>foo</blink>") }}')
escaped_out = '<p>Hello <blink>foo</blink>!</p>'
assert t.render() == escaped_out
assert text_type(t.module) == escaped_out
assert escape(t.module) == escaped_out
assert t.module.say_hello('<blink>foo</blink>') == escaped_out
assert escape(t.module.say_hello('<blink>foo</blink>')) == escaped_out
def test_attr_filter(self):
env = SandboxedEnvironment()
tmpl = env.from_string('{{ cls|attr("__subclasses__")() }}')
self.assert_raises(SecurityError, tmpl.render, cls=int)
def test_binary_operator_intercepting(self):
def disable_op(left, right):
raise TemplateRuntimeError('that operator so does not work')
for expr, ctx, rv in ('1 + 2', {}, '3'), ('a + 2', {'a': 2}, '4'):
env = SandboxedEnvironment()
env.binop_table['+'] = disable_op
t = env.from_string('{{ %s }}' % expr)
assert t.render(ctx) == rv
env.intercepted_binops = frozenset(['+'])
t = env.from_string('{{ %s }}' % expr)
try:
t.render(ctx)
except TemplateRuntimeError as e:
pass
else:
self.fail('expected runtime error')
def test_unary_operator_intercepting(self):
def disable_op(arg):
raise TemplateRuntimeError('that operator so does not work')
for expr, ctx, rv in ('-1', {}, '-1'), ('-a', {'a': 2}, '-2'):
env = SandboxedEnvironment()
env.unop_table['-'] = disable_op
t = env.from_string('{{ %s }}' % expr)
assert t.render(ctx) == rv
env.intercepted_unops = frozenset(['-'])
t = env.from_string('{{ %s }}' % expr)
try:
t.render(ctx)
except TemplateRuntimeError as e:
pass
else:
self.fail('expected runtime error')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(SandboxTestCase))
return suite
| gpl-3.0 |
credativUK/OCB | addons/product/_common.py | 111 | 1418 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
import math
def rounding(f, r):
# TODO for trunk: log deprecation warning
# _logger.warning("Deprecated rounding method, please use tools.float_round to round floats.")
return tools.float_round(f, precision_rounding=r)
# TODO for trunk: add rounding method parameter to tools.float_round and use this method as hook
def ceiling(f, r):
if not r:
return f
return math.ceil(f / r) * r
| agpl-3.0 |
scripnichenko/glance | glance/tests/functional/v1/test_multiprocessing.py | 10 | 2590 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import httplib2
import psutil
# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
from six.moves import range
from glance.tests import functional
from glance.tests.utils import execute
class TestMultiprocessing(functional.FunctionalTest):
"""Functional tests for the bin/glance CLI tool"""
def setUp(self):
self.workers = 2
super(TestMultiprocessing, self).setUp()
def test_multiprocessing(self):
"""Spin up the api servers with multiprocessing on"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(200, response.status)
self.assertEqual('{"images": []}', content)
self.stop_servers()
def _get_children(self):
api_pid = self.api_server.process_pid
process = psutil.Process(api_pid)
children = process.get_children()
pids = [str(child.pid) for child in children]
return pids
def test_interrupt_avoids_respawn_storm(self):
"""
Ensure an interrupt signal does not cause a respawn storm.
See bug #978130
"""
self.start_servers(**self.__dict__.copy())
children = self._get_children()
cmd = "kill -INT %s" % ' '.join(children)
execute(cmd, raise_error=True)
for _ in range(9):
# Yeah. This totally isn't a race condition. Randomly fails
# set at 0.05. Works most of the time at 0.10
time.sleep(0.10)
# ensure number of children hasn't grown
self.assertTrue(len(children) >= len(self._get_children()))
for child in self._get_children():
# ensure no new children spawned
self.assertIn(child, children, child)
self.stop_servers()
| apache-2.0 |
memo/tensorflow | tensorflow/examples/adding_an_op/zero_out_op_3.py | 190 | 1053 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""ZeroOut op Python library."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import tensorflow as tf
_zero_out_module = tf.load_op_library(
os.path.join(tf.resource_loader.get_data_files_path(),
'zero_out_op_kernel_3.so'))
zero_out = _zero_out_module.zero_out
| apache-2.0 |
iuliat/nova | nova/tests/unit/scheduler/filters/test_aggregate_image_properties_isolation_filters.py | 56 | 5302 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova.scheduler.filters import aggregate_image_properties_isolation as aipi
from nova import test
from nova.tests.unit.scheduler import fakes
@mock.patch('nova.scheduler.filters.utils.aggregate_metadata_get_by_host')
class TestAggImagePropsIsolationFilter(test.NoDBTestCase):
def setUp(self):
super(TestAggImagePropsIsolationFilter, self).setUp()
self.filt_cls = aipi.AggregateImagePropertiesIsolation()
def test_aggregate_image_properties_isolation_passes(self, agg_mock):
agg_mock.return_value = {'foo': 'bar'}
filter_properties = {'context': mock.sentinel.ctx,
'request_spec': {
'image': {
'properties': {'foo': 'bar'}}}}
host = fakes.FakeHostState('host1', 'compute', {})
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_aggregate_image_properties_isolation_passes_comma(self, agg_mock):
agg_mock.return_value = {'foo': 'bar,bar2'}
filter_properties = {'context': mock.sentinel.ctx,
'request_spec': {
'image': {
'properties': {'foo': 'bar'}}}}
host = fakes.FakeHostState('host1', 'compute', {})
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_aggregate_image_properties_isolation_multi_props_passes(self,
agg_mock):
agg_mock.return_value = {'foo': 'bar', 'foo2': 'bar2'}
filter_properties = {'context': mock.sentinel.ctx,
'request_spec': {
'image': {
'properties': {'foo': 'bar',
'foo2': 'bar2'}}}}
host = fakes.FakeHostState('host1', 'compute', {})
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_aggregate_image_properties_isolation_props_with_meta_passes(self,
agg_mock):
agg_mock.return_value = {'foo': 'bar'}
filter_properties = {'context': mock.sentinel.ctx,
'request_spec': {
'image': {
'properties': {}}}}
host = fakes.FakeHostState('host1', 'compute', {})
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_aggregate_image_properties_isolation_props_imgprops_passes(self,
agg_mock):
agg_mock.return_value = {}
filter_properties = {'context': mock.sentinel.ctx,
'request_spec': {
'image': {
'properties': {'foo': 'bar'}}}}
host = fakes.FakeHostState('host1', 'compute', {})
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_aggregate_image_properties_isolation_props_not_match_fails(self,
agg_mock):
agg_mock.return_value = {'foo': 'bar'}
filter_properties = {'context': mock.sentinel.ctx,
'request_spec': {
'image': {
'properties': {'foo': 'no-bar'}}}}
host = fakes.FakeHostState('host1', 'compute', {})
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_aggregate_image_properties_isolation_props_not_match2_fails(self,
agg_mock):
agg_mock.return_value = {'foo': 'bar', 'foo2': 'bar2'}
filter_properties = {'context': mock.sentinel.ctx,
'request_spec': {
'image': {
'properties': {'foo': 'bar',
'foo2': 'bar3'}}}}
host = fakes.FakeHostState('host1', 'compute', {})
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_aggregate_image_properties_isolation_props_namespace(self,
agg_mock):
self.flags(aggregate_image_properties_isolation_namespace="np")
agg_mock.return_value = {'np.foo': 'bar', 'foo2': 'bar2'}
filter_properties = {'context': mock.sentinel.ctx,
'request_spec': {
'image': {
'properties': {'np.foo': 'bar',
'foo2': 'bar3'}}}}
host = fakes.FakeHostState('host1', 'compute', {})
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
| apache-2.0 |
relman/sevpn-mgmt-py | tests/cedar/test_session.py | 1 | 6285 | # -*- coding: utf-8 -*-
import mock
import socket
import ssl
import unittest
from SevpnMgmtPy.cedar import Session, Watermark
class TestSession(unittest.TestCase):
def test_start_rpc_session(self):
ba = bytearray('\xab\xcd\xef')
hello_pack = mock.MagicMock()
hello_pack.get_value = mock.MagicMock(return_value=ba)
sess = Session('host', 'port')
sess.connect_to_server = mock.MagicMock()
sess.upload_signature = mock.MagicMock()
sess.http_recv_pack = mock.MagicMock(return_value=hello_pack)
sess.start_rpc_session()
sess.connect_to_server.assert_called_once()
sess.upload_signature.assert_called_once()
sess.http_recv_pack.assert_called_once()
hello_pack.get_value.assert_called_once_with('random', bytearray())
self.assertEqual(sess.rpc_random, ba)
def test_connect_to_server(self):
sess = Session('host', 'port')
sess.set_sock_timeout = mock.MagicMock()
with mock.patch('socket.socket') as mock_socket, mock.patch('ssl.wrap_socket') as mock_wrap_sock:
ssl_sock = mock.MagicMock()
mock_wrap_sock.return_value = ssl_sock
ssl_sock.connect = mock.MagicMock()
sess.connect_to_server()
mock_socket.assert_called_once_with(socket.AF_INET, socket.SOCK_STREAM)
sess.set_sock_timeout.assert_called_with(mock_socket.return_value, sess.CONNECTING_TIMEOUT)
mock_wrap_sock.assert_called_once_with(mock_socket.return_value, ssl_version=ssl.PROTOCOL_TLSv1)
ssl_sock.connect.assert_called_once_with((sess.host, sess.port))
self.assertEqual(sess.sock, ssl_sock)
def test_set_sock_timeout(self):
timeout = 100
sock = mock.MagicMock()
sock.settimeout = mock.MagicMock()
sess = Session('host', 'port')
sess.set_sock_timeout(sock, timeout)
sock.settimeout.assert_called_once_with(timeout)
def test_get_host_http_header(self):
host = 'https://example.com'
port = '8080'
sess = Session(host, port)
result = sess.get_host_http_header()
self.assertIsNotNone(result)
self.assertEqual(result, host + ':' + port)
def test_upload_signature(self):
host, port = 'example.com', 80
head = bytearray(
"POST /vpnsvc/connect.cgi HTTP/1.1\r\n"
"Host: {0}\r\n"
"Content-Type: image/jpeg\r\n"
"Connection: Keep-Alive\r\n"
"Content-Length: 1411\r\n"
"\r\n".format(
host + ':' + str(port)
))
body = bytearray(Watermark.watermark)
sess = Session(host, port)
sess.sock = mock.MagicMock()
sess.sock.sendall = mock.MagicMock()
sess.upload_signature()
sess.sock.sendall.assert_called_once_with(head + body)
def test_http_recv_pack_exception(self):
sock = mock.MagicMock()
sock.recv = mock.MagicMock(return_value=bytearray())
sess = Session('host', 'port')
sess.sock = sock
with self.assertRaises(Exception):
sess.http_recv_pack()
def test_http_recv_pack_ok(self):
data = bytearray('header\r\n\r\nbody')
sock = mock.MagicMock()
sock.recv = mock.MagicMock(return_value=data)
sess = Session('host', 'port')
sess.sock = sock
with mock.patch('SevpnMgmtPy.cedar.session.Pack') as mock_pack, \
mock.patch('SevpnMgmtPy.cedar.session.Buf') as mock_buf:
mock_pack.return_value.read_pack = mock.MagicMock()
pack = sess.http_recv_pack()
mock_pack.assert_called_once()
mock_buf.assert_called_once()
mock_pack.return_value.read_pack.assert_called_with(mock_buf.return_value)
self.assertEqual(pack, mock_pack.return_value)
def test_http_date(self):
sess = Session('host', 'port')
m1 = mock.MagicMock()
m2 = mock.MagicMock()
with mock.patch.dict('sys.modules', {'time': m1, 'wsgiref.handlers': m2}):
date = sess.http_date()
m1.mktime.assert_called_once()
m2.format_date_time.assert_called_once_with(m1.mktime.return_value)
self.assertEqual(date, m2.format_date_time.return_value)
def test_http_send_pack(self):
storage = bytearray('\xff\xee\xdd')
pack = mock.MagicMock()
pack.create_dummy_value = mock.MagicMock()
pack.to_buf = mock.MagicMock()
sess = Session('host', 'port')
sess.sock = mock.MagicMock()
sess.sock.sendall = mock.MagicMock()
with mock.patch('SevpnMgmtPy.cedar.session.Buf') as mock_buf:
mock_buf.return_value.storage = storage
sess.http_send_pack(pack)
pack.create_dummy_value.assert_called_once()
mock_buf.assert_called_once()
pack.to_buf.assert_called_once_with(mock_buf.return_value)
sess.sock.sendall.assert_called_once()
def test_send_raw(self):
pack = mock.MagicMock()
pack.to_buf = mock.MagicMock()
sess = Session('host', 'port')
sess.sock = mock.MagicMock()
sess.sock.send = mock.MagicMock()
with mock.patch('SevpnMgmtPy.cedar.session.Buf') as mock_buf:
storage = bytearray('\xfa\xce\xde')
mock_buf.return_value.storage = storage
sess.send_raw(pack)
pack.to_buf.assert_called_with(mock_buf.return_value)
sess.sock.send.assert_has_calls(calls=[mock.call(mock_buf.int_to_bytes.return_value), mock.call(storage)],
any_order=True)
def test_recv_raw(self):
sess = Session('host', 'port')
sess.sock = mock.MagicMock()
with mock.patch('SevpnMgmtPy.cedar.session.Buf') as mock_buf:
result = sess.recv_raw()
mock_buf.bytes_to_int.assert_called_once()
sess.sock.recv.assert_has_calls(calls=[mock.call(4), mock.call(mock_buf.bytes_to_int.return_value)])
self.assertEqual(result, sess.sock.recv.return_value)
| mit |
DarkCascade/get2gether | node_modules/meanio/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py | 912 | 3325 | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypd output module
This module produces gyp input as its output. Output files are given the
.gypd extension to avoid overwriting the .gyp files that they are generated
from. Internal references to .gyp files (such as those found in
"dependencies" sections) are not adjusted to point to .gypd files instead;
unlike other paths, which are relative to the .gyp or .gypd file, such paths
are relative to the directory from which gyp was run to create the .gypd file.
This generator module is intended to be a sample and a debugging aid, hence
the "d" for "debug" in .gypd. It is useful to inspect the results of the
various merges, expansions, and conditional evaluations performed by gyp
and to see a representation of what would be fed to a generator module.
It's not advisable to rename .gypd files produced by this module to .gyp,
because they will have all merges, expansions, and evaluations already
performed and the relevant constructs not present in the output; paths to
dependencies may be wrong; and various sections that do not belong in .gyp
files such as such as "included_files" and "*_excluded" will be present.
Output will also be stripped of comments. This is not intended to be a
general-purpose gyp pretty-printer; for that, you probably just want to
run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
comments but won't do all of the other things done to this module's output.
The specific formatting of the output generated by this module is subject
to change.
"""
import gyp.common
import errno
import os
import pprint
# These variables should just be spit back out as variable references.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_DIRNAME',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
# gypd doesn't define a default value for OS like many other generator
# modules. Specify "-D OS=whatever" on the command line to provide a value.
generator_default_variables = {
}
# gypd supports multiple toolsets
generator_supports_multiple_toolsets = True
# TODO(mark): This always uses <, which isn't right. The input module should
# notify the generator to tell it which phase it is operating in, and this
# module should use < for the early phase and then switch to > for the late
# phase. Bonus points for carrying @ back into the output too.
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
output_files = {}
for qualified_target in target_list:
[input_file, target] = \
gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
if input_file[-4:] != '.gyp':
continue
input_file_stem = input_file[:-4]
output_file = input_file_stem + params['options'].suffix + '.gypd'
if not output_file in output_files:
output_files[output_file] = input_file
for output_file, input_file in output_files.iteritems():
output = open(output_file, 'w')
pprint.pprint(data[input_file], output)
output.close()
| mit |
Islandman93/reinforcepy | reinforcepy/learners/dqn/asynchronous/recurrent_thread_learner.py | 1 | 2073 | import numpy as np
from .q_thread_learner import QThreadLearner
class RecurrentThreadLearner(QThreadLearner):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.lstm_state_for_training = self.network.get_lstm_state()
def reset(self):
super().reset()
self.network.reset_lstm_state()
self.lstm_state_for_training = self.network.get_lstm_state()
def update(self, state, action, reward, state_tp1, terminal):
self.frame_buffer.add_state_to_buffer(state)
# quit update if testing
if self.testing:
return
# clip reward
if self.reward_clip_vals is not None:
reward = np.clip(reward, *self.reward_clip_vals)
# accumulate minibatch_vars
self.minibatch_accumulate(self.frame_buffer.get_buffer(), action,
reward, self.frame_buffer.get_buffer_with(state_tp1), terminal)
# increment counters
self.step_count += 1
self.global_dict['counter'] += 1
# check perform gradient step
if self.step_count % self.async_update_step == 0 or terminal:
summaries = self.global_dict['write_summaries_this_step']
if summaries:
self.global_dict['write_summaries_this_step'] = False
summary = self.network.train_step(*self.get_minibatch_vars(), lstm_state=self.lstm_state_for_training,
global_step=self.global_dict['counter'], summaries=True)
self.global_dict['summary_writer'].add_summary(summary, global_step=self.global_dict['counter'])
else:
self.network.train_step(*self.get_minibatch_vars(), lstm_state=self.lstm_state_for_training,
global_step=self.global_dict['counter'], summaries=False)
self.reset_minibatch()
self.lstm_state_for_training = self.network.get_lstm_state()
# anneal action handler
self.anneal_random_policy()
| gpl-3.0 |
sesamesushi/satisrevude | models/vfs.py | 5 | 3683 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Virtual file system for managing files locally or in the cloud."""
__author__ = 'Pavel Simakov ([email protected])'
import os
import jinja2
class AbstractReadOnlyFileSystem(object):
"""A generic ro file system interface that forwards to an implementation."""
def __init__(self, impl):
self._impl = impl
def isfile(self, filename):
"""Checks if file exists, similar to os.path.isfile(...)."""
return self._impl.isfile(filename)
def open(self, filename):
"""Returns a stream with the file content, similar to open(...)."""
return self._impl.open(filename)
def list(self, dir_name):
"""Lists all files in a directory."""
return self._impl.list(dir_name)
def get_jinja_environ(self):
"""Configures jinja environment loaders for this file system."""
return self._impl.get_jinja_environ()
class LocalReadOnlyFileSystem(object):
"""A ro file system serving only local files."""
def __init__(self, logical_home_folder=None, physical_home_folder=None):
"""Create a new instance of the object.
Args:
logical_home_folder: A logical home dir of all files (/a/b/c/...).
physical_home_folder: A physical location on the file system (/x/y).
Returns:
A new instance of the object.
"""
self._logical_home_folder = logical_home_folder
self._physical_home_folder = physical_home_folder
def _logical_to_physical(self, filename):
if not (self._logical_home_folder and self._physical_home_folder):
return filename
return os.path.join(
self._physical_home_folder,
os.path.relpath(filename, self._logical_home_folder))
def _physical_to_logical(self, filename):
if not (self._logical_home_folder and self._physical_home_folder):
return filename
return os.path.join(
self._logical_home_folder,
os.path.relpath(filename, self._physical_home_folder))
def isfile(self, filename):
return os.path.isfile(self._logical_to_physical(filename))
def open(self, filename):
return open(self._logical_to_physical(filename), 'rb')
def list(self, root_dir):
"""Lists all files in a directory."""
files = []
for dirname, unused_dirnames, filenames in os.walk(
self._logical_to_physical(root_dir)):
for filename in filenames:
files.append(
self._physical_to_logical(os.path.join(dirname, filename)))
return sorted(files)
def get_jinja_environ(self, dir_names):
physical_dir_names = []
for dir_name in dir_names:
physical_dir_names.append(self._logical_to_physical(dir_name))
return jinja2.Environment(
extensions=['jinja2.ext.i18n'],
loader=jinja2.FileSystemLoader(physical_dir_names))
def run_all_unit_tests():
"""Runs all unit tests in the project."""
if __name__ == '__main__':
run_all_unit_tests()
| apache-2.0 |
FireWRT/OpenWrt-Firefly-Libraries | staging_dir/host/lib/python3.4/modulefinder.py | 77 | 23421 | """Find modules used by a script, using introspection."""
import dis
import importlib._bootstrap
import importlib.machinery
import marshal
import os
import sys
import types
import struct
import warnings
with warnings.catch_warnings():
warnings.simplefilter('ignore', PendingDeprecationWarning)
import imp
# XXX Clean up once str8's cstor matches bytes.
LOAD_CONST = bytes([dis.opname.index('LOAD_CONST')])
IMPORT_NAME = bytes([dis.opname.index('IMPORT_NAME')])
STORE_NAME = bytes([dis.opname.index('STORE_NAME')])
STORE_GLOBAL = bytes([dis.opname.index('STORE_GLOBAL')])
STORE_OPS = [STORE_NAME, STORE_GLOBAL]
HAVE_ARGUMENT = bytes([dis.HAVE_ARGUMENT])
# Modulefinder does a good job at simulating Python's, but it can not
# handle __path__ modifications packages make at runtime. Therefore there
# is a mechanism whereby you can register extra paths in this map for a
# package, and it will be honored.
# Note this is a mapping is lists of paths.
packagePathMap = {}
# A Public interface
def AddPackagePath(packagename, path):
packagePathMap.setdefault(packagename, []).append(path)
replacePackageMap = {}
# This ReplacePackage mechanism allows modulefinder to work around
# situations in which a package injects itself under the name
# of another package into sys.modules at runtime by calling
# ReplacePackage("real_package_name", "faked_package_name")
# before running ModuleFinder.
def ReplacePackage(oldname, newname):
replacePackageMap[oldname] = newname
class Module:
def __init__(self, name, file=None, path=None):
self.__name__ = name
self.__file__ = file
self.__path__ = path
self.__code__ = None
# The set of global names that are assigned to in the module.
# This includes those names imported through starimports of
# Python modules.
self.globalnames = {}
# The set of starimports this module did that could not be
# resolved, ie. a starimport from a non-Python module.
self.starimports = {}
def __repr__(self):
s = "Module(%r" % (self.__name__,)
if self.__file__ is not None:
s = s + ", %r" % (self.__file__,)
if self.__path__ is not None:
s = s + ", %r" % (self.__path__,)
s = s + ")"
return s
class ModuleFinder:
def __init__(self, path=None, debug=0, excludes=[], replace_paths=[]):
if path is None:
path = sys.path
self.path = path
self.modules = {}
self.badmodules = {}
self.debug = debug
self.indent = 0
self.excludes = excludes
self.replace_paths = replace_paths
self.processed_paths = [] # Used in debugging only
def msg(self, level, str, *args):
if level <= self.debug:
for i in range(self.indent):
print(" ", end=' ')
print(str, end=' ')
for arg in args:
print(repr(arg), end=' ')
print()
def msgin(self, *args):
level = args[0]
if level <= self.debug:
self.indent = self.indent + 1
self.msg(*args)
def msgout(self, *args):
level = args[0]
if level <= self.debug:
self.indent = self.indent - 1
self.msg(*args)
def run_script(self, pathname):
self.msg(2, "run_script", pathname)
with open(pathname) as fp:
stuff = ("", "r", imp.PY_SOURCE)
self.load_module('__main__', fp, pathname, stuff)
def load_file(self, pathname):
dir, name = os.path.split(pathname)
name, ext = os.path.splitext(name)
with open(pathname) as fp:
stuff = (ext, "r", imp.PY_SOURCE)
self.load_module(name, fp, pathname, stuff)
def import_hook(self, name, caller=None, fromlist=None, level=-1):
self.msg(3, "import_hook", name, caller, fromlist, level)
parent = self.determine_parent(caller, level=level)
q, tail = self.find_head_package(parent, name)
m = self.load_tail(q, tail)
if not fromlist:
return q
if m.__path__:
self.ensure_fromlist(m, fromlist)
return None
def determine_parent(self, caller, level=-1):
self.msgin(4, "determine_parent", caller, level)
if not caller or level == 0:
self.msgout(4, "determine_parent -> None")
return None
pname = caller.__name__
if level >= 1: # relative import
if caller.__path__:
level -= 1
if level == 0:
parent = self.modules[pname]
assert parent is caller
self.msgout(4, "determine_parent ->", parent)
return parent
if pname.count(".") < level:
raise ImportError("relative importpath too deep")
pname = ".".join(pname.split(".")[:-level])
parent = self.modules[pname]
self.msgout(4, "determine_parent ->", parent)
return parent
if caller.__path__:
parent = self.modules[pname]
assert caller is parent
self.msgout(4, "determine_parent ->", parent)
return parent
if '.' in pname:
i = pname.rfind('.')
pname = pname[:i]
parent = self.modules[pname]
assert parent.__name__ == pname
self.msgout(4, "determine_parent ->", parent)
return parent
self.msgout(4, "determine_parent -> None")
return None
def find_head_package(self, parent, name):
self.msgin(4, "find_head_package", parent, name)
if '.' in name:
i = name.find('.')
head = name[:i]
tail = name[i+1:]
else:
head = name
tail = ""
if parent:
qname = "%s.%s" % (parent.__name__, head)
else:
qname = head
q = self.import_module(head, qname, parent)
if q:
self.msgout(4, "find_head_package ->", (q, tail))
return q, tail
if parent:
qname = head
parent = None
q = self.import_module(head, qname, parent)
if q:
self.msgout(4, "find_head_package ->", (q, tail))
return q, tail
self.msgout(4, "raise ImportError: No module named", qname)
raise ImportError("No module named " + qname)
def load_tail(self, q, tail):
self.msgin(4, "load_tail", q, tail)
m = q
while tail:
i = tail.find('.')
if i < 0: i = len(tail)
head, tail = tail[:i], tail[i+1:]
mname = "%s.%s" % (m.__name__, head)
m = self.import_module(head, mname, m)
if not m:
self.msgout(4, "raise ImportError: No module named", mname)
raise ImportError("No module named " + mname)
self.msgout(4, "load_tail ->", m)
return m
def ensure_fromlist(self, m, fromlist, recursive=0):
self.msg(4, "ensure_fromlist", m, fromlist, recursive)
for sub in fromlist:
if sub == "*":
if not recursive:
all = self.find_all_submodules(m)
if all:
self.ensure_fromlist(m, all, 1)
elif not hasattr(m, sub):
subname = "%s.%s" % (m.__name__, sub)
submod = self.import_module(sub, subname, m)
if not submod:
raise ImportError("No module named " + subname)
def find_all_submodules(self, m):
if not m.__path__:
return
modules = {}
# 'suffixes' used to be a list hardcoded to [".py", ".pyc", ".pyo"].
# But we must also collect Python extension modules - although
# we cannot separate normal dlls from Python extensions.
suffixes = []
suffixes += importlib.machinery.EXTENSION_SUFFIXES[:]
suffixes += importlib.machinery.SOURCE_SUFFIXES[:]
suffixes += importlib.machinery.BYTECODE_SUFFIXES[:]
for dir in m.__path__:
try:
names = os.listdir(dir)
except OSError:
self.msg(2, "can't list directory", dir)
continue
for name in names:
mod = None
for suff in suffixes:
n = len(suff)
if name[-n:] == suff:
mod = name[:-n]
break
if mod and mod != "__init__":
modules[mod] = mod
return modules.keys()
def import_module(self, partname, fqname, parent):
self.msgin(3, "import_module", partname, fqname, parent)
try:
m = self.modules[fqname]
except KeyError:
pass
else:
self.msgout(3, "import_module ->", m)
return m
if fqname in self.badmodules:
self.msgout(3, "import_module -> None")
return None
if parent and parent.__path__ is None:
self.msgout(3, "import_module -> None")
return None
try:
fp, pathname, stuff = self.find_module(partname,
parent and parent.__path__, parent)
except ImportError:
self.msgout(3, "import_module ->", None)
return None
try:
m = self.load_module(fqname, fp, pathname, stuff)
finally:
if fp:
fp.close()
if parent:
setattr(parent, partname, m)
self.msgout(3, "import_module ->", m)
return m
def load_module(self, fqname, fp, pathname, file_info):
suffix, mode, type = file_info
self.msgin(2, "load_module", fqname, fp and "fp", pathname)
if type == imp.PKG_DIRECTORY:
m = self.load_package(fqname, pathname)
self.msgout(2, "load_module ->", m)
return m
if type == imp.PY_SOURCE:
co = compile(fp.read()+'\n', pathname, 'exec')
elif type == imp.PY_COMPILED:
try:
marshal_data = importlib._bootstrap._validate_bytecode_header(fp.read())
except ImportError as exc:
self.msgout(2, "raise ImportError: " + str(exc), pathname)
raise
co = marshal.loads(marshal_data)
else:
co = None
m = self.add_module(fqname)
m.__file__ = pathname
if co:
if self.replace_paths:
co = self.replace_paths_in_code(co)
m.__code__ = co
self.scan_code(co, m)
self.msgout(2, "load_module ->", m)
return m
def _add_badmodule(self, name, caller):
if name not in self.badmodules:
self.badmodules[name] = {}
if caller:
self.badmodules[name][caller.__name__] = 1
else:
self.badmodules[name]["-"] = 1
def _safe_import_hook(self, name, caller, fromlist, level=-1):
# wrapper for self.import_hook() that won't raise ImportError
if name in self.badmodules:
self._add_badmodule(name, caller)
return
try:
self.import_hook(name, caller, level=level)
except ImportError as msg:
self.msg(2, "ImportError:", str(msg))
self._add_badmodule(name, caller)
else:
if fromlist:
for sub in fromlist:
if sub in self.badmodules:
self._add_badmodule(sub, caller)
continue
try:
self.import_hook(name, caller, [sub], level=level)
except ImportError as msg:
self.msg(2, "ImportError:", str(msg))
fullname = name + "." + sub
self._add_badmodule(fullname, caller)
def scan_opcodes_25(self, co,
unpack = struct.unpack):
# Scan the code, and yield 'interesting' opcode combinations
# Python 2.5 version (has absolute and relative imports)
code = co.co_code
names = co.co_names
consts = co.co_consts
LOAD_LOAD_AND_IMPORT = LOAD_CONST + LOAD_CONST + IMPORT_NAME
while code:
c = bytes([code[0]])
if c in STORE_OPS:
oparg, = unpack('<H', code[1:3])
yield "store", (names[oparg],)
code = code[3:]
continue
if code[:9:3] == LOAD_LOAD_AND_IMPORT:
oparg_1, oparg_2, oparg_3 = unpack('<xHxHxH', code[:9])
level = consts[oparg_1]
if level == 0: # absolute import
yield "absolute_import", (consts[oparg_2], names[oparg_3])
else: # relative import
yield "relative_import", (level, consts[oparg_2], names[oparg_3])
code = code[9:]
continue
if c >= HAVE_ARGUMENT:
code = code[3:]
else:
code = code[1:]
def scan_code(self, co, m):
code = co.co_code
scanner = self.scan_opcodes_25
for what, args in scanner(co):
if what == "store":
name, = args
m.globalnames[name] = 1
elif what == "absolute_import":
fromlist, name = args
have_star = 0
if fromlist is not None:
if "*" in fromlist:
have_star = 1
fromlist = [f for f in fromlist if f != "*"]
self._safe_import_hook(name, m, fromlist, level=0)
if have_star:
# We've encountered an "import *". If it is a Python module,
# the code has already been parsed and we can suck out the
# global names.
mm = None
if m.__path__:
# At this point we don't know whether 'name' is a
# submodule of 'm' or a global module. Let's just try
# the full name first.
mm = self.modules.get(m.__name__ + "." + name)
if mm is None:
mm = self.modules.get(name)
if mm is not None:
m.globalnames.update(mm.globalnames)
m.starimports.update(mm.starimports)
if mm.__code__ is None:
m.starimports[name] = 1
else:
m.starimports[name] = 1
elif what == "relative_import":
level, fromlist, name = args
if name:
self._safe_import_hook(name, m, fromlist, level=level)
else:
parent = self.determine_parent(m, level=level)
self._safe_import_hook(parent.__name__, None, fromlist, level=0)
else:
# We don't expect anything else from the generator.
raise RuntimeError(what)
for c in co.co_consts:
if isinstance(c, type(co)):
self.scan_code(c, m)
def load_package(self, fqname, pathname):
self.msgin(2, "load_package", fqname, pathname)
newname = replacePackageMap.get(fqname)
if newname:
fqname = newname
m = self.add_module(fqname)
m.__file__ = pathname
m.__path__ = [pathname]
# As per comment at top of file, simulate runtime __path__ additions.
m.__path__ = m.__path__ + packagePathMap.get(fqname, [])
fp, buf, stuff = self.find_module("__init__", m.__path__)
try:
self.load_module(fqname, fp, buf, stuff)
self.msgout(2, "load_package ->", m)
return m
finally:
if fp:
fp.close()
def add_module(self, fqname):
if fqname in self.modules:
return self.modules[fqname]
self.modules[fqname] = m = Module(fqname)
return m
def find_module(self, name, path, parent=None):
if parent is not None:
# assert path is not None
fullname = parent.__name__+'.'+name
else:
fullname = name
if fullname in self.excludes:
self.msgout(3, "find_module -> Excluded", fullname)
raise ImportError(name)
if path is None:
if name in sys.builtin_module_names:
return (None, None, ("", "", imp.C_BUILTIN))
path = self.path
return imp.find_module(name, path)
def report(self):
"""Print a report to stdout, listing the found modules with their
paths, as well as modules that are missing, or seem to be missing.
"""
print()
print(" %-25s %s" % ("Name", "File"))
print(" %-25s %s" % ("----", "----"))
# Print modules found
keys = sorted(self.modules.keys())
for key in keys:
m = self.modules[key]
if m.__path__:
print("P", end=' ')
else:
print("m", end=' ')
print("%-25s" % key, m.__file__ or "")
# Print missing modules
missing, maybe = self.any_missing_maybe()
if missing:
print()
print("Missing modules:")
for name in missing:
mods = sorted(self.badmodules[name].keys())
print("?", name, "imported from", ', '.join(mods))
# Print modules that may be missing, but then again, maybe not...
if maybe:
print()
print("Submodules that appear to be missing, but could also be", end=' ')
print("global names in the parent package:")
for name in maybe:
mods = sorted(self.badmodules[name].keys())
print("?", name, "imported from", ', '.join(mods))
def any_missing(self):
"""Return a list of modules that appear to be missing. Use
any_missing_maybe() if you want to know which modules are
certain to be missing, and which *may* be missing.
"""
missing, maybe = self.any_missing_maybe()
return missing + maybe
def any_missing_maybe(self):
"""Return two lists, one with modules that are certainly missing
and one with modules that *may* be missing. The latter names could
either be submodules *or* just global names in the package.
The reason it can't always be determined is that it's impossible to
tell which names are imported when "from module import *" is done
with an extension module, short of actually importing it.
"""
missing = []
maybe = []
for name in self.badmodules:
if name in self.excludes:
continue
i = name.rfind(".")
if i < 0:
missing.append(name)
continue
subname = name[i+1:]
pkgname = name[:i]
pkg = self.modules.get(pkgname)
if pkg is not None:
if pkgname in self.badmodules[name]:
# The package tried to import this module itself and
# failed. It's definitely missing.
missing.append(name)
elif subname in pkg.globalnames:
# It's a global in the package: definitely not missing.
pass
elif pkg.starimports:
# It could be missing, but the package did an "import *"
# from a non-Python module, so we simply can't be sure.
maybe.append(name)
else:
# It's not a global in the package, the package didn't
# do funny star imports, it's very likely to be missing.
# The symbol could be inserted into the package from the
# outside, but since that's not good style we simply list
# it missing.
missing.append(name)
else:
missing.append(name)
missing.sort()
maybe.sort()
return missing, maybe
def replace_paths_in_code(self, co):
new_filename = original_filename = os.path.normpath(co.co_filename)
for f, r in self.replace_paths:
if original_filename.startswith(f):
new_filename = r + original_filename[len(f):]
break
if self.debug and original_filename not in self.processed_paths:
if new_filename != original_filename:
self.msgout(2, "co_filename %r changed to %r" \
% (original_filename,new_filename,))
else:
self.msgout(2, "co_filename %r remains unchanged" \
% (original_filename,))
self.processed_paths.append(original_filename)
consts = list(co.co_consts)
for i in range(len(consts)):
if isinstance(consts[i], type(co)):
consts[i] = self.replace_paths_in_code(consts[i])
return types.CodeType(co.co_argcount, co.co_kwonlyargcount,
co.co_nlocals, co.co_stacksize, co.co_flags,
co.co_code, tuple(consts), co.co_names,
co.co_varnames, new_filename, co.co_name,
co.co_firstlineno, co.co_lnotab, co.co_freevars,
co.co_cellvars)
def test():
# Parse command line
import getopt
try:
opts, args = getopt.getopt(sys.argv[1:], "dmp:qx:")
except getopt.error as msg:
print(msg)
return
# Process options
debug = 1
domods = 0
addpath = []
exclude = []
for o, a in opts:
if o == '-d':
debug = debug + 1
if o == '-m':
domods = 1
if o == '-p':
addpath = addpath + a.split(os.pathsep)
if o == '-q':
debug = 0
if o == '-x':
exclude.append(a)
# Provide default arguments
if not args:
script = "hello.py"
else:
script = args[0]
# Set the path based on sys.path and the script directory
path = sys.path[:]
path[0] = os.path.dirname(script)
path = addpath + path
if debug > 1:
print("path:")
for item in path:
print(" ", repr(item))
# Create the module finder and turn its crank
mf = ModuleFinder(path, debug, exclude)
for arg in args[1:]:
if arg == '-m':
domods = 1
continue
if domods:
if arg[-2:] == '.*':
mf.import_hook(arg[:-2], None, ["*"])
else:
mf.import_hook(arg)
else:
mf.load_file(arg)
mf.run_script(script)
mf.report()
return mf # for -i debugging
if __name__ == '__main__':
try:
mf = test()
except KeyboardInterrupt:
print("\n[interrupted]")
| gpl-2.0 |
RAPD/RAPD | src/plugins/subcontractors/xdsme/new/xdsme-0.4.9/XOconv/pycgtypes/mat4.py | 12 | 23700 | ######################################################################
# mat4 - Matrix class (4x4 matrix)
#
# Copyright (C) 2002, Matthias Baas ([email protected])
#
# You may distribute under the terms of the BSD license, as
# specified in the file license.txt.
####################################################################
import types, math, copy
from vec3 import vec3 as _vec3
from vec4 import vec4 as _vec4
from mat3 import mat3 as _mat3
# [ 0 1 2 3 ]
# [ 4 5 6 7 ]
# [ 8 9 10 11 ]
# [ 12 13 14 15 ]
# mat4
class mat4:
"""Matrix class (4x4).
This class represents a 4x4 matrix that can be used to store
affine transformations.
"""
def __init__(self, *args):
"Constructor"
# No arguments
if len(args)==0:
self.mlist = 16*[0.0]
# 1 argument (list, scalar or mat4)
elif len(args)==1:
T = type(args[0])
if T==types.FloatType or T==types.IntType or T==types.LongType:
self.mlist = [args[0],0.0,0.0,0.0,
0.0,args[0],0.0,0.0,
0.0,0.0,args[0],0.0,
0.0,0.0,0.0,args[0]]
# mat4
elif isinstance(args[0], mat4):
self.mlist = copy.copy(args[0].mlist)
# String
elif T==types.StringType:
s=args[0].replace(","," ").replace(" "," ").strip().split(" ")
self.mlist=map(lambda x: float(x), s)
else:
self.mlist = list(args[0])
# 4 arguments (sequences)
elif len(args)==4:
a,b,c,d=args
self.mlist = [a[0], b[0], c[0], d[0],
a[1], b[1], c[1], d[1],
a[2], b[2], c[2], d[2],
a[3], b[3], c[3], d[3]]
# 16 arguments
elif len(args)==16:
self.mlist = list(args)
else:
raise TypeError,"mat4() arg can't be converted to mat4"
# Check if there are really 16 elements in the list
if len(self.mlist)!=16:
raise TypeError, "mat4(): Wrong number of matrix elements ("+`len(self.mlist)`+" instead of 16)"
def __repr__(self):
return 'mat4('+`self.mlist`[1:-1]+')'
def __str__(self):
fmt="%9.4f"
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
return ('['+fmt%m11+', '+fmt%m12+', '+fmt%m13+', '+fmt%m14+']\n'+
'['+fmt%m21+', '+fmt%m22+', '+fmt%m23+', '+fmt%m24+']\n'+
'['+fmt%m31+', '+fmt%m32+', '+fmt%m33+', '+fmt%m34+']\n'+
'['+fmt%m41+', '+fmt%m42+', '+fmt%m43+', '+fmt%m44+']')
def __eq__(self, other):
"""== operator"""
if isinstance(other, mat4):
return self.mlist==other.mlist
else:
return 0
def __ne__(self, other):
"""!= operator"""
if isinstance(other, mat4):
return self.mlist!=other.mlist
else:
return 1
def __add__(self, other):
"""Matrix addition.
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print M+M
[ 2.0000, 4.0000, 6.0000, 8.0000]
[ 10.0000, 12.0000, 14.0000, 16.0000]
[ 18.0000, 20.0000, 22.0000, 24.0000]
[ 26.0000, 28.0000, 30.0000, 32.0000]
"""
if isinstance(other, mat4):
return mat4(map(lambda x,y: x+y, self.mlist, other.mlist))
else:
raise TypeError, "unsupported operand type for +"
def __sub__(self, other):
"""Matrix subtraction.
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print M-M
[ 0.0000, 0.0000, 0.0000, 0.0000]
[ 0.0000, 0.0000, 0.0000, 0.0000]
[ 0.0000, 0.0000, 0.0000, 0.0000]
[ 0.0000, 0.0000, 0.0000, 0.0000]
"""
if isinstance(other, mat4):
return mat4(map(lambda x,y: x-y, self.mlist, other.mlist))
else:
raise TypeError, "unsupported operand type for -"
def __mul__(self, other):
"""Multiplication.
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print M*2.0
[ 2.0000, 4.0000, 6.0000, 8.0000]
[ 10.0000, 12.0000, 14.0000, 16.0000]
[ 18.0000, 20.0000, 22.0000, 24.0000]
[ 26.0000, 28.0000, 30.0000, 32.0000]
>>> print 2.0*M
[ 2.0000, 4.0000, 6.0000, 8.0000]
[ 10.0000, 12.0000, 14.0000, 16.0000]
[ 18.0000, 20.0000, 22.0000, 24.0000]
[ 26.0000, 28.0000, 30.0000, 32.0000]
>>> print M*M
[ 90.0000, 100.0000, 110.0000, 120.0000]
[ 202.0000, 228.0000, 254.0000, 280.0000]
[ 314.0000, 356.0000, 398.0000, 440.0000]
[ 426.0000, 484.0000, 542.0000, 600.0000]
>>> print M*_vec3(1,2,3)
(0.1765, 0.4510, 0.7255)
>>> print _vec3(1,2,3)*M
(0.7083, 0.8056, 0.9028)
"""
T = type(other)
# mat4*scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
return mat4(map(lambda x,other=other: x*other, self.mlist))
# mat4*vec3
if isinstance(other, _vec3):
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
w = float(m41*other.x + m42*other.y + m43*other.z + m44)
return _vec3(m11*other.x + m12*other.y + m13*other.z + m14,
m21*other.x + m22*other.y + m23*other.z + m24,
m31*other.x + m32*other.y + m33*other.z + m34)/w
# mat4*vec4
if isinstance(other, _vec4):
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
return _vec4(m11*other.x + m12*other.y + m13*other.z + m14*other.w,
m21*other.x + m22*other.y + m23*other.z + m24*other.w,
m31*other.x + m32*other.y + m33*other.z + m34*other.w,
m41*other.x + m42*other.y + m43*other.z + m44*other.w)
# mat4*mat4
if isinstance(other, mat4):
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
n11,n12,n13,n14,n21,n22,n23,n24,n31,n32,n33,n34,n41,n42,n43,n44 = other.mlist
return mat4( m11*n11+m12*n21+m13*n31+m14*n41,
m11*n12+m12*n22+m13*n32+m14*n42,
m11*n13+m12*n23+m13*n33+m14*n43,
m11*n14+m12*n24+m13*n34+m14*n44,
m21*n11+m22*n21+m23*n31+m24*n41,
m21*n12+m22*n22+m23*n32+m24*n42,
m21*n13+m22*n23+m23*n33+m24*n43,
m21*n14+m22*n24+m23*n34+m24*n44,
m31*n11+m32*n21+m33*n31+m34*n41,
m31*n12+m32*n22+m33*n32+m34*n42,
m31*n13+m32*n23+m33*n33+m34*n43,
m31*n14+m32*n24+m33*n34+m34*n44,
m41*n11+m42*n21+m43*n31+m44*n41,
m41*n12+m42*n22+m43*n32+m44*n42,
m41*n13+m42*n23+m43*n33+m44*n43,
m41*n14+m42*n24+m43*n34+m44*n44)
# unsupported
else:
raise TypeError, "unsupported operand type for *"
def __rmul__(self, other):
T = type(other)
# scalar*mat4
if T==types.FloatType or T==types.IntType or T==types.LongType:
return mat4(map(lambda x,other=other: other*x, self.mlist))
# vec4*mat4
if isinstance(other, _vec4):
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
return _vec4(other.x*m11 + other.y*m21 + other.z*m31 + other.w*m41,
other.x*m12 + other.y*m22 + other.z*m32 + other.w*m42,
other.x*m13 + other.y*m23 + other.z*m33 + other.w*m43,
other.x*m14 + other.y*m24 + other.z*m34 + other.w*m44)
# vec3*mat4
if isinstance(other, _vec3):
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
w = float(other.x*m14 + other.y*m24 + other.z*m34 + m44)
return _vec3(other.x*m11 + other.y*m21 + other.z*m31 + m41,
other.x*m12 + other.y*m22 + other.z*m32 + m42,
other.x*m13 + other.y*m23 + other.z*m33 + m43)/w
# mat4*mat4
if isinstance(other, mat4):
return self.__mul__(other)
# unsupported
else:
raise TypeError, "unsupported operand type for *"
def __div__(self, other):
"""Division
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print M/2.0
[ 0.5000, 1.0000, 1.5000, 2.0000]
[ 2.5000, 3.0000, 3.5000, 4.0000]
[ 4.5000, 5.0000, 5.5000, 6.0000]
[ 6.5000, 7.0000, 7.5000, 8.0000]
"""
T = type(other)
# mat4/scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
return mat4(map(lambda x,other=other: x/other, self.mlist))
# unsupported
else:
raise TypeError, "unsupported operand type for /"
def __mod__(self, other):
"""Modulo.
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print M%5.0
[ 1.0000, 2.0000, 3.0000, 4.0000]
[ 0.0000, 1.0000, 2.0000, 3.0000]
[ 4.0000, 0.0000, 1.0000, 2.0000]
[ 3.0000, 4.0000, 0.0000, 1.0000]
"""
T = type(other)
# mat4%scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
return mat4(map(lambda x,other=other: x%other, self.mlist))
# unsupported
else:
raise TypeError, "unsupported operand type for %"
def __neg__(self):
"""Negation.
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print -M
[ -1.0000, -2.0000, -3.0000, -4.0000]
[ -5.0000, -6.0000, -7.0000, -8.0000]
[ -9.0000, -10.0000, -11.0000, -12.0000]
[ -13.0000, -14.0000, -15.0000, -16.0000]
"""
return mat4(map(lambda x: -x, self.mlist))
def __pos__(self):
"""
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print +M
[ 1.0000, 2.0000, 3.0000, 4.0000]
[ 5.0000, 6.0000, 7.0000, 8.0000]
[ 9.0000, 10.0000, 11.0000, 12.0000]
[ 13.0000, 14.0000, 15.0000, 16.0000]
"""
return mat4(map(lambda x: +x, self.mlist))
def __len__(self):
return 4
def __getitem__(self, key):
if type(key)==types.IntType:
if key<0 or key>3:
raise IndexError,"index out of range"
m=self.mlist
if key==0: return [m[0],m[4],m[8],m[12]]
elif key==1: return [m[1],m[5],m[9],m[13]]
elif key==2: return [m[2],m[6],m[10],m[14]]
elif key==3: return [m[3],m[7],m[11],m[15]]
elif type(key)==types.TupleType:
i,j=key
if i<0 or i>3 or j<0 or j>3:
raise IndexError, "index out of range"
return self.mlist[i*4+j]
else:
raise TypeError,"index must be integer or 2-tuple"
def __setitem__(self, key, value):
if type(key)==types.IntType:
if key<0 or key>3:
raise IndexError,"index out of range"
m=self.mlist
if key==0: m[0],m[4],m[8],m[12]=value
elif key==1: m[1],m[5],m[9],m[13]=value
elif key==2: m[2],m[6],m[10],m[14]=value
elif key==3: m[3],m[7],m[11],m[15]=value
elif type(key)==types.TupleType:
i,j=key
if i<0 or i>3 or j<0 or j>3:
raise IndexError, "index out of range"
self.mlist[i*4+j] = value
else:
raise TypeError,"index must be integer or 2-tuple"
def getRow(self, idx):
"""Return row (as vec4)."""
m=self.mlist
if idx==0: return _vec4(m[0], m[1], m[2], m[3])
elif idx==1: return _vec4(m[4], m[5], m[6], m[7])
elif idx==2: return _vec4(m[8], m[9], m[10], m[11])
elif idx==3: return _vec4(m[12], m[13], m[14], m[15])
else:
raise IndexError,"index out of range"
def setRow(self, idx, value):
"""Set row."""
m=self.mlist
if idx==0: m[0],m[1],m[2],m[3] = value
elif idx==1: m[4],m[5],m[6],m[7] = value
elif idx==2: m[8],m[9],m[10],m[11] = value
elif idx==3: m[12],m[13],m[14],m[15] = value
else:
raise IndexError,"index out of range"
def getColumn(self, idx):
"""Return column (as vec4)."""
m=self.mlist
if idx==0: return _vec4(m[0], m[4], m[8], m[12])
elif idx==1: return _vec4(m[1], m[5], m[9], m[13])
elif idx==2: return _vec4(m[2], m[6], m[10], m[14])
elif idx==3: return _vec4(m[3], m[7], m[11], m[15])
else:
raise IndexError,"index out of range"
def setColumn(self, idx, value):
"""Set column."""
m=self.mlist
if idx==0: m[0],m[4],m[8],m[12] = value
elif idx==1: m[1],m[5],m[9],m[13] = value
elif idx==2: m[2],m[6],m[10],m[14] = value
elif idx==3: m[3],m[7],m[11],m[15] = value
else:
raise IndexError,"index out of range"
def toList(self, rowmajor=0):
"""Return a list containing the matrix elements.
By default the list is in column-major order (which can directly be
used in OpenGL or RenderMan). If you set the optional argument
rowmajor to 1, you'll get the list in row-major order.
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print M.toList()
[1, 5, 9, 13, 2, 6, 10, 14, 3, 7, 11, 15, 4, 8, 12, 16]
>>> print M.toList(rowmajor=1)
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]
"""
if rowmajor:
return copy.copy(self.mlist)
else:
return self.transpose().mlist
def identity(self):
"""Return identity matrix.
>>> print mat4().identity()
[ 1.0000, 0.0000, 0.0000, 0.0000]
[ 0.0000, 1.0000, 0.0000, 0.0000]
[ 0.0000, 0.0000, 1.0000, 0.0000]
[ 0.0000, 0.0000, 0.0000, 1.0000]
"""
return mat4(1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0)
def transpose(self):
"""Transpose matrix.
>>> M=mat4(1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16)
>>> print M.transpose()
[ 1.0000, 5.0000, 9.0000, 13.0000]
[ 2.0000, 6.0000, 10.0000, 14.0000]
[ 3.0000, 7.0000, 11.0000, 15.0000]
[ 4.0000, 8.0000, 12.0000, 16.0000]
"""
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
return mat4(m11,m21,m31,m41,
m12,m22,m32,m42,
m13,m23,m33,m43,
m14,m24,m34,m44)
def determinant(self):
"""Return determinant.
>>> M=mat4(2.0,0,0,0, 0,2.0,0,0, 0,0,2.0,0, 0,0,0,2.0)
>>> print M.determinant()
16.0
"""
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
return m11*m22*m33*m44 \
-m11*m22*m34*m43 \
+m11*m23*m34*m42 \
-m11*m23*m32*m44 \
+m11*m24*m32*m43 \
-m11*m24*m33*m42 \
-m12*m23*m34*m41 \
+m12*m23*m31*m44 \
-m12*m24*m31*m43 \
+m12*m24*m33*m41 \
-m12*m21*m33*m44 \
+m12*m21*m34*m43 \
+m13*m24*m31*m42 \
-m13*m24*m32*m41 \
+m13*m21*m32*m44 \
-m13*m21*m34*m42 \
+m13*m22*m34*m41 \
-m13*m22*m31*m44 \
-m14*m21*m32*m43 \
+m14*m21*m33*m42 \
-m14*m22*m33*m41 \
+m14*m22*m31*m43 \
-m14*m23*m31*m42 \
+m14*m23*m32*m41
def _submat(self, i,j):
M=_mat3()
for k in range(3):
for l in range(3):
t=(k,l)
if k>=i:
t=(k+1,t[1])
if l>=j:
t=(t[0],l+1)
M[k,l] = self[t]
return M
def inverse(self):
"""Return inverse matrix.
>>> M=mat4(0,-2.0,0,0, 2.0,0,0,0, 0,0,2,0, 0,0,0,2)
>>> print M.inverse()
[ 0.0000, 0.5000, 0.0000, 0.0000]
[ -0.5000, 0.0000, 0.0000, 0.0000]
[ 0.0000, 0.0000, 0.5000, 0.0000]
[ 0.0000, 0.0000, 0.0000, 0.5000]
"""
Mi=mat4()
d=self.determinant()
for i in range(4):
for j in range(4):
sign=1-((i+j)%2)*2
m3=self._submat(i,j)
Mi[j,i]=sign*m3.determinant()/d
return Mi
def translation(self, t):
"""Return translation matrix."""
return mat4(1.0, 0.0, 0.0, t.x,
0.0, 1.0, 0.0, t.y,
0.0, 0.0, 1.0, t.z,
0.0, 0.0, 0.0, 1.0)
def scaling(self, s):
"""Return scaling matrix."""
return mat4(s.x, 0.0, 0.0, 0.0,
0.0, s.y, 0.0, 0.0,
0.0, 0.0, s.z, 0.0,
0.0, 0.0, 0.0, 1.0)
def rotation(self, angle, axis):
"""Return rotation matrix.
angle must be given in radians. axis should be of type vec3.
"""
sqr_a = axis.x*axis.x
sqr_b = axis.y*axis.y
sqr_c = axis.z*axis.z
len2 = sqr_a+sqr_b+sqr_c
k2 = math.cos(angle)
k1 = (1.0-k2)/len2
k3 = math.sin(angle)/math.sqrt(len2)
k1ab = k1*axis.x*axis.y
k1ac = k1*axis.x*axis.z
k1bc = k1*axis.y*axis.z
k3a = k3*axis.x
k3b = k3*axis.y
k3c = k3*axis.z
return mat4( k1*sqr_a+k2, k1ab-k3c, k1ac+k3b, 0.0,
k1ab+k3c, k1*sqr_b+k2, k1bc-k3a, 0.0,
k1ac-k3b, k1bc+k3a, k1*sqr_c+k2, 0.0,
0.0, 0.0, 0.0, 1.0)
def translate(self, t):
"""Concatenate a translation."""
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
self.mlist[3] = m11*t.x + m12*t.y + m13*t.z + m14
self.mlist[7] = m21*t.x + m22*t.y + m23*t.z + m24
self.mlist[11] = m31*t.x + m32*t.y + m33*t.z + m34
self.mlist[15] = m41*t.x + m42*t.y + m43*t.z + m44
return self
def scale(self, s):
"""Concatenate a scaling."""
self.mlist[0] *= s.x
self.mlist[1] *= s.y
self.mlist[2] *= s.z
self.mlist[4] *= s.x
self.mlist[5] *= s.y
self.mlist[6] *= s.z
self.mlist[8] *= s.x
self.mlist[9] *= s.y
self.mlist[10] *= s.z
self.mlist[12] *= s.x
self.mlist[13] *= s.y
self.mlist[14] *= s.z
return self
def rotate(self, angle, axis):
"""Concatenate a rotation.
angle must be given in radians. axis should be of type vec3.
"""
R=self.rotation(angle, axis)
self.mlist = (self*R).mlist
return self
def frustum(self, left, right, bottom, top, near, far):
"""equivalent to the OpenGL command glFrustum()"""
return mat4( (2.0*near)/(right-left), 0.0, float(right+left)/(right-left), 0.0,
0.0, (2.0*near)/(top-bottom), float(top+bottom)/(top-bottom), 0.0,
0.0, 0.0, -float(far+near)/(far-near), -(2.0*far*near)/(far-near),
0.0, 0.0, -1.0, 0.0)
def perspective(self, fovy, aspect, near, far):
"""von Mesa ubernommen (glu.c)"""
top = near * math.tan(fovy * math.pi / 360.0)
bottom = -top
left = bottom * aspect
right = top * aspect
return self.frustum(left, right, bottom, top, near, far)
def lookAt(self, pos, target, up=_vec3(0,0,1)):
"""Look from pos to target.
The resulting transformation moves the origin to pos and
rotates so that The z-axis points to target. The y-axis is
as close as possible to the up vector.
"""
dir = (target - pos).normalize()
up = up.normalize()
up -= (up * dir) * dir
try:
up = up.normalize()
except:
# We're looking along the up direction, so choose
# an arbitrary direction that is perpendicular to dir
# as new up.
up = dir.ortho()
right = up.cross(dir).normalize()
self.mlist=[right.x, up.x, dir.x, pos.x,
right.y, up.y, dir.y, pos.y,
right.z, up.z, dir.z, pos.z,
0.0, 0.0, 0.0, 1.0]
return self
def ortho(self):
"""Return a matrix with orthogonal base vectors.
Makes the x-, y- and z-axis orthogonal.
The fourth column and row remain untouched.
"""
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
x = _vec3(m11, m21, m31)
y = _vec3(m12, m22, m32)
z = _vec3(m13, m23, m33)
xl = x.length()
xl*=xl
y = y - ((x*y)/xl)*x
z = z - ((x*z)/xl)*x
yl = y.length()
yl*=yl
z = z - ((y*z)/yl)*y
return mat4( x.x, y.x, z.x, m14,
x.y, y.y, z.y, m24,
x.z, y.z, z.z, m34,
m41, m42, m43, m44)
def decompose(self):
"""Decomposes the matrix into a translation, rotation and scaling part.
Returns a tuple (translation, rotation, scaling). The
translation and scaling parts are given as vec3's, the rotation
is still given as a mat4.
"""
dummy = self.ortho()
dummy.setRow(3,_vec4(0.0, 0.0, 0.0, 1.0))
x = dummy.getColumn(0)
y = dummy.getColumn(1)
z = dummy.getColumn(2)
xl = x.length()
yl = y.length()
zl = z.length()
scale = _vec3(xl,yl,zl)
x/=xl
y/=yl
z/=zl
dummy.setColumn(0,x)
dummy.setColumn(1,y)
dummy.setColumn(2,z)
if dummy.determinant()<0.0:
dummy.setColumn(0,-x)
scale.x=-scale.x
return (_vec3(self.mlist[3], self.mlist[7], self.mlist[11]),
dummy,
scale)
def getMat3(self):
"""Convert to mat3 by discarding 4th row and column.
"""
m11,m12,m13,m14,m21,m22,m23,m24,m31,m32,m33,m34,m41,m42,m43,m44 = self.mlist
return _mat3(m11,m12,m13,
m21,m22,m23,
m31,m32,m33)
######################################################################
def _test():
import doctest, mat4
failed, total = doctest.testmod(mat4)
print "%d/%d failed" % (failed, total)
if __name__=="__main__":
_test()
| agpl-3.0 |
rotofly/odoo | addons/mail/mail_followers.py | 15 | 12402 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import threading
from openerp.osv import osv, fields
from openerp import tools, SUPERUSER_ID
from openerp.tools.translate import _
from openerp.tools.mail import plaintext2html
class mail_followers(osv.Model):
""" mail_followers holds the data related to the follow mechanism inside
OpenERP. Partners can choose to follow documents (records) of any kind
that inherits from mail.thread. Following documents allow to receive
notifications for new messages.
A subscription is characterized by:
:param: res_model: model of the followed objects
:param: res_id: ID of resource (may be 0 for every objects)
"""
_name = 'mail.followers'
_rec_name = 'partner_id'
_log_access = False
_description = 'Document Followers'
_columns = {
'res_model': fields.char('Related Document Model',
required=True, select=1,
help='Model of the followed resource'),
'res_id': fields.integer('Related Document ID', select=1,
help='Id of the followed resource'),
'partner_id': fields.many2one('res.partner', string='Related Partner',
ondelete='cascade', required=True, select=1),
'subtype_ids': fields.many2many('mail.message.subtype', string='Subtype',
help="Message subtypes followed, meaning subtypes that will be pushed onto the user's Wall."),
}
#
# Modifying followers change access rights to individual documents. As the
# cache may contain accessible/inaccessible data, one has to refresh it.
#
def create(self, cr, uid, vals, context=None):
res = super(mail_followers, self).create(cr, uid, vals, context=context)
self.invalidate_cache(cr, uid, context=context)
return res
def write(self, cr, uid, ids, vals, context=None):
res = super(mail_followers, self).write(cr, uid, ids, vals, context=context)
self.invalidate_cache(cr, uid, context=context)
return res
def unlink(self, cr, uid, ids, context=None):
res = super(mail_followers, self).unlink(cr, uid, ids, context=context)
self.invalidate_cache(cr, uid, context=context)
return res
_sql_constraints = [('mail_followers_res_partner_res_model_id_uniq','unique(res_model,res_id,partner_id)','Error, a partner cannot follow twice the same object.')]
class mail_notification(osv.Model):
""" Class holding notifications pushed to partners. Followers and partners
added in 'contacts to notify' receive notifications. """
_name = 'mail.notification'
_rec_name = 'partner_id'
_log_access = False
_description = 'Notifications'
_columns = {
'partner_id': fields.many2one('res.partner', string='Contact',
ondelete='cascade', required=True, select=1),
'is_read': fields.boolean('Read', select=1, oldname='read'),
'starred': fields.boolean('Starred', select=1,
help='Starred message that goes into the todo mailbox'),
'message_id': fields.many2one('mail.message', string='Message',
ondelete='cascade', required=True, select=1),
}
_defaults = {
'is_read': False,
'starred': False,
}
def init(self, cr):
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('mail_notification_partner_id_read_starred_message_id',))
if not cr.fetchone():
cr.execute('CREATE INDEX mail_notification_partner_id_read_starred_message_id ON mail_notification (partner_id, is_read, starred, message_id)')
def get_partners_to_email(self, cr, uid, ids, message, context=None):
""" Return the list of partners to notify, based on their preferences.
:param browse_record message: mail.message to notify
:param list partners_to_notify: optional list of partner ids restricting
the notifications to process
"""
notify_pids = []
for notification in self.browse(cr, uid, ids, context=context):
if notification.is_read:
continue
partner = notification.partner_id
# Do not send to partners without email address defined
if not partner.email:
continue
# Do not send to partners having same email address than the author (can cause loops or bounce effect due to messy database)
if message.author_id and message.author_id.email == partner.email:
continue
# Partner does not want to receive any emails or is opt-out
if partner.notify_email == 'none':
continue
notify_pids.append(partner.id)
return notify_pids
def get_signature_footer(self, cr, uid, user_id, res_model=None, res_id=None, context=None, user_signature=True):
""" Format a standard footer for notification emails (such as pushed messages
notification or invite emails).
Format:
<p>--<br />
Administrator
</p>
<div>
<small>Sent from <a ...>Your Company</a> using <a ...>OpenERP</a>.</small>
</div>
"""
footer = ""
if not user_id:
return footer
# add user signature
user = self.pool.get("res.users").browse(cr, SUPERUSER_ID, [user_id], context=context)[0]
if user_signature:
if user.signature:
signature = user.signature
else:
signature = "--<br />%s" % user.name
footer = tools.append_content_to_html(footer, signature, plaintext=False)
# add company signature
if user.company_id.website:
website_url = ('http://%s' % user.company_id.website) if not user.company_id.website.lower().startswith(('http:', 'https:')) \
else user.company_id.website
company = "<a style='color:inherit' href='%s'>%s</a>" % (website_url, user.company_id.name)
else:
company = user.company_id.name
sent_by = _('Sent by %(company)s using %(odoo)s')
signature_company = '<br /><small>%s</small>' % (sent_by % {
'company': company,
'odoo': "<a style='color:inherit' href='https://www.odoo.com/'>Odoo</a>"
})
footer = tools.append_content_to_html(footer, signature_company, plaintext=False, container_tag='div')
return footer
def update_message_notification(self, cr, uid, ids, message_id, partner_ids, context=None):
existing_pids = set()
new_pids = set()
new_notif_ids = []
for notification in self.browse(cr, uid, ids, context=context):
existing_pids.add(notification.partner_id.id)
# update existing notifications
self.write(cr, uid, ids, {'is_read': False}, context=context)
# create new notifications
new_pids = set(partner_ids) - existing_pids
for new_pid in new_pids:
new_notif_ids.append(self.create(cr, uid, {'message_id': message_id, 'partner_id': new_pid, 'is_read': False}, context=context))
return new_notif_ids
def _notify_email(self, cr, uid, ids, message_id, force_send=False, user_signature=True, context=None):
message = self.pool['mail.message'].browse(cr, SUPERUSER_ID, message_id, context=context)
# compute partners
email_pids = self.get_partners_to_email(cr, uid, ids, message, context=None)
if not email_pids:
return True
# compute email body (signature, company data)
body_html = message.body
# add user signature except for mail groups, where users are usually adding their own signatures already
user_id = message.author_id and message.author_id.user_ids and message.author_id.user_ids[0] and message.author_id.user_ids[0].id or None
signature_company = self.get_signature_footer(cr, uid, user_id, res_model=message.model, res_id=message.res_id, context=context, user_signature=(user_signature and message.model != 'mail.group'))
if signature_company:
body_html = tools.append_content_to_html(body_html, signature_company, plaintext=False, container_tag='div')
# compute email references
references = message.parent_id.message_id if message.parent_id else False
# custom values
custom_values = dict()
if message.model and message.res_id and self.pool.get(message.model) and hasattr(self.pool[message.model], 'message_get_email_values'):
custom_values = self.pool[message.model].message_get_email_values(cr, uid, message.res_id, message, context=context)
# create email values
max_recipients = 50
chunks = [email_pids[x:x + max_recipients] for x in xrange(0, len(email_pids), max_recipients)]
email_ids = []
for chunk in chunks:
mail_values = {
'mail_message_id': message.id,
'auto_delete': (context or {}).get('mail_auto_delete', True),
'body_html': body_html,
'recipient_ids': [(4, id) for id in chunk],
'references': references,
}
mail_values.update(custom_values)
email_ids.append(self.pool.get('mail.mail').create(cr, uid, mail_values, context=context))
# NOTE:
# 1. for more than 50 followers, use the queue system
# 2. do not send emails immediately if the registry is not loaded,
# to prevent sending email during a simple update of the database
# using the command-line.
if force_send and len(chunks) < 2 and \
(not self.pool._init or
getattr(threading.currentThread(), 'testing', False)):
self.pool.get('mail.mail').send(cr, uid, email_ids, context=context)
return True
def _notify(self, cr, uid, message_id, partners_to_notify=None, context=None,
force_send=False, user_signature=True):
""" Send by email the notification depending on the user preferences
:param list partners_to_notify: optional list of partner ids restricting
the notifications to process
:param bool force_send: if True, the generated mail.mail is
immediately sent after being created, as if the scheduler
was executed for this message only.
:param bool user_signature: if True, the generated mail.mail body is
the body of the related mail.message with the author's signature
"""
notif_ids = self.search(cr, SUPERUSER_ID, [('message_id', '=', message_id), ('partner_id', 'in', partners_to_notify)], context=context)
# update or create notifications
new_notif_ids = self.update_message_notification(cr, SUPERUSER_ID, notif_ids, message_id, partners_to_notify, context=context)
# mail_notify_noemail (do not send email) or no partner_ids: do not send, return
if context and context.get('mail_notify_noemail'):
return True
# browse as SUPERUSER_ID because of access to res_partner not necessarily allowed
self._notify_email(cr, SUPERUSER_ID, new_notif_ids, message_id, force_send, user_signature, context=context)
| agpl-3.0 |
hunter007/wechatpy | wechatpy/pay/api/order.py | 2 | 6074 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import time
import random
from datetime import datetime, timedelta
from wechatpy.utils import timezone
from wechatpy.pay.utils import get_external_ip
from wechatpy.pay.base import BaseWeChatPayAPI
from wechatpy.utils import random_string, to_text, json
from wechatpy.pay.utils import calculate_signature
class WeChatOrder(BaseWeChatPayAPI):
def create(self, trade_type, body, total_fee, notify_url, client_ip=None,
user_id=None, out_trade_no=None, detail=None, attach=None,
fee_type='CNY', time_start=None, time_expire=None, goods_tag=None,
product_id=None, device_info=None, limit_pay=None, scene_info=None):
"""
统一下单接口
:param trade_type: 交易类型,取值如下:JSAPI,NATIVE,APP,WAP, MWEB
:param body: 商品描述
:param total_fee: 总金额,单位分
:param notify_url: 接收微信支付异步通知回调地址
:param client_ip: 可选,APP和网页支付提交用户端ip,Native支付填调用微信支付API的机器IP
:param user_id: 可选,用户在商户appid下的唯一标识。trade_type=JSAPI,此参数必传
:param out_trade_no: 可选,商户订单号,默认自动生成
:param detail: 可选,商品详情
:param attach: 可选,附加数据,在查询API和支付通知中原样返回,该字段主要用于商户携带订单的自定义数据
:param fee_type: 可选,符合ISO 4217标准的三位字母代码,默认人民币:CNY
:param time_start: 可选,订单生成时间,默认为当前时间
:param time_expire: 可选,订单失效时间,默认为订单生成时间后两小时
:param goods_tag: 可选,商品标记,代金券或立减优惠功能的参数
:param product_id: 可选,trade_type=NATIVE,此参数必传。此id为二维码中包含的商品ID,商户自行定义
:param device_info: 可选,终端设备号(门店号或收银设备ID),注意:PC网页或公众号内支付请传"WEB"
:param limit_pay: 可选,指定支付方式,no_credit--指定不能使用信用卡支付
:param scene_info: 可选,上报支付的场景信息
:type scene_info: dict
:return: 返回的结果数据
"""
now = datetime.fromtimestamp(time.time(), tz=timezone('Asia/Shanghai'))
hours_later = now + timedelta(hours=2)
if time_start is None:
time_start = now
if time_expire is None:
time_expire = hours_later
if not out_trade_no:
out_trade_no = '{0}{1}{2}'.format(
self.mch_id,
now.strftime('%Y%m%d%H%M%S'),
random.randint(1000, 10000)
)
if scene_info is not None:
scene_info = json.dumps(scene_info, ensure_ascii=False)
data = {
'appid': self.appid,
'device_info': device_info,
'body': body,
'detail': detail,
'attach': attach,
'out_trade_no': out_trade_no,
'fee_type': fee_type,
'total_fee': total_fee,
'spbill_create_ip': client_ip or get_external_ip(),
'time_start': time_start.strftime('%Y%m%d%H%M%S'),
'time_expire': time_expire.strftime('%Y%m%d%H%M%S'),
'goods_tag': goods_tag,
'notify_url': notify_url,
'trade_type': trade_type,
'limit_pay': limit_pay,
'product_id': product_id,
'openid': user_id,
'scene_info': scene_info,
}
return self._post('pay/unifiedorder', data=data)
def query(self, transaction_id=None, out_trade_no=None):
"""
查询订单
:param transaction_id: 微信的订单号,优先使用
:param out_trade_no: 商户系统内部的订单号,当没提供transaction_id时需要传这个。
:return: 返回的结果数据
"""
data = {
'appid': self.appid,
'transaction_id': transaction_id,
'out_trade_no': out_trade_no,
}
return self._post('pay/orderquery', data=data)
def close(self, out_trade_no):
"""
关闭订单
:param out_trade_no: 商户系统内部的订单号
:return: 返回的结果数据
"""
data = {
'appid': self.appid,
'out_trade_no': out_trade_no,
}
return self._post('pay/closeorder', data=data)
def get_appapi_params(self, prepay_id, timestamp=None, nonce_str=None):
"""
获取 APP 支付参数
:param prepay_id: 统一下单接口返回的 prepay_id 参数值
:param timestamp: 可选,时间戳,默认为当前时间戳
:param nonce_str: 可选,随机字符串,默认自动生成
:return: 签名
"""
data = {
'appid': self.appid,
'partnerid': self.mch_id,
'prepayid': prepay_id,
'package': 'Sign=WXPay',
'timestamp': timestamp or to_text(int(time.time())),
'noncestr': nonce_str or random_string(32)
}
sign = calculate_signature(data, self._client.api_key)
data['sign'] = sign
return data
def reverse(self, transaction_id=None, out_trade_no=None):
"""
撤销订单
:param transaction_id: 可选,微信的订单号,优先使用
:param out_trade_no: 可选,商户系统内部的订单号,
transaction_id、out_trade_no二选一,
如果同时存在优先级:transaction_id> out_trade_no
:return: 返回的结果数据
"""
data = {
'appid': self.appid,
'transaction_id': transaction_id,
'out_trade_no': out_trade_no,
}
return self._post('secapi/pay/reverse', data=data)
| mit |
ruchikd/Algorithms | Python/RankingSystem/rankingSystem.py | 1 | 1182 | class Students:
def __init__ (self, name, percentage):
self.name = name
self.percentage = percentage
def getPercentile(percentageList):
percentageList = sorted(percentageList, reverse=True)
print percentageList
rank = []
for x in range(len(percentageList)):
if percentageList[x][0] != percentageList[x-1][0]:
rank.append(x+1)
else:
rank.append(rank[x-1])
for x in range(len(percentageList)):
print percentageList[x][1], " is having", percentageList[x][0], "percentage with rank as", rank[x]
def getRanking(percentageList):
percentageList = sorted(percentageList, key=lambda t: t[1], reverse=True)
rank = []
for i in range(len(percentageList)):
if percentageList[i][1] != percentageList[i-1][1]:
rank.append(i+1)
else:
rank.append(rank[i-1])
for i in range (len(percentageList)):
print percentageList[i][0], " is ranked ", rank[i]
def main():
percentageList = [('a', 99), ('b', 99.5), ('c', 96), ('d', 99), ('e', 96), ('f', 96), ('g', 94)]
#getRanking(percentageList)
percentageList1 = [(99, 'a'), (99.5, 'b'), (96, 'c'), (99, 'd'), (96, 'e'), (96, 'f'), (94, 'g')]
getPercentile(percentageList1)
if __name__ == '__main__':
main() | gpl-3.0 |
Thraxis/SickRage | lib/github/Milestone.py | 72 | 9198 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <[email protected]> #
# Copyright 2012 Zearin <[email protected]> #
# Copyright 2013 AKFish <[email protected]> #
# Copyright 2013 Vincent Jacques <[email protected]> #
# Copyright 2013 martinqt <[email protected]> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import datetime
import github.GithubObject
import github.PaginatedList
import github.NamedUser
import github.Label
class Milestone(github.GithubObject.CompletableGithubObject):
"""
This class represents Milestones. The reference can be found here http://developer.github.com/v3/issues/milestones/
"""
@property
def closed_issues(self):
"""
:type: integer
"""
self._completeIfNotSet(self._closed_issues)
return self._closed_issues.value
@property
def created_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._created_at)
return self._created_at.value
@property
def creator(self):
"""
:type: :class:`github.NamedUser.NamedUser`
"""
self._completeIfNotSet(self._creator)
return self._creator.value
@property
def description(self):
"""
:type: string
"""
self._completeIfNotSet(self._description)
return self._description.value
@property
def due_on(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._due_on)
return self._due_on.value
@property
def id(self):
"""
:type: integer
"""
self._completeIfNotSet(self._id)
return self._id.value
@property
def labels_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._labels_url)
return self._labels_url.value
@property
def number(self):
"""
:type: integer
"""
self._completeIfNotSet(self._number)
return self._number.value
@property
def open_issues(self):
"""
:type: integer
"""
self._completeIfNotSet(self._open_issues)
return self._open_issues.value
@property
def state(self):
"""
:type: string
"""
self._completeIfNotSet(self._state)
return self._state.value
@property
def title(self):
"""
:type: string
"""
self._completeIfNotSet(self._title)
return self._title.value
@property
def updated_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._updated_at)
return self._updated_at.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
def delete(self):
"""
:calls: `DELETE /repos/:owner/:repo/milestones/:number <http://developer.github.com/v3/issues/milestones>`_
:rtype: None
"""
headers, data = self._requester.requestJsonAndCheck(
"DELETE",
self.url
)
def edit(self, title, state=github.GithubObject.NotSet, description=github.GithubObject.NotSet, due_on=github.GithubObject.NotSet):
"""
:calls: `PATCH /repos/:owner/:repo/milestones/:number <http://developer.github.com/v3/issues/milestones>`_
:param title: string
:param state: string
:param description: string
:param due_on: date
:rtype: None
"""
assert isinstance(title, (str, unicode)), title
assert state is github.GithubObject.NotSet or isinstance(state, (str, unicode)), state
assert description is github.GithubObject.NotSet or isinstance(description, (str, unicode)), description
assert due_on is github.GithubObject.NotSet or isinstance(due_on, datetime.date), due_on
post_parameters = {
"title": title,
}
if state is not github.GithubObject.NotSet:
post_parameters["state"] = state
if description is not github.GithubObject.NotSet:
post_parameters["description"] = description
if due_on is not github.GithubObject.NotSet:
post_parameters["due_on"] = due_on.strftime("%Y-%m-%d")
headers, data = self._requester.requestJsonAndCheck(
"PATCH",
self.url,
input=post_parameters
)
self._useAttributes(data)
def get_labels(self):
"""
:calls: `GET /repos/:owner/:repo/milestones/:number/labels <http://developer.github.com/v3/issues/labels>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Label.Label`
"""
return github.PaginatedList.PaginatedList(
github.Label.Label,
self._requester,
self.url + "/labels",
None
)
@property
def _identity(self):
return self.number
def _initAttributes(self):
self._closed_issues = github.GithubObject.NotSet
self._created_at = github.GithubObject.NotSet
self._creator = github.GithubObject.NotSet
self._description = github.GithubObject.NotSet
self._due_on = github.GithubObject.NotSet
self._id = github.GithubObject.NotSet
self._labels_url = github.GithubObject.NotSet
self._number = github.GithubObject.NotSet
self._open_issues = github.GithubObject.NotSet
self._state = github.GithubObject.NotSet
self._title = github.GithubObject.NotSet
self._updated_at = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "closed_issues" in attributes: # pragma no branch
self._closed_issues = self._makeIntAttribute(attributes["closed_issues"])
if "created_at" in attributes: # pragma no branch
self._created_at = self._makeDatetimeAttribute(attributes["created_at"])
if "creator" in attributes: # pragma no branch
self._creator = self._makeClassAttribute(github.NamedUser.NamedUser, attributes["creator"])
if "description" in attributes: # pragma no branch
self._description = self._makeStringAttribute(attributes["description"])
if "due_on" in attributes: # pragma no branch
self._due_on = self._makeDatetimeAttribute(attributes["due_on"])
if "id" in attributes: # pragma no branch
self._id = self._makeIntAttribute(attributes["id"])
if "labels_url" in attributes: # pragma no branch
self._labels_url = self._makeStringAttribute(attributes["labels_url"])
if "number" in attributes: # pragma no branch
self._number = self._makeIntAttribute(attributes["number"])
if "open_issues" in attributes: # pragma no branch
self._open_issues = self._makeIntAttribute(attributes["open_issues"])
if "state" in attributes: # pragma no branch
self._state = self._makeStringAttribute(attributes["state"])
if "title" in attributes: # pragma no branch
self._title = self._makeStringAttribute(attributes["title"])
if "updated_at" in attributes: # pragma no branch
self._updated_at = self._makeDatetimeAttribute(attributes["updated_at"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
| gpl-3.0 |
DinoCow/airflow | tests/providers/apache/pinot/hooks/test_pinot.py | 3 | 9346 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import io
import os
import subprocess
import unittest
from unittest import mock
from airflow.exceptions import AirflowException
from airflow.providers.apache.pinot.hooks.pinot import PinotAdminHook, PinotDbApiHook
class TestPinotAdminHook(unittest.TestCase):
def setUp(self):
super().setUp()
self.conn = conn = mock.MagicMock()
self.conn.host = 'host'
self.conn.port = '1000'
self.conn.extra_dejson = {'cmd_path': './pinot-admin.sh'}
class PinotAdminHookTest(PinotAdminHook):
def get_connection(self, conn_id):
return conn
self.db_hook = PinotAdminHookTest()
@mock.patch('airflow.providers.apache.pinot.hooks.pinot.PinotAdminHook.run_cli')
def test_add_schema(self, mock_run_cli):
params = ["schema_file", False]
self.db_hook.add_schema(*params)
mock_run_cli.assert_called_once_with(
[
'AddSchema',
'-controllerHost',
self.conn.host,
'-controllerPort',
self.conn.port,
'-schemaFile',
params[0],
]
)
@mock.patch('airflow.providers.apache.pinot.hooks.pinot.PinotAdminHook.run_cli')
def test_add_table(self, mock_run_cli):
params = ["config_file", False]
self.db_hook.add_table(*params)
mock_run_cli.assert_called_once_with(
[
'AddTable',
'-controllerHost',
self.conn.host,
'-controllerPort',
self.conn.port,
'-filePath',
params[0],
]
)
@mock.patch('airflow.providers.apache.pinot.hooks.pinot.PinotAdminHook.run_cli')
def test_create_segment(self, mock_run_cli):
params = {
"generator_config_file": "a",
"data_dir": "b",
"segment_format": "c",
"out_dir": "d",
"overwrite": True,
"table_name": "e",
"segment_name": "f",
"time_column_name": "g",
"schema_file": "h",
"reader_config_file": "i",
"enable_star_tree_index": False,
"star_tree_index_spec_file": "j",
"hll_size": 9,
"hll_columns": "k",
"hll_suffix": "l",
"num_threads": 8,
"post_creation_verification": True,
"retry": 7,
}
self.db_hook.create_segment(**params)
mock_run_cli.assert_called_once_with(
[
'CreateSegment',
'-generatorConfigFile',
params["generator_config_file"],
'-dataDir',
params["data_dir"],
'-format',
params["segment_format"],
'-outDir',
params["out_dir"],
'-overwrite',
params["overwrite"],
'-tableName',
params["table_name"],
'-segmentName',
params["segment_name"],
'-timeColumnName',
params["time_column_name"],
'-schemaFile',
params["schema_file"],
'-readerConfigFile',
params["reader_config_file"],
'-starTreeIndexSpecFile',
params["star_tree_index_spec_file"],
'-hllSize',
params["hll_size"],
'-hllColumns',
params["hll_columns"],
'-hllSuffix',
params["hll_suffix"],
'-numThreads',
params["num_threads"],
'-postCreationVerification',
params["post_creation_verification"],
'-retry',
params["retry"],
]
)
@mock.patch('airflow.providers.apache.pinot.hooks.pinot.PinotAdminHook.run_cli')
def test_upload_segment(self, mock_run_cli):
params = ["segment_dir", False]
self.db_hook.upload_segment(*params)
mock_run_cli.assert_called_once_with(
[
'UploadSegment',
'-controllerHost',
self.conn.host,
'-controllerPort',
self.conn.port,
'-segmentDir',
params[0],
]
)
@mock.patch('subprocess.Popen')
def test_run_cli_success(self, mock_popen):
mock_proc = mock.MagicMock()
mock_proc.returncode = 0
mock_proc.stdout = io.BytesIO(b'')
mock_popen.return_value = mock_proc
params = ["foo", "bar", "baz"]
self.db_hook.run_cli(params)
params.insert(0, self.conn.extra_dejson.get('cmd_path'))
mock_popen.assert_called_once_with(
params, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, close_fds=True, env=None
)
@mock.patch('subprocess.Popen')
def test_run_cli_failure_error_message(self, mock_popen):
msg = b"Exception caught"
mock_proc = mock.MagicMock()
mock_proc.returncode = 0
mock_proc.stdout = io.BytesIO(msg)
mock_popen.return_value = mock_proc
params = ["foo", "bar", "baz"]
with self.assertRaises(AirflowException, msg=msg):
self.db_hook.run_cli(params)
params.insert(0, self.conn.extra_dejson.get('cmd_path'))
mock_popen.assert_called_once_with(
params, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, close_fds=True, env=None
)
@mock.patch('subprocess.Popen')
def test_run_cli_failure_status_code(self, mock_popen):
mock_proc = mock.MagicMock()
mock_proc.returncode = 1
mock_proc.stdout = io.BytesIO(b'')
mock_popen.return_value = mock_proc
self.db_hook.pinot_admin_system_exit = True
params = ["foo", "bar", "baz"]
with self.assertRaises(AirflowException):
self.db_hook.run_cli(params)
params.insert(0, self.conn.extra_dejson.get('cmd_path'))
env = os.environ.copy()
env.update({"JAVA_OPTS": "-Dpinot.admin.system.exit=true "})
mock_popen.assert_called_once_with(
params, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, close_fds=True, env=env
)
class TestPinotDbApiHook(unittest.TestCase):
def setUp(self):
super().setUp()
self.conn = conn = mock.MagicMock()
self.conn.host = 'host'
self.conn.port = '1000'
self.conn.conn_type = 'http'
self.conn.extra_dejson = {'endpoint': 'query/sql'}
self.cur = mock.MagicMock()
self.conn.cursor.return_value = self.cur
self.conn.__enter__.return_value = self.cur
self.conn.__exit__.return_value = None
class TestPinotDBApiHook(PinotDbApiHook):
def get_conn(self):
return conn
def get_connection(self, conn_id):
return conn
self.db_hook = TestPinotDBApiHook
def test_get_uri(self):
"""
Test on getting a pinot connection uri
"""
db_hook = self.db_hook()
self.assertEqual(db_hook.get_uri(), 'http://host:1000/query/sql')
def test_get_conn(self):
"""
Test on getting a pinot connection
"""
conn = self.db_hook().get_conn()
self.assertEqual(conn.host, 'host')
self.assertEqual(conn.port, '1000')
self.assertEqual(conn.conn_type, 'http')
self.assertEqual(conn.extra_dejson.get('endpoint'), 'query/sql')
def test_get_records(self):
statement = 'SQL'
result_sets = [('row1',), ('row2',)]
self.cur.fetchall.return_value = result_sets
self.assertEqual(result_sets, self.db_hook().get_records(statement))
def test_get_first(self):
statement = 'SQL'
result_sets = [('row1',), ('row2',)]
self.cur.fetchone.return_value = result_sets[0]
self.assertEqual(result_sets[0], self.db_hook().get_first(statement))
def test_get_pandas_df(self):
statement = 'SQL'
column = 'col'
result_sets = [('row1',), ('row2',)]
self.cur.description = [(column,)]
self.cur.fetchall.return_value = result_sets
df = self.db_hook().get_pandas_df(statement)
self.assertEqual(column, df.columns[0])
for i in range(len(result_sets)): # pylint: disable=consider-using-enumerate
self.assertEqual(result_sets[i][0], df.values.tolist()[i][0])
| apache-2.0 |
cloudbase/nova-virtualbox | nova/tests/unit/virt/xenapi/image/test_vdi_through_dev.py | 70 | 6931 | # Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import tarfile
import eventlet
from nova.image import glance
from nova import test
from nova.virt.xenapi.client import session as xenapi_session
from nova.virt.xenapi.image import vdi_through_dev
@contextlib.contextmanager
def fake_context(result=None):
yield result
class TestDelegatingToCommand(test.NoDBTestCase):
def test_upload_image_is_delegated_to_command(self):
command = self.mox.CreateMock(vdi_through_dev.UploadToGlanceAsRawTgz)
self.mox.StubOutWithMock(vdi_through_dev, 'UploadToGlanceAsRawTgz')
vdi_through_dev.UploadToGlanceAsRawTgz(
'ctx', 'session', 'instance', 'image_id', 'vdis').AndReturn(
command)
command.upload_image().AndReturn('result')
self.mox.ReplayAll()
store = vdi_through_dev.VdiThroughDevStore()
result = store.upload_image(
'ctx', 'session', 'instance', 'image_id', 'vdis')
self.assertEqual('result', result)
class TestUploadToGlanceAsRawTgz(test.NoDBTestCase):
def test_upload_image(self):
store = vdi_through_dev.UploadToGlanceAsRawTgz(
'context', 'session', 'instance', 'id', ['vdi0', 'vdi1'])
self.mox.StubOutWithMock(store, '_perform_upload')
self.mox.StubOutWithMock(store, '_get_vdi_ref')
self.mox.StubOutWithMock(vdi_through_dev, 'glance')
self.mox.StubOutWithMock(vdi_through_dev, 'vm_utils')
self.mox.StubOutWithMock(vdi_through_dev, 'utils')
store._get_vdi_ref().AndReturn('vdi_ref')
vdi_through_dev.vm_utils.vdi_attached_here(
'session', 'vdi_ref', read_only=True).AndReturn(
fake_context('dev'))
vdi_through_dev.utils.make_dev_path('dev').AndReturn('devpath')
vdi_through_dev.utils.temporary_chown('devpath').AndReturn(
fake_context())
store._perform_upload('devpath')
self.mox.ReplayAll()
store.upload_image()
def test__perform_upload(self):
producer = self.mox.CreateMock(vdi_through_dev.TarGzProducer)
consumer = self.mox.CreateMock(glance.UpdateGlanceImage)
pool = self.mox.CreateMock(eventlet.GreenPool)
store = vdi_through_dev.UploadToGlanceAsRawTgz(
'context', 'session', 'instance', 'id', ['vdi0', 'vdi1'])
self.mox.StubOutWithMock(store, '_create_pipe')
self.mox.StubOutWithMock(store, '_get_virtual_size')
self.mox.StubOutWithMock(producer, 'get_metadata')
self.mox.StubOutWithMock(vdi_through_dev, 'TarGzProducer')
self.mox.StubOutWithMock(glance, 'UpdateGlanceImage')
self.mox.StubOutWithMock(vdi_through_dev, 'eventlet')
producer.get_metadata().AndReturn('metadata')
store._get_virtual_size().AndReturn('324')
store._create_pipe().AndReturn(('readfile', 'writefile'))
vdi_through_dev.TarGzProducer(
'devpath', 'writefile', '324', 'disk.raw').AndReturn(
producer)
glance.UpdateGlanceImage('context', 'id', 'metadata',
'readfile').AndReturn(consumer)
vdi_through_dev.eventlet.GreenPool().AndReturn(pool)
pool.spawn(producer.start)
pool.spawn(consumer.start)
pool.waitall()
self.mox.ReplayAll()
store._perform_upload('devpath')
def test__get_vdi_ref(self):
session = self.mox.CreateMock(xenapi_session.XenAPISession)
store = vdi_through_dev.UploadToGlanceAsRawTgz(
'context', session, 'instance', 'id', ['vdi0', 'vdi1'])
session.call_xenapi('VDI.get_by_uuid', 'vdi0').AndReturn('vdi_ref')
self.mox.ReplayAll()
self.assertEqual('vdi_ref', store._get_vdi_ref())
def test__get_virtual_size(self):
session = self.mox.CreateMock(xenapi_session.XenAPISession)
store = vdi_through_dev.UploadToGlanceAsRawTgz(
'context', session, 'instance', 'id', ['vdi0', 'vdi1'])
self.mox.StubOutWithMock(store, '_get_vdi_ref')
store._get_vdi_ref().AndReturn('vdi_ref')
session.call_xenapi('VDI.get_virtual_size', 'vdi_ref')
self.mox.ReplayAll()
store._get_virtual_size()
def test__create_pipe(self):
store = vdi_through_dev.UploadToGlanceAsRawTgz(
'context', 'session', 'instance', 'id', ['vdi0', 'vdi1'])
self.mox.StubOutWithMock(vdi_through_dev, 'os')
self.mox.StubOutWithMock(vdi_through_dev, 'greenio')
vdi_through_dev.os.pipe().AndReturn(('rpipe', 'wpipe'))
vdi_through_dev.greenio.GreenPipe('rpipe', 'rb', 0).AndReturn('rfile')
vdi_through_dev.greenio.GreenPipe('wpipe', 'wb', 0).AndReturn('wfile')
self.mox.ReplayAll()
result = store._create_pipe()
self.assertEqual(('rfile', 'wfile'), result)
class TestTarGzProducer(test.NoDBTestCase):
def test_constructor(self):
producer = vdi_through_dev.TarGzProducer('devpath', 'writefile',
'100', 'fname')
self.assertEqual('devpath', producer.fpath)
self.assertEqual('writefile', producer.output)
self.assertEqual('100', producer.size)
self.assertEqual('writefile', producer.output)
def test_start(self):
outf = self.mox.CreateMock(file)
producer = vdi_through_dev.TarGzProducer('fpath', outf,
'100', 'fname')
tfile = self.mox.CreateMock(tarfile.TarFile)
tinfo = self.mox.CreateMock(tarfile.TarInfo)
inf = self.mox.CreateMock(file)
self.mox.StubOutWithMock(vdi_through_dev, 'tarfile')
self.mox.StubOutWithMock(producer, '_open_file')
vdi_through_dev.tarfile.TarInfo(name='fname').AndReturn(tinfo)
vdi_through_dev.tarfile.open(fileobj=outf, mode='w|gz').AndReturn(
fake_context(tfile))
producer._open_file('fpath', 'rb').AndReturn(fake_context(inf))
tfile.addfile(tinfo, fileobj=inf)
outf.close()
self.mox.ReplayAll()
producer.start()
self.assertEqual(100, tinfo.size)
def test_get_metadata(self):
producer = vdi_through_dev.TarGzProducer('devpath', 'writefile',
'100', 'fname')
self.assertEqual({
'disk_format': 'raw',
'container_format': 'tgz'},
producer.get_metadata())
| apache-2.0 |
Awesomeomics/webserver | env/lib/python2.7/site-packages/requests/packages/urllib3/connection.py | 67 | 8298 | import datetime
import sys
import socket
from socket import timeout as SocketTimeout
import warnings
try: # Python 3
from http.client import HTTPConnection as _HTTPConnection, HTTPException
except ImportError:
from httplib import HTTPConnection as _HTTPConnection, HTTPException
class DummyConnection(object):
"Used to detect a failed ConnectionCls import."
pass
try: # Compiled with SSL?
HTTPSConnection = DummyConnection
import ssl
BaseSSLError = ssl.SSLError
except (ImportError, AttributeError): # Platform-specific: No SSL.
ssl = None
class BaseSSLError(BaseException):
pass
from .exceptions import (
ConnectTimeoutError,
SystemTimeWarning,
)
from .packages.ssl_match_hostname import match_hostname
from .packages import six
from .util.ssl_ import (
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
assert_fingerprint,
)
from .util import connection
port_by_scheme = {
'http': 80,
'https': 443,
}
RECENT_DATE = datetime.date(2014, 1, 1)
class HTTPConnection(_HTTPConnection, object):
"""
Based on httplib.HTTPConnection but provides an extra constructor
backwards-compatibility layer between older and newer Pythons.
Additional keyword parameters are used to configure attributes of the connection.
Accepted parameters include:
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
- ``source_address``: Set the source address for the current connection.
.. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
you might pass::
HTTPConnection.default_socket_options + [
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
]
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
"""
default_port = port_by_scheme['http']
#: Disable Nagle's algorithm by default.
#: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
#: Whether this connection verifies the host's certificate.
is_verified = False
def __init__(self, *args, **kw):
if six.PY3: # Python 3
kw.pop('strict', None)
# Pre-set source_address in case we have an older Python like 2.6.
self.source_address = kw.get('source_address')
if sys.version_info < (2, 7): # Python 2.6
# _HTTPConnection on Python 2.6 will balk at this keyword arg, but
# not newer versions. We can still use it when creating a
# connection though, so we pop it *after* we have saved it as
# self.source_address.
kw.pop('source_address', None)
#: The socket options provided by the user. If no options are
#: provided, we use the default options.
self.socket_options = kw.pop('socket_options', self.default_socket_options)
# Superclass also sets self.source_address in Python 2.7+.
_HTTPConnection.__init__(self, *args, **kw)
def _new_conn(self):
""" Establish a socket connection and set nodelay settings on it.
:return: New socket connection.
"""
extra_kw = {}
if self.source_address:
extra_kw['source_address'] = self.source_address
if self.socket_options:
extra_kw['socket_options'] = self.socket_options
try:
conn = connection.create_connection(
(self.host, self.port), self.timeout, **extra_kw)
except SocketTimeout:
raise ConnectTimeoutError(
self, "Connection to %s timed out. (connect timeout=%s)" %
(self.host, self.timeout))
return conn
def _prepare_conn(self, conn):
self.sock = conn
# the _tunnel_host attribute was added in python 2.6.3 (via
# http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
# not have them.
if getattr(self, '_tunnel_host', None):
# TODO: Fix tunnel so it doesn't depend on self.sock state.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
class HTTPSConnection(HTTPConnection):
default_port = port_by_scheme['https']
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw):
HTTPConnection.__init__(self, host, port, strict=strict,
timeout=timeout, **kw)
self.key_file = key_file
self.cert_file = cert_file
# Required property for Google AppEngine 1.9.0 which otherwise causes
# HTTPS requests to go out as HTTP. (See Issue #356)
self._protocol = 'https'
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)
class VerifiedHTTPSConnection(HTTPSConnection):
"""
Based on httplib.HTTPSConnection but wraps the socket with
SSL certification.
"""
cert_reqs = None
ca_certs = None
ssl_version = None
assert_fingerprint = None
def set_cert(self, key_file=None, cert_file=None,
cert_reqs=None, ca_certs=None,
assert_hostname=None, assert_fingerprint=None):
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.ca_certs = ca_certs
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
def connect(self):
# Add certificate verification
conn = self._new_conn()
resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
resolved_ssl_version = resolve_ssl_version(self.ssl_version)
hostname = self.host
if getattr(self, '_tunnel_host', None):
# _tunnel_host was added in Python 2.6.3
# (See: http://hg.python.org/cpython/rev/0f57b30a152f)
self.sock = conn
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
# Override the host with the one we're requesting data from.
hostname = self._tunnel_host
is_time_off = datetime.date.today() < RECENT_DATE
if is_time_off:
warnings.warn((
'System time is way off (before {0}). This will probably '
'lead to SSL verification errors').format(RECENT_DATE),
SystemTimeWarning
)
# Wrap socket using verification with the root certs in
# trusted_root_certs
self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
cert_reqs=resolved_cert_reqs,
ca_certs=self.ca_certs,
server_hostname=hostname,
ssl_version=resolved_ssl_version)
if self.assert_fingerprint:
assert_fingerprint(self.sock.getpeercert(binary_form=True),
self.assert_fingerprint)
elif resolved_cert_reqs != ssl.CERT_NONE \
and self.assert_hostname is not False:
match_hostname(self.sock.getpeercert(),
self.assert_hostname or hostname)
self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED
or self.assert_fingerprint is not None)
if ssl:
# Make a copy for testing.
UnverifiedHTTPSConnection = HTTPSConnection
HTTPSConnection = VerifiedHTTPSConnection
| mit |
sam-m888/gramps | gramps/plugins/export/export.gpr.py | 1 | 6420 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2009 Benny Malengier
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from gramps.gen.plug._pluginreg import newplugin, STABLE, EXPORT
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
MODULE_VERSION="5.1"
#------------------------------------------------------------------------
#
# Comma _Separated Values Spreadsheet (CSV)
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_csv'
plg.name = _("Comma Separated Values Spreadsheet (CSV)")
plg.name_accell = _("Comma _Separated Values Spreadsheet (CSV)")
plg.description = _("CSV is a common spreadsheet format.")
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportcsv.py'
plg.ptype = EXPORT
plg.export_function = 'exportData'
plg.export_options = 'CSVWriterOptionBox'
plg.export_options_title = _('CSV spreadsheet options')
plg.extension = "csv"
#------------------------------------------------------------------------
#
# Web Family Tree export
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_webfamtree'
plg.name = _('Web Family Tree')
plg.name_accell = _('_Web Family Tree')
plg.description = _("Web Family Tree format")
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportftree.py'
plg.ptype = EXPORT
plg.export_function = 'writeData'
plg.export_options = 'WriterOptionBox'
plg.export_options_title = _('Web Family Tree export options')
plg.extension = "wft"
#------------------------------------------------------------------------
#
# GEDCOM
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_ged'
plg.name = _('GEDCOM')
plg.name_accell = _('GE_DCOM')
plg.description = _('GEDCOM is used to transfer data between genealogy programs. '
'Most genealogy software will accept a GEDCOM file as input.')
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportgedcom.py'
plg.ptype = EXPORT
plg.export_function = 'export_data'
plg.export_options = 'WriterOptionBox'
plg.export_options_title = _('GEDCOM export options')
plg.extension = "ged"
#------------------------------------------------------------------------
#
# Geneweb
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_geneweb'
plg.name = _('GeneWeb')
plg.name_accell = _('_GeneWeb')
plg.description = _('GeneWeb is a web based genealogy program.')
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportgeneweb.py'
plg.ptype = EXPORT
plg.export_function = 'exportData'
plg.export_options = 'WriterOptionBox'
plg.export_options_title = _('GeneWeb export options')
plg.extension = "gw"
#------------------------------------------------------------------------
#
# Gramps package (portable XML)
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_gpkg'
plg.name = _('Gramps XML Package (family tree and media)')
plg.name_accell = _('Gra_mps XML Package (family tree and media)')
plg.description = _('Gramps package is an archived XML family tree together '
'with the media object files.')
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportpkg.py'
plg.ptype = EXPORT
plg.export_function = 'writeData'
plg.export_options = 'WriterOptionBox'
plg.export_options_title = _('Gramps package export options')
plg.extension = "gpkg"
#------------------------------------------------------------------------
#
# Gramps XML database
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_gramps'
plg.name = _('Gramps XML (family tree)')
plg.name_accell = _('Gramps _XML (family tree)')
plg.description = _('Gramps XML export is a complete archived XML backup of a'
' Gramps family tree without the media object files.'
' Suitable for backup purposes.')
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportxml.py'
plg.ptype = EXPORT
plg.export_function = 'export_data'
plg.export_options = 'WriterOptionBoxWithCompression'
plg.export_options_title = _('Gramps XML export options')
plg.extension = "gramps"
#------------------------------------------------------------------------
#
# vCalendar
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_vcal'
plg.name = _('vCalendar')
plg.name_accell = _('vC_alendar')
plg.description = _('vCalendar is used in many calendaring and PIM applications.')
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportvcalendar.py'
plg.ptype = EXPORT
plg.export_function = 'exportData'
plg.export_options = 'WriterOptionBox'
plg.export_options_title = _('vCalendar export options')
plg.extension = "vcs"
#------------------------------------------------------------------------
#
# vCard
#
#------------------------------------------------------------------------
plg = newplugin()
plg.id = 'ex_vcard'
plg.name = _('vCard')
plg.name_accell = _('_vCard')
plg.description = _('vCard is used in many addressbook and pim applications.')
plg.version = '1.0'
plg.gramps_target_version = MODULE_VERSION
plg.status = STABLE
plg.fname = 'exportvcard.py'
plg.ptype = EXPORT
plg.export_function = 'exportData'
plg.export_options = 'WriterOptionBox'
plg.export_options_title = _('vCard export options')
plg.extension = "vcf"
| gpl-2.0 |
40223110/2015cd_midterm- | static/Brython3.1.1-20150328-091302/Lib/unittest/test/test_case.py | 738 | 51689 | import difflib
import pprint
import pickle
import re
import sys
import warnings
import weakref
import inspect
from copy import deepcopy
from test import support
import unittest
from .support import (
TestEquality, TestHashing, LoggingResult,
ResultWithNoStartTestRunStopTestRun
)
class Test(object):
"Keep these TestCase classes out of the main namespace"
class Foo(unittest.TestCase):
def runTest(self): pass
def test1(self): pass
class Bar(Foo):
def test2(self): pass
class LoggingTestCase(unittest.TestCase):
"""A test case which logs its calls."""
def __init__(self, events):
super(Test.LoggingTestCase, self).__init__('test')
self.events = events
def setUp(self):
self.events.append('setUp')
def test(self):
self.events.append('test')
def tearDown(self):
self.events.append('tearDown')
class Test_TestCase(unittest.TestCase, TestEquality, TestHashing):
### Set up attributes used by inherited tests
################################################################
# Used by TestHashing.test_hash and TestEquality.test_eq
eq_pairs = [(Test.Foo('test1'), Test.Foo('test1'))]
# Used by TestEquality.test_ne
ne_pairs = [(Test.Foo('test1'), Test.Foo('runTest')),
(Test.Foo('test1'), Test.Bar('test1')),
(Test.Foo('test1'), Test.Bar('test2'))]
################################################################
### /Set up attributes used by inherited tests
# "class TestCase([methodName])"
# ...
# "Each instance of TestCase will run a single test method: the
# method named methodName."
# ...
# "methodName defaults to "runTest"."
#
# Make sure it really is optional, and that it defaults to the proper
# thing.
def test_init__no_test_name(self):
class Test(unittest.TestCase):
def runTest(self): raise MyException()
def test(self): pass
self.assertEqual(Test().id()[-13:], '.Test.runTest')
# test that TestCase can be instantiated with no args
# primarily for use at the interactive interpreter
test = unittest.TestCase()
test.assertEqual(3, 3)
with test.assertRaises(test.failureException):
test.assertEqual(3, 2)
with self.assertRaises(AttributeError):
test.run()
# "class TestCase([methodName])"
# ...
# "Each instance of TestCase will run a single test method: the
# method named methodName."
def test_init__test_name__valid(self):
class Test(unittest.TestCase):
def runTest(self): raise MyException()
def test(self): pass
self.assertEqual(Test('test').id()[-10:], '.Test.test')
# "class TestCase([methodName])"
# ...
# "Each instance of TestCase will run a single test method: the
# method named methodName."
def test_init__test_name__invalid(self):
class Test(unittest.TestCase):
def runTest(self): raise MyException()
def test(self): pass
try:
Test('testfoo')
except ValueError:
pass
else:
self.fail("Failed to raise ValueError")
# "Return the number of tests represented by the this test object. For
# TestCase instances, this will always be 1"
def test_countTestCases(self):
class Foo(unittest.TestCase):
def test(self): pass
self.assertEqual(Foo('test').countTestCases(), 1)
# "Return the default type of test result object to be used to run this
# test. For TestCase instances, this will always be
# unittest.TestResult; subclasses of TestCase should
# override this as necessary."
def test_defaultTestResult(self):
class Foo(unittest.TestCase):
def runTest(self):
pass
result = Foo().defaultTestResult()
self.assertEqual(type(result), unittest.TestResult)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if setUp() raises
# an exception.
def test_run_call_order__error_in_setUp(self):
events = []
result = LoggingResult(events)
class Foo(Test.LoggingTestCase):
def setUp(self):
super(Foo, self).setUp()
raise RuntimeError('raised by Foo.setUp')
Foo(events).run(result)
expected = ['startTest', 'setUp', 'addError', 'stopTest']
self.assertEqual(events, expected)
# "With a temporary result stopTestRun is called when setUp errors.
def test_run_call_order__error_in_setUp_default_result(self):
events = []
class Foo(Test.LoggingTestCase):
def defaultTestResult(self):
return LoggingResult(self.events)
def setUp(self):
super(Foo, self).setUp()
raise RuntimeError('raised by Foo.setUp')
Foo(events).run()
expected = ['startTestRun', 'startTest', 'setUp', 'addError',
'stopTest', 'stopTestRun']
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if the test raises
# an error (as opposed to a failure).
def test_run_call_order__error_in_test(self):
events = []
result = LoggingResult(events)
class Foo(Test.LoggingTestCase):
def test(self):
super(Foo, self).test()
raise RuntimeError('raised by Foo.test')
expected = ['startTest', 'setUp', 'test', 'tearDown',
'addError', 'stopTest']
Foo(events).run(result)
self.assertEqual(events, expected)
# "With a default result, an error in the test still results in stopTestRun
# being called."
def test_run_call_order__error_in_test_default_result(self):
events = []
class Foo(Test.LoggingTestCase):
def defaultTestResult(self):
return LoggingResult(self.events)
def test(self):
super(Foo, self).test()
raise RuntimeError('raised by Foo.test')
expected = ['startTestRun', 'startTest', 'setUp', 'test',
'tearDown', 'addError', 'stopTest', 'stopTestRun']
Foo(events).run()
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if the test signals
# a failure (as opposed to an error).
def test_run_call_order__failure_in_test(self):
events = []
result = LoggingResult(events)
class Foo(Test.LoggingTestCase):
def test(self):
super(Foo, self).test()
self.fail('raised by Foo.test')
expected = ['startTest', 'setUp', 'test', 'tearDown',
'addFailure', 'stopTest']
Foo(events).run(result)
self.assertEqual(events, expected)
# "When a test fails with a default result stopTestRun is still called."
def test_run_call_order__failure_in_test_default_result(self):
class Foo(Test.LoggingTestCase):
def defaultTestResult(self):
return LoggingResult(self.events)
def test(self):
super(Foo, self).test()
self.fail('raised by Foo.test')
expected = ['startTestRun', 'startTest', 'setUp', 'test',
'tearDown', 'addFailure', 'stopTest', 'stopTestRun']
events = []
Foo(events).run()
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if tearDown() raises
# an exception.
def test_run_call_order__error_in_tearDown(self):
events = []
result = LoggingResult(events)
class Foo(Test.LoggingTestCase):
def tearDown(self):
super(Foo, self).tearDown()
raise RuntimeError('raised by Foo.tearDown')
Foo(events).run(result)
expected = ['startTest', 'setUp', 'test', 'tearDown', 'addError',
'stopTest']
self.assertEqual(events, expected)
# "When tearDown errors with a default result stopTestRun is still called."
def test_run_call_order__error_in_tearDown_default_result(self):
class Foo(Test.LoggingTestCase):
def defaultTestResult(self):
return LoggingResult(self.events)
def tearDown(self):
super(Foo, self).tearDown()
raise RuntimeError('raised by Foo.tearDown')
events = []
Foo(events).run()
expected = ['startTestRun', 'startTest', 'setUp', 'test', 'tearDown',
'addError', 'stopTest', 'stopTestRun']
self.assertEqual(events, expected)
# "TestCase.run() still works when the defaultTestResult is a TestResult
# that does not support startTestRun and stopTestRun.
def test_run_call_order_default_result(self):
class Foo(unittest.TestCase):
def defaultTestResult(self):
return ResultWithNoStartTestRunStopTestRun()
def test(self):
pass
Foo('test').run()
# "This class attribute gives the exception raised by the test() method.
# If a test framework needs to use a specialized exception, possibly to
# carry additional information, it must subclass this exception in
# order to ``play fair'' with the framework. The initial value of this
# attribute is AssertionError"
def test_failureException__default(self):
class Foo(unittest.TestCase):
def test(self):
pass
self.assertTrue(Foo('test').failureException is AssertionError)
# "This class attribute gives the exception raised by the test() method.
# If a test framework needs to use a specialized exception, possibly to
# carry additional information, it must subclass this exception in
# order to ``play fair'' with the framework."
#
# Make sure TestCase.run() respects the designated failureException
def test_failureException__subclassing__explicit_raise(self):
events = []
result = LoggingResult(events)
class Foo(unittest.TestCase):
def test(self):
raise RuntimeError()
failureException = RuntimeError
self.assertTrue(Foo('test').failureException is RuntimeError)
Foo('test').run(result)
expected = ['startTest', 'addFailure', 'stopTest']
self.assertEqual(events, expected)
# "This class attribute gives the exception raised by the test() method.
# If a test framework needs to use a specialized exception, possibly to
# carry additional information, it must subclass this exception in
# order to ``play fair'' with the framework."
#
# Make sure TestCase.run() respects the designated failureException
def test_failureException__subclassing__implicit_raise(self):
events = []
result = LoggingResult(events)
class Foo(unittest.TestCase):
def test(self):
self.fail("foo")
failureException = RuntimeError
self.assertTrue(Foo('test').failureException is RuntimeError)
Foo('test').run(result)
expected = ['startTest', 'addFailure', 'stopTest']
self.assertEqual(events, expected)
# "The default implementation does nothing."
def test_setUp(self):
class Foo(unittest.TestCase):
def runTest(self):
pass
# ... and nothing should happen
Foo().setUp()
# "The default implementation does nothing."
def test_tearDown(self):
class Foo(unittest.TestCase):
def runTest(self):
pass
# ... and nothing should happen
Foo().tearDown()
# "Return a string identifying the specific test case."
#
# Because of the vague nature of the docs, I'm not going to lock this
# test down too much. Really all that can be asserted is that the id()
# will be a string (either 8-byte or unicode -- again, because the docs
# just say "string")
def test_id(self):
class Foo(unittest.TestCase):
def runTest(self):
pass
self.assertIsInstance(Foo().id(), str)
# "If result is omitted or None, a temporary result object is created,
# used, and is made available to the caller. As TestCase owns the
# temporary result startTestRun and stopTestRun are called.
def test_run__uses_defaultTestResult(self):
events = []
defaultResult = LoggingResult(events)
class Foo(unittest.TestCase):
def test(self):
events.append('test')
def defaultTestResult(self):
return defaultResult
# Make run() find a result object on its own
result = Foo('test').run()
self.assertIs(result, defaultResult)
expected = ['startTestRun', 'startTest', 'test', 'addSuccess',
'stopTest', 'stopTestRun']
self.assertEqual(events, expected)
# "The result object is returned to run's caller"
def test_run__returns_given_result(self):
class Foo(unittest.TestCase):
def test(self):
pass
result = unittest.TestResult()
retval = Foo('test').run(result)
self.assertIs(retval, result)
# "The same effect [as method run] may be had by simply calling the
# TestCase instance."
def test_call__invoking_an_instance_delegates_to_run(self):
resultIn = unittest.TestResult()
resultOut = unittest.TestResult()
class Foo(unittest.TestCase):
def test(self):
pass
def run(self, result):
self.assertIs(result, resultIn)
return resultOut
retval = Foo('test')(resultIn)
self.assertIs(retval, resultOut)
def testShortDescriptionWithoutDocstring(self):
self.assertIsNone(self.shortDescription())
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def testShortDescriptionWithOneLineDocstring(self):
"""Tests shortDescription() for a method with a docstring."""
self.assertEqual(
self.shortDescription(),
'Tests shortDescription() for a method with a docstring.')
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def testShortDescriptionWithMultiLineDocstring(self):
"""Tests shortDescription() for a method with a longer docstring.
This method ensures that only the first line of a docstring is
returned used in the short description, no matter how long the
whole thing is.
"""
self.assertEqual(
self.shortDescription(),
'Tests shortDescription() for a method with a longer '
'docstring.')
def testAddTypeEqualityFunc(self):
class SadSnake(object):
"""Dummy class for test_addTypeEqualityFunc."""
s1, s2 = SadSnake(), SadSnake()
self.assertFalse(s1 == s2)
def AllSnakesCreatedEqual(a, b, msg=None):
return type(a) == type(b) == SadSnake
self.addTypeEqualityFunc(SadSnake, AllSnakesCreatedEqual)
self.assertEqual(s1, s2)
# No this doesn't clean up and remove the SadSnake equality func
# from this TestCase instance but since its a local nothing else
# will ever notice that.
def testAssertIs(self):
thing = object()
self.assertIs(thing, thing)
self.assertRaises(self.failureException, self.assertIs, thing, object())
def testAssertIsNot(self):
thing = object()
self.assertIsNot(thing, object())
self.assertRaises(self.failureException, self.assertIsNot, thing, thing)
def testAssertIsInstance(self):
thing = []
self.assertIsInstance(thing, list)
self.assertRaises(self.failureException, self.assertIsInstance,
thing, dict)
def testAssertNotIsInstance(self):
thing = []
self.assertNotIsInstance(thing, dict)
self.assertRaises(self.failureException, self.assertNotIsInstance,
thing, list)
def testAssertIn(self):
animals = {'monkey': 'banana', 'cow': 'grass', 'seal': 'fish'}
self.assertIn('a', 'abc')
self.assertIn(2, [1, 2, 3])
self.assertIn('monkey', animals)
self.assertNotIn('d', 'abc')
self.assertNotIn(0, [1, 2, 3])
self.assertNotIn('otter', animals)
self.assertRaises(self.failureException, self.assertIn, 'x', 'abc')
self.assertRaises(self.failureException, self.assertIn, 4, [1, 2, 3])
self.assertRaises(self.failureException, self.assertIn, 'elephant',
animals)
self.assertRaises(self.failureException, self.assertNotIn, 'c', 'abc')
self.assertRaises(self.failureException, self.assertNotIn, 1, [1, 2, 3])
self.assertRaises(self.failureException, self.assertNotIn, 'cow',
animals)
def testAssertDictContainsSubset(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
self.assertDictContainsSubset({}, {})
self.assertDictContainsSubset({}, {'a': 1})
self.assertDictContainsSubset({'a': 1}, {'a': 1})
self.assertDictContainsSubset({'a': 1}, {'a': 1, 'b': 2})
self.assertDictContainsSubset({'a': 1, 'b': 2}, {'a': 1, 'b': 2})
with self.assertRaises(self.failureException):
self.assertDictContainsSubset({1: "one"}, {})
with self.assertRaises(self.failureException):
self.assertDictContainsSubset({'a': 2}, {'a': 1})
with self.assertRaises(self.failureException):
self.assertDictContainsSubset({'c': 1}, {'a': 1})
with self.assertRaises(self.failureException):
self.assertDictContainsSubset({'a': 1, 'c': 1}, {'a': 1})
with self.assertRaises(self.failureException):
self.assertDictContainsSubset({'a': 1, 'c': 1}, {'a': 1})
one = ''.join(chr(i) for i in range(255))
# this used to cause a UnicodeDecodeError constructing the failure msg
with self.assertRaises(self.failureException):
self.assertDictContainsSubset({'foo': one}, {'foo': '\uFFFD'})
def testAssertEqual(self):
equal_pairs = [
((), ()),
({}, {}),
([], []),
(set(), set()),
(frozenset(), frozenset())]
for a, b in equal_pairs:
# This mess of try excepts is to test the assertEqual behavior
# itself.
try:
self.assertEqual(a, b)
except self.failureException:
self.fail('assertEqual(%r, %r) failed' % (a, b))
try:
self.assertEqual(a, b, msg='foo')
except self.failureException:
self.fail('assertEqual(%r, %r) with msg= failed' % (a, b))
try:
self.assertEqual(a, b, 'foo')
except self.failureException:
self.fail('assertEqual(%r, %r) with third parameter failed' %
(a, b))
unequal_pairs = [
((), []),
({}, set()),
(set([4,1]), frozenset([4,2])),
(frozenset([4,5]), set([2,3])),
(set([3,4]), set([5,4]))]
for a, b in unequal_pairs:
self.assertRaises(self.failureException, self.assertEqual, a, b)
self.assertRaises(self.failureException, self.assertEqual, a, b,
'foo')
self.assertRaises(self.failureException, self.assertEqual, a, b,
msg='foo')
def testEquality(self):
self.assertListEqual([], [])
self.assertTupleEqual((), ())
self.assertSequenceEqual([], ())
a = [0, 'a', []]
b = []
self.assertRaises(unittest.TestCase.failureException,
self.assertListEqual, a, b)
self.assertRaises(unittest.TestCase.failureException,
self.assertListEqual, tuple(a), tuple(b))
self.assertRaises(unittest.TestCase.failureException,
self.assertSequenceEqual, a, tuple(b))
b.extend(a)
self.assertListEqual(a, b)
self.assertTupleEqual(tuple(a), tuple(b))
self.assertSequenceEqual(a, tuple(b))
self.assertSequenceEqual(tuple(a), b)
self.assertRaises(self.failureException, self.assertListEqual,
a, tuple(b))
self.assertRaises(self.failureException, self.assertTupleEqual,
tuple(a), b)
self.assertRaises(self.failureException, self.assertListEqual, None, b)
self.assertRaises(self.failureException, self.assertTupleEqual, None,
tuple(b))
self.assertRaises(self.failureException, self.assertSequenceEqual,
None, tuple(b))
self.assertRaises(self.failureException, self.assertListEqual, 1, 1)
self.assertRaises(self.failureException, self.assertTupleEqual, 1, 1)
self.assertRaises(self.failureException, self.assertSequenceEqual,
1, 1)
self.assertDictEqual({}, {})
c = { 'x': 1 }
d = {}
self.assertRaises(unittest.TestCase.failureException,
self.assertDictEqual, c, d)
d.update(c)
self.assertDictEqual(c, d)
d['x'] = 0
self.assertRaises(unittest.TestCase.failureException,
self.assertDictEqual, c, d, 'These are unequal')
self.assertRaises(self.failureException, self.assertDictEqual, None, d)
self.assertRaises(self.failureException, self.assertDictEqual, [], d)
self.assertRaises(self.failureException, self.assertDictEqual, 1, 1)
def testAssertSequenceEqualMaxDiff(self):
self.assertEqual(self.maxDiff, 80*8)
seq1 = 'a' + 'x' * 80**2
seq2 = 'b' + 'x' * 80**2
diff = '\n'.join(difflib.ndiff(pprint.pformat(seq1).splitlines(),
pprint.pformat(seq2).splitlines()))
# the +1 is the leading \n added by assertSequenceEqual
omitted = unittest.case.DIFF_OMITTED % (len(diff) + 1,)
self.maxDiff = len(diff)//2
try:
self.assertSequenceEqual(seq1, seq2)
except self.failureException as e:
msg = e.args[0]
else:
self.fail('assertSequenceEqual did not fail.')
self.assertTrue(len(msg) < len(diff))
self.assertIn(omitted, msg)
self.maxDiff = len(diff) * 2
try:
self.assertSequenceEqual(seq1, seq2)
except self.failureException as e:
msg = e.args[0]
else:
self.fail('assertSequenceEqual did not fail.')
self.assertTrue(len(msg) > len(diff))
self.assertNotIn(omitted, msg)
self.maxDiff = None
try:
self.assertSequenceEqual(seq1, seq2)
except self.failureException as e:
msg = e.args[0]
else:
self.fail('assertSequenceEqual did not fail.')
self.assertTrue(len(msg) > len(diff))
self.assertNotIn(omitted, msg)
def testTruncateMessage(self):
self.maxDiff = 1
message = self._truncateMessage('foo', 'bar')
omitted = unittest.case.DIFF_OMITTED % len('bar')
self.assertEqual(message, 'foo' + omitted)
self.maxDiff = None
message = self._truncateMessage('foo', 'bar')
self.assertEqual(message, 'foobar')
self.maxDiff = 4
message = self._truncateMessage('foo', 'bar')
self.assertEqual(message, 'foobar')
def testAssertDictEqualTruncates(self):
test = unittest.TestCase('assertEqual')
def truncate(msg, diff):
return 'foo'
test._truncateMessage = truncate
try:
test.assertDictEqual({}, {1: 0})
except self.failureException as e:
self.assertEqual(str(e), 'foo')
else:
self.fail('assertDictEqual did not fail')
def testAssertMultiLineEqualTruncates(self):
test = unittest.TestCase('assertEqual')
def truncate(msg, diff):
return 'foo'
test._truncateMessage = truncate
try:
test.assertMultiLineEqual('foo', 'bar')
except self.failureException as e:
self.assertEqual(str(e), 'foo')
else:
self.fail('assertMultiLineEqual did not fail')
def testAssertEqual_diffThreshold(self):
# check threshold value
self.assertEqual(self._diffThreshold, 2**16)
# disable madDiff to get diff markers
self.maxDiff = None
# set a lower threshold value and add a cleanup to restore it
old_threshold = self._diffThreshold
self._diffThreshold = 2**8
self.addCleanup(lambda: setattr(self, '_diffThreshold', old_threshold))
# under the threshold: diff marker (^) in error message
s = 'x' * (2**7)
with self.assertRaises(self.failureException) as cm:
self.assertEqual(s + 'a', s + 'b')
self.assertIn('^', str(cm.exception))
self.assertEqual(s + 'a', s + 'a')
# over the threshold: diff not used and marker (^) not in error message
s = 'x' * (2**9)
# if the path that uses difflib is taken, _truncateMessage will be
# called -- replace it with explodingTruncation to verify that this
# doesn't happen
def explodingTruncation(message, diff):
raise SystemError('this should not be raised')
old_truncate = self._truncateMessage
self._truncateMessage = explodingTruncation
self.addCleanup(lambda: setattr(self, '_truncateMessage', old_truncate))
s1, s2 = s + 'a', s + 'b'
with self.assertRaises(self.failureException) as cm:
self.assertEqual(s1, s2)
self.assertNotIn('^', str(cm.exception))
self.assertEqual(str(cm.exception), '%r != %r' % (s1, s2))
self.assertEqual(s + 'a', s + 'a')
def testAssertCountEqual(self):
a = object()
self.assertCountEqual([1, 2, 3], [3, 2, 1])
self.assertCountEqual(['foo', 'bar', 'baz'], ['bar', 'baz', 'foo'])
self.assertCountEqual([a, a, 2, 2, 3], (a, 2, 3, a, 2))
self.assertCountEqual([1, "2", "a", "a"], ["a", "2", True, "a"])
self.assertRaises(self.failureException, self.assertCountEqual,
[1, 2] + [3] * 100, [1] * 100 + [2, 3])
self.assertRaises(self.failureException, self.assertCountEqual,
[1, "2", "a", "a"], ["a", "2", True, 1])
self.assertRaises(self.failureException, self.assertCountEqual,
[10], [10, 11])
self.assertRaises(self.failureException, self.assertCountEqual,
[10, 11], [10])
self.assertRaises(self.failureException, self.assertCountEqual,
[10, 11, 10], [10, 11])
# Test that sequences of unhashable objects can be tested for sameness:
self.assertCountEqual([[1, 2], [3, 4], 0], [False, [3, 4], [1, 2]])
# Test that iterator of unhashable objects can be tested for sameness:
self.assertCountEqual(iter([1, 2, [], 3, 4]),
iter([1, 2, [], 3, 4]))
# hashable types, but not orderable
self.assertRaises(self.failureException, self.assertCountEqual,
[], [divmod, 'x', 1, 5j, 2j, frozenset()])
# comparing dicts
self.assertCountEqual([{'a': 1}, {'b': 2}], [{'b': 2}, {'a': 1}])
# comparing heterogenous non-hashable sequences
self.assertCountEqual([1, 'x', divmod, []], [divmod, [], 'x', 1])
self.assertRaises(self.failureException, self.assertCountEqual,
[], [divmod, [], 'x', 1, 5j, 2j, set()])
self.assertRaises(self.failureException, self.assertCountEqual,
[[1]], [[2]])
# Same elements, but not same sequence length
self.assertRaises(self.failureException, self.assertCountEqual,
[1, 1, 2], [2, 1])
self.assertRaises(self.failureException, self.assertCountEqual,
[1, 1, "2", "a", "a"], ["2", "2", True, "a"])
self.assertRaises(self.failureException, self.assertCountEqual,
[1, {'b': 2}, None, True], [{'b': 2}, True, None])
# Same elements which don't reliably compare, in
# different order, see issue 10242
a = [{2,4}, {1,2}]
b = a[::-1]
self.assertCountEqual(a, b)
# test utility functions supporting assertCountEqual()
diffs = set(unittest.util._count_diff_all_purpose('aaabccd', 'abbbcce'))
expected = {(3,1,'a'), (1,3,'b'), (1,0,'d'), (0,1,'e')}
self.assertEqual(diffs, expected)
diffs = unittest.util._count_diff_all_purpose([[]], [])
self.assertEqual(diffs, [(1, 0, [])])
diffs = set(unittest.util._count_diff_hashable('aaabccd', 'abbbcce'))
expected = {(3,1,'a'), (1,3,'b'), (1,0,'d'), (0,1,'e')}
self.assertEqual(diffs, expected)
def testAssertSetEqual(self):
set1 = set()
set2 = set()
self.assertSetEqual(set1, set2)
self.assertRaises(self.failureException, self.assertSetEqual, None, set2)
self.assertRaises(self.failureException, self.assertSetEqual, [], set2)
self.assertRaises(self.failureException, self.assertSetEqual, set1, None)
self.assertRaises(self.failureException, self.assertSetEqual, set1, [])
set1 = set(['a'])
set2 = set()
self.assertRaises(self.failureException, self.assertSetEqual, set1, set2)
set1 = set(['a'])
set2 = set(['a'])
self.assertSetEqual(set1, set2)
set1 = set(['a'])
set2 = set(['a', 'b'])
self.assertRaises(self.failureException, self.assertSetEqual, set1, set2)
set1 = set(['a'])
set2 = frozenset(['a', 'b'])
self.assertRaises(self.failureException, self.assertSetEqual, set1, set2)
set1 = set(['a', 'b'])
set2 = frozenset(['a', 'b'])
self.assertSetEqual(set1, set2)
set1 = set()
set2 = "foo"
self.assertRaises(self.failureException, self.assertSetEqual, set1, set2)
self.assertRaises(self.failureException, self.assertSetEqual, set2, set1)
# make sure any string formatting is tuple-safe
set1 = set([(0, 1), (2, 3)])
set2 = set([(4, 5)])
self.assertRaises(self.failureException, self.assertSetEqual, set1, set2)
def testInequality(self):
# Try ints
self.assertGreater(2, 1)
self.assertGreaterEqual(2, 1)
self.assertGreaterEqual(1, 1)
self.assertLess(1, 2)
self.assertLessEqual(1, 2)
self.assertLessEqual(1, 1)
self.assertRaises(self.failureException, self.assertGreater, 1, 2)
self.assertRaises(self.failureException, self.assertGreater, 1, 1)
self.assertRaises(self.failureException, self.assertGreaterEqual, 1, 2)
self.assertRaises(self.failureException, self.assertLess, 2, 1)
self.assertRaises(self.failureException, self.assertLess, 1, 1)
self.assertRaises(self.failureException, self.assertLessEqual, 2, 1)
# Try Floats
self.assertGreater(1.1, 1.0)
self.assertGreaterEqual(1.1, 1.0)
self.assertGreaterEqual(1.0, 1.0)
self.assertLess(1.0, 1.1)
self.assertLessEqual(1.0, 1.1)
self.assertLessEqual(1.0, 1.0)
self.assertRaises(self.failureException, self.assertGreater, 1.0, 1.1)
self.assertRaises(self.failureException, self.assertGreater, 1.0, 1.0)
self.assertRaises(self.failureException, self.assertGreaterEqual, 1.0, 1.1)
self.assertRaises(self.failureException, self.assertLess, 1.1, 1.0)
self.assertRaises(self.failureException, self.assertLess, 1.0, 1.0)
self.assertRaises(self.failureException, self.assertLessEqual, 1.1, 1.0)
# Try Strings
self.assertGreater('bug', 'ant')
self.assertGreaterEqual('bug', 'ant')
self.assertGreaterEqual('ant', 'ant')
self.assertLess('ant', 'bug')
self.assertLessEqual('ant', 'bug')
self.assertLessEqual('ant', 'ant')
self.assertRaises(self.failureException, self.assertGreater, 'ant', 'bug')
self.assertRaises(self.failureException, self.assertGreater, 'ant', 'ant')
self.assertRaises(self.failureException, self.assertGreaterEqual, 'ant', 'bug')
self.assertRaises(self.failureException, self.assertLess, 'bug', 'ant')
self.assertRaises(self.failureException, self.assertLess, 'ant', 'ant')
self.assertRaises(self.failureException, self.assertLessEqual, 'bug', 'ant')
# Try bytes
self.assertGreater(b'bug', b'ant')
self.assertGreaterEqual(b'bug', b'ant')
self.assertGreaterEqual(b'ant', b'ant')
self.assertLess(b'ant', b'bug')
self.assertLessEqual(b'ant', b'bug')
self.assertLessEqual(b'ant', b'ant')
self.assertRaises(self.failureException, self.assertGreater, b'ant', b'bug')
self.assertRaises(self.failureException, self.assertGreater, b'ant', b'ant')
self.assertRaises(self.failureException, self.assertGreaterEqual, b'ant',
b'bug')
self.assertRaises(self.failureException, self.assertLess, b'bug', b'ant')
self.assertRaises(self.failureException, self.assertLess, b'ant', b'ant')
self.assertRaises(self.failureException, self.assertLessEqual, b'bug', b'ant')
def testAssertMultiLineEqual(self):
sample_text = """\
http://www.python.org/doc/2.3/lib/module-unittest.html
test case
A test case is the smallest unit of testing. [...]
"""
revised_sample_text = """\
http://www.python.org/doc/2.4.1/lib/module-unittest.html
test case
A test case is the smallest unit of testing. [...] You may provide your
own implementation that does not subclass from TestCase, of course.
"""
sample_text_error = """\
- http://www.python.org/doc/2.3/lib/module-unittest.html
? ^
+ http://www.python.org/doc/2.4.1/lib/module-unittest.html
? ^^^
test case
- A test case is the smallest unit of testing. [...]
+ A test case is the smallest unit of testing. [...] You may provide your
? +++++++++++++++++++++
+ own implementation that does not subclass from TestCase, of course.
"""
self.maxDiff = None
try:
self.assertMultiLineEqual(sample_text, revised_sample_text)
except self.failureException as e:
# need to remove the first line of the error message
error = str(e).split('\n', 1)[1]
# no fair testing ourself with ourself, and assertEqual is used for strings
# so can't use assertEqual either. Just use assertTrue.
self.assertTrue(sample_text_error == error)
def testAsertEqualSingleLine(self):
sample_text = "laden swallows fly slowly"
revised_sample_text = "unladen swallows fly quickly"
sample_text_error = """\
- laden swallows fly slowly
? ^^^^
+ unladen swallows fly quickly
? ++ ^^^^^
"""
try:
self.assertEqual(sample_text, revised_sample_text)
except self.failureException as e:
error = str(e).split('\n', 1)[1]
self.assertTrue(sample_text_error == error)
def testAssertIsNone(self):
self.assertIsNone(None)
self.assertRaises(self.failureException, self.assertIsNone, False)
self.assertIsNotNone('DjZoPloGears on Rails')
self.assertRaises(self.failureException, self.assertIsNotNone, None)
def testAssertRegex(self):
self.assertRegex('asdfabasdf', r'ab+')
self.assertRaises(self.failureException, self.assertRegex,
'saaas', r'aaaa')
def testAssertRaisesRegex(self):
class ExceptionMock(Exception):
pass
def Stub():
raise ExceptionMock('We expect')
self.assertRaisesRegex(ExceptionMock, re.compile('expect$'), Stub)
self.assertRaisesRegex(ExceptionMock, 'expect$', Stub)
def testAssertNotRaisesRegex(self):
self.assertRaisesRegex(
self.failureException, '^Exception not raised by <lambda>$',
self.assertRaisesRegex, Exception, re.compile('x'),
lambda: None)
self.assertRaisesRegex(
self.failureException, '^Exception not raised by <lambda>$',
self.assertRaisesRegex, Exception, 'x',
lambda: None)
def testAssertRaisesRegexMismatch(self):
def Stub():
raise Exception('Unexpected')
self.assertRaisesRegex(
self.failureException,
r'"\^Expected\$" does not match "Unexpected"',
self.assertRaisesRegex, Exception, '^Expected$',
Stub)
self.assertRaisesRegex(
self.failureException,
r'"\^Expected\$" does not match "Unexpected"',
self.assertRaisesRegex, Exception,
re.compile('^Expected$'), Stub)
def testAssertRaisesExcValue(self):
class ExceptionMock(Exception):
pass
def Stub(foo):
raise ExceptionMock(foo)
v = "particular value"
ctx = self.assertRaises(ExceptionMock)
with ctx:
Stub(v)
e = ctx.exception
self.assertIsInstance(e, ExceptionMock)
self.assertEqual(e.args[0], v)
def testAssertWarnsCallable(self):
def _runtime_warn():
warnings.warn("foo", RuntimeWarning)
# Success when the right warning is triggered, even several times
self.assertWarns(RuntimeWarning, _runtime_warn)
self.assertWarns(RuntimeWarning, _runtime_warn)
# A tuple of warning classes is accepted
self.assertWarns((DeprecationWarning, RuntimeWarning), _runtime_warn)
# *args and **kwargs also work
self.assertWarns(RuntimeWarning,
warnings.warn, "foo", category=RuntimeWarning)
# Failure when no warning is triggered
with self.assertRaises(self.failureException):
self.assertWarns(RuntimeWarning, lambda: 0)
# Failure when another warning is triggered
with warnings.catch_warnings():
# Force default filter (in case tests are run with -We)
warnings.simplefilter("default", RuntimeWarning)
with self.assertRaises(self.failureException):
self.assertWarns(DeprecationWarning, _runtime_warn)
# Filters for other warnings are not modified
with warnings.catch_warnings():
warnings.simplefilter("error", RuntimeWarning)
with self.assertRaises(RuntimeWarning):
self.assertWarns(DeprecationWarning, _runtime_warn)
def testAssertWarnsContext(self):
# Believe it or not, it is preferrable to duplicate all tests above,
# to make sure the __warningregistry__ $@ is circumvented correctly.
def _runtime_warn():
warnings.warn("foo", RuntimeWarning)
_runtime_warn_lineno = inspect.getsourcelines(_runtime_warn)[1]
with self.assertWarns(RuntimeWarning) as cm:
_runtime_warn()
# A tuple of warning classes is accepted
with self.assertWarns((DeprecationWarning, RuntimeWarning)) as cm:
_runtime_warn()
# The context manager exposes various useful attributes
self.assertIsInstance(cm.warning, RuntimeWarning)
self.assertEqual(cm.warning.args[0], "foo")
self.assertIn("test_case.py", cm.filename)
self.assertEqual(cm.lineno, _runtime_warn_lineno + 1)
# Same with several warnings
with self.assertWarns(RuntimeWarning):
_runtime_warn()
_runtime_warn()
with self.assertWarns(RuntimeWarning):
warnings.warn("foo", category=RuntimeWarning)
# Failure when no warning is triggered
with self.assertRaises(self.failureException):
with self.assertWarns(RuntimeWarning):
pass
# Failure when another warning is triggered
with warnings.catch_warnings():
# Force default filter (in case tests are run with -We)
warnings.simplefilter("default", RuntimeWarning)
with self.assertRaises(self.failureException):
with self.assertWarns(DeprecationWarning):
_runtime_warn()
# Filters for other warnings are not modified
with warnings.catch_warnings():
warnings.simplefilter("error", RuntimeWarning)
with self.assertRaises(RuntimeWarning):
with self.assertWarns(DeprecationWarning):
_runtime_warn()
def testAssertWarnsRegexCallable(self):
def _runtime_warn(msg):
warnings.warn(msg, RuntimeWarning)
self.assertWarnsRegex(RuntimeWarning, "o+",
_runtime_warn, "foox")
# Failure when no warning is triggered
with self.assertRaises(self.failureException):
self.assertWarnsRegex(RuntimeWarning, "o+",
lambda: 0)
# Failure when another warning is triggered
with warnings.catch_warnings():
# Force default filter (in case tests are run with -We)
warnings.simplefilter("default", RuntimeWarning)
with self.assertRaises(self.failureException):
self.assertWarnsRegex(DeprecationWarning, "o+",
_runtime_warn, "foox")
# Failure when message doesn't match
with self.assertRaises(self.failureException):
self.assertWarnsRegex(RuntimeWarning, "o+",
_runtime_warn, "barz")
# A little trickier: we ask RuntimeWarnings to be raised, and then
# check for some of them. It is implementation-defined whether
# non-matching RuntimeWarnings are simply re-raised, or produce a
# failureException.
with warnings.catch_warnings():
warnings.simplefilter("error", RuntimeWarning)
with self.assertRaises((RuntimeWarning, self.failureException)):
self.assertWarnsRegex(RuntimeWarning, "o+",
_runtime_warn, "barz")
def testAssertWarnsRegexContext(self):
# Same as above, but with assertWarnsRegex as a context manager
def _runtime_warn(msg):
warnings.warn(msg, RuntimeWarning)
_runtime_warn_lineno = inspect.getsourcelines(_runtime_warn)[1]
with self.assertWarnsRegex(RuntimeWarning, "o+") as cm:
_runtime_warn("foox")
self.assertIsInstance(cm.warning, RuntimeWarning)
self.assertEqual(cm.warning.args[0], "foox")
self.assertIn("test_case.py", cm.filename)
self.assertEqual(cm.lineno, _runtime_warn_lineno + 1)
# Failure when no warning is triggered
with self.assertRaises(self.failureException):
with self.assertWarnsRegex(RuntimeWarning, "o+"):
pass
# Failure when another warning is triggered
with warnings.catch_warnings():
# Force default filter (in case tests are run with -We)
warnings.simplefilter("default", RuntimeWarning)
with self.assertRaises(self.failureException):
with self.assertWarnsRegex(DeprecationWarning, "o+"):
_runtime_warn("foox")
# Failure when message doesn't match
with self.assertRaises(self.failureException):
with self.assertWarnsRegex(RuntimeWarning, "o+"):
_runtime_warn("barz")
# A little trickier: we ask RuntimeWarnings to be raised, and then
# check for some of them. It is implementation-defined whether
# non-matching RuntimeWarnings are simply re-raised, or produce a
# failureException.
with warnings.catch_warnings():
warnings.simplefilter("error", RuntimeWarning)
with self.assertRaises((RuntimeWarning, self.failureException)):
with self.assertWarnsRegex(RuntimeWarning, "o+"):
_runtime_warn("barz")
def testDeprecatedMethodNames(self):
"""
Test that the deprecated methods raise a DeprecationWarning. See #9424.
"""
old = (
(self.failIfEqual, (3, 5)),
(self.assertNotEquals, (3, 5)),
(self.failUnlessEqual, (3, 3)),
(self.assertEquals, (3, 3)),
(self.failUnlessAlmostEqual, (2.0, 2.0)),
(self.assertAlmostEquals, (2.0, 2.0)),
(self.failIfAlmostEqual, (3.0, 5.0)),
(self.assertNotAlmostEquals, (3.0, 5.0)),
(self.failUnless, (True,)),
(self.assert_, (True,)),
(self.failUnlessRaises, (TypeError, lambda _: 3.14 + 'spam')),
(self.failIf, (False,)),
(self.assertDictContainsSubset, (dict(a=1, b=2), dict(a=1, b=2, c=3))),
(self.assertRaisesRegexp, (KeyError, 'foo', lambda: {}['foo'])),
(self.assertRegexpMatches, ('bar', 'bar')),
)
for meth, args in old:
with self.assertWarns(DeprecationWarning):
meth(*args)
# disable this test for now. When the version where the fail* methods will
# be removed is decided, re-enable it and update the version
def _testDeprecatedFailMethods(self):
"""Test that the deprecated fail* methods get removed in 3.x"""
if sys.version_info[:2] < (3, 3):
return
deprecated_names = [
'failIfEqual', 'failUnlessEqual', 'failUnlessAlmostEqual',
'failIfAlmostEqual', 'failUnless', 'failUnlessRaises', 'failIf',
'assertDictContainsSubset',
]
for deprecated_name in deprecated_names:
with self.assertRaises(AttributeError):
getattr(self, deprecated_name) # remove these in 3.x
def testDeepcopy(self):
# Issue: 5660
class TestableTest(unittest.TestCase):
def testNothing(self):
pass
test = TestableTest('testNothing')
# This shouldn't blow up
deepcopy(test)
def testPickle(self):
# Issue 10326
# Can't use TestCase classes defined in Test class as
# pickle does not work with inner classes
test = unittest.TestCase('run')
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
# blew up prior to fix
pickled_test = pickle.dumps(test, protocol=protocol)
unpickled_test = pickle.loads(pickled_test)
self.assertEqual(test, unpickled_test)
# exercise the TestCase instance in a way that will invoke
# the type equality lookup mechanism
unpickled_test.assertEqual(set(), set())
def testKeyboardInterrupt(self):
def _raise(self=None):
raise KeyboardInterrupt
def nothing(self):
pass
class Test1(unittest.TestCase):
test_something = _raise
class Test2(unittest.TestCase):
setUp = _raise
test_something = nothing
class Test3(unittest.TestCase):
test_something = nothing
tearDown = _raise
class Test4(unittest.TestCase):
def test_something(self):
self.addCleanup(_raise)
for klass in (Test1, Test2, Test3, Test4):
with self.assertRaises(KeyboardInterrupt):
klass('test_something').run()
def testSkippingEverywhere(self):
def _skip(self=None):
raise unittest.SkipTest('some reason')
def nothing(self):
pass
class Test1(unittest.TestCase):
test_something = _skip
class Test2(unittest.TestCase):
setUp = _skip
test_something = nothing
class Test3(unittest.TestCase):
test_something = nothing
tearDown = _skip
class Test4(unittest.TestCase):
def test_something(self):
self.addCleanup(_skip)
for klass in (Test1, Test2, Test3, Test4):
result = unittest.TestResult()
klass('test_something').run(result)
self.assertEqual(len(result.skipped), 1)
self.assertEqual(result.testsRun, 1)
def testSystemExit(self):
def _raise(self=None):
raise SystemExit
def nothing(self):
pass
class Test1(unittest.TestCase):
test_something = _raise
class Test2(unittest.TestCase):
setUp = _raise
test_something = nothing
class Test3(unittest.TestCase):
test_something = nothing
tearDown = _raise
class Test4(unittest.TestCase):
def test_something(self):
self.addCleanup(_raise)
for klass in (Test1, Test2, Test3, Test4):
result = unittest.TestResult()
klass('test_something').run(result)
self.assertEqual(len(result.errors), 1)
self.assertEqual(result.testsRun, 1)
@support.cpython_only
def testNoCycles(self):
case = unittest.TestCase()
wr = weakref.ref(case)
with support.disable_gc():
del case
self.assertFalse(wr())
| gpl-3.0 |
badreddinetahir/pwn_plug_sources | src/fasttrack/bin/menu/servicesmenu.py | 16 | 1851 | #!/usr/bin/env python
import os,time,re
definepath=os.getcwd()
# define metasploit path
meta_path=file("%s/config/fasttrack_config" % (definepath),"r").readlines()
for line in meta_path:
line=line.rstrip()
match=re.search("METASPLOIT_PATH",line)
if match:
line=line.replace("METASPLOIT_PATH=","")
metapath=line
try:
import psyco
psyco.full()
except ImportError:
pass
while 1==1 :
servmen=raw_input("""Which service do you want to start:
1. VNC
2. TFTP
3. SSH
4. Apache
5. Metasploit Web
6. ProFTPD
(q)uit
Enter number: """)
if servmen == '1' :
vnc=os.system("vncserver;netstat -ant |grep 5901")
print "VNC Server Started..."
if servmen == '2' :
tftp=os.system("atftpd --daemon --port 69 /tmp/;netstat -anu |grep 69")
print "TFTP Server Started..."
if servmen == '3' :
ssh=os.system("sshd-generate;/usr/sbin/sshd;netstat -ant |grep 22")
print "SSH Server Started..."
if servmen == '4' :
apache=os.system("apachectl start;netstat -ant |grep 80")
print "Apache Server Started..."
if servmen == '5' :
metasplt1=os.system("%s/msfweb &" % (metapath))
time.sleep(10)
print 'Sleeping 10 seconds...waiting for metasploit...then launching FireFox'
metasplt2=os.system("firefox http://127.0.0.1:55555 &")
if servmen == '6' :
print "Make sure you installed it from the Updates Menu first!!"
time.sleep(2)
print 'Starting FTP Service'
ftpstart=os.system('proftpd &')
print 'FTP Service Started. Returning to menu.'
if servmen == 'q' :
print "Returning to previous menu..."
break
| gpl-3.0 |
1200wd/1200wd_addons | delivery_transsmart_address_consolidation/models.py | 1 | 2895 | # -*- coding: utf-8 -*-
##############################################################################
#
# Delivery Transsmart Ingegration - Address Consolidation
# Copyright (C) 2016 1200 Web Development (<http://1200wd.com/>)
# (C) 2015 ONESTEiN BV (<http://www.onestein.nl>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _
import logging
_logger = logging.getLogger(__name__)
class stock_picking(models.Model):
_inherit = 'stock.picking'
def _transsmart_document_from_stock_picking(self):
"""Use address consolidation fields for stock picking.
"""
document = super(stock_picking, self)._transsmart_document_from_stock_picking()
document.update({
"AddressName": self.partner_id.name or '',
"AddressStreet": self.shipping_partner_street or '',
"AddressStreet2": self.shipping_partner_street2 or '',
"AddressStreetNo": ".",
"AddressZipcode": self.shipping_partner_zip or '',
"AddressCity": self.shipping_partner_city or '',
"AddressState": self.shipping_partner_state_id.name or '',
"AddressCountry": self.shipping_partner_country_id.code or '',
})
if self.group_id:
related_sale = self.env['sale.order'].search([('procurement_group_id','=',self.group_id.id)])
if related_sale:
document.update({
"AddressNameInvoice": related_sale.partner_invoice_id.name or '',
"AddressStreetInvoice": related_sale.invoice_partner_street or '',
"AddressStreet2Invoice": related_sale.invoice_partner_street2 or '',
"AddressStreetNoInvoice": "-",
"AddressZipcodeInvoice": related_sale.invoice_partner_zip or '',
"AddressCityInvoice": related_sale.invoice_partner_city or '',
"AddressStateInvoice": related_sale.shipping_partner_state_id.name or '',
"AddressCountryInvoice": related_sale.invoice_partner_country_id.code or '',
})
return document
| agpl-3.0 |
geodynamics/gale | config/scons/scons-local-1.2.0/SCons/Scanner/D.py | 12 | 2440 | """SCons.Scanner.D
Scanner for the Digital Mars "D" programming language.
Coded by Andy Friesen
17 Nov 2003
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Scanner/D.py 3842 2008/12/20 22:59:52 scons"
import re
import string
import SCons.Scanner
def DScanner():
"""Return a prototype Scanner instance for scanning D source files"""
ds = D()
return ds
class D(SCons.Scanner.Classic):
def __init__ (self):
SCons.Scanner.Classic.__init__ (self,
name = "DScanner",
suffixes = '$DSUFFIXES',
path_variable = 'DPATH',
regex = 'import\s+(?:[a-zA-Z0-9_.]+)\s*(?:,\s*(?:[a-zA-Z0-9_.]+)\s*)*;')
self.cre2 = re.compile ('(?:import\s)?\s*([a-zA-Z0-9_.]+)\s*(?:,|;)', re.M)
def find_include(self, include, source_dir, path):
# translate dots (package separators) to slashes
inc = string.replace(include, '.', '/')
i = SCons.Node.FS.find_file(inc + '.d', (source_dir,) + path)
if i is None:
i = SCons.Node.FS.find_file (inc + '.di', (source_dir,) + path)
return i, include
def find_include_names(self, node):
includes = []
for i in self.cre.findall(node.get_contents()):
includes = includes + self.cre2.findall(i)
return includes
| gpl-2.0 |
insionng/httpie | httpie/models.py | 48 | 3986 | from httpie.compat import urlsplit, str
class HTTPMessage(object):
"""Abstract class for HTTP messages."""
def __init__(self, orig):
self._orig = orig
def iter_body(self, chunk_size):
"""Return an iterator over the body."""
raise NotImplementedError()
def iter_lines(self, chunk_size):
"""Return an iterator over the body yielding (`line`, `line_feed`)."""
raise NotImplementedError()
@property
def headers(self):
"""Return a `str` with the message's headers."""
raise NotImplementedError()
@property
def encoding(self):
"""Return a `str` with the message's encoding, if known."""
raise NotImplementedError()
@property
def body(self):
"""Return a `bytes` with the message's body."""
raise NotImplementedError()
@property
def content_type(self):
"""Return the message content type."""
ct = self._orig.headers.get('Content-Type', '')
if not isinstance(ct, str):
ct = ct.decode('utf8')
return ct
class HTTPResponse(HTTPMessage):
"""A :class:`requests.models.Response` wrapper."""
def iter_body(self, chunk_size=1):
return self._orig.iter_content(chunk_size=chunk_size)
def iter_lines(self, chunk_size):
return ((line, b'\n') for line in self._orig.iter_lines(chunk_size))
#noinspection PyProtectedMember
@property
def headers(self):
original = self._orig.raw._original_response
version = {
9: '0.9',
10: '1.0',
11: '1.1',
20: '2',
}[original.version]
status_line = 'HTTP/{version} {status} {reason}'.format(
version=version,
status=original.status,
reason=original.reason
)
headers = [status_line]
try:
# `original.msg` is a `http.client.HTTPMessage` on Python 3
# `_headers` is a 2-tuple
headers.extend(
'%s: %s' % header for header in original.msg._headers)
except AttributeError:
# and a `httplib.HTTPMessage` on Python 2.x
# `headers` is a list of `name: val<CRLF>`.
headers.extend(h.strip() for h in original.msg.headers)
return '\r\n'.join(headers)
@property
def encoding(self):
return self._orig.encoding or 'utf8'
@property
def body(self):
# Only now the response body is fetched.
# Shouldn't be touched unless the body is actually needed.
return self._orig.content
class HTTPRequest(HTTPMessage):
"""A :class:`requests.models.Request` wrapper."""
def iter_body(self, chunk_size):
yield self.body
def iter_lines(self, chunk_size):
yield self.body, b''
@property
def headers(self):
url = urlsplit(self._orig.url)
request_line = '{method} {path}{query} HTTP/1.1'.format(
method=self._orig.method,
path=url.path or '/',
query='?' + url.query if url.query else ''
)
headers = dict(self._orig.headers)
if 'Host' not in self._orig.headers:
headers['Host'] = url.netloc.split('@')[-1]
headers = [
'%s: %s' % (
name,
value if isinstance(value, str) else value.decode('utf8')
)
for name, value in headers.items()
]
headers.insert(0, request_line)
headers = '\r\n'.join(headers).strip()
if isinstance(headers, bytes):
# Python < 3
headers = headers.decode('utf8')
return headers
@property
def encoding(self):
return 'utf8'
@property
def body(self):
body = self._orig.body
if isinstance(body, str):
# Happens with JSON/form request data parsed from the command line.
body = body.encode('utf8')
return body or b''
| bsd-3-clause |
open-craft/XBlock | xblock/test/test_fields.py | 1 | 17132 | """
Tests for classes extending Field.
"""
# Allow accessing protected members for testing purposes
# pylint: disable=W0212
from mock import MagicMock, Mock
import unittest
import datetime as dt
import pytz
import warnings
from contextlib import contextmanager
from xblock.core import XBlock, Scope
from xblock.field_data import DictFieldData
from xblock.fields import (
Any, Boolean, Dict, Field, Float,
Integer, List, String, DateTime, Reference, ReferenceList, Sentinel
)
from xblock.test.tools import assert_equals, assert_not_equals, assert_not_in
from xblock.fields import scope_key, ScopeIds
class FieldTest(unittest.TestCase):
""" Base test class for Fields. """
def field_totest(self):
"""Child classes should override this with the type of field
the test is testing."""
return None
def set_and_get_field(self, arg, enforce_type):
"""
Set the field to arg in a Block, get it and return it
"""
class TestBlock(XBlock):
"""
Block for testing
"""
field_x = self.field_totest(enforce_type=enforce_type)
block = TestBlock(MagicMock(), DictFieldData({}), Mock())
block.field_x = arg
return block.field_x
@contextmanager
def assertDeprecationWarning(self, count=1):
"""Asserts that the contained code raises `count` deprecation warnings"""
with warnings.catch_warnings(record=True) as caught:
warnings.simplefilter("always", DeprecationWarning)
yield
self.assertEquals(count, sum(
1 for warning in caught
if issubclass(warning.category, DeprecationWarning)
))
def assertJSONOrSetEquals(self, expected, arg):
"""
Asserts the result of field.from_json and of setting field.
"""
# from_json(arg) -> expected
self.assertEqual(expected, self.field_totest().from_json(arg))
# set+get with enforce_type arg -> expected
self.assertEqual(expected, self.set_and_get_field(arg, True))
# set+get without enforce_type arg -> arg
# provoking a warning unless arg == expected
count = 0 if arg == expected else 1
with self.assertDeprecationWarning(count):
self.assertEqual(arg, self.set_and_get_field(arg, False))
def assertToJSONEquals(self, expected, arg):
"""
Assert that serialization of `arg` to JSON equals `expected`.
"""
self.assertEqual(expected, self.field_totest().to_json(arg))
def assertJSONOrSetValueError(self, arg):
"""
Asserts that field.from_json or setting the field throws a ValueError
for the supplied value.
"""
# from_json and set+get with enforce_type -> ValueError
with self.assertRaises(ValueError):
self.field_totest().from_json(arg)
with self.assertRaises(ValueError):
self.set_and_get_field(arg, True)
# set+get without enforce_type -> warning
with self.assertDeprecationWarning():
self.set_and_get_field(arg, False)
def assertJSONOrSetTypeError(self, arg):
"""
Asserts that field.from_json or setting the field throws a TypeError
for the supplied value.
"""
# from_json and set+get with enforce_type -> TypeError
with self.assertRaises(TypeError):
self.field_totest().from_json(arg)
with self.assertRaises(TypeError):
self.set_and_get_field(arg, True)
# set+get without enforce_type -> warning
with self.assertDeprecationWarning():
self.set_and_get_field(arg, False)
class IntegerTest(FieldTest):
"""
Tests the Integer Field.
"""
field_totest = Integer
def test_integer(self):
self.assertJSONOrSetEquals(5, '5')
self.assertJSONOrSetEquals(0, '0')
self.assertJSONOrSetEquals(-1023, '-1023')
self.assertJSONOrSetEquals(7, 7)
self.assertJSONOrSetEquals(0, False)
self.assertJSONOrSetEquals(1, True)
def test_float_converts(self):
self.assertJSONOrSetEquals(1, 1.023)
self.assertJSONOrSetEquals(-3, -3.8)
def test_none(self):
self.assertJSONOrSetEquals(None, None)
self.assertJSONOrSetEquals(None, '')
def test_error(self):
self.assertJSONOrSetValueError('abc')
self.assertJSONOrSetValueError('[1]')
self.assertJSONOrSetValueError('1.023')
self.assertJSONOrSetTypeError([])
self.assertJSONOrSetTypeError({})
class FloatTest(FieldTest):
"""
Tests the Float Field.
"""
field_totest = Float
def test_float(self):
self.assertJSONOrSetEquals(.23, '.23')
self.assertJSONOrSetEquals(5, '5')
self.assertJSONOrSetEquals(0, '0.0')
self.assertJSONOrSetEquals(-1023.22, '-1023.22')
self.assertJSONOrSetEquals(0, 0.0)
self.assertJSONOrSetEquals(4, 4)
self.assertJSONOrSetEquals(-0.23, -0.23)
self.assertJSONOrSetEquals(0, False)
self.assertJSONOrSetEquals(1, True)
def test_none(self):
self.assertJSONOrSetEquals(None, None)
self.assertJSONOrSetEquals(None, '')
def test_error(self):
self.assertJSONOrSetValueError('abc')
self.assertJSONOrSetValueError('[1]')
self.assertJSONOrSetTypeError([])
self.assertJSONOrSetTypeError({})
class BooleanTest(FieldTest):
"""
Tests the Boolean Field.
"""
field_totest = Boolean
def test_false(self):
self.assertJSONOrSetEquals(False, "false")
self.assertJSONOrSetEquals(False, "False")
self.assertJSONOrSetEquals(False, "")
self.assertJSONOrSetEquals(False, "any other string")
self.assertJSONOrSetEquals(False, False)
def test_true(self):
self.assertJSONOrSetEquals(True, "true")
self.assertJSONOrSetEquals(True, "TruE")
self.assertJSONOrSetEquals(True, True)
def test_none(self):
self.assertJSONOrSetEquals(False, None)
def test_everything_converts_to_bool(self):
self.assertJSONOrSetEquals(True, 123)
self.assertJSONOrSetEquals(True, ['a'])
self.assertJSONOrSetEquals(False, [])
class StringTest(FieldTest):
"""
Tests the String Field.
"""
field_totest = String
def test_json_equals(self):
self.assertJSONOrSetEquals("false", "false")
self.assertJSONOrSetEquals("abba", "abba")
self.assertJSONOrSetEquals('"abba"', '"abba"')
self.assertJSONOrSetEquals('', '')
def test_none(self):
self.assertJSONOrSetEquals(None, None)
def test_error(self):
self.assertJSONOrSetTypeError(['a'])
self.assertJSONOrSetTypeError(1.023)
self.assertJSONOrSetTypeError(3)
self.assertJSONOrSetTypeError([1])
self.assertJSONOrSetTypeError([])
self.assertJSONOrSetTypeError({})
class DateTest(FieldTest):
"""
Tests of the Date field.
"""
field_totest = DateTime
def test_json_equals(self):
self.assertJSONOrSetEquals(
dt.datetime(2014, 4, 1, 2, 3, 4, 567890).replace(tzinfo=pytz.utc),
'2014-04-01T02:03:04.567890'
)
self.assertJSONOrSetEquals(
dt.datetime(2014, 4, 1, 2, 3, 4).replace(tzinfo=pytz.utc),
'2014-04-01T02:03:04.000000'
)
self.assertJSONOrSetEquals(
dt.datetime(2014, 4, 1, 2, 3, 4).replace(tzinfo=pytz.utc),
'2014-04-01T02:03:04Z'
)
self.assertJSONOrSetEquals(
dt.datetime(2014, 4, 1, 2, 3, 4).replace(tzinfo=pytz.utc),
dt.datetime(2014, 4, 1, 2, 3, 4).replace(tzinfo=pytz.utc)
)
def test_serialize(self):
self.assertToJSONEquals(
'2014-04-01T02:03:04.567890',
dt.datetime(2014, 4, 1, 2, 3, 4, 567890).replace(tzinfo=pytz.utc)
)
self.assertToJSONEquals(
'2014-04-01T02:03:04.000000',
dt.datetime(2014, 4, 1, 2, 3, 4).replace(tzinfo=pytz.utc)
)
def test_none(self):
self.assertJSONOrSetEquals(None, None)
self.assertJSONOrSetEquals(None, '')
self.assertEqual(DateTime().to_json(None), None)
def test_error(self):
self.assertJSONOrSetTypeError(['a'])
self.assertJSONOrSetTypeError(5)
self.assertJSONOrSetTypeError(5.123)
def test_date_format_error(self):
with self.assertRaises(ValueError):
DateTime().from_json('invalid')
def test_serialize_error(self):
with self.assertRaises(TypeError):
DateTime().to_json('not a datetime')
class AnyTest(FieldTest):
"""
Tests the Any Field.
"""
field_totest = Any
def test_json_equals(self):
self.assertJSONOrSetEquals({'bar'}, {'bar'})
self.assertJSONOrSetEquals("abba", "abba")
self.assertJSONOrSetEquals('', '')
self.assertJSONOrSetEquals('3.2', '3.2')
self.assertJSONOrSetEquals(False, False)
self.assertJSONOrSetEquals([3, 4], [3, 4])
def test_none(self):
self.assertJSONOrSetEquals(None, None)
class ListTest(FieldTest):
"""
Tests the List Field.
"""
field_totest = List
def test_json_equals(self):
self.assertJSONOrSetEquals([], [])
self.assertJSONOrSetEquals(['foo', 'bar'], ['foo', 'bar'])
self.assertJSONOrSetEquals([1, 3.4], [1, 3.4])
def test_none(self):
self.assertJSONOrSetEquals(None, None)
def test_error(self):
self.assertJSONOrSetTypeError('abc')
self.assertJSONOrSetTypeError('')
self.assertJSONOrSetTypeError('1.23')
self.assertJSONOrSetTypeError('true')
self.assertJSONOrSetTypeError(3.7)
self.assertJSONOrSetTypeError(True)
self.assertJSONOrSetTypeError({})
class ReferenceTest(FieldTest):
"""
Tests the Reference Field.
"""
field_totest = Reference
def test_json_equals(self):
self.assertJSONOrSetEquals({'id': 'bar', 'usage': 'baz'}, {'id': 'bar', 'usage': 'baz'})
self.assertJSONOrSetEquals("i4x://myu/mycourse/problem/myproblem", "i4x://myu/mycourse/problem/myproblem")
self.assertJSONOrSetEquals('', '')
self.assertJSONOrSetEquals(3.2, 3.2)
self.assertJSONOrSetEquals(False, False)
self.assertJSONOrSetEquals([3, 4], [3, 4])
def test_none(self):
self.assertJSONOrSetEquals(None, None)
class ReferenceListTest(FieldTest):
"""
Tests the ReferenceList Field.
"""
field_totest = ReferenceList
def test_json_equals(self):
self.assertJSONOrSetEquals([], [])
self.assertJSONOrSetEquals(['foo', 'bar'], ['foo', 'bar'])
self.assertJSONOrSetEquals([1, 3.4], [1, 3.4])
def test_none(self):
self.assertJSONOrSetEquals(None, None)
def test_error(self):
self.assertJSONOrSetTypeError('abc')
self.assertJSONOrSetTypeError('')
self.assertJSONOrSetTypeError('1.23')
self.assertJSONOrSetTypeError('true')
self.assertJSONOrSetTypeError(3.7)
self.assertJSONOrSetTypeError(True)
self.assertJSONOrSetTypeError({})
class DictTest(FieldTest):
"""
Tests the Dict Field.
"""
field_totest = Dict
def test_json_equals(self):
self.assertJSONOrSetEquals({}, {})
self.assertJSONOrSetEquals({'a': 'b', 'c': 3}, {'a': 'b', 'c': 3})
def test_none(self):
self.assertJSONOrSetEquals(None, None)
def test_error(self):
self.assertJSONOrSetTypeError(['foo', 'bar'])
self.assertJSONOrSetTypeError([])
self.assertJSONOrSetTypeError('abc')
self.assertJSONOrSetTypeError('1.23')
self.assertJSONOrSetTypeError('true')
self.assertJSONOrSetTypeError(3.7)
self.assertJSONOrSetTypeError(True)
def test_field_name_defaults():
# Tests field display name default values
attempts = Integer()
attempts._name = "max_problem_attempts"
assert_equals('max_problem_attempts', attempts.display_name)
class TestBlock(XBlock):
"""
Block for testing
"""
field_x = List()
assert_equals("field_x", TestBlock.field_x.display_name)
def test_scope_key():
# Tests field display name default values
class TestBlock(XBlock):
"""
Block for testing
"""
field_x = List(scope=Scope.settings, name='')
settings_lst = List(scope=Scope.settings, name='')
uss_lst = List(scope=Scope.user_state_summary, name='')
user_lst = List(scope=Scope.user_state, name='')
pref_lst = List(scope=Scope.preferences, name='')
user_info_lst = List(scope=Scope.user_info, name='')
sids = ScopeIds(user_id="_bob",
block_type="b.12#ob",
def_id="..",
usage_id="..")
field_data = DictFieldData({})
from test_runtime import TestRuntime
runtime = TestRuntime(Mock(), field_data, [])
block = TestBlock(runtime, field_data, sids)
# Format: usage or block ID/field_name/user_id
for item, correct_key in [[TestBlock.field_x, "__..../field__x/NONE.NONE"],
[TestBlock.user_info_lst, "NONE.NONE/user__info__lst/____bob"],
[TestBlock.pref_lst, "b..12_35_ob/pref__lst/____bob"],
[TestBlock.user_lst, "__..../user__lst/____bob"],
[TestBlock.uss_lst, "__..../uss__lst/NONE.NONE"],
[TestBlock.settings_lst, "__..../settings__lst/NONE.NONE"]]:
key = scope_key(item, block)
assert_equals(key, correct_key)
def test_field_display_name():
attempts = Integer(display_name='Maximum Problem Attempts')
attempts._name = "max_problem_attempts"
assert_equals("Maximum Problem Attempts", attempts.display_name)
boolean_field = Boolean(display_name="boolean field")
assert_equals("boolean field", boolean_field.display_name)
class TestBlock(XBlock):
"""
Block for testing
"""
field_x = List(display_name="Field Known as X")
assert_equals("Field Known as X", TestBlock.field_x.display_name)
def test_values():
# static return value
field_values = ['foo', 'bar']
test_field = String(values=field_values)
assert_equals(field_values, test_field.values)
# function to generate values
test_field = String(values=lambda: [1, 4])
assert_equals([1, 4], test_field.values)
# default if nothing specified
assert_equals(None, String().values)
def test_values_boolean():
# Test Boolean, which has values defined
test_field = Boolean()
assert_equals(
({'display_name': "True", "value": True}, {'display_name': "False", "value": False}),
test_field.values
)
def test_values_dict():
# Test that the format expected for integers is allowed
test_field = Integer(values={"min": 1, "max": 100})
assert_equals({"min": 1, "max": 100}, test_field.values)
def test_twofaced_field_access():
# Check that a field with different to_json and from_json representations
# persists and saves correctly.
class TwoFacedField(Field):
"""A field that emits different 'json' than it parses."""
def from_json(self, thestr):
"""Store an int, the length of the string parsed."""
return len(thestr)
def to_json(self, value):
"""Emit some number of X's."""
return "X" * value
class FieldTester(XBlock):
"""Test block for TwoFacedField."""
how_many = TwoFacedField(scope=Scope.settings)
original_json = "YYY"
field_tester = FieldTester(MagicMock(), DictFieldData({'how_many': original_json}), Mock())
# Test that the native value isn't equal to the original json we specified.
assert_not_equals(field_tester.how_many, original_json)
# Test that the native -> json value isn't equal to the original json we specified.
assert_not_equals(TwoFacedField().to_json(field_tester.how_many), original_json)
# The previous accesses will mark the field as dirty (via __get__)
assert_equals(len(field_tester._dirty_fields), 1)
# However, the field should not ACTUALLY be marked as a field that is needing to be saved.
assert_not_in('how_many', field_tester._get_fields_to_save()) # pylint: disable=W0212
class SentinelTest(unittest.TestCase):
"""
Tests of :ref:`xblock.fields.Sentinel`.
"""
def test_equality(self):
base = Sentinel('base')
self.assertEquals(base, base)
self.assertEquals(base, Sentinel('base'))
self.assertNotEquals(base, Sentinel('foo'))
self.assertNotEquals(base, 'base')
def test_hashing(self):
base = Sentinel('base')
a_dict = {base: True}
self.assertEquals(a_dict[Sentinel('base')], True)
self.assertEquals(a_dict[base], True)
self.assertNotIn(Sentinel('foo'), a_dict)
self.assertNotIn('base', a_dict)
| apache-2.0 |
mydongistiny/external_chromium_org | tools/valgrind/chrome_tests.py | 32 | 31784 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
''' Runs various chrome tests through valgrind_test.py.'''
import glob
import logging
import multiprocessing
import optparse
import os
import stat
import sys
import logging_utils
import path_utils
import common
import valgrind_test
class TestNotFound(Exception): pass
class MultipleGTestFiltersSpecified(Exception): pass
class BuildDirNotFound(Exception): pass
class BuildDirAmbiguous(Exception): pass
class ChromeTests:
SLOW_TOOLS = ["memcheck", "tsan", "tsan_rv", "drmemory"]
LAYOUT_TESTS_DEFAULT_CHUNK_SIZE = 300
def __init__(self, options, args, test):
if ':' in test:
(self._test, self._gtest_filter) = test.split(':', 1)
else:
self._test = test
self._gtest_filter = options.gtest_filter
if self._test not in self._test_list:
raise TestNotFound("Unknown test: %s" % test)
if options.gtest_filter and options.gtest_filter != self._gtest_filter:
raise MultipleGTestFiltersSpecified("Can not specify both --gtest_filter "
"and --test %s" % test)
self._options = options
self._args = args
script_dir = path_utils.ScriptDir()
# Compute the top of the tree (the "source dir") from the script dir (where
# this script lives). We assume that the script dir is in tools/valgrind/
# relative to the top of the tree.
self._source_dir = os.path.dirname(os.path.dirname(script_dir))
# since this path is used for string matching, make sure it's always
# an absolute Unix-style path
self._source_dir = os.path.abspath(self._source_dir).replace('\\', '/')
valgrind_test_script = os.path.join(script_dir, "valgrind_test.py")
self._command_preamble = ["--source-dir=%s" % (self._source_dir)]
if not self._options.build_dir:
dirs = [
os.path.join(self._source_dir, "xcodebuild", "Debug"),
os.path.join(self._source_dir, "out", "Debug"),
os.path.join(self._source_dir, "build", "Debug"),
]
build_dir = [d for d in dirs if os.path.isdir(d)]
if len(build_dir) > 1:
raise BuildDirAmbiguous("Found more than one suitable build dir:\n"
"%s\nPlease specify just one "
"using --build-dir" % ", ".join(build_dir))
elif build_dir:
self._options.build_dir = build_dir[0]
else:
self._options.build_dir = None
if self._options.build_dir:
build_dir = os.path.abspath(self._options.build_dir)
self._command_preamble += ["--build-dir=%s" % (self._options.build_dir)]
def _EnsureBuildDirFound(self):
if not self._options.build_dir:
raise BuildDirNotFound("Oops, couldn't find a build dir, please "
"specify it manually using --build-dir")
def _DefaultCommand(self, tool, exe=None, valgrind_test_args=None):
'''Generates the default command array that most tests will use.'''
if exe and common.IsWindows():
exe += '.exe'
cmd = list(self._command_preamble)
# Find all suppressions matching the following pattern:
# tools/valgrind/TOOL/suppressions[_PLATFORM].txt
# and list them with --suppressions= prefix.
script_dir = path_utils.ScriptDir()
tool_name = tool.ToolName();
suppression_file = os.path.join(script_dir, tool_name, "suppressions.txt")
if os.path.exists(suppression_file):
cmd.append("--suppressions=%s" % suppression_file)
# Platform-specific suppression
for platform in common.PlatformNames():
platform_suppression_file = \
os.path.join(script_dir, tool_name, 'suppressions_%s.txt' % platform)
if os.path.exists(platform_suppression_file):
cmd.append("--suppressions=%s" % platform_suppression_file)
if self._options.valgrind_tool_flags:
cmd += self._options.valgrind_tool_flags.split(" ")
if self._options.keep_logs:
cmd += ["--keep_logs"]
if valgrind_test_args != None:
for arg in valgrind_test_args:
cmd.append(arg)
if exe:
self._EnsureBuildDirFound()
cmd.append(os.path.join(self._options.build_dir, exe))
# Valgrind runs tests slowly, so slow tests hurt more; show elapased time
# so we can find the slowpokes.
cmd.append("--gtest_print_time")
# Built-in test launcher for gtest-based executables runs tests using
# multiple process by default. Force the single-process mode back.
cmd.append("--single-process-tests")
if self._options.gtest_repeat:
cmd.append("--gtest_repeat=%s" % self._options.gtest_repeat)
if self._options.gtest_shuffle:
cmd.append("--gtest_shuffle")
if self._options.brave_new_test_launcher:
cmd.append("--brave-new-test-launcher")
if self._options.test_launcher_bot_mode:
cmd.append("--test-launcher-bot-mode")
return cmd
def Run(self):
''' Runs the test specified by command-line argument --test '''
logging.info("running test %s" % (self._test))
return self._test_list[self._test](self)
def _AppendGtestFilter(self, tool, name, cmd):
'''Append an appropriate --gtest_filter flag to the googletest binary
invocation.
If the user passed his own filter mentioning only one test, just use it.
Othewise, filter out tests listed in the appropriate gtest_exclude files.
'''
if (self._gtest_filter and
":" not in self._gtest_filter and
"?" not in self._gtest_filter and
"*" not in self._gtest_filter):
cmd.append("--gtest_filter=%s" % self._gtest_filter)
return
filters = []
gtest_files_dir = os.path.join(path_utils.ScriptDir(), "gtest_exclude")
gtest_filter_files = [
os.path.join(gtest_files_dir, name + ".gtest-%s.txt" % tool.ToolName())]
# Use ".gtest.txt" files only for slow tools, as they now contain
# Valgrind- and Dr.Memory-specific filters.
# TODO(glider): rename the files to ".gtest_slow.txt"
if tool.ToolName() in ChromeTests.SLOW_TOOLS:
gtest_filter_files += [os.path.join(gtest_files_dir, name + ".gtest.txt")]
for platform_suffix in common.PlatformNames():
gtest_filter_files += [
os.path.join(gtest_files_dir, name + ".gtest_%s.txt" % platform_suffix),
os.path.join(gtest_files_dir, name + ".gtest-%s_%s.txt" % \
(tool.ToolName(), platform_suffix))]
logging.info("Reading gtest exclude filter files:")
for filename in gtest_filter_files:
# strip the leading absolute path (may be very long on the bot)
# and the following / or \.
readable_filename = filename.replace("\\", "/") # '\' on Windows
readable_filename = readable_filename.replace(self._source_dir, "")[1:]
if not os.path.exists(filename):
logging.info(" \"%s\" - not found" % readable_filename)
continue
logging.info(" \"%s\" - OK" % readable_filename)
f = open(filename, 'r')
for line in f.readlines():
if line.startswith("#") or line.startswith("//") or line.isspace():
continue
line = line.rstrip()
test_prefixes = ["FLAKY", "FAILS"]
for p in test_prefixes:
# Strip prefixes from the test names.
line = line.replace(".%s_" % p, ".")
# Exclude the original test name.
filters.append(line)
if line[-2:] != ".*":
# List all possible prefixes if line doesn't end with ".*".
for p in test_prefixes:
filters.append(line.replace(".", ".%s_" % p))
# Get rid of duplicates.
filters = set(filters)
gtest_filter = self._gtest_filter
if len(filters):
if gtest_filter:
gtest_filter += ":"
if gtest_filter.find("-") < 0:
gtest_filter += "-"
else:
gtest_filter = "-"
gtest_filter += ":".join(filters)
if gtest_filter:
cmd.append("--gtest_filter=%s" % gtest_filter)
@staticmethod
def ShowTests():
test_to_names = {}
for name, test_function in ChromeTests._test_list.iteritems():
test_to_names.setdefault(test_function, []).append(name)
name_to_aliases = {}
for names in test_to_names.itervalues():
names.sort(key=lambda name: len(name))
name_to_aliases[names[0]] = names[1:]
print
print "Available tests:"
print "----------------"
for name, aliases in sorted(name_to_aliases.iteritems()):
if aliases:
print " {} (aka {})".format(name, ', '.join(aliases))
else:
print " {}".format(name)
def SetupLdPath(self, requires_build_dir):
if requires_build_dir:
self._EnsureBuildDirFound()
elif not self._options.build_dir:
return
# Append build_dir to LD_LIBRARY_PATH so external libraries can be loaded.
if (os.getenv("LD_LIBRARY_PATH")):
os.putenv("LD_LIBRARY_PATH", "%s:%s" % (os.getenv("LD_LIBRARY_PATH"),
self._options.build_dir))
else:
os.putenv("LD_LIBRARY_PATH", self._options.build_dir)
def SimpleTest(self, module, name, valgrind_test_args=None, cmd_args=None):
tool = valgrind_test.CreateTool(self._options.valgrind_tool)
cmd = self._DefaultCommand(tool, name, valgrind_test_args)
self._AppendGtestFilter(tool, name, cmd)
cmd.extend(['--test-tiny-timeout=1000'])
if cmd_args:
cmd.extend(cmd_args)
self.SetupLdPath(True)
return tool.Run(cmd, module)
def RunCmdLine(self):
tool = valgrind_test.CreateTool(self._options.valgrind_tool)
cmd = self._DefaultCommand(tool, None, self._args)
self.SetupLdPath(False)
return tool.Run(cmd, None)
def TestAccessibility(self):
return self.SimpleTest("accessibility", "accessibility_unittests")
def TestAddressInput(self):
return self.SimpleTest("addressinput", "libaddressinput_unittests")
def TestAngle(self):
return self.SimpleTest("angle", "angle_unittests")
def TestAppList(self):
return self.SimpleTest("app_list", "app_list_unittests")
def TestAsh(self):
return self.SimpleTest("ash", "ash_unittests")
def TestAshShell(self):
return self.SimpleTest("ash_shelf", "ash_shell_unittests")
def TestAura(self):
return self.SimpleTest("aura", "aura_unittests")
def TestBase(self):
return self.SimpleTest("base", "base_unittests")
def TestBlinkHeap(self):
return self.SimpleTest("blink_heap", "blink_heap_unittests")
def TestBlinkPlatform(self):
return self.SimpleTest("blink_platform", "blink_platform_unittests")
def TestCacheInvalidation(self):
return self.SimpleTest("cacheinvalidation", "cacheinvalidation_unittests")
def TestCast(self):
return self.SimpleTest("chrome", "cast_unittests")
def TestCC(self):
return self.SimpleTest("cc", "cc_unittests")
def TestChromeApp(self):
return self.SimpleTest("chrome_app", "chrome_app_unittests")
def TestChromeElf(self):
return self.SimpleTest("chrome_elf", "chrome_elf_unittests")
def TestChromeDriver(self):
return self.SimpleTest("chromedriver", "chromedriver_unittests")
def TestChromeOS(self):
return self.SimpleTest("chromeos", "chromeos_unittests")
def TestCloudPrint(self):
return self.SimpleTest("cloud_print", "cloud_print_unittests")
def TestComponents(self):
return self.SimpleTest("components", "components_unittests")
def TestCompositor(self):
return self.SimpleTest("compositor", "compositor_unittests")
def TestContent(self):
return self.SimpleTest("content", "content_unittests")
def TestCourgette(self):
return self.SimpleTest("courgette", "courgette_unittests")
def TestCrypto(self):
return self.SimpleTest("crypto", "crypto_unittests")
def TestDevice(self):
return self.SimpleTest("device", "device_unittests")
def TestDisplay(self):
return self.SimpleTest("display", "display_unittests")
def TestEvents(self):
return self.SimpleTest("events", "events_unittests")
def TestExtensions(self):
return self.SimpleTest("extensions", "extensions_unittests")
def TestFFmpeg(self):
return self.SimpleTest("chrome", "ffmpeg_unittests")
def TestFFmpegRegressions(self):
return self.SimpleTest("chrome", "ffmpeg_regression_tests")
def TestGCM(self):
return self.SimpleTest("gcm", "gcm_unit_tests")
def TestGfx(self):
return self.SimpleTest("gfx", "gfx_unittests")
def TestGin(self):
return self.SimpleTest("gin", "gin_unittests")
def TestGoogleApis(self):
return self.SimpleTest("google_apis", "google_apis_unittests")
def TestGPU(self):
return self.SimpleTest("gpu", "gpu_unittests")
def TestIpc(self):
return self.SimpleTest("ipc", "ipc_tests",
valgrind_test_args=["--trace_children"])
def TestInstallerUtil(self):
return self.SimpleTest("installer_util", "installer_util_unittests")
def TestJingle(self):
return self.SimpleTest("chrome", "jingle_unittests")
def TestKeyboard(self):
return self.SimpleTest("keyboard", "keyboard_unittests")
def TestMedia(self):
return self.SimpleTest("chrome", "media_unittests")
def TestMessageCenter(self):
return self.SimpleTest("message_center", "message_center_unittests")
def TestMojoAppsJS(self):
return self.SimpleTest("mojo_apps_js", "mojo_apps_js_unittests")
def TestMojoCommon(self):
return self.SimpleTest("mojo_common", "mojo_common_unittests")
def TestMojoJS(self):
return self.SimpleTest("mojo_js", "mojo_js_unittests")
def TestMojoPublicBindings(self):
return self.SimpleTest("mojo_public_bindings",
"mojo_public_bindings_unittests")
def TestMojoPublicEnv(self):
return self.SimpleTest("mojo_public_env",
"mojo_public_environment_unittests")
def TestMojoPublicSystem(self):
return self.SimpleTest("mojo_public_system",
"mojo_public_system_unittests")
def TestMojoPublicSysPerf(self):
return self.SimpleTest("mojo_public_sysperf",
"mojo_public_system_perftests")
def TestMojoPublicUtility(self):
return self.SimpleTest("mojo_public_utility",
"mojo_public_utility_unittests")
def TestMojoApplicationManager(self):
return self.SimpleTest("mojo_application_manager",
"mojo_application_manager_unittests")
def TestMojoSystem(self):
return self.SimpleTest("mojo_system", "mojo_system_unittests")
def TestMojoViewManager(self):
return self.SimpleTest("mojo_view_manager", "mojo_view_manager_unittests")
def TestNet(self):
return self.SimpleTest("net", "net_unittests")
def TestNetPerf(self):
return self.SimpleTest("net", "net_perftests")
def TestPhoneNumber(self):
return self.SimpleTest("phonenumber", "libphonenumber_unittests")
def TestPPAPI(self):
return self.SimpleTest("chrome", "ppapi_unittests")
def TestPrinting(self):
return self.SimpleTest("chrome", "printing_unittests")
def TestRemoting(self):
return self.SimpleTest("chrome", "remoting_unittests",
cmd_args=[
"--ui-test-action-timeout=60000",
"--ui-test-action-max-timeout=150000"])
def TestSql(self):
return self.SimpleTest("chrome", "sql_unittests")
def TestSync(self):
return self.SimpleTest("chrome", "sync_unit_tests")
def TestLinuxSandbox(self):
return self.SimpleTest("sandbox", "sandbox_linux_unittests")
def TestUnit(self):
# http://crbug.com/51716
# Disabling all unit tests
# Problems reappeared after r119922
if common.IsMac() and (self._options.valgrind_tool == "memcheck"):
logging.warning("unit_tests are disabled for memcheck on MacOS.")
return 0;
return self.SimpleTest("chrome", "unit_tests")
def TestUIUnit(self):
return self.SimpleTest("chrome", "ui_unittests")
def TestURL(self):
return self.SimpleTest("chrome", "url_unittests")
def TestViews(self):
return self.SimpleTest("views", "views_unittests")
# Valgrind timeouts are in seconds.
UI_VALGRIND_ARGS = ["--timeout=14400", "--trace_children", "--indirect"]
# UI test timeouts are in milliseconds.
UI_TEST_ARGS = ["--ui-test-action-timeout=60000",
"--ui-test-action-max-timeout=150000",
"--no-sandbox"]
# TODO(thestig) fine-tune these values.
# Valgrind timeouts are in seconds.
BROWSER_VALGRIND_ARGS = ["--timeout=50000", "--trace_children", "--indirect"]
# Browser test timeouts are in milliseconds.
BROWSER_TEST_ARGS = ["--ui-test-action-timeout=400000",
"--ui-test-action-max-timeout=800000",
"--no-sandbox"]
def TestBrowser(self):
return self.SimpleTest("chrome", "browser_tests",
valgrind_test_args=self.BROWSER_VALGRIND_ARGS,
cmd_args=self.BROWSER_TEST_ARGS)
def TestContentBrowser(self):
return self.SimpleTest("content", "content_browsertests",
valgrind_test_args=self.BROWSER_VALGRIND_ARGS,
cmd_args=self.BROWSER_TEST_ARGS)
def TestInteractiveUI(self):
return self.SimpleTest("chrome", "interactive_ui_tests",
valgrind_test_args=self.UI_VALGRIND_ARGS,
cmd_args=self.UI_TEST_ARGS)
def TestSafeBrowsing(self):
return self.SimpleTest("chrome", "safe_browsing_tests",
valgrind_test_args=self.UI_VALGRIND_ARGS,
cmd_args=(["--ui-test-action-max-timeout=450000"]))
def TestSyncIntegration(self):
return self.SimpleTest("chrome", "sync_integration_tests",
valgrind_test_args=self.UI_VALGRIND_ARGS,
cmd_args=(["--ui-test-action-max-timeout=450000"]))
def TestLayoutChunk(self, chunk_num, chunk_size):
# Run tests [chunk_num*chunk_size .. (chunk_num+1)*chunk_size) from the
# list of tests. Wrap around to beginning of list at end.
# If chunk_size is zero, run all tests in the list once.
# If a text file is given as argument, it is used as the list of tests.
assert((chunk_size == 0) != (len(self._args) == 0))
# Build the ginormous commandline in 'cmd'.
# It's going to be roughly
# python valgrind_test.py ... python run_webkit_tests.py ...
# but we'll use the --indirect flag to valgrind_test.py
# to avoid valgrinding python.
# Start by building the valgrind_test.py commandline.
tool = valgrind_test.CreateTool(self._options.valgrind_tool)
cmd = self._DefaultCommand(tool)
cmd.append("--trace_children")
cmd.append("--indirect_webkit_layout")
cmd.append("--ignore_exit_code")
# Now build script_cmd, the run_webkits_tests.py commandline
# Store each chunk in its own directory so that we can find the data later
chunk_dir = os.path.join("layout", "chunk_%05d" % chunk_num)
out_dir = os.path.join(path_utils.ScriptDir(), "latest")
out_dir = os.path.join(out_dir, chunk_dir)
if os.path.exists(out_dir):
old_files = glob.glob(os.path.join(out_dir, "*.txt"))
for f in old_files:
os.remove(f)
else:
os.makedirs(out_dir)
script = os.path.join(self._source_dir, "webkit", "tools", "layout_tests",
"run_webkit_tests.py")
# http://crbug.com/260627: After the switch to content_shell from DRT, each
# test now brings up 3 processes. Under Valgrind, they become memory bound
# and can eventually OOM if we don't reduce the total count.
# It'd be nice if content_shell automatically throttled the startup of new
# tests if we're low on memory.
jobs = max(1, int(multiprocessing.cpu_count() * 0.3))
script_cmd = ["python", script, "-v",
# run a separate DumpRenderTree for each test
"--batch-size=1",
"--fully-parallel",
"--child-processes=%d" % jobs,
"--time-out-ms=800000",
"--no-retry-failures", # retrying takes too much time
# http://crbug.com/176908: Don't launch a browser when done.
"--no-show-results",
"--nocheck-sys-deps"]
# Pass build mode to run_webkit_tests.py. We aren't passed it directly,
# so parse it out of build_dir. run_webkit_tests.py can only handle
# the two values "Release" and "Debug".
# TODO(Hercules): unify how all our scripts pass around build mode
# (--mode / --target / --build-dir / --debug)
if self._options.build_dir:
build_root, mode = os.path.split(self._options.build_dir)
script_cmd.extend(["--build-directory", build_root, "--target", mode])
if (chunk_size > 0):
script_cmd.append("--run-chunk=%d:%d" % (chunk_num, chunk_size))
if len(self._args):
# if the arg is a txt file, then treat it as a list of tests
if os.path.isfile(self._args[0]) and self._args[0][-4:] == ".txt":
script_cmd.append("--test-list=%s" % self._args[0])
else:
script_cmd.extend(self._args)
self._AppendGtestFilter(tool, "layout", script_cmd)
# Now run script_cmd with the wrapper in cmd
cmd.extend(["--"])
cmd.extend(script_cmd)
# Layout tests often times fail quickly, but the buildbot remains green.
# Detect this situation when running with the default chunk size.
if chunk_size == self.LAYOUT_TESTS_DEFAULT_CHUNK_SIZE:
min_runtime_in_seconds=120
else:
min_runtime_in_seconds=0
ret = tool.Run(cmd, "layout", min_runtime_in_seconds=min_runtime_in_seconds)
return ret
def TestLayout(self):
# A "chunk file" is maintained in the local directory so that each test
# runs a slice of the layout tests of size chunk_size that increments with
# each run. Since tests can be added and removed from the layout tests at
# any time, this is not going to give exact coverage, but it will allow us
# to continuously run small slices of the layout tests under valgrind rather
# than having to run all of them in one shot.
chunk_size = self._options.num_tests
if chunk_size == 0 or len(self._args):
return self.TestLayoutChunk(0, 0)
chunk_num = 0
chunk_file = os.path.join("valgrind_layout_chunk.txt")
logging.info("Reading state from " + chunk_file)
try:
f = open(chunk_file)
if f:
chunk_str = f.read()
if len(chunk_str):
chunk_num = int(chunk_str)
# This should be enough so that we have a couple of complete runs
# of test data stored in the archive (although note that when we loop
# that we almost guaranteed won't be at the end of the test list)
if chunk_num > 10000:
chunk_num = 0
f.close()
except IOError, (errno, strerror):
logging.error("error reading from file %s (%d, %s)" % (chunk_file,
errno, strerror))
# Save the new chunk size before running the tests. Otherwise if a
# particular chunk hangs the bot, the chunk number will never get
# incremented and the bot will be wedged.
logging.info("Saving state to " + chunk_file)
try:
f = open(chunk_file, "w")
chunk_num += 1
f.write("%d" % chunk_num)
f.close()
except IOError, (errno, strerror):
logging.error("error writing to file %s (%d, %s)" % (chunk_file, errno,
strerror))
# Since we're running small chunks of the layout tests, it's important to
# mark the ones that have errors in them. These won't be visible in the
# summary list for long, but will be useful for someone reviewing this bot.
return self.TestLayoutChunk(chunk_num, chunk_size)
# The known list of tests.
# Recognise the original abbreviations as well as full executable names.
_test_list = {
"cmdline" : RunCmdLine,
"addressinput": TestAddressInput,
"libaddressinput_unittests": TestAddressInput,
"accessibility": TestAccessibility,
"angle": TestAngle, "angle_unittests": TestAngle,
"app_list": TestAppList, "app_list_unittests": TestAppList,
"ash": TestAsh, "ash_unittests": TestAsh,
"ash_shell": TestAshShell, "ash_shell_unittests": TestAshShell,
"aura": TestAura, "aura_unittests": TestAura,
"base": TestBase, "base_unittests": TestBase,
"blink_heap": TestBlinkHeap,
"blink_platform": TestBlinkPlatform,
"browser": TestBrowser, "browser_tests": TestBrowser,
"cacheinvalidation": TestCacheInvalidation,
"cacheinvalidation_unittests": TestCacheInvalidation,
"cast": TestCast, "cast_unittests": TestCast,
"cc": TestCC, "cc_unittests": TestCC,
"chrome_app": TestChromeApp,
"chrome_elf": TestChromeElf,
"chromedriver": TestChromeDriver,
"chromeos": TestChromeOS, "chromeos_unittests": TestChromeOS,
"cloud_print": TestCloudPrint,
"cloud_print_unittests": TestCloudPrint,
"components": TestComponents,"components_unittests": TestComponents,
"compositor": TestCompositor,"compositor_unittests": TestCompositor,
"content": TestContent, "content_unittests": TestContent,
"content_browsertests": TestContentBrowser,
"courgette": TestCourgette, "courgette_unittests": TestCourgette,
"crypto": TestCrypto, "crypto_unittests": TestCrypto,
"device": TestDevice, "device_unittests": TestDevice,
"display": TestDisplay, "display_unittests": TestDisplay,
"events": TestEvents, "events_unittests": TestEvents,
"extensions": TestExtensions, "extensions_unittests": TestExtensions,
"ffmpeg": TestFFmpeg, "ffmpeg_unittests": TestFFmpeg,
"ffmpeg_regression_tests": TestFFmpegRegressions,
"gcm": TestGCM, "gcm_unit_tests": TestGCM,
"gin": TestGin, "gin_unittests": TestGin,
"gfx": TestGfx, "gfx_unittests": TestGfx,
"google_apis": TestGoogleApis,
"gpu": TestGPU, "gpu_unittests": TestGPU,
"ipc": TestIpc, "ipc_tests": TestIpc,
"installer_util": TestInstallerUtil,
"interactive_ui": TestInteractiveUI,
"jingle": TestJingle, "jingle_unittests": TestJingle,
"keyboard": TestKeyboard, "keyboard_unittests": TestKeyboard,
"layout": TestLayout, "layout_tests": TestLayout,
"media": TestMedia, "media_unittests": TestMedia,
"message_center": TestMessageCenter,
"message_center_unittests" : TestMessageCenter,
"mojo_apps_js": TestMojoAppsJS,
"mojo_common": TestMojoCommon,
"mojo_js": TestMojoJS,
"mojo_system": TestMojoSystem,
"mojo_public_system": TestMojoPublicSystem,
"mojo_public_utility": TestMojoPublicUtility,
"mojo_public_bindings": TestMojoPublicBindings,
"mojo_public_env": TestMojoPublicEnv,
"mojo_public_sysperf": TestMojoPublicSysPerf,
"mojo_application_manager": TestMojoApplicationManager,
"mojo_view_manager": TestMojoViewManager,
"net": TestNet, "net_unittests": TestNet,
"net_perf": TestNetPerf, "net_perftests": TestNetPerf,
"phonenumber": TestPhoneNumber,
"libphonenumber_unittests": TestPhoneNumber,
"ppapi": TestPPAPI, "ppapi_unittests": TestPPAPI,
"printing": TestPrinting, "printing_unittests": TestPrinting,
"remoting": TestRemoting, "remoting_unittests": TestRemoting,
"safe_browsing": TestSafeBrowsing, "safe_browsing_tests": TestSafeBrowsing,
"sandbox": TestLinuxSandbox, "sandbox_linux_unittests": TestLinuxSandbox,
"sql": TestSql, "sql_unittests": TestSql,
"sync": TestSync, "sync_unit_tests": TestSync,
"sync_integration_tests": TestSyncIntegration,
"sync_integration": TestSyncIntegration,
"ui_unit": TestUIUnit, "ui_unittests": TestUIUnit,
"unit": TestUnit, "unit_tests": TestUnit,
"url": TestURL, "url_unittests": TestURL,
"views": TestViews, "views_unittests": TestViews,
"webkit": TestLayout,
}
def _main():
parser = optparse.OptionParser("usage: %prog -b <dir> -t <test> "
"[-t <test> ...]")
parser.add_option("--help-tests", dest="help_tests", action="store_true",
default=False, help="List all available tests")
parser.add_option("-b", "--build-dir",
help="the location of the compiler output")
parser.add_option("--target", help="Debug or Release")
parser.add_option("-t", "--test", action="append", default=[],
help="which test to run, supports test:gtest_filter format "
"as well.")
parser.add_option("--baseline", action="store_true", default=False,
help="generate baseline data instead of validating")
parser.add_option("--gtest_filter",
help="additional arguments to --gtest_filter")
parser.add_option("--gtest_repeat", help="argument for --gtest_repeat")
parser.add_option("--gtest_shuffle", action="store_true", default=False,
help="Randomize tests' orders on every iteration.")
parser.add_option("-v", "--verbose", action="store_true", default=False,
help="verbose output - enable debug log messages")
parser.add_option("--tool", dest="valgrind_tool", default="memcheck",
help="specify a valgrind tool to run the tests under")
parser.add_option("--tool_flags", dest="valgrind_tool_flags", default="",
help="specify custom flags for the selected valgrind tool")
parser.add_option("--keep_logs", action="store_true", default=False,
help="store memory tool logs in the <tool>.logs directory "
"instead of /tmp.\nThis can be useful for tool "
"developers/maintainers.\nPlease note that the <tool>"
".logs directory will be clobbered on tool startup.")
parser.add_option("-n", "--num_tests", type="int",
default=ChromeTests.LAYOUT_TESTS_DEFAULT_CHUNK_SIZE,
help="for layout tests: # of subtests per run. 0 for all.")
# TODO(thestig) Remove this if we can.
parser.add_option("--gtest_color", dest="gtest_color", default="no",
help="dummy compatibility flag for sharding_supervisor.")
parser.add_option("--brave-new-test-launcher", action="store_true",
help="run the tests with --brave-new-test-launcher")
parser.add_option("--test-launcher-bot-mode", action="store_true",
help="run the tests with --test-launcher-bot-mode")
options, args = parser.parse_args()
# Bake target into build_dir.
if options.target and options.build_dir:
assert (options.target !=
os.path.basename(os.path.dirname(options.build_dir)))
options.build_dir = os.path.join(os.path.abspath(options.build_dir),
options.target)
if options.verbose:
logging_utils.config_root(logging.DEBUG)
else:
logging_utils.config_root()
if options.help_tests:
ChromeTests.ShowTests()
return 0
if not options.test:
parser.error("--test not specified")
if len(options.test) != 1 and options.gtest_filter:
parser.error("--gtest_filter and multiple tests don't make sense together")
for t in options.test:
tests = ChromeTests(options, args, t)
ret = tests.Run()
if ret: return ret
return 0
if __name__ == "__main__":
sys.exit(_main())
| bsd-3-clause |
christophlsa/odoo | addons/hr_payroll/report/report_contribution_register.py | 377 | 3380 | #-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from datetime import datetime
from dateutil import relativedelta
from openerp.osv import osv
from openerp.report import report_sxw
class contribution_register_report(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(contribution_register_report, self).__init__(cr, uid, name, context)
self.localcontext.update({
'get_payslip_lines': self._get_payslip_lines,
'sum_total': self.sum_total,
})
def set_context(self, objects, data, ids, report_type=None):
self.date_from = data['form'].get('date_from', time.strftime('%Y-%m-%d'))
self.date_to = data['form'].get('date_to', str(datetime.now() + relativedelta.relativedelta(months=+1, day=1, days=-1))[:10])
return super(contribution_register_report, self).set_context(objects, data, ids, report_type=report_type)
def sum_total(self):
return self.regi_total
def _get_payslip_lines(self, obj):
payslip_line = self.pool.get('hr.payslip.line')
payslip_lines = []
res = []
self.regi_total = 0.0
self.cr.execute("SELECT pl.id from hr_payslip_line as pl "\
"LEFT JOIN hr_payslip AS hp on (pl.slip_id = hp.id) "\
"WHERE (hp.date_from >= %s) AND (hp.date_to <= %s) "\
"AND pl.register_id = %s "\
"AND hp.state = 'done' "\
"ORDER BY pl.slip_id, pl.sequence",
(self.date_from, self.date_to, obj.id))
payslip_lines = [x[0] for x in self.cr.fetchall()]
for line in payslip_line.browse(self.cr, self.uid, payslip_lines):
res.append({
'payslip_name': line.slip_id.name,
'name': line.name,
'code': line.code,
'quantity': line.quantity,
'amount': line.amount,
'total': line.total,
})
self.regi_total += line.total
return res
class wrapped_report_contribution_register(osv.AbstractModel):
_name = 'report.hr_payroll.report_contributionregister'
_inherit = 'report.abstract_report'
_template = 'hr_payroll.report_contributionregister'
_wrapped_report_class = contribution_register_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
tomhughes/mapnik | scons/scons-local-4.1.0/SCons/Scanner/Prog.py | 4 | 3536 | # MIT License
#
# Copyright The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Dependency scanner for program files."""
import SCons.Node
import SCons.Node.FS
import SCons.Scanner
import SCons.Util
# global, set by --debug=findlibs
print_find_libs = None
def ProgramScanner(**kw):
"""Return a prototype Scanner instance for scanning executable
files for static-lib dependencies"""
kw['path_function'] = SCons.Scanner.FindPathDirs('LIBPATH')
ps = SCons.Scanner.Base(scan, "ProgramScanner", **kw)
return ps
def _subst_libs(env, libs):
"""Substitute environment variables and split into list."""
if SCons.Util.is_String(libs):
libs = env.subst(libs)
if SCons.Util.is_String(libs):
libs = libs.split()
elif SCons.Util.is_Sequence(libs):
_libs = []
for l in libs:
_libs += _subst_libs(env, l)
libs = _libs
else:
# libs is an object (Node, for example)
libs = [libs]
return libs
def scan(node, env, libpath = ()):
"""Scans program files for static-library dependencies.
It will search the LIBPATH environment variable
for libraries specified in the LIBS variable, returning any
files it finds as dependencies.
"""
try:
libs = env['LIBS']
except KeyError:
# There are no LIBS in this environment, so just return a null list:
return []
libs = _subst_libs(env, libs)
try:
prefix = env['LIBPREFIXES']
if not SCons.Util.is_List(prefix):
prefix = [ prefix ]
except KeyError:
prefix = [ '' ]
try:
suffix = env['LIBSUFFIXES']
if not SCons.Util.is_List(suffix):
suffix = [ suffix ]
except KeyError:
suffix = [ '' ]
pairs = []
for suf in map(env.subst, suffix):
for pref in map(env.subst, prefix):
pairs.append((pref, suf))
result = []
if callable(libpath):
libpath = libpath()
find_file = SCons.Node.FS.find_file
adjustixes = SCons.Util.adjustixes
for lib in libs:
if SCons.Util.is_String(lib):
for pref, suf in pairs:
l = adjustixes(lib, pref, suf)
l = find_file(l, libpath, verbose=print_find_libs)
if l:
result.append(l)
else:
result.append(lib)
return result
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| lgpl-2.1 |
drodger/django-scheduler | schedule/forms.py | 2 | 1568 | from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from schedule.models import Event, Occurrence
from schedule.widgets import SpectrumColorPicker
class SpanForm(forms.ModelForm):
start = forms.SplitDateTimeField(label=_("start"))
end = forms.SplitDateTimeField(label=_("end"),
help_text=_("The end time must be later than start time."))
def clean(self):
if 'end' in self.cleaned_data and 'start' in self.cleaned_data:
if self.cleaned_data['end'] <= self.cleaned_data['start']:
raise forms.ValidationError(_("The end time must be later than start time."))
return self.cleaned_data
class EventForm(SpanForm):
def __init__(self, *args, **kwargs):
super(EventForm, self).__init__(*args, **kwargs)
end_recurring_period = forms.DateTimeField(label=_("End recurring period"),
help_text=_("This date is ignored for one time only events."),
required=False)
class Meta(object):
model = Event
exclude = ('creator', 'created_on', 'calendar')
class OccurrenceForm(SpanForm):
class Meta(object):
model = Occurrence
exclude = ('original_start', 'original_end', 'event', 'cancelled')
class EventAdminForm(forms.ModelForm):
class Meta:
exclude = []
model = Event
widgets = {
'color_event': SpectrumColorPicker,
}
| bsd-3-clause |
ronakkhunt/kuma | kuma/attachments/feeds.py | 21 | 1299 | from django.utils.translation import ugettext as _
from kuma.wiki.feeds import DocumentsFeed
from .models import AttachmentRevision
class AttachmentsFeed(DocumentsFeed):
title = _("MDN recent file changes")
subtitle = _("Recent revisions to MDN file attachments")
def items(self):
return (AttachmentRevision.objects.prefetch_related('creator',
'attachment')
.order_by('-created')[:50])
def item_title(self, item):
return item.title
def item_description(self, item):
if item.get_previous() is None:
return '<p>Created by: %s</p>' % item.creator.username
else:
return '<p>Edited by %s: %s' % (item.creator.username,
item.comment)
def item_link(self, item):
return self.request.build_absolute_uri(
item.attachment.get_absolute_url())
def item_pubdate(self, item):
return item.created
def item_author_name(self, item):
return item.creator.username
def item_author_link(self, item):
return self.request.build_absolute_uri(item.creator.get_absolute_url())
def item_categories(self, item):
return []
| mpl-2.0 |
mistercrunch/airflow | tests/models/test_taskinstance.py | 3 | 79775 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import datetime
import os
import time
import unittest
import urllib
from typing import List, Optional, Union, cast
from unittest import mock
from unittest.mock import call, mock_open, patch
import pendulum
import pytest
from freezegun import freeze_time
from parameterized import param, parameterized
from sqlalchemy.orm.session import Session
from airflow import models, settings
from airflow.exceptions import AirflowException, AirflowFailException, AirflowSkipException
from airflow.jobs.scheduler_job import SchedulerJob
from airflow.models import (
DAG,
DagModel,
DagRun,
Pool,
RenderedTaskInstanceFields,
TaskInstance as TI,
TaskReschedule,
Variable,
)
from airflow.operators.bash import BashOperator
from airflow.operators.dummy import DummyOperator
from airflow.operators.python import PythonOperator
from airflow.sensors.base import BaseSensorOperator
from airflow.sensors.python import PythonSensor
from airflow.serialization.serialized_objects import SerializedBaseOperator
from airflow.stats import Stats
from airflow.ti_deps.dependencies_deps import REQUEUEABLE_DEPS, RUNNING_DEPS
from airflow.ti_deps.dependencies_states import RUNNABLE_STATES
from airflow.ti_deps.deps.base_ti_dep import TIDepStatus
from airflow.ti_deps.deps.trigger_rule_dep import TriggerRuleDep
from airflow.utils import timezone
from airflow.utils.session import create_session, provide_session
from airflow.utils.state import State
from airflow.utils.types import DagRunType
from airflow.version import version
from tests.models import DEFAULT_DATE
from tests.test_utils import db
from tests.test_utils.asserts import assert_queries_count
from tests.test_utils.config import conf_vars
class CallbackWrapper:
task_id: Optional[str] = None
dag_id: Optional[str] = None
execution_date: Optional[datetime.datetime] = None
task_state_in_callback: Optional[str] = None
callback_ran = False
def wrap_task_instance(self, ti):
self.task_id = ti.task_id
self.dag_id = ti.dag_id
self.execution_date = ti.execution_date
self.task_state_in_callback = ""
self.callback_ran = False
def success_handler(self, context): # pylint: disable=unused-argument
self.callback_ran = True
session = settings.Session()
temp_instance = (
session.query(TI)
.filter(TI.task_id == self.task_id)
.filter(TI.dag_id == self.dag_id)
.filter(TI.execution_date == self.execution_date)
.one()
)
self.task_state_in_callback = temp_instance.state
class TestTaskInstance(unittest.TestCase):
@staticmethod
def clean_db():
db.clear_db_dags()
db.clear_db_pools()
db.clear_db_runs()
db.clear_db_task_fail()
db.clear_rendered_ti_fields()
db.clear_db_task_reschedule()
def setUp(self):
self.clean_db()
with create_session() as session:
test_pool = Pool(pool='test_pool', slots=1)
session.add(test_pool)
session.commit()
def tearDown(self):
self.clean_db()
def test_set_task_dates(self):
"""
Test that tasks properly take start/end dates from DAGs
"""
dag = DAG('dag', start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + datetime.timedelta(days=10))
op1 = DummyOperator(task_id='op_1', owner='test')
self.assertTrue(op1.start_date is None and op1.end_date is None)
# dag should assign its dates to op1 because op1 has no dates
dag.add_task(op1)
self.assertTrue(op1.start_date == dag.start_date and op1.end_date == dag.end_date)
op2 = DummyOperator(
task_id='op_2',
owner='test',
start_date=DEFAULT_DATE - datetime.timedelta(days=1),
end_date=DEFAULT_DATE + datetime.timedelta(days=11),
)
# dag should assign its dates to op2 because they are more restrictive
dag.add_task(op2)
self.assertTrue(op2.start_date == dag.start_date and op2.end_date == dag.end_date)
op3 = DummyOperator(
task_id='op_3',
owner='test',
start_date=DEFAULT_DATE + datetime.timedelta(days=1),
end_date=DEFAULT_DATE + datetime.timedelta(days=9),
)
# op3 should keep its dates because they are more restrictive
dag.add_task(op3)
self.assertTrue(op3.start_date == DEFAULT_DATE + datetime.timedelta(days=1))
self.assertTrue(op3.end_date == DEFAULT_DATE + datetime.timedelta(days=9))
def test_timezone_awareness(self):
naive_datetime = DEFAULT_DATE.replace(tzinfo=None)
# check ti without dag (just for bw compat)
op_no_dag = DummyOperator(task_id='op_no_dag')
ti = TI(task=op_no_dag, execution_date=naive_datetime)
self.assertEqual(ti.execution_date, DEFAULT_DATE)
# check with dag without localized execution_date
dag = DAG('dag', start_date=DEFAULT_DATE)
op1 = DummyOperator(task_id='op_1')
dag.add_task(op1)
ti = TI(task=op1, execution_date=naive_datetime)
self.assertEqual(ti.execution_date, DEFAULT_DATE)
# with dag and localized execution_date
tzinfo = pendulum.timezone("Europe/Amsterdam")
execution_date = timezone.datetime(2016, 1, 1, 1, 0, 0, tzinfo=tzinfo)
utc_date = timezone.convert_to_utc(execution_date)
ti = TI(task=op1, execution_date=execution_date)
self.assertEqual(ti.execution_date, utc_date)
def test_task_naive_datetime(self):
naive_datetime = DEFAULT_DATE.replace(tzinfo=None)
op_no_dag = DummyOperator(
task_id='test_task_naive_datetime', start_date=naive_datetime, end_date=naive_datetime
)
self.assertTrue(op_no_dag.start_date.tzinfo)
self.assertTrue(op_no_dag.end_date.tzinfo)
def test_set_dag(self):
"""
Test assigning Operators to Dags, including deferred assignment
"""
dag = DAG('dag', start_date=DEFAULT_DATE)
dag2 = DAG('dag2', start_date=DEFAULT_DATE)
op = DummyOperator(task_id='op_1', owner='test')
# no dag assigned
self.assertFalse(op.has_dag())
self.assertRaises(AirflowException, getattr, op, 'dag')
# no improper assignment
with self.assertRaises(TypeError):
op.dag = 1
op.dag = dag
# no reassignment
with self.assertRaises(AirflowException):
op.dag = dag2
# but assigning the same dag is ok
op.dag = dag
self.assertIs(op.dag, dag)
self.assertIn(op, dag.tasks)
def test_infer_dag(self):
dag = DAG('dag', start_date=DEFAULT_DATE)
dag2 = DAG('dag2', start_date=DEFAULT_DATE)
op1 = DummyOperator(task_id='test_op_1', owner='test')
op2 = DummyOperator(task_id='test_op_2', owner='test')
op3 = DummyOperator(task_id='test_op_3', owner='test', dag=dag)
op4 = DummyOperator(task_id='test_op_4', owner='test', dag=dag2)
# double check dags
self.assertEqual([i.has_dag() for i in [op1, op2, op3, op4]], [False, False, True, True])
# can't combine operators with no dags
self.assertRaises(AirflowException, op1.set_downstream, op2)
# op2 should infer dag from op1
op1.dag = dag
op1.set_downstream(op2)
self.assertIs(op2.dag, dag)
# can't assign across multiple DAGs
self.assertRaises(AirflowException, op1.set_downstream, op4)
self.assertRaises(AirflowException, op1.set_downstream, [op3, op4])
def test_bitshift_compose_operators(self):
dag = DAG('dag', start_date=DEFAULT_DATE)
with dag:
op1 = DummyOperator(task_id='test_op_1', owner='test')
op2 = DummyOperator(task_id='test_op_2', owner='test')
op3 = DummyOperator(task_id='test_op_3', owner='test')
op1 >> op2 << op3
# op2 should be downstream of both
self.assertIn(op2, op1.downstream_list)
self.assertIn(op2, op3.downstream_list)
@patch.object(DAG, 'get_concurrency_reached')
def test_requeue_over_dag_concurrency(self, mock_concurrency_reached):
mock_concurrency_reached.return_value = True
dag = DAG(
dag_id='test_requeue_over_dag_concurrency',
start_date=DEFAULT_DATE,
max_active_runs=1,
concurrency=2,
)
task = DummyOperator(task_id='test_requeue_over_dag_concurrency_op', dag=dag)
ti = TI(task=task, execution_date=timezone.utcnow(), state=State.QUEUED)
# TI.run() will sync from DB before validating deps.
with create_session() as session:
session.add(ti)
session.commit()
ti.run()
self.assertEqual(ti.state, State.NONE)
def test_requeue_over_task_concurrency(self):
dag = DAG(
dag_id='test_requeue_over_task_concurrency',
start_date=DEFAULT_DATE,
max_active_runs=1,
concurrency=2,
)
task = DummyOperator(task_id='test_requeue_over_task_concurrency_op', dag=dag, task_concurrency=0)
ti = TI(task=task, execution_date=timezone.utcnow(), state=State.QUEUED)
# TI.run() will sync from DB before validating deps.
with create_session() as session:
session.add(ti)
session.commit()
ti.run()
self.assertEqual(ti.state, State.NONE)
def test_requeue_over_pool_concurrency(self):
dag = DAG(
dag_id='test_requeue_over_pool_concurrency',
start_date=DEFAULT_DATE,
max_active_runs=1,
concurrency=2,
)
task = DummyOperator(task_id='test_requeue_over_pool_concurrency_op', dag=dag, task_concurrency=0)
ti = TI(task=task, execution_date=timezone.utcnow(), state=State.QUEUED)
# TI.run() will sync from DB before validating deps.
with create_session() as session:
pool = session.query(Pool).filter(Pool.pool == 'test_pool').one()
pool.slots = 0
session.add(ti)
session.commit()
ti.run()
self.assertEqual(ti.state, State.NONE)
def test_not_requeue_non_requeueable_task_instance(self):
dag = models.DAG(dag_id='test_not_requeue_non_requeueable_task_instance')
# Use BaseSensorOperator because sensor got
# one additional DEP in BaseSensorOperator().deps
task = BaseSensorOperator(
task_id='test_not_requeue_non_requeueable_task_instance_op',
dag=dag,
pool='test_pool',
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow(), state=State.QUEUED)
with create_session() as session:
session.add(ti)
session.commit()
all_deps = RUNNING_DEPS | task.deps
all_non_requeueable_deps = all_deps - REQUEUEABLE_DEPS
patch_dict = {}
for dep in all_non_requeueable_deps:
class_name = dep.__class__.__name__
dep_patch = patch(f'{dep.__module__}.{class_name}.{dep._get_dep_statuses.__name__}')
method_patch = dep_patch.start()
method_patch.return_value = iter([TIDepStatus('mock_' + class_name, True, 'mock')])
patch_dict[class_name] = (dep_patch, method_patch)
for class_name, (dep_patch, method_patch) in patch_dict.items():
method_patch.return_value = iter([TIDepStatus('mock_' + class_name, False, 'mock')])
ti.run()
self.assertEqual(ti.state, State.QUEUED)
dep_patch.return_value = TIDepStatus('mock_' + class_name, True, 'mock')
for (dep_patch, method_patch) in patch_dict.values():
dep_patch.stop()
def test_mark_non_runnable_task_as_success(self):
"""
test that running task with mark_success param update task state
as SUCCESS without running task despite it fails dependency checks.
"""
non_runnable_state = (set(State.task_states) - RUNNABLE_STATES - set(State.SUCCESS)).pop()
dag = models.DAG(dag_id='test_mark_non_runnable_task_as_success')
task = DummyOperator(
task_id='test_mark_non_runnable_task_as_success_op',
dag=dag,
pool='test_pool',
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow(), state=non_runnable_state)
# TI.run() will sync from DB before validating deps.
with create_session() as session:
session.add(ti)
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
session=session,
)
session.commit()
ti.run(mark_success=True)
self.assertEqual(ti.state, State.SUCCESS)
def test_run_pooling_task(self):
"""
test that running a task in an existing pool update task state as SUCCESS.
"""
dag = models.DAG(dag_id='test_run_pooling_task')
task = DummyOperator(
task_id='test_run_pooling_task_op',
dag=dag,
pool='test_pool',
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow())
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
ti.run()
db.clear_db_pools()
self.assertEqual(ti.state, State.SUCCESS)
def test_pool_slots_property(self):
"""
test that try to create a task with pool_slots less than 1
"""
def create_task_instance():
dag = models.DAG(dag_id='test_run_pooling_task')
task = DummyOperator(
task_id='test_run_pooling_task_op',
dag=dag,
pool='test_pool',
pool_slots=0,
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
return TI(task=task, execution_date=timezone.utcnow())
self.assertRaises(AirflowException, create_task_instance)
@provide_session
def test_ti_updates_with_task(self, session=None):
"""
test that updating the executor_config propagates to the TaskInstance DB
"""
with models.DAG(dag_id='test_run_pooling_task') as dag:
task = DummyOperator(
task_id='test_run_pooling_task_op',
owner='airflow',
executor_config={'foo': 'bar'},
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow())
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
session=session,
)
ti.run(session=session)
tis = dag.get_task_instances()
self.assertEqual({'foo': 'bar'}, tis[0].executor_config)
with models.DAG(dag_id='test_run_pooling_task') as dag:
task2 = DummyOperator(
task_id='test_run_pooling_task_op',
owner='airflow',
executor_config={'bar': 'baz'},
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task2, execution_date=timezone.utcnow())
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
session=session,
)
ti.run(session=session)
tis = dag.get_task_instances()
self.assertEqual({'bar': 'baz'}, tis[1].executor_config)
session.rollback()
def test_run_pooling_task_with_mark_success(self):
"""
test that running task in an existing pool with mark_success param
update task state as SUCCESS without running task
despite it fails dependency checks.
"""
dag = models.DAG(dag_id='test_run_pooling_task_with_mark_success')
task = DummyOperator(
task_id='test_run_pooling_task_with_mark_success_op',
dag=dag,
pool='test_pool',
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow())
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
ti.run(mark_success=True)
self.assertEqual(ti.state, State.SUCCESS)
def test_run_pooling_task_with_skip(self):
"""
test that running task which returns AirflowSkipOperator will end
up in a SKIPPED state.
"""
def raise_skip_exception():
raise AirflowSkipException
dag = models.DAG(dag_id='test_run_pooling_task_with_skip')
task = PythonOperator(
task_id='test_run_pooling_task_with_skip',
dag=dag,
python_callable=raise_skip_exception,
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow())
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
ti.run()
self.assertEqual(State.SKIPPED, ti.state)
def test_retry_delay(self):
"""
Test that retry delays are respected
"""
dag = models.DAG(dag_id='test_retry_handling')
task = BashOperator(
task_id='test_retry_handling_op',
bash_command='exit 1',
retries=1,
retry_delay=datetime.timedelta(seconds=3),
dag=dag,
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
def run_with_error(ti):
try:
ti.run()
except AirflowException:
pass
ti = TI(task=task, execution_date=timezone.utcnow())
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
self.assertEqual(ti.try_number, 1)
# first run -- up for retry
run_with_error(ti)
self.assertEqual(ti.state, State.UP_FOR_RETRY)
self.assertEqual(ti.try_number, 2)
# second run -- still up for retry because retry_delay hasn't expired
run_with_error(ti)
self.assertEqual(ti.state, State.UP_FOR_RETRY)
# third run -- failed
time.sleep(3)
run_with_error(ti)
self.assertEqual(ti.state, State.FAILED)
def test_retry_handling(self):
"""
Test that task retries are handled properly
"""
expected_rendered_ti_fields = {'env': None, 'bash_command': 'echo test_retry_handling; exit 1'}
dag = models.DAG(dag_id='test_retry_handling')
task = BashOperator(
task_id='test_retry_handling_op',
bash_command='echo {{dag.dag_id}}; exit 1',
retries=1,
retry_delay=datetime.timedelta(seconds=0),
dag=dag,
owner='test_pool',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
def run_with_error(ti):
try:
ti.run()
except AirflowException:
pass
ti = TI(task=task, execution_date=timezone.utcnow())
self.assertEqual(ti.try_number, 1)
# first run -- up for retry
run_with_error(ti)
self.assertEqual(ti.state, State.UP_FOR_RETRY)
self.assertEqual(ti._try_number, 1)
self.assertEqual(ti.try_number, 2)
# second run -- fail
run_with_error(ti)
self.assertEqual(ti.state, State.FAILED)
self.assertEqual(ti._try_number, 2)
self.assertEqual(ti.try_number, 3)
# Clear the TI state since you can't run a task with a FAILED state without
# clearing it first
dag.clear()
# third run -- up for retry
run_with_error(ti)
self.assertEqual(ti.state, State.UP_FOR_RETRY)
self.assertEqual(ti._try_number, 3)
self.assertEqual(ti.try_number, 4)
# fourth run -- fail
run_with_error(ti)
ti.refresh_from_db()
self.assertEqual(ti.state, State.FAILED)
self.assertEqual(ti._try_number, 4)
self.assertEqual(ti.try_number, 5)
self.assertEqual(RenderedTaskInstanceFields.get_templated_fields(ti), expected_rendered_ti_fields)
def test_next_retry_datetime(self):
delay = datetime.timedelta(seconds=30)
max_delay = datetime.timedelta(minutes=60)
dag = models.DAG(dag_id='fail_dag')
task = BashOperator(
task_id='task_with_exp_backoff_and_max_delay',
bash_command='exit 1',
retries=3,
retry_delay=delay,
retry_exponential_backoff=True,
max_retry_delay=max_delay,
dag=dag,
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=DEFAULT_DATE)
ti.end_date = pendulum.instance(timezone.utcnow())
date = ti.next_retry_datetime()
# between 30 * 2^0.5 and 30 * 2^1 (15 and 30)
period = ti.end_date.add(seconds=30) - ti.end_date.add(seconds=15)
self.assertTrue(date in period)
ti.try_number = 3
date = ti.next_retry_datetime()
# between 30 * 2^2 and 30 * 2^3 (120 and 240)
period = ti.end_date.add(seconds=240) - ti.end_date.add(seconds=120)
self.assertTrue(date in period)
ti.try_number = 5
date = ti.next_retry_datetime()
# between 30 * 2^4 and 30 * 2^5 (480 and 960)
period = ti.end_date.add(seconds=960) - ti.end_date.add(seconds=480)
self.assertTrue(date in period)
ti.try_number = 9
date = ti.next_retry_datetime()
self.assertEqual(date, ti.end_date + max_delay)
ti.try_number = 50
date = ti.next_retry_datetime()
self.assertEqual(date, ti.end_date + max_delay)
def test_next_retry_datetime_short_intervals(self):
delay = datetime.timedelta(seconds=1)
max_delay = datetime.timedelta(minutes=60)
dag = models.DAG(dag_id='fail_dag')
task = BashOperator(
task_id='task_with_exp_backoff_and_short_time_interval',
bash_command='exit 1',
retries=3,
retry_delay=delay,
retry_exponential_backoff=True,
max_retry_delay=max_delay,
dag=dag,
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=DEFAULT_DATE)
ti.end_date = pendulum.instance(timezone.utcnow())
date = ti.next_retry_datetime()
# between 1 * 2^0.5 and 1 * 2^1 (15 and 30)
period = ti.end_date.add(seconds=15) - ti.end_date.add(seconds=1)
self.assertTrue(date in period)
def test_reschedule_handling(self):
"""
Test that task reschedules are handled properly
"""
# Return values of the python sensor callable, modified during tests
done = False
fail = False
def func():
if fail:
raise AirflowException()
return done
dag = models.DAG(dag_id='test_reschedule_handling')
task = PythonSensor(
task_id='test_reschedule_handling_sensor',
poke_interval=0,
mode='reschedule',
python_callable=func,
retries=1,
retry_delay=datetime.timedelta(seconds=0),
dag=dag,
owner='airflow',
pool='test_pool',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow())
self.assertEqual(ti._try_number, 0)
self.assertEqual(ti.try_number, 1)
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
def run_ti_and_assert(
run_date,
expected_start_date,
expected_end_date,
expected_duration,
expected_state,
expected_try_number,
expected_task_reschedule_count,
):
with freeze_time(run_date):
try:
ti.run()
except AirflowException:
if not fail:
raise
ti.refresh_from_db()
self.assertEqual(ti.state, expected_state)
self.assertEqual(ti._try_number, expected_try_number)
self.assertEqual(ti.try_number, expected_try_number + 1)
self.assertEqual(ti.start_date, expected_start_date)
self.assertEqual(ti.end_date, expected_end_date)
self.assertEqual(ti.duration, expected_duration)
trs = TaskReschedule.find_for_task_instance(ti) # pylint: disable=no-value-for-parameter
self.assertEqual(len(trs), expected_task_reschedule_count)
date1 = timezone.utcnow()
date2 = date1 + datetime.timedelta(minutes=1)
date3 = date2 + datetime.timedelta(minutes=1)
date4 = date3 + datetime.timedelta(minutes=1)
# Run with multiple reschedules.
# During reschedule the try number remains the same, but each reschedule is recorded.
# The start date is expected to remain the initial date, hence the duration increases.
# When finished the try number is incremented and there is no reschedule expected
# for this try.
done, fail = False, False
run_ti_and_assert(date1, date1, date1, 0, State.UP_FOR_RESCHEDULE, 0, 1)
done, fail = False, False
run_ti_and_assert(date2, date1, date2, 60, State.UP_FOR_RESCHEDULE, 0, 2)
done, fail = False, False
run_ti_and_assert(date3, date1, date3, 120, State.UP_FOR_RESCHEDULE, 0, 3)
done, fail = True, False
run_ti_and_assert(date4, date1, date4, 180, State.SUCCESS, 1, 0)
# Clear the task instance.
dag.clear()
ti.refresh_from_db()
self.assertEqual(ti.state, State.NONE)
self.assertEqual(ti._try_number, 1)
# Run again after clearing with reschedules and a retry.
# The retry increments the try number, and for that try no reschedule is expected.
# After the retry the start date is reset, hence the duration is also reset.
done, fail = False, False
run_ti_and_assert(date1, date1, date1, 0, State.UP_FOR_RESCHEDULE, 1, 1)
done, fail = False, True
run_ti_and_assert(date2, date1, date2, 60, State.UP_FOR_RETRY, 2, 0)
done, fail = False, False
run_ti_and_assert(date3, date3, date3, 0, State.UP_FOR_RESCHEDULE, 2, 1)
done, fail = True, False
run_ti_and_assert(date4, date3, date4, 60, State.SUCCESS, 3, 0)
def test_reschedule_handling_clear_reschedules(self):
"""
Test that task reschedules clearing are handled properly
"""
# Return values of the python sensor callable, modified during tests
done = False
fail = False
def func():
if fail:
raise AirflowException()
return done
dag = models.DAG(dag_id='test_reschedule_handling')
task = PythonSensor(
task_id='test_reschedule_handling_sensor',
poke_interval=0,
mode='reschedule',
python_callable=func,
retries=1,
retry_delay=datetime.timedelta(seconds=0),
dag=dag,
owner='airflow',
pool='test_pool',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
)
ti = TI(task=task, execution_date=timezone.utcnow())
self.assertEqual(ti._try_number, 0)
self.assertEqual(ti.try_number, 1)
def run_ti_and_assert(
run_date,
expected_start_date,
expected_end_date,
expected_duration,
expected_state,
expected_try_number,
expected_task_reschedule_count,
):
with freeze_time(run_date):
try:
ti.run()
except AirflowException:
if not fail:
raise
ti.refresh_from_db()
self.assertEqual(ti.state, expected_state)
self.assertEqual(ti._try_number, expected_try_number)
self.assertEqual(ti.try_number, expected_try_number + 1)
self.assertEqual(ti.start_date, expected_start_date)
self.assertEqual(ti.end_date, expected_end_date)
self.assertEqual(ti.duration, expected_duration)
trs = TaskReschedule.find_for_task_instance(ti) # pylint: disable=no-value-for-parameter
self.assertEqual(len(trs), expected_task_reschedule_count)
date1 = timezone.utcnow()
done, fail = False, False
run_ti_and_assert(date1, date1, date1, 0, State.UP_FOR_RESCHEDULE, 0, 1)
# Clear the task instance.
dag.clear()
ti.refresh_from_db()
self.assertEqual(ti.state, State.NONE)
self.assertEqual(ti._try_number, 0)
# Check that reschedules for ti have also been cleared.
trs = TaskReschedule.find_for_task_instance(ti) # pylint: disable=no-value-for-parameter
self.assertFalse(trs)
def test_depends_on_past(self):
dag = DAG(dag_id='test_depends_on_past', start_date=DEFAULT_DATE)
task = DummyOperator(
task_id='test_dop_task',
dag=dag,
depends_on_past=True,
)
dag.clear()
run_date = task.start_date + datetime.timedelta(days=5)
dag.create_dagrun(
execution_date=run_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
ti = TI(task, run_date)
# depends_on_past prevents the run
task.run(start_date=run_date, end_date=run_date, ignore_first_depends_on_past=False)
ti.refresh_from_db()
self.assertIs(ti.state, None)
# ignore first depends_on_past to allow the run
task.run(start_date=run_date, end_date=run_date, ignore_first_depends_on_past=True)
ti.refresh_from_db()
self.assertEqual(ti.state, State.SUCCESS)
# Parameterized tests to check for the correct firing
# of the trigger_rule under various circumstances
# Numeric fields are in order:
# successes, skipped, failed, upstream_failed, done
@parameterized.expand(
[
#
# Tests for all_success
#
['all_success', 5, 0, 0, 0, 0, True, None, True],
['all_success', 2, 0, 0, 0, 0, True, None, False],
['all_success', 2, 0, 1, 0, 0, True, State.UPSTREAM_FAILED, False],
['all_success', 2, 1, 0, 0, 0, True, State.SKIPPED, False],
#
# Tests for one_success
#
['one_success', 5, 0, 0, 0, 5, True, None, True],
['one_success', 2, 0, 0, 0, 2, True, None, True],
['one_success', 2, 0, 1, 0, 3, True, None, True],
['one_success', 2, 1, 0, 0, 3, True, None, True],
#
# Tests for all_failed
#
['all_failed', 5, 0, 0, 0, 5, True, State.SKIPPED, False],
['all_failed', 0, 0, 5, 0, 5, True, None, True],
['all_failed', 2, 0, 0, 0, 2, True, State.SKIPPED, False],
['all_failed', 2, 0, 1, 0, 3, True, State.SKIPPED, False],
['all_failed', 2, 1, 0, 0, 3, True, State.SKIPPED, False],
#
# Tests for one_failed
#
['one_failed', 5, 0, 0, 0, 0, True, None, False],
['one_failed', 2, 0, 0, 0, 0, True, None, False],
['one_failed', 2, 0, 1, 0, 0, True, None, True],
['one_failed', 2, 1, 0, 0, 3, True, None, False],
['one_failed', 2, 3, 0, 0, 5, True, State.SKIPPED, False],
#
# Tests for done
#
['all_done', 5, 0, 0, 0, 5, True, None, True],
['all_done', 2, 0, 0, 0, 2, True, None, False],
['all_done', 2, 0, 1, 0, 3, True, None, False],
['all_done', 2, 1, 0, 0, 3, True, None, False],
]
)
def test_check_task_dependencies(
self,
trigger_rule,
successes,
skipped,
failed,
upstream_failed,
done,
flag_upstream_failed,
expect_state,
expect_completed,
):
start_date = timezone.datetime(2016, 2, 1, 0, 0, 0)
dag = models.DAG('test-dag', start_date=start_date)
downstream = DummyOperator(task_id='downstream', dag=dag, owner='airflow', trigger_rule=trigger_rule)
for i in range(5):
task = DummyOperator(task_id=f'runme_{i}', dag=dag, owner='airflow')
task.set_downstream(downstream)
run_date = task.start_date + datetime.timedelta(days=5)
ti = TI(downstream, run_date)
dep_results = TriggerRuleDep()._evaluate_trigger_rule( # pylint: disable=no-value-for-parameter
ti=ti,
successes=successes,
skipped=skipped,
failed=failed,
upstream_failed=upstream_failed,
done=done,
flag_upstream_failed=flag_upstream_failed,
)
completed = all(dep.passed for dep in dep_results)
self.assertEqual(completed, expect_completed)
self.assertEqual(ti.state, expect_state)
def test_respects_prev_dagrun_dep(self):
with DAG(dag_id='test_dag'):
task = DummyOperator(task_id='task', start_date=DEFAULT_DATE)
ti = TI(task, DEFAULT_DATE)
failing_status = [TIDepStatus('test fail status name', False, 'test fail reason')]
passing_status = [TIDepStatus('test pass status name', True, 'test passing reason')]
with patch(
'airflow.ti_deps.deps.prev_dagrun_dep.PrevDagrunDep.get_dep_statuses', return_value=failing_status
):
self.assertFalse(ti.are_dependencies_met())
with patch(
'airflow.ti_deps.deps.prev_dagrun_dep.PrevDagrunDep.get_dep_statuses', return_value=passing_status
):
self.assertTrue(ti.are_dependencies_met())
@parameterized.expand(
[
(State.SUCCESS, True),
(State.SKIPPED, True),
(State.RUNNING, False),
(State.FAILED, False),
(State.NONE, False),
]
)
def test_are_dependents_done(self, downstream_ti_state, expected_are_dependents_done):
with DAG(dag_id='test_dag'):
task = DummyOperator(task_id='task', start_date=DEFAULT_DATE)
downstream_task = DummyOperator(task_id='downstream_task', start_date=DEFAULT_DATE)
task >> downstream_task
ti = TI(task, DEFAULT_DATE)
downstream_ti = TI(downstream_task, DEFAULT_DATE)
downstream_ti.set_state(downstream_ti_state)
self.assertEqual(ti.are_dependents_done(), expected_are_dependents_done)
def test_xcom_pull(self):
"""
Test xcom_pull, using different filtering methods.
"""
dag = models.DAG(
dag_id='test_xcom',
schedule_interval='@monthly',
start_date=timezone.datetime(2016, 6, 1, 0, 0, 0),
)
exec_date = timezone.utcnow()
# Push a value
task1 = DummyOperator(task_id='test_xcom_1', dag=dag, owner='airflow')
ti1 = TI(task=task1, execution_date=exec_date)
ti1.xcom_push(key='foo', value='bar')
# Push another value with the same key (but by a different task)
task2 = DummyOperator(task_id='test_xcom_2', dag=dag, owner='airflow')
ti2 = TI(task=task2, execution_date=exec_date)
ti2.xcom_push(key='foo', value='baz')
# Pull with no arguments
result = ti1.xcom_pull()
self.assertEqual(result, None)
# Pull the value pushed most recently by any task.
result = ti1.xcom_pull(key='foo')
self.assertIn(result, 'baz')
# Pull the value pushed by the first task
result = ti1.xcom_pull(task_ids='test_xcom_1', key='foo')
self.assertEqual(result, 'bar')
# Pull the value pushed by the second task
result = ti1.xcom_pull(task_ids='test_xcom_2', key='foo')
self.assertEqual(result, 'baz')
# Pull the values pushed by both tasks & Verify Order of task_ids pass & values returned
result = ti1.xcom_pull(task_ids=['test_xcom_1', 'test_xcom_2'], key='foo')
self.assertEqual(result, ['bar', 'baz'])
def test_xcom_pull_after_success(self):
"""
tests xcom set/clear relative to a task in a 'success' rerun scenario
"""
key = 'xcom_key'
value = 'xcom_value'
dag = models.DAG(dag_id='test_xcom', schedule_interval='@monthly')
task = DummyOperator(
task_id='test_xcom',
dag=dag,
pool='test_xcom',
owner='airflow',
start_date=timezone.datetime(2016, 6, 2, 0, 0, 0),
)
exec_date = timezone.utcnow()
ti = TI(task=task, execution_date=exec_date)
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
ti.run(mark_success=True)
ti.xcom_push(key=key, value=value)
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), value)
ti.run()
# The second run and assert is to handle AIRFLOW-131 (don't clear on
# prior success)
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), value)
# Test AIRFLOW-703: Xcom shouldn't be cleared if the task doesn't
# execute, even if dependencies are ignored
ti.run(ignore_all_deps=True, mark_success=True)
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), value)
# Xcom IS finally cleared once task has executed
ti.run(ignore_all_deps=True)
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), None)
def test_xcom_pull_different_execution_date(self):
"""
tests xcom fetch behavior with different execution dates, using
both xcom_pull with "include_prior_dates" and without
"""
key = 'xcom_key'
value = 'xcom_value'
dag = models.DAG(dag_id='test_xcom', schedule_interval='@monthly')
task = DummyOperator(
task_id='test_xcom',
dag=dag,
pool='test_xcom',
owner='airflow',
start_date=timezone.datetime(2016, 6, 2, 0, 0, 0),
)
exec_date = timezone.utcnow()
ti = TI(task=task, execution_date=exec_date)
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
ti.run(mark_success=True)
ti.xcom_push(key=key, value=value)
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), value)
ti.run()
exec_date += datetime.timedelta(days=1)
ti = TI(task=task, execution_date=exec_date)
ti.run()
# We have set a new execution date (and did not pass in
# 'include_prior_dates'which means this task should now have a cleared
# xcom value
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), None)
# We *should* get a value using 'include_prior_dates'
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key, include_prior_dates=True), value)
def test_xcom_push_flag(self):
"""
Tests the option for Operators to push XComs
"""
value = 'hello'
task_id = 'test_no_xcom_push'
dag = models.DAG(dag_id='test_xcom')
# nothing saved to XCom
task = PythonOperator(
task_id=task_id,
dag=dag,
python_callable=lambda: value,
do_xcom_push=False,
owner='airflow',
start_date=datetime.datetime(2017, 1, 1),
)
ti = TI(task=task, execution_date=datetime.datetime(2017, 1, 1))
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
)
ti.run()
self.assertEqual(ti.xcom_pull(task_ids=task_id, key=models.XCOM_RETURN_KEY), None)
def test_post_execute_hook(self):
"""
Test that post_execute hook is called with the Operator's result.
The result ('error') will cause an error to be raised and trapped.
"""
class TestError(Exception):
pass
class TestOperator(PythonOperator):
def post_execute(self, context, result=None):
if result == 'error':
raise TestError('expected error.')
dag = models.DAG(dag_id='test_post_execute_dag')
task = TestOperator(
task_id='test_operator',
dag=dag,
python_callable=lambda: 'error',
owner='airflow',
start_date=timezone.datetime(2017, 2, 1),
)
ti = TI(task=task, execution_date=timezone.utcnow())
with self.assertRaises(TestError):
ti.run()
def test_check_and_change_state_before_execution(self):
dag = models.DAG(dag_id='test_check_and_change_state_before_execution')
task = DummyOperator(task_id='task', dag=dag, start_date=DEFAULT_DATE)
ti = TI(task=task, execution_date=timezone.utcnow())
self.assertEqual(ti._try_number, 0)
self.assertTrue(ti.check_and_change_state_before_execution())
# State should be running, and try_number column should be incremented
self.assertEqual(ti.state, State.RUNNING)
self.assertEqual(ti._try_number, 1)
def test_check_and_change_state_before_execution_dep_not_met(self):
dag = models.DAG(dag_id='test_check_and_change_state_before_execution')
task = DummyOperator(task_id='task', dag=dag, start_date=DEFAULT_DATE)
task2 = DummyOperator(task_id='task2', dag=dag, start_date=DEFAULT_DATE)
task >> task2
ti = TI(task=task2, execution_date=timezone.utcnow())
self.assertFalse(ti.check_and_change_state_before_execution())
def test_try_number(self):
"""
Test the try_number accessor behaves in various running states
"""
dag = models.DAG(dag_id='test_check_and_change_state_before_execution')
task = DummyOperator(task_id='task', dag=dag, start_date=DEFAULT_DATE)
ti = TI(task=task, execution_date=timezone.utcnow())
self.assertEqual(1, ti.try_number)
ti.try_number = 2
ti.state = State.RUNNING
self.assertEqual(2, ti.try_number)
ti.state = State.SUCCESS
self.assertEqual(3, ti.try_number)
def test_get_num_running_task_instances(self):
session = settings.Session()
dag = models.DAG(dag_id='test_get_num_running_task_instances')
dag2 = models.DAG(dag_id='test_get_num_running_task_instances_dummy')
task = DummyOperator(task_id='task', dag=dag, start_date=DEFAULT_DATE)
task2 = DummyOperator(task_id='task', dag=dag2, start_date=DEFAULT_DATE)
ti1 = TI(task=task, execution_date=DEFAULT_DATE)
ti2 = TI(task=task, execution_date=DEFAULT_DATE + datetime.timedelta(days=1))
ti3 = TI(task=task2, execution_date=DEFAULT_DATE)
ti1.state = State.RUNNING
ti2.state = State.QUEUED
ti3.state = State.RUNNING
session.add(ti1)
session.add(ti2)
session.add(ti3)
session.commit()
self.assertEqual(1, ti1.get_num_running_task_instances(session=session))
self.assertEqual(1, ti2.get_num_running_task_instances(session=session))
self.assertEqual(1, ti3.get_num_running_task_instances(session=session))
# def test_log_url(self):
# now = pendulum.now('Europe/Brussels')
# dag = DAG('dag', start_date=DEFAULT_DATE)
# task = DummyOperator(task_id='op', dag=dag)
# ti = TI(task=task, execution_date=now)
# d = urllib.parse.parse_qs(
# urllib.parse.urlparse(ti.log_url).query,
# keep_blank_values=True, strict_parsing=True)
# self.assertEqual(d['dag_id'][0], 'dag')
# self.assertEqual(d['task_id'][0], 'op')
# self.assertEqual(pendulum.parse(d['execution_date'][0]), now)
def test_log_url(self):
dag = DAG('dag', start_date=DEFAULT_DATE)
task = DummyOperator(task_id='op', dag=dag)
ti = TI(task=task, execution_date=datetime.datetime(2018, 1, 1))
expected_url = (
'http://localhost:8080/log?'
'execution_date=2018-01-01T00%3A00%3A00%2B00%3A00'
'&task_id=op'
'&dag_id=dag'
)
self.assertEqual(ti.log_url, expected_url)
def test_mark_success_url(self):
now = pendulum.now('Europe/Brussels')
dag = DAG('dag', start_date=DEFAULT_DATE)
task = DummyOperator(task_id='op', dag=dag)
ti = TI(task=task, execution_date=now)
query = urllib.parse.parse_qs(
urllib.parse.urlparse(ti.mark_success_url).query, keep_blank_values=True, strict_parsing=True
)
self.assertEqual(query['dag_id'][0], 'dag')
self.assertEqual(query['task_id'][0], 'op')
self.assertEqual(pendulum.parse(query['execution_date'][0]), now)
def test_overwrite_params_with_dag_run_conf(self):
task = DummyOperator(task_id='op')
ti = TI(task=task, execution_date=datetime.datetime.now())
dag_run = DagRun()
dag_run.conf = {"override": True}
params = {"override": False}
ti.overwrite_params_with_dag_run_conf(params, dag_run)
self.assertEqual(True, params["override"])
def test_overwrite_params_with_dag_run_none(self):
task = DummyOperator(task_id='op')
ti = TI(task=task, execution_date=datetime.datetime.now())
params = {"override": False}
ti.overwrite_params_with_dag_run_conf(params, None)
self.assertEqual(False, params["override"])
def test_overwrite_params_with_dag_run_conf_none(self):
task = DummyOperator(task_id='op')
ti = TI(task=task, execution_date=datetime.datetime.now())
params = {"override": False}
dag_run = DagRun()
ti.overwrite_params_with_dag_run_conf(params, dag_run)
self.assertEqual(False, params["override"])
@patch('airflow.models.taskinstance.send_email')
def test_email_alert(self, mock_send_email):
dag = models.DAG(dag_id='test_failure_email')
task = BashOperator(
task_id='test_email_alert', dag=dag, bash_command='exit 1', start_date=DEFAULT_DATE, email='to'
)
ti = TI(task=task, execution_date=timezone.utcnow())
try:
ti.run()
except AirflowException:
pass
(email, title, body), _ = mock_send_email.call_args
self.assertEqual(email, 'to')
self.assertIn('test_email_alert', title)
self.assertIn('test_email_alert', body)
self.assertIn('Try 1', body)
@conf_vars(
{
('email', 'subject_template'): '/subject/path',
('email', 'html_content_template'): '/html_content/path',
}
)
@patch('airflow.models.taskinstance.send_email')
def test_email_alert_with_config(self, mock_send_email):
dag = models.DAG(dag_id='test_failure_email')
task = BashOperator(
task_id='test_email_alert_with_config',
dag=dag,
bash_command='exit 1',
start_date=DEFAULT_DATE,
email='to',
)
ti = TI(task=task, execution_date=timezone.utcnow())
opener = mock_open(read_data='template: {{ti.task_id}}')
with patch('airflow.models.taskinstance.open', opener, create=True):
try:
ti.run()
except AirflowException:
pass
(email, title, body), _ = mock_send_email.call_args
self.assertEqual(email, 'to')
self.assertEqual('template: test_email_alert_with_config', title)
self.assertEqual('template: test_email_alert_with_config', body)
def test_set_duration(self):
task = DummyOperator(task_id='op', email='[email protected]')
ti = TI(
task=task,
execution_date=datetime.datetime.now(),
)
ti.start_date = datetime.datetime(2018, 10, 1, 1)
ti.end_date = datetime.datetime(2018, 10, 1, 2)
ti.set_duration()
self.assertEqual(ti.duration, 3600)
def test_set_duration_empty_dates(self):
task = DummyOperator(task_id='op', email='[email protected]')
ti = TI(task=task, execution_date=datetime.datetime.now())
ti.set_duration()
self.assertIsNone(ti.duration)
def test_success_callback_no_race_condition(self):
callback_wrapper = CallbackWrapper()
dag = DAG(
'test_success_callback_no_race_condition',
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=10),
)
task = DummyOperator(
task_id='op',
email='[email protected]',
on_success_callback=callback_wrapper.success_handler,
dag=dag,
)
ti = TI(task=task, execution_date=datetime.datetime.now())
ti.state = State.RUNNING
session = settings.Session()
session.merge(ti)
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
session=session,
)
session.commit()
callback_wrapper.wrap_task_instance(ti)
ti._run_raw_task()
self.assertTrue(callback_wrapper.callback_ran)
self.assertEqual(callback_wrapper.task_state_in_callback, State.RUNNING)
ti.refresh_from_db()
self.assertEqual(ti.state, State.SUCCESS)
@staticmethod
def _test_previous_dates_setup(
schedule_interval: Union[str, datetime.timedelta, None], catchup: bool, scenario: List[str]
) -> list:
dag_id = 'test_previous_dates'
dag = models.DAG(dag_id=dag_id, schedule_interval=schedule_interval, catchup=catchup)
task = DummyOperator(task_id='task', dag=dag, start_date=DEFAULT_DATE)
def get_test_ti(session, execution_date: pendulum.DateTime, state: str) -> TI:
dag.create_dagrun(
run_type=DagRunType.SCHEDULED,
state=state,
execution_date=execution_date,
start_date=pendulum.now('UTC'),
session=session,
)
ti = TI(task=task, execution_date=execution_date)
ti.set_state(state=State.SUCCESS, session=session)
return ti
with create_session() as session: # type: Session
date = cast(pendulum.DateTime, pendulum.parse('2019-01-01T00:00:00+00:00'))
ret = []
for idx, state in enumerate(scenario):
new_date = date.add(days=idx)
ti = get_test_ti(session, new_date, state)
ret.append(ti)
return ret
_prev_dates_param_list = (
param('cron/catchup', '0 0 * * * ', True),
param('cron/no-catchup', '0 0 * * *', False),
param('no-sched/catchup', None, True),
param('no-sched/no-catchup', None, False),
param('timedelta/catchup', datetime.timedelta(days=1), True),
param('timedelta/no-catchup', datetime.timedelta(days=1), False),
)
@parameterized.expand(_prev_dates_param_list)
def test_previous_ti(self, _, schedule_interval, catchup) -> None:
scenario = [State.SUCCESS, State.FAILED, State.SUCCESS]
ti_list = self._test_previous_dates_setup(schedule_interval, catchup, scenario)
self.assertIsNone(ti_list[0].get_previous_ti())
self.assertEqual(ti_list[2].get_previous_ti().execution_date, ti_list[1].execution_date)
self.assertNotEqual(ti_list[2].get_previous_ti().execution_date, ti_list[0].execution_date)
@parameterized.expand(_prev_dates_param_list)
def test_previous_ti_success(self, _, schedule_interval, catchup) -> None:
scenario = [State.FAILED, State.SUCCESS, State.FAILED, State.SUCCESS]
ti_list = self._test_previous_dates_setup(schedule_interval, catchup, scenario)
self.assertIsNone(ti_list[0].get_previous_ti(state=State.SUCCESS))
self.assertIsNone(ti_list[1].get_previous_ti(state=State.SUCCESS))
self.assertEqual(
ti_list[3].get_previous_ti(state=State.SUCCESS).execution_date, ti_list[1].execution_date
)
self.assertNotEqual(
ti_list[3].get_previous_ti(state=State.SUCCESS).execution_date, ti_list[2].execution_date
)
@parameterized.expand(_prev_dates_param_list)
def test_previous_execution_date_success(self, _, schedule_interval, catchup) -> None:
scenario = [State.FAILED, State.SUCCESS, State.FAILED, State.SUCCESS]
ti_list = self._test_previous_dates_setup(schedule_interval, catchup, scenario)
self.assertIsNone(ti_list[0].get_previous_execution_date(state=State.SUCCESS))
self.assertIsNone(ti_list[1].get_previous_execution_date(state=State.SUCCESS))
self.assertEqual(
ti_list[3].get_previous_execution_date(state=State.SUCCESS), ti_list[1].execution_date
)
self.assertNotEqual(
ti_list[3].get_previous_execution_date(state=State.SUCCESS), ti_list[2].execution_date
)
@parameterized.expand(_prev_dates_param_list)
def test_previous_start_date_success(self, _, schedule_interval, catchup) -> None:
scenario = [State.FAILED, State.SUCCESS, State.FAILED, State.SUCCESS]
ti_list = self._test_previous_dates_setup(schedule_interval, catchup, scenario)
self.assertIsNone(ti_list[0].get_previous_start_date(state=State.SUCCESS))
self.assertIsNone(ti_list[1].get_previous_start_date(state=State.SUCCESS))
self.assertEqual(
ti_list[3].get_previous_start_date(state=State.SUCCESS),
ti_list[1].start_date,
)
self.assertNotEqual(
ti_list[3].get_previous_start_date(state=State.SUCCESS),
ti_list[2].start_date,
)
def test_pendulum_template_dates(self):
dag = models.DAG(
dag_id='test_pendulum_template_dates',
schedule_interval='0 12 * * *',
start_date=timezone.datetime(2016, 6, 1, 0, 0, 0),
)
task = DummyOperator(task_id='test_pendulum_template_dates_task', dag=dag)
ti = TI(task=task, execution_date=timezone.utcnow())
template_context = ti.get_template_context()
self.assertIsInstance(template_context["execution_date"], pendulum.DateTime)
self.assertIsInstance(template_context["next_execution_date"], pendulum.DateTime)
self.assertIsInstance(template_context["prev_execution_date"], pendulum.DateTime)
@parameterized.expand(
[
('{{ var.value.a_variable }}', 'a test value'),
('{{ var.value.get("a_variable") }}', 'a test value'),
('{{ var.value.get("a_variable", "unused_fallback") }}', 'a test value'),
('{{ var.value.get("missing_variable", "fallback") }}', 'fallback'),
]
)
def test_template_with_variable(self, content, expected_output):
"""
Test the availability of variables in templates
"""
Variable.set('a_variable', 'a test value')
with DAG('test-dag', start_date=DEFAULT_DATE):
task = DummyOperator(task_id='op1')
ti = TI(task=task, execution_date=DEFAULT_DATE)
context = ti.get_template_context()
result = task.render_template(content, context)
self.assertEqual(result, expected_output)
def test_template_with_variable_missing(self):
"""
Test the availability of variables in templates
"""
with DAG('test-dag', start_date=DEFAULT_DATE):
task = DummyOperator(task_id='op1')
ti = TI(task=task, execution_date=DEFAULT_DATE)
context = ti.get_template_context()
with self.assertRaises(KeyError):
task.render_template('{{ var.value.get("missing_variable") }}', context)
@parameterized.expand(
[
('{{ var.value.a_variable }}', '{\n "a": {\n "test": "value"\n }\n}'),
('{{ var.json.a_variable["a"]["test"] }}', 'value'),
('{{ var.json.get("a_variable")["a"]["test"] }}', 'value'),
('{{ var.json.get("a_variable", {"a": {"test": "unused_fallback"}})["a"]["test"] }}', 'value'),
('{{ var.json.get("missing_variable", {"a": {"test": "fallback"}})["a"]["test"] }}', 'fallback'),
]
)
def test_template_with_json_variable(self, content, expected_output):
"""
Test the availability of variables in templates
"""
Variable.set('a_variable', {'a': {'test': 'value'}}, serialize_json=True)
with DAG('test-dag', start_date=DEFAULT_DATE):
task = DummyOperator(task_id='op1')
ti = TI(task=task, execution_date=DEFAULT_DATE)
context = ti.get_template_context()
result = task.render_template(content, context)
self.assertEqual(result, expected_output)
def test_template_with_json_variable_missing(self):
with DAG('test-dag', start_date=DEFAULT_DATE):
task = DummyOperator(task_id='op1')
ti = TI(task=task, execution_date=DEFAULT_DATE)
context = ti.get_template_context()
with self.assertRaises(KeyError):
task.render_template('{{ var.json.get("missing_variable") }}', context)
def test_execute_callback(self):
called = False
def on_execute_callable(context):
nonlocal called
called = True
self.assertEqual(context['dag_run'].dag_id, 'test_dagrun_execute_callback')
dag = DAG(
'test_execute_callback',
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=10),
)
task = DummyOperator(
task_id='op', email='[email protected]', on_execute_callback=on_execute_callable, dag=dag
)
ti = TI(task=task, execution_date=datetime.datetime.now())
ti.state = State.RUNNING
session = settings.Session()
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
session=session,
)
session.merge(ti)
session.commit()
ti._run_raw_task()
assert called
ti.refresh_from_db()
assert ti.state == State.SUCCESS
def test_handle_failure(self):
start_date = timezone.datetime(2016, 6, 1)
dag = models.DAG(dag_id="test_handle_failure", schedule_interval=None, start_date=start_date)
mock_on_failure_1 = mock.MagicMock()
mock_on_retry_1 = mock.MagicMock()
task1 = DummyOperator(
task_id="test_handle_failure_on_failure",
on_failure_callback=mock_on_failure_1,
on_retry_callback=mock_on_retry_1,
dag=dag,
)
ti1 = TI(task=task1, execution_date=start_date)
ti1.state = State.FAILED
ti1.handle_failure("test failure handling")
context_arg_1 = mock_on_failure_1.call_args[0][0]
assert context_arg_1 and "task_instance" in context_arg_1
mock_on_retry_1.assert_not_called()
mock_on_failure_2 = mock.MagicMock()
mock_on_retry_2 = mock.MagicMock()
task2 = DummyOperator(
task_id="test_handle_failure_on_retry",
on_failure_callback=mock_on_failure_2,
on_retry_callback=mock_on_retry_2,
retries=1,
dag=dag,
)
ti2 = TI(task=task2, execution_date=start_date)
ti2.state = State.FAILED
ti2.handle_failure("test retry handling")
mock_on_failure_2.assert_not_called()
context_arg_2 = mock_on_retry_2.call_args[0][0]
assert context_arg_2 and "task_instance" in context_arg_2
# test the scenario where normally we would retry but have been asked to fail
mock_on_failure_3 = mock.MagicMock()
mock_on_retry_3 = mock.MagicMock()
task3 = DummyOperator(
task_id="test_handle_failure_on_force_fail",
on_failure_callback=mock_on_failure_3,
on_retry_callback=mock_on_retry_3,
retries=1,
dag=dag,
)
ti3 = TI(task=task3, execution_date=start_date)
ti3.state = State.FAILED
ti3.handle_failure("test force_fail handling", force_fail=True)
context_arg_3 = mock_on_failure_3.call_args[0][0]
assert context_arg_3 and "task_instance" in context_arg_3
mock_on_retry_3.assert_not_called()
def test_does_not_retry_on_airflow_fail_exception(self):
def fail():
raise AirflowFailException("hopeless")
dag = models.DAG(dag_id='test_does_not_retry_on_airflow_fail_exception')
task = PythonOperator(
task_id='test_raise_airflow_fail_exception',
dag=dag,
python_callable=fail,
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
retries=1,
)
ti = TI(task=task, execution_date=timezone.utcnow())
try:
ti.run()
except AirflowFailException:
pass # expected
self.assertEqual(State.FAILED, ti.state)
def test_retries_on_other_exceptions(self):
def fail():
raise AirflowException("maybe this will pass?")
dag = models.DAG(dag_id='test_retries_on_other_exceptions')
task = PythonOperator(
task_id='test_raise_other_exception',
dag=dag,
python_callable=fail,
owner='airflow',
start_date=timezone.datetime(2016, 2, 1, 0, 0, 0),
retries=1,
)
ti = TI(task=task, execution_date=timezone.utcnow())
try:
ti.run()
except AirflowException:
pass # expected
self.assertEqual(State.UP_FOR_RETRY, ti.state)
def _env_var_check_callback(self):
self.assertEqual('test_echo_env_variables', os.environ['AIRFLOW_CTX_DAG_ID'])
self.assertEqual('hive_in_python_op', os.environ['AIRFLOW_CTX_TASK_ID'])
self.assertEqual(DEFAULT_DATE.isoformat(), os.environ['AIRFLOW_CTX_EXECUTION_DATE'])
self.assertEqual(
DagRun.generate_run_id(DagRunType.MANUAL, DEFAULT_DATE), os.environ['AIRFLOW_CTX_DAG_RUN_ID']
)
def test_echo_env_variables(self):
dag = DAG(
'test_echo_env_variables',
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=10),
)
op = PythonOperator(
task_id='hive_in_python_op', dag=dag, python_callable=self._env_var_check_callback
)
dag.create_dagrun(
run_type=DagRunType.MANUAL,
execution_date=DEFAULT_DATE,
start_date=DEFAULT_DATE,
state=State.RUNNING,
external_trigger=False,
)
ti = TI(task=op, execution_date=DEFAULT_DATE)
ti.state = State.RUNNING
session = settings.Session()
session.merge(ti)
session.commit()
ti._run_raw_task()
ti.refresh_from_db()
self.assertEqual(ti.state, State.SUCCESS)
@patch.object(Stats, 'incr')
def test_task_stats(self, stats_mock):
dag = DAG(
'test_task_start_end_stats',
start_date=DEFAULT_DATE,
end_date=DEFAULT_DATE + datetime.timedelta(days=10),
)
op = DummyOperator(task_id='dummy_op', dag=dag)
dag.create_dagrun(
run_id='manual__' + DEFAULT_DATE.isoformat(),
execution_date=DEFAULT_DATE,
start_date=DEFAULT_DATE,
state=State.RUNNING,
external_trigger=False,
)
ti = TI(task=op, execution_date=DEFAULT_DATE)
ti.state = State.RUNNING
session = settings.Session()
session.merge(ti)
session.commit()
ti._run_raw_task()
ti.refresh_from_db()
stats_mock.assert_called_with(f'ti.finish.{dag.dag_id}.{op.task_id}.{ti.state}')
self.assertIn(call(f'ti.start.{dag.dag_id}.{op.task_id}'), stats_mock.mock_calls)
self.assertEqual(stats_mock.call_count, 5)
def test_generate_command_default_param(self):
dag_id = 'test_generate_command_default_param'
task_id = 'task'
assert_command = ['airflow', 'tasks', 'run', dag_id, task_id, DEFAULT_DATE.isoformat()]
generate_command = TI.generate_command(dag_id=dag_id, task_id=task_id, execution_date=DEFAULT_DATE)
assert assert_command == generate_command
def test_generate_command_specific_param(self):
dag_id = 'test_generate_command_specific_param'
task_id = 'task'
assert_command = [
'airflow',
'tasks',
'run',
dag_id,
task_id,
DEFAULT_DATE.isoformat(),
'--mark-success',
]
generate_command = TI.generate_command(
dag_id=dag_id, task_id=task_id, execution_date=DEFAULT_DATE, mark_success=True
)
assert assert_command == generate_command
def test_get_rendered_template_fields(self):
with DAG('test-dag', start_date=DEFAULT_DATE):
task = BashOperator(task_id='op1', bash_command="{{ task.task_id }}")
ti = TI(task=task, execution_date=DEFAULT_DATE)
with create_session() as session:
session.add(RenderedTaskInstanceFields(ti))
# Create new TI for the same Task
with DAG('test-dag', start_date=DEFAULT_DATE):
new_task = BashOperator(task_id='op1', bash_command="{{ task.task_id }}")
new_ti = TI(task=new_task, execution_date=DEFAULT_DATE)
new_ti.get_rendered_template_fields()
self.assertEqual("op1", ti.task.bash_command)
# CleanUp
with create_session() as session:
session.query(RenderedTaskInstanceFields).delete()
@mock.patch.dict(os.environ, {"AIRFLOW_IS_K8S_EXECUTOR_POD": "True"})
def test_get_rendered_k8s_spec(self):
with DAG('test_get_rendered_k8s_spec', start_date=DEFAULT_DATE):
task = BashOperator(task_id='op1', bash_command="{{ task.task_id }}")
ti = TI(task=task, execution_date=DEFAULT_DATE)
expected_pod_spec = {
'metadata': {
'annotations': {
'dag_id': 'test_get_rendered_k8s_spec',
'execution_date': '2016-01-01T00:00:00+00:00',
'task_id': 'op1',
'try_number': '1',
},
'labels': {
'airflow-worker': 'worker-config',
'airflow_version': version,
'dag_id': 'test_get_rendered_k8s_spec',
'execution_date': '2016-01-01T00_00_00_plus_00_00',
'kubernetes_executor': 'True',
'task_id': 'op1',
'try_number': '1',
},
'name': mock.ANY,
'namespace': 'default',
},
'spec': {
'containers': [
{
'args': [
'airflow',
'tasks',
'run',
'test_get_rendered_k8s_spec',
'op1',
'2016-01-01T00:00:00+00:00',
],
'image': ':',
'name': 'base',
'env': [{'name': 'AIRFLOW_IS_K8S_EXECUTOR_POD', 'value': 'True'}],
}
]
},
}
with create_session() as session:
rtif = RenderedTaskInstanceFields(ti)
session.add(rtif)
self.assertEqual(rtif.k8s_pod_yaml, expected_pod_spec)
# Create new TI for the same Task
with DAG('test_get_rendered_k8s_spec', start_date=DEFAULT_DATE):
new_task = BashOperator(task_id='op1', bash_command="{{ task.task_id }}")
new_ti = TI(task=new_task, execution_date=DEFAULT_DATE)
pod_spec = new_ti.get_rendered_k8s_spec()
self.assertEqual(expected_pod_spec, pod_spec)
# CleanUp
with create_session() as session:
session.query(RenderedTaskInstanceFields).delete()
def validate_ti_states(self, dag_run, ti_state_mapping, error_message):
for task_id, expected_state in ti_state_mapping.items():
task_instance = dag_run.get_task_instance(task_id=task_id)
self.assertEqual(task_instance.state, expected_state, error_message)
@parameterized.expand(
[
(
{('scheduler', 'schedule_after_task_execution'): 'True'},
{'A': 'B', 'B': 'C'},
{'A': State.QUEUED, 'B': State.NONE, 'C': State.NONE},
{'A': State.SUCCESS, 'B': State.SCHEDULED, 'C': State.NONE},
{'A': State.SUCCESS, 'B': State.SUCCESS, 'C': State.SCHEDULED},
"A -> B -> C, with fast-follow ON when A runs, B should be QUEUED. Same for B and C.",
),
(
{('scheduler', 'schedule_after_task_execution'): 'False'},
{'A': 'B', 'B': 'C'},
{'A': State.QUEUED, 'B': State.NONE, 'C': State.NONE},
{'A': State.SUCCESS, 'B': State.NONE, 'C': State.NONE},
None,
"A -> B -> C, with fast-follow OFF, when A runs, B shouldn't be QUEUED.",
),
(
{('scheduler', 'schedule_after_task_execution'): 'True'},
{'A': 'B', 'C': 'B', 'D': 'C'},
{'A': State.QUEUED, 'B': State.NONE, 'C': State.NONE, 'D': State.NONE},
{'A': State.SUCCESS, 'B': State.NONE, 'C': State.NONE, 'D': State.NONE},
None,
"D -> C -> B & A -> B, when A runs but C isn't QUEUED yet, B shouldn't be QUEUED.",
),
(
{('scheduler', 'schedule_after_task_execution'): 'True'},
{'A': 'C', 'B': 'C'},
{'A': State.QUEUED, 'B': State.FAILED, 'C': State.NONE},
{'A': State.SUCCESS, 'B': State.FAILED, 'C': State.UPSTREAM_FAILED},
None,
"A -> C & B -> C, when A is QUEUED but B has FAILED, C is marked UPSTREAM_FAILED.",
),
]
)
def test_fast_follow(
self, conf, dependencies, init_state, first_run_state, second_run_state, error_message
):
with conf_vars(conf):
session = settings.Session()
dag = DAG('test_dagrun_fast_follow', start_date=DEFAULT_DATE)
dag_model = DagModel(
dag_id=dag.dag_id,
next_dagrun=dag.start_date,
is_active=True,
)
session.add(dag_model)
session.flush()
python_callable = lambda: True
with dag:
task_a = PythonOperator(task_id='A', python_callable=python_callable)
task_b = PythonOperator(task_id='B', python_callable=python_callable)
task_c = PythonOperator(task_id='C', python_callable=python_callable)
if 'D' in init_state:
task_d = PythonOperator(task_id='D', python_callable=python_callable)
for upstream, downstream in dependencies.items():
dag.set_dependency(upstream, downstream)
scheduler = SchedulerJob(subdir=os.devnull)
scheduler.dagbag.bag_dag(dag, root_dag=dag)
dag_run = dag.create_dagrun(run_id='test_dagrun_fast_follow', state=State.RUNNING)
task_instance_a = dag_run.get_task_instance(task_id=task_a.task_id)
task_instance_a.task = task_a
task_instance_a.set_state(init_state['A'])
task_instance_b = dag_run.get_task_instance(task_id=task_b.task_id)
task_instance_b.task = task_b
task_instance_b.set_state(init_state['B'])
task_instance_c = dag_run.get_task_instance(task_id=task_c.task_id)
task_instance_c.task = task_c
task_instance_c.set_state(init_state['C'])
if 'D' in init_state:
task_instance_d = dag_run.get_task_instance(task_id=task_d.task_id)
task_instance_d.task = task_d
task_instance_d.state = init_state['D']
session.commit()
task_instance_a.run()
self.validate_ti_states(dag_run, first_run_state, error_message)
if second_run_state:
scheduler._critical_section_execute_task_instances(session=session)
task_instance_b.run()
self.validate_ti_states(dag_run, second_run_state, error_message)
def test_set_state_up_for_retry(self):
dag = DAG('dag', start_date=DEFAULT_DATE)
op1 = DummyOperator(task_id='op_1', owner='test', dag=dag)
ti = TI(task=op1, execution_date=timezone.utcnow(), state=State.RUNNING)
start_date = timezone.utcnow()
ti.start_date = start_date
ti.set_state(State.UP_FOR_RETRY)
assert ti.state == State.UP_FOR_RETRY
assert ti.start_date == start_date, "Start date should have been left alone"
assert ti.start_date < ti.end_date
assert ti.duration > 0
@pytest.mark.parametrize("pool_override", [None, "test_pool2"])
def test_refresh_from_task(pool_override):
task = DummyOperator(
task_id="dummy",
queue="test_queue",
pool="test_pool1",
pool_slots=3,
priority_weight=10,
run_as_user="test",
retries=30,
executor_config={"KubernetesExecutor": {"image": "myCustomDockerImage"}},
)
ti = TI(task, execution_date=pendulum.datetime(2020, 1, 1))
ti.refresh_from_task(task, pool_override=pool_override)
assert ti.queue == task.queue
if pool_override:
assert ti.pool == pool_override
else:
assert ti.pool == task.pool
assert ti.pool_slots == task.pool_slots
assert ti.priority_weight == task.priority_weight_total
assert ti.run_as_user == task.run_as_user
assert ti.max_tries == task.retries
assert ti.executor_config == task.executor_config
assert ti.operator == DummyOperator.__name__
class TestRunRawTaskQueriesCount(unittest.TestCase):
"""
These tests are designed to detect changes in the number of queries executed
when calling _run_raw_task
"""
@staticmethod
def _clean():
db.clear_db_runs()
db.clear_db_pools()
db.clear_db_dags()
db.clear_db_sla_miss()
db.clear_db_errors()
def setUp(self) -> None:
self._clean()
def tearDown(self) -> None:
self._clean()
@parameterized.expand(
[
# Expected queries, mark_success
(10, False),
(5, True),
]
)
def test_execute_queries_count(self, expected_query_count, mark_success):
with create_session() as session:
dag = DAG('test_queries', start_date=DEFAULT_DATE)
task = DummyOperator(task_id='op', dag=dag)
ti = TI(task=task, execution_date=datetime.datetime.now())
ti.state = State.RUNNING
session.merge(ti)
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
session=session,
)
with assert_queries_count(expected_query_count):
ti._run_raw_task(mark_success=mark_success)
def test_execute_queries_count_store_serialized(self):
with create_session() as session:
dag = DAG('test_queries', start_date=DEFAULT_DATE)
task = DummyOperator(task_id='op', dag=dag)
ti = TI(task=task, execution_date=datetime.datetime.now())
ti.state = State.RUNNING
session.merge(ti)
dag.create_dagrun(
execution_date=ti.execution_date,
state=State.RUNNING,
run_type=DagRunType.SCHEDULED,
session=session,
)
with assert_queries_count(10):
ti._run_raw_task()
def test_operator_field_with_serialization(self):
dag = DAG('test_queries', start_date=DEFAULT_DATE)
task = DummyOperator(task_id='op', dag=dag)
self.assertEqual(task.task_type, 'DummyOperator')
# Verify that ti.operator field renders correctly "without" Serialization
ti = TI(task=task, execution_date=datetime.datetime.now())
self.assertEqual(ti.operator, "DummyOperator")
serialized_op = SerializedBaseOperator.serialize_operator(task)
deserialized_op = SerializedBaseOperator.deserialize_operator(serialized_op)
self.assertEqual(deserialized_op.task_type, 'DummyOperator')
# Verify that ti.operator field renders correctly "with" Serialization
ser_ti = TI(task=deserialized_op, execution_date=datetime.datetime.now())
self.assertEqual(ser_ti.operator, "DummyOperator")
| apache-2.0 |
heeraj123/oh-mainline | vendor/packages/sqlparse/tests/test_pipeline.py | 47 | 1945 | import unittest
from sqlparse.filters import ColumnsSelect
from sqlparse.lexer import tokenize
from sqlparse.pipeline import Pipeline
class Test(unittest.TestCase):
def setUp(self):
self.pipe = Pipeline()
self.pipe.append(tokenize)
self.pipe.append(ColumnsSelect())
def test_1(self):
sql = """
-- type: script
-- return: integer
INCLUDE "Direntry.make.sql";
INSERT INTO directories(inode)
VALUES(:inode)
LIMIT 1"""
self.assertEqual([], self.pipe(sql))
def test_2(self):
sql = """
SELECT child_entry,asdf AS inode, creation
FROM links
WHERE parent_dir == :parent_dir AND name == :name
LIMIT 1"""
self.assertEqual([u'child_entry', u'inode', u'creation'],
self.pipe(sql))
def test_3(self):
sql = """
SELECT
0 AS st_dev,
0 AS st_uid,
0 AS st_gid,
dir_entries.type AS st_mode,
dir_entries.inode AS st_ino,
COUNT(links.child_entry) AS st_nlink,
:creation AS st_ctime,
dir_entries.access AS st_atime,
dir_entries.modification AS st_mtime,
-- :creation AS st_ctime,
-- CAST(STRFTIME('%s',dir_entries.access) AS INTEGER) AS st_atime,
-- CAST(STRFTIME('%s',dir_entries.modification) AS INTEGER) AS st_mtime,
COALESCE(files.size,0) AS st_size, -- Python-FUSE
COALESCE(files.size,0) AS size -- PyFilesystem
FROM dir_entries
LEFT JOIN files
ON dir_entries.inode == files.inode
LEFT JOIN links
ON dir_entries.inode == links.child_entry
WHERE dir_entries.inode == :inode
GROUP BY dir_entries.inode
LIMIT 1"""
self.assertEqual([u'st_dev', u'st_uid', u'st_gid', u'st_mode',
u'st_ino', u'st_nlink', u'st_ctime',
u'st_atime', u'st_mtime', u'st_size', u'size'],
self.pipe(sql))
| agpl-3.0 |
toshywoshy/ansible | lib/ansible/modules/windows/win_webpicmd.py | 52 | 1398 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Peter Mounce <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_webpicmd
version_added: "2.0"
short_description: Installs packages using Web Platform Installer command-line
description:
- Installs packages using Web Platform Installer command-line
(U(http://www.iis.net/learn/install/web-platform-installer/web-platform-installer-v4-command-line-webpicmdexe-rtw-release)).
- Must be installed and present in PATH (see M(win_chocolatey) module; 'webpicmd' is the package name, and you must install 'lessmsi' first too)?
- Install IIS first (see M(win_feature) module).
notes:
- Accepts EULAs and suppresses reboot - you will need to check manage reboots yourself (see M(win_reboot) module)
options:
name:
description:
- Name of the package to be installed.
type: str
required: yes
seealso:
- module: win_package
author:
- Peter Mounce (@petemounce)
'''
EXAMPLES = r'''
- name: Install URLRewrite2.
win_webpicmd:
name: URLRewrite2
'''
| gpl-3.0 |
lowitty/selenium | libs/windows/paramiko/buffered_pipe.py | 3 | 6997 | # Copyright (C) 2006-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
Attempt to generalize the "feeder" part of a `.Channel`: an object which can be
read from and closed, but is reading from a buffer fed by another thread. The
read operations are blocking and can have a timeout set.
"""
import array
import threading
import time
from paramiko.py3compat import PY2, b
class PipeTimeout (IOError):
"""
Indicates that a timeout was reached on a read from a `.BufferedPipe`.
"""
pass
class BufferedPipe (object):
"""
A buffer that obeys normal read (with timeout) & close semantics for a
file or socket, but is fed data from another thread. This is used by
`.Channel`.
"""
def __init__(self):
self._lock = threading.Lock()
self._cv = threading.Condition(self._lock)
self._event = None
self._buffer = array.array('B')
self._closed = False
if PY2:
def _buffer_frombytes(self, data):
self._buffer.fromstring(data)
def _buffer_tobytes(self, limit=None):
return self._buffer[:limit].tostring()
else:
def _buffer_frombytes(self, data):
self._buffer.frombytes(data)
def _buffer_tobytes(self, limit=None):
return self._buffer[:limit].tobytes()
def set_event(self, event):
"""
Set an event on this buffer. When data is ready to be read (or the
buffer has been closed), the event will be set. When no data is
ready, the event will be cleared.
:param threading.Event event: the event to set/clear
"""
self._event = event
if len(self._buffer) > 0:
event.set()
else:
event.clear()
def feed(self, data):
"""
Feed new data into this pipe. This method is assumed to be called
from a separate thread, so synchronization is done.
:param data: the data to add, as a `str` or `bytes`
"""
self._lock.acquire()
try:
if self._event is not None:
self._event.set()
self._buffer_frombytes(b(data))
self._cv.notifyAll()
finally:
self._lock.release()
def read_ready(self):
"""
Returns true if data is buffered and ready to be read from this
feeder. A ``False`` result does not mean that the feeder has closed;
it means you may need to wait before more data arrives.
:return:
``True`` if a `read` call would immediately return at least one
byte; ``False`` otherwise.
"""
self._lock.acquire()
try:
if len(self._buffer) == 0:
return False
return True
finally:
self._lock.release()
def read(self, nbytes, timeout=None):
"""
Read data from the pipe. The return value is a string representing
the data received. The maximum amount of data to be received at once
is specified by ``nbytes``. If a string of length zero is returned,
the pipe has been closed.
The optional ``timeout`` argument can be a nonnegative float expressing
seconds, or ``None`` for no timeout. If a float is given, a
`.PipeTimeout` will be raised if the timeout period value has elapsed
before any data arrives.
:param int nbytes: maximum number of bytes to read
:param float timeout:
maximum seconds to wait (or ``None``, the default, to wait forever)
:return: the read data, as a `bytes`
:raises PipeTimeout:
if a timeout was specified and no data was ready before that
timeout
"""
out = bytes()
self._lock.acquire()
try:
if len(self._buffer) == 0:
if self._closed:
return out
# should we block?
if timeout == 0.0:
raise PipeTimeout()
# loop here in case we get woken up but a different thread has
# grabbed everything in the buffer.
while (len(self._buffer) == 0) and not self._closed:
then = time.time()
self._cv.wait(timeout)
if timeout is not None:
timeout -= time.time() - then
if timeout <= 0.0:
raise PipeTimeout()
# something's in the buffer and we have the lock!
if len(self._buffer) <= nbytes:
out = self._buffer_tobytes()
del self._buffer[:]
if (self._event is not None) and not self._closed:
self._event.clear()
else:
out = self._buffer_tobytes(nbytes)
del self._buffer[:nbytes]
finally:
self._lock.release()
return out
def empty(self):
"""
Clear out the buffer and return all data that was in it.
:return:
any data that was in the buffer prior to clearing it out, as a
`str`
"""
self._lock.acquire()
try:
out = self._buffer_tobytes()
del self._buffer[:]
if (self._event is not None) and not self._closed:
self._event.clear()
return out
finally:
self._lock.release()
def close(self):
"""
Close this pipe object. Future calls to `read` after the buffer
has been emptied will return immediately with an empty string.
"""
self._lock.acquire()
try:
self._closed = True
self._cv.notifyAll()
if self._event is not None:
self._event.set()
finally:
self._lock.release()
def __len__(self):
"""
Return the number of bytes buffered.
:return: number (`int`) of bytes buffered
"""
self._lock.acquire()
try:
return len(self._buffer)
finally:
self._lock.release()
| mit |
tumbl3w33d/ansible | lib/ansible/modules/network/fortios/fortios_firewall_ssl_setting.py | 14 | 12477 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_firewall_ssl_setting
short_description: SSL proxy settings in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify firewall_ssl feature and setting category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
firewall_ssl_setting:
description:
- SSL proxy settings.
default: null
type: dict
suboptions:
abbreviate_handshake:
description:
- Enable/disable use of SSL abbreviated handshake.
type: str
choices:
- enable
- disable
cert_cache_capacity:
description:
- Maximum capacity of the host certificate cache (0 - 500).
type: int
cert_cache_timeout:
description:
- Time limit to keep certificate cache (1 - 120 min).
type: int
kxp_queue_threshold:
description:
- Maximum length of the CP KXP queue. When the queue becomes full, the proxy switches cipher functions to the main CPU (0 - 512).
type: int
no_matching_cipher_action:
description:
- Bypass or drop the connection when no matching cipher is found.
type: str
choices:
- bypass
- drop
proxy_connect_timeout:
description:
- Time limit to make an internal connection to the appropriate proxy process (1 - 60 sec).
type: int
session_cache_capacity:
description:
- Capacity of the SSL session cache (--Obsolete--) (1 - 1000).
type: int
session_cache_timeout:
description:
- Time limit to keep SSL session state (1 - 60 min).
type: int
ssl_dh_bits:
description:
- Bit-size of Diffie-Hellman (DH) prime used in DHE-RSA negotiation.
type: str
choices:
- 768
- 1024
- 1536
- 2048
ssl_queue_threshold:
description:
- Maximum length of the CP SSL queue. When the queue becomes full, the proxy switches cipher functions to the main CPU (0 - 512).
type: int
ssl_send_empty_frags:
description:
- Enable/disable sending empty fragments to avoid attack on CBC IV (for SSL 3.0 and TLS 1.0 only).
type: str
choices:
- enable
- disable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: SSL proxy settings.
fortios_firewall_ssl_setting:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
firewall_ssl_setting:
abbreviate_handshake: "enable"
cert_cache_capacity: "4"
cert_cache_timeout: "5"
kxp_queue_threshold: "6"
no_matching_cipher_action: "bypass"
proxy_connect_timeout: "8"
session_cache_capacity: "9"
session_cache_timeout: "10"
ssl_dh_bits: "768"
ssl_queue_threshold: "12"
ssl_send_empty_frags: "enable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_firewall_ssl_setting_data(json):
option_list = ['abbreviate_handshake', 'cert_cache_capacity', 'cert_cache_timeout',
'kxp_queue_threshold', 'no_matching_cipher_action', 'proxy_connect_timeout',
'session_cache_capacity', 'session_cache_timeout', 'ssl_dh_bits',
'ssl_queue_threshold', 'ssl_send_empty_frags']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def firewall_ssl_setting(data, fos):
vdom = data['vdom']
firewall_ssl_setting_data = data['firewall_ssl_setting']
filtered_data = underscore_to_hyphen(filter_firewall_ssl_setting_data(firewall_ssl_setting_data))
return fos.set('firewall.ssl',
'setting',
data=filtered_data,
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_firewall_ssl(data, fos):
if data['firewall_ssl_setting']:
resp = firewall_ssl_setting(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"firewall_ssl_setting": {
"required": False, "type": "dict", "default": None,
"options": {
"abbreviate_handshake": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"cert_cache_capacity": {"required": False, "type": "int"},
"cert_cache_timeout": {"required": False, "type": "int"},
"kxp_queue_threshold": {"required": False, "type": "int"},
"no_matching_cipher_action": {"required": False, "type": "str",
"choices": ["bypass", "drop"]},
"proxy_connect_timeout": {"required": False, "type": "int"},
"session_cache_capacity": {"required": False, "type": "int"},
"session_cache_timeout": {"required": False, "type": "int"},
"ssl_dh_bits": {"required": False, "type": "str",
"choices": ["768", "1024", "1536",
"2048"]},
"ssl_queue_threshold": {"required": False, "type": "int"},
"ssl_send_empty_frags": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_firewall_ssl(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_firewall_ssl(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
rajendrakrp/GeoMicroFormat | django/utils/unittest/suite.py | 353 | 9293 | """TestSuite"""
import sys
import unittest
from django.utils.unittest import case, util
__unittest = True
class BaseTestSuite(unittest.TestSuite):
"""A simple test suite that doesn't provide class or module shared fixtures.
"""
def __init__(self, tests=()):
self._tests = []
self.addTests(tests)
def __repr__(self):
return "<%s tests=%s>" % (util.strclass(self.__class__), list(self))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return list(self) == list(other)
def __ne__(self, other):
return not self == other
# Can't guarantee hash invariant, so flag as unhashable
__hash__ = None
def __iter__(self):
return iter(self._tests)
def countTestCases(self):
cases = 0
for test in self:
cases += test.countTestCases()
return cases
def addTest(self, test):
# sanity checks
if not hasattr(test, '__call__'):
raise TypeError("%r is not callable" % (repr(test),))
if isinstance(test, type) and issubclass(test,
(case.TestCase, TestSuite)):
raise TypeError("TestCases and TestSuites must be instantiated "
"before passing them to addTest()")
self._tests.append(test)
def addTests(self, tests):
if isinstance(tests, basestring):
raise TypeError("tests must be an iterable of tests, not a string")
for test in tests:
self.addTest(test)
def run(self, result):
for test in self:
if result.shouldStop:
break
test(result)
return result
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
for test in self:
test.debug()
class TestSuite(BaseTestSuite):
"""A test suite is a composite test consisting of a number of TestCases.
For use, create an instance of TestSuite, then add test case instances.
When all tests have been added, the suite can be passed to a test
runner, such as TextTestRunner. It will run the individual test cases
in the order in which they were added, aggregating the results. When
subclassing, do not forget to call the base class constructor.
"""
def run(self, result):
self._wrapped_run(result)
self._tearDownPreviousClass(None, result)
self._handleModuleTearDown(result)
return result
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
debug = _DebugResult()
self._wrapped_run(debug, True)
self._tearDownPreviousClass(None, debug)
self._handleModuleTearDown(debug)
################################
# private methods
def _wrapped_run(self, result, debug=False):
for test in self:
if result.shouldStop:
break
if _isnotsuite(test):
self._tearDownPreviousClass(test, result)
self._handleModuleFixture(test, result)
self._handleClassSetUp(test, result)
result._previousTestClass = test.__class__
if (getattr(test.__class__, '_classSetupFailed', False) or
getattr(result, '_moduleSetUpFailed', False)):
continue
if hasattr(test, '_wrapped_run'):
test._wrapped_run(result, debug)
elif not debug:
test(result)
else:
test.debug()
def _handleClassSetUp(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if result._moduleSetUpFailed:
return
if getattr(currentClass, "__unittest_skip__", False):
return
try:
currentClass._classSetupFailed = False
except TypeError:
# test may actually be a function
# so its class will be a builtin-type
pass
setUpClass = getattr(currentClass, 'setUpClass', None)
if setUpClass is not None:
try:
setUpClass()
except Exception, e:
if isinstance(result, _DebugResult):
raise
currentClass._classSetupFailed = True
className = util.strclass(currentClass)
errorName = 'setUpClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
def _get_previous_module(self, result):
previousModule = None
previousClass = getattr(result, '_previousTestClass', None)
if previousClass is not None:
previousModule = previousClass.__module__
return previousModule
def _handleModuleFixture(self, test, result):
previousModule = self._get_previous_module(result)
currentModule = test.__class__.__module__
if currentModule == previousModule:
return
self._handleModuleTearDown(result)
result._moduleSetUpFailed = False
try:
module = sys.modules[currentModule]
except KeyError:
return
setUpModule = getattr(module, 'setUpModule', None)
if setUpModule is not None:
try:
setUpModule()
except Exception, e:
if isinstance(result, _DebugResult):
raise
result._moduleSetUpFailed = True
errorName = 'setUpModule (%s)' % currentModule
self._addClassOrModuleLevelException(result, e, errorName)
def _addClassOrModuleLevelException(self, result, exception, errorName):
error = _ErrorHolder(errorName)
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None and isinstance(exception, case.SkipTest):
addSkip(error, str(exception))
else:
result.addError(error, sys.exc_info())
def _handleModuleTearDown(self, result):
previousModule = self._get_previous_module(result)
if previousModule is None:
return
if result._moduleSetUpFailed:
return
try:
module = sys.modules[previousModule]
except KeyError:
return
tearDownModule = getattr(module, 'tearDownModule', None)
if tearDownModule is not None:
try:
tearDownModule()
except Exception, e:
if isinstance(result, _DebugResult):
raise
errorName = 'tearDownModule (%s)' % previousModule
self._addClassOrModuleLevelException(result, e, errorName)
def _tearDownPreviousClass(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if getattr(previousClass, '_classSetupFailed', False):
return
if getattr(result, '_moduleSetUpFailed', False):
return
if getattr(previousClass, "__unittest_skip__", False):
return
tearDownClass = getattr(previousClass, 'tearDownClass', None)
if tearDownClass is not None:
try:
tearDownClass()
except Exception, e:
if isinstance(result, _DebugResult):
raise
className = util.strclass(previousClass)
errorName = 'tearDownClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
class _ErrorHolder(object):
"""
Placeholder for a TestCase inside a result. As far as a TestResult
is concerned, this looks exactly like a unit test. Used to insert
arbitrary errors into a test suite run.
"""
# Inspired by the ErrorHolder from Twisted:
# http://twistedmatrix.com/trac/browser/trunk/twisted/trial/runner.py
# attribute used by TestResult._exc_info_to_string
failureException = None
def __init__(self, description):
self.description = description
def id(self):
return self.description
def shortDescription(self):
return None
def __repr__(self):
return "<ErrorHolder description=%r>" % (self.description,)
def __str__(self):
return self.id()
def run(self, result):
# could call result.addError(...) - but this test-like object
# shouldn't be run anyway
pass
def __call__(self, result):
return self.run(result)
def countTestCases(self):
return 0
def _isnotsuite(test):
"A crude way to tell apart testcases and suites with duck-typing"
try:
iter(test)
except TypeError:
return True
return False
class _DebugResult(object):
"Used by the TestSuite to hold previous class when running in debug."
_previousTestClass = None
_moduleSetUpFailed = False
shouldStop = False
| bsd-3-clause |
caromedellin/data-play | definingMarkovThings.py | 1 | 2434 | # -*- coding: utf-8 -*-
"""
Created on Thu Feb 25 17:58:31 2016
Markov Chain Monte Carol ( or something like that, MCMC)
@author: katar
"""
import numpy
import random
"""
Concept: 1) define a markov chain
2) run a monte carlo simulation using the
markov chain to
"""
stateNames =['tropics','city','mountains']
""" the value of a process at time period t """
class State:
def __init__(self,stringname):#,timest):
self.name = stringname
#self.numvisits = 0
self.here = 0
#self.time = timestep
#self.idnum = 0
#def visited(self):
#self.numvisits += 1
def currentState(self):
self.here = 1
def leftState(self):
self.here = 0
# All states!
class ArrayStates:
def __init__(self,stateNames=[]):
self.stateSet = set()
self.listOfStates = stateNames
self.stateLocations = []
def initialAddStates(self):
for state in range(len(stateNames)):
self.stateSet.add(stateNames[state])
#self.listOfStates.append(stateNames[state])
self.stateLocations.append(State(stateNames[state]))
def length(self):
return len(self.listOfStates)
def addState(self,additionalStateName):
self.stateSet.add(additionalStateName)
self.listOfStates.append(additionalStateName)
self.stateLocations.append(State(additionalStateName))
""" one step transition probability matrix between states"""
transMatrix = numpy.matrix('0 0.7 0.3; 0.5 0 0.5; 0.7 0.2 0.1')
""" numpy.matrix[i] to array """
def Matrix1dToArray(npmatrix):
matrix1d = numpy.asarray(npmatrix)[0]
return matrix1d
""" chose state based """
def nextState(currentIndex, transMatrix):
transProbs = transMatrix[currentIndex]
sortedProbs = transProbs.sort()
sortIndices = transProbs.argsort()
#sums = [for]
rand = random.random()
""" take n steps in the Markov chain"""
#def nSteps(n, ArrayOfStates, transMatrix):
#initState = randint(0,ArrayOfStates.length()-1)
#for i in range(n):
""" Example Markov Chain!
It's having ArrayStates, and defining the probabilities between
each of the states with the transMatrix! Then, the prob. of
being in some state next has to do with which state you are in
now,
"""
example = ArrayStates(stateNames)
example.initialAddStates()
| mit |
kentatogashi/ansible | v1/ansible/utils/vault.py | 112 | 18771 | # (c) 2014, James Tanner <[email protected]>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# ansible-pull is a script that runs ansible in local mode
# after checking out a playbooks directory from source repo. There is an
# example playbook to bootstrap this script in the examples/ dir which
# installs ansible and sets it up to run on cron.
import os
import shlex
import shutil
import tempfile
from io import BytesIO
from subprocess import call
from ansible import errors
from hashlib import sha256
# Note: Only used for loading obsolete VaultAES files. All files are written
# using the newer VaultAES256 which does not require md5
try:
from hashlib import md5
except ImportError:
try:
from md5 import md5
except ImportError:
# MD5 unavailable. Possibly FIPS mode
md5 = None
from binascii import hexlify
from binascii import unhexlify
from ansible import constants as C
try:
from Crypto.Hash import SHA256, HMAC
HAS_HASH = True
except ImportError:
HAS_HASH = False
# Counter import fails for 2.0.1, requires >= 2.6.1 from pip
try:
from Crypto.Util import Counter
HAS_COUNTER = True
except ImportError:
HAS_COUNTER = False
# KDF import fails for 2.0.1, requires >= 2.6.1 from pip
try:
from Crypto.Protocol.KDF import PBKDF2
HAS_PBKDF2 = True
except ImportError:
HAS_PBKDF2 = False
# AES IMPORTS
try:
from Crypto.Cipher import AES as AES
HAS_AES = True
except ImportError:
HAS_AES = False
CRYPTO_UPGRADE = "ansible-vault requires a newer version of pycrypto than the one installed on your platform. You may fix this with OS-specific commands such as: yum install python-devel; rpm -e --nodeps python-crypto; pip install pycrypto"
HEADER='$ANSIBLE_VAULT'
CIPHER_WHITELIST=['AES', 'AES256']
class VaultLib(object):
def __init__(self, password):
self.password = password
self.cipher_name = None
self.version = '1.1'
def is_encrypted(self, data):
if data.startswith(HEADER):
return True
else:
return False
def encrypt(self, data):
if self.is_encrypted(data):
raise errors.AnsibleError("data is already encrypted")
if not self.cipher_name:
self.cipher_name = "AES256"
#raise errors.AnsibleError("the cipher must be set before encrypting data")
if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST:
cipher = globals()['Vault' + self.cipher_name]
this_cipher = cipher()
else:
raise errors.AnsibleError("%s cipher could not be found" % self.cipher_name)
"""
# combine sha + data
this_sha = sha256(data).hexdigest()
tmp_data = this_sha + "\n" + data
"""
# encrypt sha + data
enc_data = this_cipher.encrypt(data, self.password)
# add header
tmp_data = self._add_header(enc_data)
return tmp_data
def decrypt(self, data):
if self.password is None:
raise errors.AnsibleError("A vault password must be specified to decrypt data")
if not self.is_encrypted(data):
raise errors.AnsibleError("data is not encrypted")
# clean out header
data = self._split_header(data)
# create the cipher object
if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST:
cipher = globals()['Vault' + self.cipher_name]
this_cipher = cipher()
else:
raise errors.AnsibleError("%s cipher could not be found" % self.cipher_name)
# try to unencrypt data
data = this_cipher.decrypt(data, self.password)
if data is None:
raise errors.AnsibleError("Decryption failed")
return data
def _add_header(self, data):
# combine header and encrypted data in 80 char columns
#tmpdata = hexlify(data)
tmpdata = [data[i:i+80] for i in range(0, len(data), 80)]
if not self.cipher_name:
raise errors.AnsibleError("the cipher must be set before adding a header")
dirty_data = HEADER + ";" + str(self.version) + ";" + self.cipher_name + "\n"
for l in tmpdata:
dirty_data += l + '\n'
return dirty_data
def _split_header(self, data):
# used by decrypt
tmpdata = data.split('\n')
tmpheader = tmpdata[0].strip().split(';')
self.version = str(tmpheader[1].strip())
self.cipher_name = str(tmpheader[2].strip())
clean_data = '\n'.join(tmpdata[1:])
"""
# strip out newline, join, unhex
clean_data = [ x.strip() for x in clean_data ]
clean_data = unhexlify(''.join(clean_data))
"""
return clean_data
def __enter__(self):
return self
def __exit__(self, *err):
pass
class VaultEditor(object):
# uses helper methods for write_file(self, filename, data)
# to write a file so that code isn't duplicated for simple
# file I/O, ditto read_file(self, filename) and launch_editor(self, filename)
# ... "Don't Repeat Yourself", etc.
def __init__(self, cipher_name, password, filename):
# instantiates a member variable for VaultLib
self.cipher_name = cipher_name
self.password = password
self.filename = filename
def _edit_file_helper(self, existing_data=None, cipher=None):
# make sure the umask is set to a sane value
old_umask = os.umask(0o077)
# Create a tempfile
_, tmp_path = tempfile.mkstemp()
if existing_data:
self.write_data(existing_data, tmp_path)
# drop the user into an editor on the tmp file
try:
call(self._editor_shell_command(tmp_path))
except OSError, e:
raise Exception("Failed to open editor (%s): %s" % (self._editor_shell_command(tmp_path)[0],str(e)))
tmpdata = self.read_data(tmp_path)
# create new vault
this_vault = VaultLib(self.password)
if cipher:
this_vault.cipher_name = cipher
# encrypt new data and write out to tmp
enc_data = this_vault.encrypt(tmpdata)
self.write_data(enc_data, tmp_path)
# shuffle tmp file into place
self.shuffle_files(tmp_path, self.filename)
# and restore umask
os.umask(old_umask)
def create_file(self):
""" create a new encrypted file """
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
if os.path.isfile(self.filename):
raise errors.AnsibleError("%s exists, please use 'edit' instead" % self.filename)
# Let the user specify contents and save file
self._edit_file_helper(cipher=self.cipher_name)
def decrypt_file(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
if not os.path.isfile(self.filename):
raise errors.AnsibleError("%s does not exist" % self.filename)
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
if this_vault.is_encrypted(tmpdata):
dec_data = this_vault.decrypt(tmpdata)
if dec_data is None:
raise errors.AnsibleError("Decryption failed")
else:
self.write_data(dec_data, self.filename)
else:
raise errors.AnsibleError("%s is not encrypted" % self.filename)
def edit_file(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
# decrypt to tmpfile
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
dec_data = this_vault.decrypt(tmpdata)
# let the user edit the data and save
self._edit_file_helper(existing_data=dec_data)
###we want the cipher to default to AES256 (get rid of files
# encrypted with the AES cipher)
#self._edit_file_helper(existing_data=dec_data, cipher=this_vault.cipher_name)
def view_file(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
# decrypt to tmpfile
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
dec_data = this_vault.decrypt(tmpdata)
old_umask = os.umask(0o077)
_, tmp_path = tempfile.mkstemp()
self.write_data(dec_data, tmp_path)
os.umask(old_umask)
# drop the user into pager on the tmp file
call(self._pager_shell_command(tmp_path))
os.remove(tmp_path)
def encrypt_file(self):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
if not os.path.isfile(self.filename):
raise errors.AnsibleError("%s does not exist" % self.filename)
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
this_vault.cipher_name = self.cipher_name
if not this_vault.is_encrypted(tmpdata):
enc_data = this_vault.encrypt(tmpdata)
self.write_data(enc_data, self.filename)
else:
raise errors.AnsibleError("%s is already encrypted" % self.filename)
def rekey_file(self, new_password):
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
# decrypt
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
dec_data = this_vault.decrypt(tmpdata)
# create new vault
new_vault = VaultLib(new_password)
# we want to force cipher to the default
#new_vault.cipher_name = this_vault.cipher_name
# re-encrypt data and re-write file
enc_data = new_vault.encrypt(dec_data)
self.write_data(enc_data, self.filename)
def read_data(self, filename):
f = open(filename, "rb")
tmpdata = f.read()
f.close()
return tmpdata
def write_data(self, data, filename):
if os.path.isfile(filename):
os.remove(filename)
f = open(filename, "wb")
f.write(data)
f.close()
def shuffle_files(self, src, dest):
# overwrite dest with src
if os.path.isfile(dest):
os.remove(dest)
shutil.move(src, dest)
def _editor_shell_command(self, filename):
EDITOR = os.environ.get('EDITOR','vim')
editor = shlex.split(EDITOR)
editor.append(filename)
return editor
def _pager_shell_command(self, filename):
PAGER = os.environ.get('PAGER','less')
pager = shlex.split(PAGER)
pager.append(filename)
return pager
########################################
# CIPHERS #
########################################
class VaultAES(object):
# this version has been obsoleted by the VaultAES256 class
# which uses encrypt-then-mac (fixing order) and also improving the KDF used
# code remains for upgrade purposes only
# http://stackoverflow.com/a/16761459
def __init__(self):
if not md5:
raise errors.AnsibleError('md5 hash is unavailable (Could be due to FIPS mode). Legacy VaultAES format is unavailable.')
if not HAS_AES:
raise errors.AnsibleError(CRYPTO_UPGRADE)
def aes_derive_key_and_iv(self, password, salt, key_length, iv_length):
""" Create a key and an initialization vector """
d = d_i = ''
while len(d) < key_length + iv_length:
d_i = md5(d_i + password + salt).digest()
d += d_i
key = d[:key_length]
iv = d[key_length:key_length+iv_length]
return key, iv
def encrypt(self, data, password, key_length=32):
""" Read plaintext data from in_file and write encrypted to out_file """
# combine sha + data
this_sha = sha256(data).hexdigest()
tmp_data = this_sha + "\n" + data
in_file = BytesIO(tmp_data)
in_file.seek(0)
out_file = BytesIO()
bs = AES.block_size
# Get a block of random data. EL does not have Crypto.Random.new()
# so os.urandom is used for cross platform purposes
salt = os.urandom(bs - len('Salted__'))
key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
cipher = AES.new(key, AES.MODE_CBC, iv)
out_file.write('Salted__' + salt)
finished = False
while not finished:
chunk = in_file.read(1024 * bs)
if len(chunk) == 0 or len(chunk) % bs != 0:
padding_length = (bs - len(chunk) % bs) or bs
chunk += padding_length * chr(padding_length)
finished = True
out_file.write(cipher.encrypt(chunk))
out_file.seek(0)
enc_data = out_file.read()
tmp_data = hexlify(enc_data)
return tmp_data
def decrypt(self, data, password, key_length=32):
""" Read encrypted data from in_file and write decrypted to out_file """
# http://stackoverflow.com/a/14989032
data = ''.join(data.split('\n'))
data = unhexlify(data)
in_file = BytesIO(data)
in_file.seek(0)
out_file = BytesIO()
bs = AES.block_size
salt = in_file.read(bs)[len('Salted__'):]
key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
cipher = AES.new(key, AES.MODE_CBC, iv)
next_chunk = ''
finished = False
while not finished:
chunk, next_chunk = next_chunk, cipher.decrypt(in_file.read(1024 * bs))
if len(next_chunk) == 0:
padding_length = ord(chunk[-1])
chunk = chunk[:-padding_length]
finished = True
out_file.write(chunk)
# reset the stream pointer to the beginning
out_file.seek(0)
new_data = out_file.read()
# split out sha and verify decryption
split_data = new_data.split("\n")
this_sha = split_data[0]
this_data = '\n'.join(split_data[1:])
test_sha = sha256(this_data).hexdigest()
if this_sha != test_sha:
raise errors.AnsibleError("Decryption failed")
#return out_file.read()
return this_data
class VaultAES256(object):
"""
Vault implementation using AES-CTR with an HMAC-SHA256 authentication code.
Keys are derived using PBKDF2
"""
# http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html
def __init__(self):
if not HAS_PBKDF2 or not HAS_COUNTER or not HAS_HASH:
raise errors.AnsibleError(CRYPTO_UPGRADE)
def gen_key_initctr(self, password, salt):
# 16 for AES 128, 32 for AES256
keylength = 32
# match the size used for counter.new to avoid extra work
ivlength = 16
hash_function = SHA256
# make two keys and one iv
pbkdf2_prf = lambda p, s: HMAC.new(p, s, hash_function).digest()
derivedkey = PBKDF2(password, salt, dkLen=(2 * keylength) + ivlength,
count=10000, prf=pbkdf2_prf)
key1 = derivedkey[:keylength]
key2 = derivedkey[keylength:(keylength * 2)]
iv = derivedkey[(keylength * 2):(keylength * 2) + ivlength]
return key1, key2, hexlify(iv)
def encrypt(self, data, password):
salt = os.urandom(32)
key1, key2, iv = self.gen_key_initctr(password, salt)
# PKCS#7 PAD DATA http://tools.ietf.org/html/rfc5652#section-6.3
bs = AES.block_size
padding_length = (bs - len(data) % bs) or bs
data += padding_length * chr(padding_length)
# COUNTER.new PARAMETERS
# 1) nbits (integer) - Length of the counter, in bits.
# 2) initial_value (integer) - initial value of the counter. "iv" from gen_key_initctr
ctr = Counter.new(128, initial_value=long(iv, 16))
# AES.new PARAMETERS
# 1) AES key, must be either 16, 24, or 32 bytes long -- "key" from gen_key_initctr
# 2) MODE_CTR, is the recommended mode
# 3) counter=<CounterObject>
cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
# ENCRYPT PADDED DATA
cryptedData = cipher.encrypt(data)
# COMBINE SALT, DIGEST AND DATA
hmac = HMAC.new(key2, cryptedData, SHA256)
message = "%s\n%s\n%s" % ( hexlify(salt), hmac.hexdigest(), hexlify(cryptedData) )
message = hexlify(message)
return message
def decrypt(self, data, password):
# SPLIT SALT, DIGEST, AND DATA
data = ''.join(data.split("\n"))
data = unhexlify(data)
salt, cryptedHmac, cryptedData = data.split("\n", 2)
salt = unhexlify(salt)
cryptedData = unhexlify(cryptedData)
key1, key2, iv = self.gen_key_initctr(password, salt)
# EXIT EARLY IF DIGEST DOESN'T MATCH
hmacDecrypt = HMAC.new(key2, cryptedData, SHA256)
if not self.is_equal(cryptedHmac, hmacDecrypt.hexdigest()):
return None
# SET THE COUNTER AND THE CIPHER
ctr = Counter.new(128, initial_value=long(iv, 16))
cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
# DECRYPT PADDED DATA
decryptedData = cipher.decrypt(cryptedData)
# UNPAD DATA
padding_length = ord(decryptedData[-1])
decryptedData = decryptedData[:-padding_length]
return decryptedData
def is_equal(self, a, b):
# http://codahale.com/a-lesson-in-timing-attacks/
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
| gpl-3.0 |
ygol/odoo | addons/payment_paypal/tests/test_paypal.py | 378 | 11126 | # -*- coding: utf-8 -*-
from openerp.addons.payment.models.payment_acquirer import ValidationError
from openerp.addons.payment.tests.common import PaymentAcquirerCommon
from openerp.addons.payment_paypal.controllers.main import PaypalController
from openerp.tools import mute_logger
from lxml import objectify
import urlparse
class PaypalCommon(PaymentAcquirerCommon):
def setUp(self):
super(PaypalCommon, self).setUp()
cr, uid = self.cr, self.uid
self.base_url = self.registry('ir.config_parameter').get_param(cr, uid, 'web.base.url')
# get the paypal account
model, self.paypal_id = self.registry('ir.model.data').get_object_reference(cr, uid, 'payment_paypal', 'payment_acquirer_paypal')
# [email protected] - [email protected] - [email protected]
# some CC
self.amex = (('378282246310005', '123'), ('371449635398431', '123'))
self.amex_corporate = (('378734493671000', '123'))
self.autralian_bankcard = (('5610591081018250', '123'))
self.dinersclub = (('30569309025904', '123'), ('38520000023237', '123'))
self.discover = (('6011111111111117', '123'), ('6011000990139424', '123'))
self.jcb = (('3530111333300000', '123'), ('3566002020360505', '123'))
self.mastercard = (('5555555555554444', '123'), ('5105105105105100', '123'))
self.visa = (('4111111111111111', '123'), ('4012888888881881', '123'), ('4222222222222', '123'))
self.dankord_pbs = (('76009244561', '123'), ('5019717010103742', '123'))
self.switch_polo = (('6331101999990016', '123'))
class PaypalServer2Server(PaypalCommon):
def test_00_tx_management(self):
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid things
paypal = self.payment_acquirer.browse(self.cr, self.uid, self.paypal_id, None)
self.assertEqual(paypal.environment, 'test', 'test without test environment')
res = self.payment_acquirer._paypal_s2s_get_access_token(cr, uid, [self.paypal_id], context=context)
self.assertTrue(res[self.paypal_id] is not False, 'paypal: did not generate access token')
tx_id = self.payment_transaction.s2s_create(
cr, uid, {
'amount': 0.01,
'acquirer_id': self.paypal_id,
'currency_id': self.currency_euro_id,
'reference': 'test_reference',
'partner_id': self.buyer_id,
}, {
'number': self.visa[0][0],
'cvc': self.visa[0][1],
'brand': 'visa',
'expiry_mm': 9,
'expiry_yy': 2015,
}, context=context
)
tx = self.payment_transaction.browse(cr, uid, tx_id, context=context)
self.assertTrue(tx.paypal_txn_id is not False, 'paypal: txn_id should have been set after s2s request')
self.payment_transaction.write(cr, uid, tx_id, {'paypal_txn_id': False}, context=context)
class PaypalForm(PaypalCommon):
def test_10_paypal_form_render(self):
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid things
self.payment_acquirer.write(cr, uid, self.paypal_id, {'fees_active': False}, context)
paypal = self.payment_acquirer.browse(cr, uid, self.paypal_id, context)
self.assertEqual(paypal.environment, 'test', 'test without test environment')
# ----------------------------------------
# Test: button direct rendering
# ----------------------------------------
# render the button
res = self.payment_acquirer.render(
cr, uid, self.paypal_id,
'test_ref0', 0.01, self.currency_euro_id,
partner_id=None,
partner_values=self.buyer_values,
context=context)
form_values = {
'cmd': '_xclick',
'business': '[email protected]',
'item_name': 'test_ref0',
'item_number': 'test_ref0',
'first_name': 'Buyer',
'last_name': 'Norbert',
'amount': '0.01',
'currency_code': 'EUR',
'address1': 'Huge Street 2/543',
'city': 'Sin City',
'zip': '1000',
'country': 'Belgium',
'email': '[email protected]',
'return': '%s' % urlparse.urljoin(self.base_url, PaypalController._return_url),
'notify_url': '%s' % urlparse.urljoin(self.base_url, PaypalController._notify_url),
'cancel_return': '%s' % urlparse.urljoin(self.base_url, PaypalController._cancel_url),
}
# check form result
tree = objectify.fromstring(res)
self.assertEqual(tree.get('action'), 'https://www.sandbox.paypal.com/cgi-bin/webscr', 'paypal: wrong form POST url')
for form_input in tree.input:
if form_input.get('name') in ['submit']:
continue
self.assertEqual(
form_input.get('value'),
form_values[form_input.get('name')],
'paypal: wrong value for input %s: received %s instead of %s' % (form_input.get('name'), form_input.get('value'), form_values[form_input.get('name')])
)
def test_11_paypal_form_with_fees(self):
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid things
paypal = self.payment_acquirer.browse(self.cr, self.uid, self.paypal_id, None)
self.assertEqual(paypal.environment, 'test', 'test without test environment')
# update acquirer: compute fees
self.payment_acquirer.write(cr, uid, self.paypal_id, {
'fees_active': True,
'fees_dom_fixed': 1.0,
'fees_dom_var': 0.35,
'fees_int_fixed': 1.5,
'fees_int_var': 0.50,
}, context)
# render the button
res = self.payment_acquirer.render(
cr, uid, self.paypal_id,
'test_ref0', 12.50, self.currency_euro,
partner_id=None,
partner_values=self.buyer_values,
context=context)
# check form result
handling_found = False
tree = objectify.fromstring(res)
self.assertEqual(tree.get('action'), 'https://www.sandbox.paypal.com/cgi-bin/webscr', 'paypal: wrong form POST url')
for form_input in tree.input:
if form_input.get('name') in ['handling']:
handling_found = True
self.assertEqual(form_input.get('value'), '1.57', 'paypal: wrong computed fees')
self.assertTrue(handling_found, 'paypal: fees_active did not add handling input in rendered form')
@mute_logger('openerp.addons.payment_paypal.models.paypal', 'ValidationError')
def test_20_paypal_form_management(self):
cr, uid, context = self.cr, self.uid, {}
# be sure not to do stupid things
paypal = self.payment_acquirer.browse(cr, uid, self.paypal_id, context)
self.assertEqual(paypal.environment, 'test', 'test without test environment')
# typical data posted by paypal after client has successfully paid
paypal_post_data = {
'protection_eligibility': u'Ineligible',
'last_name': u'Poilu',
'txn_id': u'08D73520KX778924N',
'receiver_email': u'[email protected]',
'payment_status': u'Pending',
'payment_gross': u'',
'tax': u'0.00',
'residence_country': u'FR',
'address_state': u'Alsace',
'payer_status': u'verified',
'txn_type': u'web_accept',
'address_street': u'Av. de la Pelouse, 87648672 Mayet',
'handling_amount': u'0.00',
'payment_date': u'03:21:19 Nov 18, 2013 PST',
'first_name': u'Norbert',
'item_name': u'test_ref_2',
'address_country': u'France',
'charset': u'windows-1252',
'custom': u'',
'notify_version': u'3.7',
'address_name': u'Norbert Poilu',
'pending_reason': u'multi_currency',
'item_number': u'test_ref_2',
'receiver_id': u'DEG7Z7MYGT6QA',
'transaction_subject': u'',
'business': u'[email protected]',
'test_ipn': u'1',
'payer_id': u'VTDKRZQSAHYPS',
'verify_sign': u'An5ns1Kso7MWUdW4ErQKJJJ4qi4-AVoiUf-3478q3vrSmqh08IouiYpM',
'address_zip': u'75002',
'address_country_code': u'FR',
'address_city': u'Paris',
'address_status': u'unconfirmed',
'mc_currency': u'EUR',
'shipping': u'0.00',
'payer_email': u'[email protected]',
'payment_type': u'instant',
'mc_gross': u'1.95',
'ipn_track_id': u'866df2ccd444b',
'quantity': u'1'
}
# should raise error about unknown tx
with self.assertRaises(ValidationError):
self.payment_transaction.form_feedback(cr, uid, paypal_post_data, 'paypal', context=context)
# create tx
tx_id = self.payment_transaction.create(
cr, uid, {
'amount': 1.95,
'acquirer_id': self.paypal_id,
'currency_id': self.currency_euro_id,
'reference': 'test_ref_2',
'partner_name': 'Norbert Buyer',
'partner_country_id': self.country_france_id,
}, context=context
)
# validate it
self.payment_transaction.form_feedback(cr, uid, paypal_post_data, 'paypal', context=context)
# check
tx = self.payment_transaction.browse(cr, uid, tx_id, context=context)
self.assertEqual(tx.state, 'pending', 'paypal: wrong state after receiving a valid pending notification')
self.assertEqual(tx.state_message, 'multi_currency', 'paypal: wrong state message after receiving a valid pending notification')
self.assertEqual(tx.paypal_txn_id, '08D73520KX778924N', 'paypal: wrong txn_id after receiving a valid pending notification')
self.assertFalse(tx.date_validate, 'paypal: validation date should not be updated whenr receiving pending notification')
# update tx
self.payment_transaction.write(cr, uid, [tx_id], {
'state': 'draft',
'paypal_txn_id': False,
}, context=context)
# update notification from paypal
paypal_post_data['payment_status'] = 'Completed'
# validate it
self.payment_transaction.form_feedback(cr, uid, paypal_post_data, 'paypal', context=context)
# check
tx = self.payment_transaction.browse(cr, uid, tx_id, context=context)
self.assertEqual(tx.state, 'done', 'paypal: wrong state after receiving a valid pending notification')
self.assertEqual(tx.paypal_txn_id, '08D73520KX778924N', 'paypal: wrong txn_id after receiving a valid pending notification')
self.assertEqual(tx.date_validate, '2013-11-18 03:21:19', 'paypal: wrong validation date')
| agpl-3.0 |
Nexenta/cinder | cinder/volume/drivers/huawei/rest_client.py | 1 | 87896 | # Copyright (c) 2016 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
import json
import re
import six
import socket
import time
from oslo_log import log as logging
from oslo_utils import excutils
from six.moves import http_cookiejar
from six.moves import urllib
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder import utils
from cinder.volume.drivers.huawei import constants
LOG = logging.getLogger(__name__)
class RestClient(object):
"""Common class for Huawei OceanStor storage system."""
def __init__(self, configuration, san_address, san_user, san_password,
**kwargs):
self.configuration = configuration
self.san_address = san_address
self.san_user = san_user
self.san_password = san_password
self.init_http_head()
self.storage_pools = kwargs.get('storage_pools',
self.configuration.storage_pools)
self.iscsi_info = kwargs.get('iscsi_info',
self.configuration.iscsi_info)
self.iscsi_default_target_ip = kwargs.get(
'iscsi_default_target_ip',
self.configuration.iscsi_default_target_ip)
def init_http_head(self):
self.cookie = http_cookiejar.CookieJar()
self.url = None
self.device_id = None
self.headers = {
"Connection": "keep-alive",
"Content-Type": "application/json",
}
def do_call(self, url=None, data=None, method=None,
calltimeout=constants.SOCKET_TIMEOUT):
"""Send requests to Huawei storage server.
Send HTTPS call, get response in JSON.
Convert response into Python Object and return it.
"""
if self.url:
url = self.url + url
handler = urllib.request.HTTPCookieProcessor(self.cookie)
opener = urllib.request.build_opener(handler)
urllib.request.install_opener(opener)
res_json = None
try:
socket.setdefaulttimeout(calltimeout)
if data:
data = json.dumps(data)
req = urllib.request.Request(url, data, self.headers)
if method:
req.get_method = lambda: method
res = urllib.request.urlopen(req).read().decode("utf-8")
if "xx/sessions" not in url:
LOG.info(_LI('\n\n\n\nRequest URL: %(url)s\n\n'
'Call Method: %(method)s\n\n'
'Request Data: %(data)s\n\n'
'Response Data:%(res)s\n\n'), {'url': url,
'method': method,
'data': data,
'res': res})
except Exception as err:
LOG.error(_LE('Bad response from server: %(url)s.'
' Error: %(err)s'), {'url': url, 'err': err})
json_msg = ('{"error":{"code": %s,"description": "Connect to '
'server error."}}') % constants.ERROR_CONNECT_TO_SERVER
res_json = json.loads(json_msg)
return res_json
try:
res_json = json.loads(res)
except Exception as err:
LOG.error(_LE('JSON transfer error: %s.'), err)
raise
return res_json
def login(self):
"""Login Huawei storage array."""
device_id = None
for item_url in self.san_address:
url = item_url + "xx/sessions"
data = {"username": self.san_user,
"password": self.san_password,
"scope": "0"}
self.init_http_head()
result = self.do_call(url, data,
calltimeout=constants.LOGIN_SOCKET_TIMEOUT)
if (result['error']['code'] != 0) or ("data" not in result):
LOG.error(_LE("Login error. URL: %(url)s\n"
"Reason: %(reason)s."),
{"url": item_url, "reason": result})
continue
LOG.debug('Login success: %(url)s', {'url': item_url})
device_id = result['data']['deviceid']
self.device_id = device_id
self.url = item_url + device_id
self.headers['iBaseToken'] = result['data']['iBaseToken']
if (result['data']['accountstate']
in (constants.PWD_EXPIRED, constants.PWD_RESET)):
self.logout()
msg = _("Password has expired or has been reset, "
"please change the password.")
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
break
if device_id is None:
msg = _("Failed to login with all rest URLs.")
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return device_id
def try_login(self):
try:
self.login()
except Exception as err:
LOG.warning(_LW('Login failed. Error: %s.'), err)
@utils.synchronized('huawei_cinder_call')
def call(self, url, data=None, method=None):
"""Send requests to server.
If fail, try another RestURL.
"""
device_id = None
old_url = self.url
result = self.do_call(url, data, method)
error_code = result['error']['code']
if (error_code == constants.ERROR_CONNECT_TO_SERVER
or error_code == constants.ERROR_UNAUTHORIZED_TO_SERVER):
LOG.error(_LE("Can't open the recent url, relogin."))
device_id = self.login()
if device_id is not None:
LOG.debug('Replace URL: \n'
'Old URL: %(old_url)s\n,'
'New URL: %(new_url)s\n.',
{'old_url': old_url,
'new_url': self.url})
result = self.do_call(url, data, method)
if result['error']['code'] in constants.RELOGIN_ERROR_PASS:
result['error']['code'] = 0
return result
def logout(self):
"""Logout the session."""
url = "/sessions"
if self.url:
result = self.do_call(url, None, "DELETE")
self._assert_rest_result(result, _('Logout session error.'))
def _assert_rest_result(self, result, err_str):
if result['error']['code'] != 0:
msg = (_('%(err)s\nresult: %(res)s.') % {'err': err_str,
'res': result})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
def _assert_data_in_result(self, result, msg):
if 'data' not in result:
err_msg = _('%s "data" is not in result.') % msg
LOG.error(err_msg)
raise exception.VolumeBackendAPIException(data=err_msg)
def create_lun(self, lun_params):
url = "/lun"
result = self.call(url, lun_params)
if result['error']['code'] == constants.ERROR_VOLUME_ALREADY_EXIST:
lun_id = self.get_lun_id_by_name(lun_params['NAME'])
if lun_id:
return self.get_lun_info(lun_id)
msg = _('Create lun error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
return result['data']
def check_lun_exist(self, lun_id, lun_wwn=None):
url = "/lun/" + lun_id
result = self.call(url, None, "GET")
error_code = result['error']['code']
if error_code != 0:
return False
if lun_wwn and result['data']['WWN'] != lun_wwn:
LOG.debug("LUN ID %(id)s with WWN %(wwn)s does not exist on "
"the array.", {"id": lun_id, "wwn": lun_wwn})
return False
return True
def delete_lun(self, lun_id):
url = "/lun/" + lun_id
data = {"TYPE": "11",
"ID": lun_id}
result = self.call(url, data, "DELETE")
self._assert_rest_result(result, _('Delete lun error.'))
def get_all_pools(self):
url = "/storagepool"
result = self.call(url, None)
msg = _('Query resource pool error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
return result['data']
def get_pool_info(self, pool_name=None, pools=None):
info = {}
if not pool_name:
return info
for pool in pools:
if pool_name.strip() != pool['NAME']:
continue
if pool.get('USAGETYPE') == constants.FILE_SYSTEM_POOL_TYPE:
break
info['ID'] = pool['ID']
info['CAPACITY'] = pool.get('DATASPACE', pool['USERFREECAPACITY'])
info['TOTALCAPACITY'] = pool['USERTOTALCAPACITY']
info['TIER0CAPACITY'] = pool['TIER0CAPACITY']
info['TIER1CAPACITY'] = pool['TIER1CAPACITY']
info['TIER2CAPACITY'] = pool['TIER2CAPACITY']
return info
def get_pool_id(self, pool_name):
pools = self.get_all_pools()
pool_info = self.get_pool_info(pool_name, pools)
if not pool_info:
# The following code is to keep compatibility with old version of
# Huawei driver.
for pool_name in self.storage_pools:
pool_info = self.get_pool_info(pool_name, pools)
if pool_info:
break
if not pool_info:
msg = _('Can not get pool info. pool: %s') % pool_name
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return pool_info['ID']
def _get_id_from_result(self, result, name, key):
if 'data' in result:
for item in result['data']:
if name == item.get(key):
return item['ID']
def get_lun_id_by_name(self, name):
url = "/lun?range=[0-65535]"
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get lun id by name error.'))
return self._get_id_from_result(result, name, 'NAME')
def activate_snapshot(self, snapshot_id):
url = "/snapshot/activate"
data = ({"SNAPSHOTLIST": snapshot_id}
if type(snapshot_id) in (list, tuple)
else {"SNAPSHOTLIST": [snapshot_id]})
result = self.call(url, data)
self._assert_rest_result(result, _('Activate snapshot error.'))
def create_snapshot(self, lun_id, snapshot_name, snapshot_description):
url = "/snapshot"
data = {"TYPE": "27",
"NAME": snapshot_name,
"PARENTTYPE": "11",
"DESCRIPTION": snapshot_description,
"PARENTID": lun_id}
result = self.call(url, data)
msg = _('Create snapshot error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
return result['data']
def get_lun_id(self, volume, volume_name):
lun_id = (volume.provider_location or
self.get_lun_id_by_name(volume_name))
if not lun_id:
msg = (_("Can't find lun info on the array. "
"volume: %(id)s, lun name: %(name)s.") %
{'id': volume.id, 'name': volume_name})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return lun_id
def check_snapshot_exist(self, snapshot_id):
url = "/snapshot/%s" % snapshot_id
result = self.call(url, None, "GET")
error_code = result['error']['code']
if error_code != 0:
return False
return True
def stop_snapshot(self, snapshot_id):
url = "/snapshot/stop"
stopdata = {"ID": snapshot_id}
result = self.call(url, stopdata, "PUT")
self._assert_rest_result(result, _('Stop snapshot error.'))
def delete_snapshot(self, snapshotid):
url = "/snapshot/%s" % snapshotid
data = {"TYPE": "27", "ID": snapshotid}
result = self.call(url, data, "DELETE")
self._assert_rest_result(result, _('Delete snapshot error.'))
def get_snapshot_id_by_name(self, name):
url = "/snapshot?range=[0-32767]"
description = 'The snapshot license file is unavailable.'
result = self.call(url, None, "GET")
if 'error' in result:
if description == result['error']['description']:
return
self._assert_rest_result(result, _('Get snapshot id error.'))
return self._get_id_from_result(result, name, 'NAME')
def create_luncopy(self, luncopyname, srclunid, tgtlunid):
"""Create a luncopy."""
url = "/luncopy"
data = {"TYPE": 219,
"NAME": luncopyname,
"DESCRIPTION": luncopyname,
"COPYSPEED": 2,
"LUNCOPYTYPE": "1",
"SOURCELUN": ("INVALID;%s;INVALID;INVALID;INVALID"
% srclunid),
"TARGETLUN": ("INVALID;%s;INVALID;INVALID;INVALID"
% tgtlunid)}
result = self.call(url, data)
msg = _('Create luncopy error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
return result['data']['ID']
def add_host_to_hostgroup(self, host_id):
"""Associate host to hostgroup.
If hostgroup doesn't exist, create one.
"""
hostgroup_name = constants.HOSTGROUP_PREFIX + host_id
hostgroup_id = self.create_hostgroup_with_check(hostgroup_name)
is_associated = self._is_host_associate_to_hostgroup(hostgroup_id,
host_id)
if not is_associated:
self._associate_host_to_hostgroup(hostgroup_id, host_id)
return hostgroup_id
def get_tgt_port_group(self, tgt_port_group):
"""Find target portgroup id by target port group name."""
url = "/portgroup?range=[0-8191]&TYPE=257"
result = self.call(url, None, "GET")
msg = _('Find portgroup error.')
self._assert_rest_result(result, msg)
return self._get_id_from_result(result, tgt_port_group, 'NAME')
def _associate_portgroup_to_view(self, view_id, portgroup_id):
url = "/MAPPINGVIEW/CREATE_ASSOCIATE"
data = {"ASSOCIATEOBJTYPE": "257",
"ASSOCIATEOBJID": portgroup_id,
"TYPE": "245",
"ID": view_id}
result = self.call(url, data, "PUT")
self._assert_rest_result(result, _('Associate portgroup to mapping '
'view error.'))
def _portgroup_associated(self, view_id, portgroup_id):
url = ("/mappingview/associate?TYPE=245&"
"ASSOCIATEOBJTYPE=257&ASSOCIATEOBJID=%s" % portgroup_id)
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Check portgroup associate error.'))
if self._get_id_from_result(result, view_id, 'ID'):
return True
return False
def do_mapping(self, lun_id, hostgroup_id, host_id, portgroup_id=None,
lun_type=constants.LUN_TYPE):
"""Add hostgroup and lungroup to mapping view."""
lungroup_name = constants.LUNGROUP_PREFIX + host_id
mapping_view_name = constants.MAPPING_VIEW_PREFIX + host_id
lungroup_id = self._find_lungroup(lungroup_name)
view_id = self.find_mapping_view(mapping_view_name)
map_info = {}
LOG.info(_LI(
'do_mapping, lun_group: %(lun_group)s, '
'view_id: %(view_id)s, lun_id: %(lun_id)s.'),
{'lun_group': lungroup_id,
'view_id': view_id,
'lun_id': lun_id})
try:
# Create lungroup and add LUN into to lungroup.
if lungroup_id is None:
lungroup_id = self._create_lungroup(lungroup_name)
is_associated = self._is_lun_associated_to_lungroup(lungroup_id,
lun_id,
lun_type)
if not is_associated:
self.associate_lun_to_lungroup(lungroup_id, lun_id, lun_type)
if view_id is None:
view_id = self._add_mapping_view(mapping_view_name)
self._associate_hostgroup_to_view(view_id, hostgroup_id)
self._associate_lungroup_to_view(view_id, lungroup_id)
if portgroup_id:
self._associate_portgroup_to_view(view_id, portgroup_id)
else:
if not self.hostgroup_associated(view_id, hostgroup_id):
self._associate_hostgroup_to_view(view_id, hostgroup_id)
if not self.lungroup_associated(view_id, lungroup_id):
self._associate_lungroup_to_view(view_id, lungroup_id)
if portgroup_id:
if not self._portgroup_associated(view_id,
portgroup_id):
self._associate_portgroup_to_view(view_id,
portgroup_id)
version = self.find_array_version()
if version >= constants.ARRAY_VERSION:
aval_luns = self.find_view_by_id(view_id)
map_info["lun_id"] = lun_id
map_info["view_id"] = view_id
map_info["aval_luns"] = aval_luns
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE(
'Error occurred when adding hostgroup and lungroup to '
'view. Remove lun from lungroup now.'))
self.remove_lun_from_lungroup(lungroup_id, lun_id, lun_type)
return map_info
def check_iscsi_initiators_exist_in_host(self, host_id):
url = "/iscsi_initiator?range=[0-256]&PARENTID=%s" % host_id
result = self.call(url, None, "GET")
self._assert_rest_result(result, 'Get host initiators info failed.')
if "data" in result:
return True
return False
def ensure_initiator_added(self, initiator_name, host_id):
added = self._initiator_is_added_to_array(initiator_name)
if not added:
self._add_initiator_to_array(initiator_name)
if not self.is_initiator_associated_to_host(initiator_name):
self._associate_initiator_to_host(initiator_name,
host_id)
def _get_iscsi_tgt_port(self):
url = "/iscsidevicename"
result = self.call(url, None)
msg = _('Get iSCSI target port error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
return result['data'][0]['CMO_ISCSI_DEVICE_NAME']
def find_hostgroup(self, groupname):
"""Get the given hostgroup id."""
url = "/hostgroup?range=[0-8191]"
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get hostgroup information error.'))
return self._get_id_from_result(result, groupname, 'NAME')
def _find_lungroup(self, lungroup_name):
"""Get the given hostgroup id."""
url = "/lungroup?range=[0-8191]"
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get lungroup information error.'))
return self._get_id_from_result(result, lungroup_name, 'NAME')
def create_hostgroup_with_check(self, hostgroup_name):
"""Check if host exists on the array, or create it."""
hostgroup_id = self.find_hostgroup(hostgroup_name)
if hostgroup_id:
LOG.info(_LI(
'create_hostgroup_with_check. '
'hostgroup name: %(name)s, '
'hostgroup id: %(id)s'),
{'name': hostgroup_name,
'id': hostgroup_id})
return hostgroup_id
try:
hostgroup_id = self._create_hostgroup(hostgroup_name)
except Exception:
LOG.info(_LI(
'Failed to create hostgroup: %(name)s. '
'Please check if it exists on the array.'),
{'name': hostgroup_name})
hostgroup_id = self.find_hostgroup(hostgroup_name)
if hostgroup_id is None:
err_msg = (_(
'Failed to create hostgroup: %(name)s. '
'Check if it exists on the array.')
% {'name': hostgroup_name})
LOG.error(err_msg)
raise exception.VolumeBackendAPIException(data=err_msg)
LOG.info(_LI(
'create_hostgroup_with_check. '
'Create hostgroup success. '
'hostgroup name: %(name)s, '
'hostgroup id: %(id)s'),
{'name': hostgroup_name,
'id': hostgroup_id})
return hostgroup_id
def _create_hostgroup(self, hostgroup_name):
url = "/hostgroup"
data = {"TYPE": "14", "NAME": hostgroup_name}
result = self.call(url, data)
msg = _('Create hostgroup error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
return result['data']['ID']
def _create_lungroup(self, lungroup_name):
url = "/lungroup"
data = {"DESCRIPTION": lungroup_name,
"APPTYPE": '0',
"GROUPTYPE": '0',
"NAME": lungroup_name}
result = self.call(url, data)
msg = _('Create lungroup error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
return result['data']['ID']
def delete_lungroup(self, lungroup_id):
url = "/LUNGroup/" + lungroup_id
result = self.call(url, None, "DELETE")
self._assert_rest_result(result, _('Delete lungroup error.'))
def lungroup_associated(self, view_id, lungroup_id):
url = ("/mappingview/associate?TYPE=245&"
"ASSOCIATEOBJTYPE=256&ASSOCIATEOBJID=%s" % lungroup_id)
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Check lungroup associate error.'))
if self._get_id_from_result(result, view_id, 'ID'):
return True
return False
def hostgroup_associated(self, view_id, hostgroup_id):
url = ("/mappingview/associate?TYPE=245&"
"ASSOCIATEOBJTYPE=14&ASSOCIATEOBJID=%s" % hostgroup_id)
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Check hostgroup associate error.'))
if self._get_id_from_result(result, view_id, 'ID'):
return True
return False
def get_host_lun_id(self, host_id, lun_id, lun_type=constants.LUN_TYPE):
cmd_type = 'lun' if lun_type == constants.LUN_TYPE else 'snapshot'
url = ("/%s/associate?TYPE=%s&ASSOCIATEOBJTYPE=21"
"&ASSOCIATEOBJID=%s" % (cmd_type, lun_type, host_id))
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Find host lun id error.'))
host_lun_id = 1
if 'data' in result:
for item in result['data']:
if lun_id == item['ID']:
associate_data = item['ASSOCIATEMETADATA']
try:
hostassoinfo = json.loads(associate_data)
host_lun_id = hostassoinfo['HostLUNID']
break
except Exception as err:
LOG.error(_LE("JSON transfer data error. %s."), err)
raise
return host_lun_id
def get_host_id_by_name(self, host_name):
"""Get the given host ID."""
url = "/host?range=[0-65535]"
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Find host in hostgroup error.'))
return self._get_id_from_result(result, host_name, 'NAME')
def add_host_with_check(self, host_name, host_name_before_hash):
host_id = self.get_host_id_by_name(host_name)
if host_id:
LOG.info(_LI(
'add_host_with_check. '
'host name: %(name)s, '
'host id: %(id)s'),
{'name': host_name,
'id': host_id})
return host_id
try:
host_id = self._add_host(host_name, host_name_before_hash)
except Exception:
LOG.info(_LI(
'Failed to create host: %(name)s. '
'Check if it exists on the array.'),
{'name': host_name})
host_id = self.get_host_id_by_name(host_name)
if not host_id:
err_msg = (_(
'Failed to create host: %(name)s. '
'Please check if it exists on the array.'),
{'name': host_name})
LOG.error(err_msg)
raise exception.VolumeBackendAPIException(data=err_msg)
LOG.info(_LI(
'add_host_with_check. '
'create host success. '
'host name: %(name)s, '
'host id: %(id)s'),
{'name': host_name,
'id': host_id})
return host_id
def _add_host(self, hostname, host_name_before_hash):
"""Add a new host."""
url = "/host"
data = {"TYPE": "21",
"NAME": hostname,
"OPERATIONSYSTEM": "0",
"DESCRIPTION": host_name_before_hash}
result = self.call(url, data)
self._assert_rest_result(result, _('Add new host error.'))
if 'data' in result:
return result['data']['ID']
def _is_host_associate_to_hostgroup(self, hostgroup_id, host_id):
"""Check whether the host is associated to the hostgroup."""
url = ("/host/associate?TYPE=21&"
"ASSOCIATEOBJTYPE=14&ASSOCIATEOBJID=%s" % hostgroup_id)
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Check hostgroup associate error.'))
if self._get_id_from_result(result, host_id, 'ID'):
return True
return False
def _is_lun_associated_to_lungroup(self, lungroup_id, lun_id,
lun_type=constants.LUN_TYPE):
"""Check whether the lun is associated to the lungroup."""
cmd_type = 'lun' if lun_type == constants.LUN_TYPE else 'snapshot'
url = ("/%s/associate?TYPE=%s&"
"ASSOCIATEOBJTYPE=256&ASSOCIATEOBJID=%s"
% (cmd_type, lun_type, lungroup_id))
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Check lungroup associate error.'))
if self._get_id_from_result(result, lun_id, 'ID'):
return True
return False
def _associate_host_to_hostgroup(self, hostgroup_id, host_id):
url = "/hostgroup/associate"
data = {"TYPE": "14",
"ID": hostgroup_id,
"ASSOCIATEOBJTYPE": "21",
"ASSOCIATEOBJID": host_id}
result = self.call(url, data)
self._assert_rest_result(result, _('Associate host to hostgroup '
'error.'))
def associate_lun_to_lungroup(self, lungroup_id, lun_id,
lun_type=constants.LUN_TYPE):
"""Associate lun to lungroup."""
url = "/lungroup/associate"
data = {"ID": lungroup_id,
"ASSOCIATEOBJTYPE": lun_type,
"ASSOCIATEOBJID": lun_id}
result = self.call(url, data)
self._assert_rest_result(result, _('Associate lun to lungroup error.'))
def remove_lun_from_lungroup(self, lungroup_id, lun_id,
lun_type=constants.LUN_TYPE):
"""Remove lun from lungroup."""
url = ("/lungroup/associate?ID=%s&ASSOCIATEOBJTYPE=%s"
"&ASSOCIATEOBJID=%s" % (lungroup_id, lun_type, lun_id))
result = self.call(url, None, 'DELETE')
self._assert_rest_result(
result, _('Delete associated lun from lungroup error.'))
def _initiator_is_added_to_array(self, ininame):
"""Check whether the initiator is already added on the array."""
url = "/iscsi_initiator?range=[0-256]"
result = self.call(url, None, "GET")
self._assert_rest_result(result,
_('Check initiator added to array error.'))
if self._get_id_from_result(result, ininame, 'ID'):
return True
return False
def is_initiator_associated_to_host(self, ininame):
"""Check whether the initiator is associated to the host."""
url = "/iscsi_initiator?range=[0-256]"
result = self.call(url, None, "GET")
self._assert_rest_result(
result, _('Check initiator associated to host error.'))
if 'data' in result:
for item in result['data']:
if item['ID'] == ininame and item['ISFREE'] == "true":
return False
return True
def _add_initiator_to_array(self, initiator_name):
"""Add a new initiator to storage device."""
url = "/iscsi_initiator"
data = {"TYPE": "222",
"ID": initiator_name,
"USECHAP": "false"}
result = self.call(url, data, "POST")
self._assert_rest_result(result,
_('Add initiator to array error.'))
def _add_initiator_to_host(self, initiator_name, host_id):
url = "/iscsi_initiator/" + initiator_name
data = {"TYPE": "222",
"ID": initiator_name,
"USECHAP": "false",
"PARENTTYPE": "21",
"PARENTID": host_id}
result = self.call(url, data, "PUT")
self._assert_rest_result(result,
_('Associate initiator to host error.'))
def _associate_initiator_to_host(self,
initiator_name,
host_id):
"""Associate initiator with the host."""
chapinfo = self.find_chap_info(self.iscsi_info,
initiator_name)
multipath_type = self._find_alua_info(self.iscsi_info,
initiator_name)
if chapinfo:
LOG.info(_LI('Use CHAP when adding initiator to host.'))
self._use_chap(chapinfo, initiator_name, host_id)
else:
self._add_initiator_to_host(initiator_name, host_id)
if multipath_type:
LOG.info(_LI('Use ALUA when adding initiator to host.'))
self._use_alua(initiator_name, multipath_type)
def find_chap_info(self, iscsi_info, initiator_name):
"""Find CHAP info from xml."""
chapinfo = None
for ini in iscsi_info:
if ini['Name'] == initiator_name:
if 'CHAPinfo' in ini:
chapinfo = ini['CHAPinfo']
break
return chapinfo
def _find_alua_info(self, iscsi_info, initiator_name):
"""Find ALUA info from xml."""
multipath_type = 0
for ini in iscsi_info:
if ini['Name'] == initiator_name:
if 'ALUA' in ini:
if ini['ALUA'] != '1' and ini['ALUA'] != '0':
msg = (_(
'Invalid ALUA value. '
'ALUA value must be 1 or 0.'))
LOG.error(msg)
raise exception.InvalidInput(msg)
else:
multipath_type = ini['ALUA']
break
return multipath_type
def _use_chap(self, chapinfo, initiator_name, host_id):
"""Use CHAP when adding initiator to host."""
(chap_username, chap_password) = chapinfo.split(";")
url = "/iscsi_initiator/" + initiator_name
data = {"TYPE": "222",
"USECHAP": "true",
"CHAPNAME": chap_username,
"CHAPPASSWORD": chap_password,
"ID": initiator_name,
"PARENTTYPE": "21",
"PARENTID": host_id}
result = self.call(url, data, "PUT")
msg = _('Use CHAP to associate initiator to host error. '
'Please check the CHAP username and password.')
self._assert_rest_result(result, msg)
def _use_alua(self, initiator_name, multipath_type):
"""Use ALUA when adding initiator to host."""
url = "/iscsi_initiator"
data = {"ID": initiator_name,
"MULTIPATHTYPE": multipath_type}
result = self.call(url, data, "PUT")
self._assert_rest_result(
result, _('Use ALUA to associate initiator to host error.'))
def remove_chap(self, initiator_name):
"""Remove CHAP when terminate connection."""
url = "/iscsi_initiator"
data = {"USECHAP": "false",
"MULTIPATHTYPE": "0",
"ID": initiator_name}
result = self.call(url, data, "PUT")
self._assert_rest_result(result, _('Remove CHAP error.'))
def find_mapping_view(self, name):
"""Find mapping view."""
url = "/mappingview?range=[0-8191]"
result = self.call(url, None, "GET")
msg = _('Find mapping view error.')
self._assert_rest_result(result, msg)
return self._get_id_from_result(result, name, 'NAME')
def _add_mapping_view(self, name):
url = "/mappingview"
data = {"NAME": name, "TYPE": "245"}
result = self.call(url, data)
self._assert_rest_result(result, _('Add mapping view error.'))
return result['data']['ID']
def _associate_hostgroup_to_view(self, view_id, hostgroup_id):
url = "/MAPPINGVIEW/CREATE_ASSOCIATE"
data = {"ASSOCIATEOBJTYPE": "14",
"ASSOCIATEOBJID": hostgroup_id,
"TYPE": "245",
"ID": view_id}
result = self.call(url, data, "PUT")
self._assert_rest_result(result, _('Associate host to mapping view '
'error.'))
def _associate_lungroup_to_view(self, view_id, lungroup_id):
url = "/MAPPINGVIEW/CREATE_ASSOCIATE"
data = {"ASSOCIATEOBJTYPE": "256",
"ASSOCIATEOBJID": lungroup_id,
"TYPE": "245",
"ID": view_id}
result = self.call(url, data, "PUT")
self._assert_rest_result(
result, _('Associate lungroup to mapping view error.'))
def delete_lungroup_mapping_view(self, view_id, lungroup_id):
"""Remove lungroup associate from the mapping view."""
url = "/mappingview/REMOVE_ASSOCIATE"
data = {"ASSOCIATEOBJTYPE": "256",
"ASSOCIATEOBJID": lungroup_id,
"TYPE": "245",
"ID": view_id}
result = self.call(url, data, "PUT")
self._assert_rest_result(result, _('Delete lungroup from mapping view '
'error.'))
def delete_hostgoup_mapping_view(self, view_id, hostgroup_id):
"""Remove hostgroup associate from the mapping view."""
url = "/mappingview/REMOVE_ASSOCIATE"
data = {"ASSOCIATEOBJTYPE": "14",
"ASSOCIATEOBJID": hostgroup_id,
"TYPE": "245",
"ID": view_id}
result = self.call(url, data, "PUT")
self._assert_rest_result(
result, _('Delete hostgroup from mapping view error.'))
def delete_portgroup_mapping_view(self, view_id, portgroup_id):
"""Remove portgroup associate from the mapping view."""
url = "/mappingview/REMOVE_ASSOCIATE"
data = {"ASSOCIATEOBJTYPE": "257",
"ASSOCIATEOBJID": portgroup_id,
"TYPE": "245",
"ID": view_id}
result = self.call(url, data, "PUT")
self._assert_rest_result(
result, _('Delete portgroup from mapping view error.'))
def delete_mapping_view(self, view_id):
"""Remove mapping view from the storage."""
url = "/mappingview/" + view_id
result = self.call(url, None, "DELETE")
self._assert_rest_result(result, _('Delete mapping view error.'))
def get_obj_count_from_lungroup(self, lungroup_id):
"""Get all objects count associated to the lungroup."""
lun_count = self._get_obj_count_from_lungroup_by_type(
lungroup_id, constants.LUN_TYPE)
snapshot_count = self._get_obj_count_from_lungroup_by_type(
lungroup_id, constants.SNAPSHOT_TYPE)
return int(lun_count) + int(snapshot_count)
def _get_obj_count_from_lungroup_by_type(self, lungroup_id,
lun_type=constants.LUN_TYPE):
cmd_type = 'lun' if lun_type == constants.LUN_TYPE else 'snapshot'
lunnum = 0
if not lungroup_id:
return lunnum
url = ("/%s/count?TYPE=%s&ASSOCIATEOBJTYPE=256&"
"ASSOCIATEOBJID=%s" % (cmd_type, lun_type, lungroup_id))
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Find obj number error.'))
if 'data' in result:
lunnum = int(result['data']['COUNT'])
return lunnum
def is_portgroup_associated_to_view(self, view_id, portgroup_id):
"""Check whether the port group is associated to the mapping view."""
url = ("/portgroup/associate?ASSOCIATEOBJTYPE=245&"
"ASSOCIATEOBJID=%s&range=[0-8191]" % view_id)
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Find portgroup from mapping view '
'error.'))
if self._get_id_from_result(result, portgroup_id, 'ID'):
return True
return False
def find_lungroup_from_map(self, view_id):
"""Get lungroup from the given map"""
url = ("/mappingview/associate/lungroup?TYPE=256&"
"ASSOCIATEOBJTYPE=245&ASSOCIATEOBJID=%s" % view_id)
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Find lun group from mapping view '
'error.'))
lungroup_id = None
if 'data' in result:
# One map can have only one lungroup.
for item in result['data']:
lungroup_id = item['ID']
return lungroup_id
def start_luncopy(self, luncopy_id):
"""Start a LUNcopy."""
url = "/LUNCOPY/start"
data = {"TYPE": "219", "ID": luncopy_id}
result = self.call(url, data, "PUT")
self._assert_rest_result(result, _('Start LUNcopy error.'))
def _get_capacity(self, pool_name, result):
"""Get free capacity and total capacity of the pool."""
pool_info = self.get_pool_info(pool_name, result)
pool_capacity = {'total_capacity': 0.0,
'free_capacity': 0.0}
if pool_info:
total = float(pool_info['TOTALCAPACITY']) / constants.CAPACITY_UNIT
free = float(pool_info['CAPACITY']) / constants.CAPACITY_UNIT
pool_capacity['total_capacity'] = total
pool_capacity['free_capacity'] = free
return pool_capacity
def _get_disk_type(self, pool_name, result):
"""Get disk type of the pool."""
pool_info = self.get_pool_info(pool_name, result)
if not pool_info:
return None
pool_disk = []
for i, x in enumerate(['ssd', 'sas', 'nl_sas']):
if pool_info['TIER%dCAPACITY' % i] != '0':
pool_disk.append(x)
if len(pool_disk) > 1:
pool_disk = ['mix']
return pool_disk[0] if pool_disk else None
def get_luncopy_info(self, luncopy_id):
"""Get LUNcopy information."""
url = "/LUNCOPY?range=[0-1023]"
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get LUNcopy information error.'))
luncopyinfo = {}
if 'data' in result:
for item in result['data']:
if luncopy_id == item['ID']:
luncopyinfo['name'] = item['NAME']
luncopyinfo['id'] = item['ID']
luncopyinfo['state'] = item['HEALTHSTATUS']
luncopyinfo['status'] = item['RUNNINGSTATUS']
break
return luncopyinfo
def delete_luncopy(self, luncopy_id):
"""Delete a LUNcopy."""
url = "/LUNCOPY/%s" % luncopy_id
result = self.call(url, None, "DELETE")
self._assert_rest_result(result, _('Delete LUNcopy error.'))
def get_init_targ_map(self, wwns):
init_targ_map = {}
tgt_port_wwns = []
for wwn in wwns:
tgtwwpns = self.get_fc_target_wwpns(wwn)
if not tgtwwpns:
continue
init_targ_map[wwn] = tgtwwpns
for tgtwwpn in tgtwwpns:
if tgtwwpn not in tgt_port_wwns:
tgt_port_wwns.append(tgtwwpn)
return (tgt_port_wwns, init_targ_map)
def get_online_free_wwns(self):
"""Get online free WWNs.
If no new ports connected, return an empty list.
"""
url = "/fc_initiator?ISFREE=true&range=[0-8191]"
result = self.call(url, None, "GET")
msg = _('Get connected free FC wwn error.')
self._assert_rest_result(result, msg)
wwns = []
if 'data' in result:
for item in result['data']:
if item['RUNNINGSTATUS'] == constants.FC_INIT_ONLINE:
wwns.append(item['ID'])
return wwns
def add_fc_port_to_host(self, host_id, wwn):
"""Add a FC port to the host."""
url = "/fc_initiator/" + wwn
data = {"TYPE": "223",
"ID": wwn,
"PARENTTYPE": 21,
"PARENTID": host_id}
result = self.call(url, data, "PUT")
self._assert_rest_result(result, _('Add FC port to host error.'))
def _get_iscsi_port_info(self, ip):
"""Get iscsi port info in order to build the iscsi target iqn."""
url = "/eth_port"
result = self.call(url, None, "GET")
msg = _('Get iSCSI port information error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
iscsi_port_info = None
for item in result['data']:
if ip == item['IPV4ADDR']:
iscsi_port_info = item['LOCATION']
break
return iscsi_port_info
def _get_tgt_iqn(self, iscsi_ip):
"""Get target iSCSI iqn."""
ip_info = self._get_iscsi_port_info(iscsi_ip)
iqn_prefix = self._get_iscsi_tgt_port()
if not ip_info:
err_msg = (_(
'Get iSCSI port info error, please check the target IP '
'configured in huawei conf file.'))
LOG.error(err_msg)
raise exception.VolumeBackendAPIException(data=err_msg)
LOG.debug('Request ip info is: %s.', ip_info)
split_list = ip_info.split(".")
newstr = split_list[1] + split_list[2]
LOG.info(_LI('New str info is: %s.'), newstr)
if ip_info:
if newstr[0] == 'A':
ctr = "0"
elif newstr[0] == 'B':
ctr = "1"
interface = '0' + newstr[1]
port = '0' + newstr[3]
iqn_suffix = ctr + '02' + interface + port
for i in range(0, len(iqn_suffix)):
if iqn_suffix[i] != '0':
iqn_suffix = iqn_suffix[i:]
break
iqn = iqn_prefix + ':' + iqn_suffix + ':' + iscsi_ip
LOG.info(_LI('_get_tgt_iqn: iSCSI target iqn is: %s.'), iqn)
return iqn
def get_fc_target_wwpns(self, wwn):
url = ("/host_link?INITIATOR_TYPE=223&INITIATOR_PORT_WWN=" + wwn)
result = self.call(url, None, "GET")
msg = _('Get FC target wwpn error.')
self._assert_rest_result(result, msg)
fc_wwpns = []
if "data" in result:
for item in result['data']:
if wwn == item['INITIATOR_PORT_WWN']:
fc_wwpns.append(item['TARGET_PORT_WWN'])
return fc_wwpns
def update_volume_stats(self):
data = {}
data['pools'] = []
result = self.get_all_pools()
for pool_name in self.storage_pools:
capacity = self._get_capacity(pool_name, result)
disk_type = self._get_disk_type(pool_name, result)
pool = {}
pool.update(dict(
location_info=self.device_id,
pool_name=pool_name,
total_capacity_gb=capacity['total_capacity'],
free_capacity_gb=capacity['free_capacity'],
reserved_percentage=self.configuration.safe_get(
'reserved_percentage'),
QoS_support=True,
max_over_subscription_ratio=self.configuration.safe_get(
'max_over_subscription_ratio'),
thin_provisioning_support=True,
thick_provisioning_support=True,
smarttier=True,
smartcache=True,
smartpartition=True,
hypermetro=True,
consistencygroup_support=True,
))
if disk_type:
pool['disk_type'] = disk_type
data['pools'].append(pool)
return data
def _find_qos_policy_info(self, policy_name):
url = "/ioclass"
result = self.call(url, None, "GET")
msg = _('Get QoS policy error.')
self._assert_rest_result(result, msg)
qos_info = {}
if 'data' in result:
for item in result['data']:
if policy_name == item['NAME']:
qos_info['ID'] = item['ID']
lun_list = json.loads(item['LUNLIST'])
qos_info['LUNLIST'] = lun_list
qos_info['RUNNINGSTATUS'] = item['RUNNINGSTATUS']
break
return qos_info
def _update_qos_policy_lunlist(self, lun_list, policy_id):
url = "/ioclass/" + policy_id
data = {"TYPE": "230",
"ID": policy_id,
"LUNLIST": lun_list}
result = self.call(url, data, "PUT")
self._assert_rest_result(result, _('Update QoS policy error.'))
def _get_tgt_ip_from_portgroup(self, portgroup_id):
target_ips = []
url = ("/eth_port/associate?TYPE=213&ASSOCIATEOBJTYPE=257"
"&ASSOCIATEOBJID=%s" % portgroup_id)
result = self.call(url, None, "GET")
msg = _('Get target IP error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
if 'data' in result:
for item in result['data']:
if (item['IPV4ADDR'] and item['HEALTHSTATUS'] ==
constants.STATUS_HEALTH
and item['RUNNINGSTATUS'] == constants.STATUS_RUNNING):
target_ip = item['IPV4ADDR']
LOG.info(_LI('_get_tgt_ip_from_portgroup: Get ip: %s.'),
target_ip)
target_ips.append(target_ip)
return target_ips
def get_iscsi_params(self, connector):
"""Get target iSCSI params, including iqn, IP."""
initiator = connector['initiator']
multipath = connector['multipath']
target_ips = []
target_iqns = []
temp_tgt_ips = []
portgroup = None
portgroup_id = None
if multipath:
for ini in self.iscsi_info:
if ini['Name'] == initiator:
portgroup = ini.get('TargetPortGroup')
if portgroup:
portgroup_id = self.get_tgt_port_group(portgroup)
temp_tgt_ips = self._get_tgt_ip_from_portgroup(portgroup_id)
valid_port_info = self._get_tgt_port_ip_from_rest()
valid_tgt_ips = valid_port_info
for ip in temp_tgt_ips:
if ip in valid_tgt_ips:
target_ips.append(ip)
if not target_ips:
msg = (_(
'get_iscsi_params: No valid port in portgroup. '
'portgroup_id: %(id)s, please check it on storage.')
% {'id': portgroup_id})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
else:
target_ips = self._get_target_ip(initiator)
else:
target_ips = self._get_target_ip(initiator)
# Deal with the remote tgt ip.
if 'remote_target_ip' in connector:
target_ips.append(connector['remote_target_ip'])
LOG.info(_LI('Get the default ip: %s.'), target_ips)
for ip in target_ips:
target_iqn = self._get_tgt_iqn_from_rest(ip)
if not target_iqn:
target_iqn = self._get_tgt_iqn(ip)
if target_iqn:
target_iqns.append(target_iqn)
return (target_iqns, target_ips, portgroup_id)
def _get_target_ip(self, initiator):
target_ips = []
for ini in self.iscsi_info:
if ini['Name'] == initiator:
if ini.get('TargetIP'):
target_ips.append(ini.get('TargetIP'))
# If not specify target IP for some initiators, use default IP.
if not target_ips:
default_target_ips = self.iscsi_default_target_ip
if default_target_ips:
target_ips.append(default_target_ips[0])
else:
msg = (_(
'get_iscsi_params: Failed to get target IP '
'for initiator %(ini)s, please check config file.')
% {'ini': initiator})
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return target_ips
def _get_tgt_port_ip_from_rest(self):
url = "/iscsi_tgt_port"
result = self.call(url, None, "GET")
info_list = []
target_ips = []
if result['error']['code'] != 0:
LOG.warning(_LW("Can't find target port info from rest."))
return target_ips
elif not result['data']:
msg = (_(
"Can't find valid IP from rest, please check it on storage."))
LOG.error(msg)
raise exception.VolumeBackendAPIException(data = msg)
if 'data' in result:
for item in result['data']:
info_list.append(item['ID'])
if not info_list:
LOG.warning(_LW("Can't find target port info from rest."))
return target_ips
for info in info_list:
split_list = info.split(",")
info_before = split_list[0]
iqn_info = info_before.split("+")
target_iqn = iqn_info[1]
ip_info = target_iqn.split(":")
target_ip = ip_info[-1]
target_ips.append(target_ip)
return target_ips
def _get_tgt_iqn_from_rest(self, target_ip):
url = "/iscsi_tgt_port"
result = self.call(url, None, "GET")
target_iqn = None
if result['error']['code'] != 0:
LOG.warning(_LW("Can't find target iqn from rest."))
return target_iqn
ip_pattern = re.compile(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}')
if 'data' in result:
for item in result['data']:
ips = re.findall(ip_pattern, item['ID'])
for ip in ips:
if target_ip == ip:
target_iqn = item['ID']
break
if not target_iqn:
LOG.warning(_LW("Can't find target iqn from rest."))
return target_iqn
split_list = target_iqn.split(",")
target_iqn_before = split_list[0]
split_list_new = target_iqn_before.split("+")
target_iqn = split_list_new[1]
return target_iqn
def create_qos_policy(self, qos, lun_id):
# Get local time.
localtime = time.strftime('%Y%m%d%H%M%S', time.localtime(time.time()))
# Package QoS name.
qos_name = constants.QOS_NAME_PREFIX + lun_id + '_' + localtime
data = {"TYPE": "230",
"NAME": qos_name,
"LUNLIST": ["%s" % lun_id],
"CLASSTYPE": "1",
"SCHEDULEPOLICY": "2",
"SCHEDULESTARTTIME": "1410969600",
"STARTTIME": "08:00",
"DURATION": "86400",
"CYCLESET": "[1,2,3,4,5,6,0]",
}
data.update(qos)
url = "/ioclass/"
result = self.call(url, data)
self._assert_rest_result(result, _('Create QoS policy error.'))
return result['data']['ID']
def delete_qos_policy(self, qos_id):
"""Delete a QoS policy."""
url = "/ioclass/" + qos_id
data = {"TYPE": "230", "ID": qos_id}
result = self.call(url, data, 'DELETE')
self._assert_rest_result(result, _('Delete QoS policy error.'))
def activate_deactivate_qos(self, qos_id, enablestatus):
"""Activate or deactivate QoS.
enablestatus: true (activate)
enbalestatus: false (deactivate)
"""
url = "/ioclass/active/" + qos_id
data = {"TYPE": 230,
"ID": qos_id,
"ENABLESTATUS": enablestatus}
result = self.call(url, data, "PUT")
self._assert_rest_result(
result, _('Activate or deactivate QoS error.'))
def get_qos_info(self, qos_id):
"""Get QoS information."""
url = "/ioclass/" + qos_id
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get QoS information error.'))
return result['data']
def get_lun_list_in_qos(self, qos_id, qos_info):
"""Get the lun list in QoS."""
lun_list = []
lun_string = qos_info['LUNLIST'][1:-1]
for lun in lun_string.split(","):
str = lun[1:-1]
lun_list.append(str)
return lun_list
def remove_lun_from_qos(self, lun_id, lun_list, qos_id):
"""Remove lun from QoS."""
lun_list = [i for i in lun_list if i != lun_id]
url = "/ioclass/" + qos_id
data = {"LUNLIST": lun_list,
"TYPE": 230,
"ID": qos_id}
result = self.call(url, data, "PUT")
msg = _('Remove lun from QoS error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
def change_lun_priority(self, lun_id):
"""Change lun priority to high."""
url = "/lun/" + lun_id
data = {"TYPE": "11",
"ID": lun_id,
"IOPRIORITY": "3"}
result = self.call(url, data, "PUT")
self._assert_rest_result(result, _('Change lun priority error.'))
def change_lun_smarttier(self, lunid, smarttier_policy):
"""Change lun smarttier policy."""
url = "/lun/" + lunid
data = {"TYPE": "11",
"ID": lunid,
"DATATRANSFERPOLICY": smarttier_policy}
result = self.call(url, data, "PUT")
self._assert_rest_result(
result, _('Change lun smarttier policy error.'))
def get_qosid_by_lunid(self, lun_id):
"""Get QoS id by lun id."""
url = "/lun/" + lun_id
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get QoS id by lun id error.'))
return result['data']['IOCLASSID']
def get_lungroupids_by_lunid(self, lun_id, lun_type=constants.LUN_TYPE):
"""Get lungroup ids by lun id."""
url = ("/lungroup/associate?TYPE=256"
"&ASSOCIATEOBJTYPE=%s&ASSOCIATEOBJID=%s" % (lun_type, lun_id))
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get lungroup id by lun id error.'))
lungroup_ids = []
if 'data' in result:
for item in result['data']:
lungroup_ids.append(item['ID'])
return lungroup_ids
def get_lun_info(self, lun_id, lun_type = constants.LUN_TYPE):
cmd_type = 'lun' if lun_type == constants.LUN_TYPE else 'snapshot'
url = ("/%s/%s" % (cmd_type, lun_id))
result = self.call(url, None, "GET")
msg = _('Get volume error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
return result['data']
def get_snapshot_info(self, snapshot_id):
url = "/snapshot/" + snapshot_id
result = self.call(url, None, "GET")
msg = _('Get snapshot error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
return result['data']
def extend_lun(self, lun_id, new_volume_size):
url = "/lun/expand"
data = {"TYPE": 11, "ID": lun_id,
"CAPACITY": new_volume_size}
result = self.call(url, data, 'PUT')
msg = _('Extend volume error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
return result['data']
def create_lun_migration(self, src_id, dst_id, speed=2):
url = "/LUN_MIGRATION"
data = {"TYPE": '253',
"PARENTID": src_id,
"TARGETLUNID": dst_id,
"SPEED": speed,
"WORKMODE": 0}
result = self.call(url, data, "POST")
msg = _('Create lun migration error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
def get_lun_migration_task(self):
url = '/LUN_MIGRATION?range=[0-256]'
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get lun migration task error.'))
return result
def delete_lun_migration(self, src_id, dst_id):
url = '/LUN_MIGRATION/' + src_id
result = self.call(url, None, "DELETE")
msg = _('Delete lun migration error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
def get_partition_id_by_name(self, name):
url = "/cachepartition"
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get partition by name error.'))
return self._get_id_from_result(result, name, 'NAME')
def get_partition_info_by_id(self, partition_id):
url = '/cachepartition/' + partition_id
result = self.call(url, None, "GET")
self._assert_rest_result(result,
_('Get partition by partition id error.'))
return result['data']
def add_lun_to_partition(self, lun_id, partition_id):
url = "/lun/associate/cachepartition"
data = {"ID": partition_id,
"ASSOCIATEOBJTYPE": 11,
"ASSOCIATEOBJID": lun_id}
result = self.call(url, data, "POST")
self._assert_rest_result(result, _('Add lun to partition error.'))
def remove_lun_from_partition(self, lun_id, partition_id):
url = ('/lun/associate/cachepartition?ID=' + partition_id
+ '&ASSOCIATEOBJTYPE=11&ASSOCIATEOBJID=' + lun_id)
result = self.call(url, None, "DELETE")
self._assert_rest_result(result, _('Remove lun from partition error.'))
def get_cache_id_by_name(self, name):
url = "/SMARTCACHEPARTITION"
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get cache by name error.'))
return self._get_id_from_result(result, name, 'NAME')
def get_cache_info_by_id(self, cacheid):
url = "/SMARTCACHEPARTITION/" + cacheid
data = {"TYPE": "273",
"ID": cacheid}
result = self.call(url, data, "GET")
self._assert_rest_result(
result, _('Get smartcache by cache id error.'))
return result['data']
def remove_lun_from_cache(self, lun_id, cache_id):
url = "/SMARTCACHEPARTITION/REMOVE_ASSOCIATE"
data = {"ID": cache_id,
"ASSOCIATEOBJTYPE": 11,
"ASSOCIATEOBJID": lun_id,
"TYPE": 273}
result = self.call(url, data, "PUT")
self._assert_rest_result(result, _('Remove lun from cache error.'))
def get_qos(self):
url = "/ioclass"
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get QoS information error.'))
return result
def find_available_qos(self, qos):
""""Find available QoS on the array."""
qos_id = None
lun_list = []
extra_qos = [i for i in constants.EXTRA_QOS_KEYS if i not in qos]
result = self.get_qos()
if 'data' in result:
for items in result['data']:
qos_flag = 0
extra_flag = False
if 'LATENCY' not in qos and items['LATENCY'] != '0':
extra_flag = True
else:
for item in items:
if item in extra_qos:
extra_flag = True
break
for key in qos:
if key not in items:
break
elif qos[key] != items[key]:
break
qos_flag = qos_flag + 1
lun_num = len(items['LUNLIST'].split(","))
qos_name = items['NAME']
qos_status = items['RUNNINGSTATUS']
# We use this QoS only if the LUNs in it is less than 64,
# created by OpenStack and does not contain filesystem,
# else we cannot add LUN to this QoS any more.
if (qos_flag == len(qos)
and not extra_flag
and lun_num < constants.MAX_LUN_NUM_IN_QOS
and qos_name.startswith(constants.QOS_NAME_PREFIX)
and qos_status == constants.STATUS_QOS_ACTIVE
and items['FSLIST'] == '[""]'):
qos_id = items['ID']
lun_list = items['LUNLIST']
break
return (qos_id, lun_list)
def add_lun_to_qos(self, qos_id, lun_id, lun_list):
"""Add lun to QoS."""
url = "/ioclass/" + qos_id
new_lun_list = []
lun_list_string = lun_list[1:-1]
for lun_string in lun_list_string.split(","):
tmp_lun_id = lun_string[1:-1]
if '' != tmp_lun_id and tmp_lun_id != lun_id:
new_lun_list.append(tmp_lun_id)
new_lun_list.append(lun_id)
data = {"LUNLIST": new_lun_list,
"TYPE": 230,
"ID": qos_id}
result = self.call(url, data, "PUT")
msg = _('Associate lun to QoS error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
def add_lun_to_cache(self, lun_id, cache_id):
url = "/SMARTCACHEPARTITION/CREATE_ASSOCIATE"
data = {"ID": cache_id,
"ASSOCIATEOBJTYPE": 11,
"ASSOCIATEOBJID": lun_id,
"TYPE": 273}
result = self.call(url, data, "PUT")
self._assert_rest_result(result, _('Add lun to cache error.'))
def get_array_info(self):
url = "/system/"
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get array info error.'))
return result.get('data', None)
def find_array_version(self):
info = self.get_array_info()
return info.get('PRODUCTVERSION', None)
def remove_host(self, host_id):
url = "/host/%s" % host_id
result = self.call(url, None, "DELETE")
self._assert_rest_result(result, _('Remove host from array error.'))
def delete_hostgroup(self, hostgroup_id):
url = "/hostgroup/%s" % hostgroup_id
result = self.call(url, None, "DELETE")
self._assert_rest_result(result, _('Delete hostgroup error.'))
def remove_host_from_hostgroup(self, hostgroup_id, host_id):
url_subfix001 = "/host/associate?TYPE=14&ID=%s" % hostgroup_id
url_subfix002 = "&ASSOCIATEOBJTYPE=21&ASSOCIATEOBJID=%s" % host_id
url = url_subfix001 + url_subfix002
result = self.call(url, None, "DELETE")
self._assert_rest_result(result,
_('Remove host from hostgroup error.'))
def remove_iscsi_from_host(self, initiator):
url = "/iscsi_initiator/remove_iscsi_from_host"
data = {"TYPE": '222',
"ID": initiator}
result = self.call(url, data, "PUT")
self._assert_rest_result(result, _('Remove iscsi from host error.'))
def get_host_online_fc_initiators(self, host_id):
url = "/fc_initiator?PARENTTYPE=21&PARENTID=%s" % host_id
result = self.call(url, None, "GET")
initiators = []
if 'data' in result:
for item in result['data']:
if (('PARENTID' in item) and (item['PARENTID'] == host_id)
and (item['RUNNINGSTATUS'] == constants.FC_INIT_ONLINE)):
initiators.append(item['ID'])
return initiators
def get_host_fc_initiators(self, host_id):
url = "/fc_initiator?PARENTTYPE=21&PARENTID=%s" % host_id
result = self.call(url, None, "GET")
initiators = []
if 'data' in result:
for item in result['data']:
if (('PARENTID' in item) and (item['PARENTID'] == host_id)):
initiators.append(item['ID'])
return initiators
def get_host_iscsi_initiators(self, host_id):
url = "/iscsi_initiator?PARENTTYPE=21&PARENTID=%s" % host_id
result = self.call(url, None, "GET")
initiators = []
if 'data' in result:
for item in result['data']:
if (('PARENTID' in item) and (item['PARENTID'] == host_id)):
initiators.append(item['ID'])
return initiators
def rename_lun(self, lun_id, new_name, description=None):
url = "/lun/" + lun_id
data = {"NAME": new_name}
if description:
data.update({"DESCRIPTION": description})
result = self.call(url, data, "PUT")
msg = _('Rename lun on array error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
def rename_snapshot(self, snapshot_id, new_name, description=None):
url = "/snapshot/" + snapshot_id
data = {"NAME": new_name}
if description:
data.update({"DESCRIPTION": description})
result = self.call(url, data, "PUT")
msg = _('Rename snapshot on array error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
def is_fc_initiator_associated_to_host(self, ininame):
"""Check whether the initiator is associated to the host."""
url = '/fc_initiator?range=[0-256]'
result = self.call(url, None, "GET")
self._assert_rest_result(result,
'Check initiator associated to host error.')
if "data" in result:
for item in result['data']:
if item['ID'] == ininame and item['ISFREE'] != "true":
return True
return False
def remove_fc_from_host(self, initiator):
url = '/fc_initiator/remove_fc_from_host'
data = {"TYPE": '223',
"ID": initiator}
result = self.call(url, data, "PUT")
self._assert_rest_result(result, _('Remove fc from host error.'))
def check_fc_initiators_exist_in_host(self, host_id):
url = "/fc_initiator?range=[0-256]&PARENTID=%s" % host_id
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get host initiators info failed.'))
if 'data' in result:
return True
return False
def _fc_initiator_is_added_to_array(self, ininame):
"""Check whether the fc initiator is already added on the array."""
url = "/fc_initiator/" + ininame
result = self.call(url, None, "GET")
error_code = result['error']['code']
if error_code != 0:
return False
return True
def _add_fc_initiator_to_array(self, ininame):
"""Add a fc initiator to storage device."""
url = '/fc_initiator/'
data = {"TYPE": '223',
"ID": ininame}
result = self.call(url, data)
self._assert_rest_result(result, _('Add fc initiator to array error.'))
def ensure_fc_initiator_added(self, initiator_name, host_id):
added = self._fc_initiator_is_added_to_array(initiator_name)
if not added:
self._add_fc_initiator_to_array(initiator_name)
# Just add, no need to check whether have been added.
self.add_fc_port_to_host(host_id, initiator_name)
def get_fc_ports_on_array(self):
url = '/fc_port'
result = self.call(url, None, "GET")
msg = _('Get FC ports from array error.')
self._assert_rest_result(result, msg)
return result['data']
def get_fc_ports_from_contr(self, contr):
port_list_from_contr = []
location = []
data = self.get_fc_ports_on_array()
for item in data:
location = item['PARENTID'].split('.')
if (location[0][1] == contr) and (item['RUNNINGSTATUS'] ==
constants.FC_PORT_CONNECTED):
port_list_from_contr.append(item['WWN'])
return port_list_from_contr
def get_hyper_domain_id(self, domain_name):
url = "/HyperMetroDomain?range=[0-32]"
result = self.call(url, None, "GET")
domain_id = None
if "data" in result:
for item in result['data']:
if domain_name == item['NAME']:
domain_id = item['ID']
break
msg = _('get_hyper_domain_id error.')
self._assert_rest_result(result, msg)
return domain_id
def create_hypermetro(self, hcp_param):
url = "/HyperMetroPair"
result = self.call(url, hcp_param, "POST")
msg = _('create_hypermetro_pair error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
return result['data']
def delete_hypermetro(self, metro_id):
url = "/HyperMetroPair/" + metro_id
result = self.call(url, None, "DELETE")
msg = _('delete_hypermetro error.')
self._assert_rest_result(result, msg)
def sync_hypermetro(self, metro_id):
url = "/HyperMetroPair/synchronize_hcpair"
data = {"ID": metro_id,
"TYPE": "15361"}
result = self.call(url, data, "PUT")
msg = _('sync_hypermetro error.')
self._assert_rest_result(result, msg)
def stop_hypermetro(self, metro_id):
url = '/HyperMetroPair/disable_hcpair'
data = {"ID": metro_id,
"TYPE": "15361"}
result = self.call(url, data, "PUT")
msg = _('stop_hypermetro error.')
self._assert_rest_result(result, msg)
def get_hypermetro_by_id(self, metro_id):
url = "/HyperMetroPair/" + metro_id
result = self.call(url, None, "GET")
msg = _('get_hypermetro_by_id error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
return result['data']
def check_hypermetro_exist(self, metro_id):
url = "/HyperMetroPair/" + metro_id
result = self.call(url, None, "GET")
error_code = result['error']['code']
if (error_code == constants.ERROR_CONNECT_TO_SERVER
or error_code == constants.ERROR_UNAUTHORIZED_TO_SERVER):
LOG.error(_LE("Can not open the recent url, login again."))
self.login()
result = self.call(url, None, "GET")
error_code = result['error']['code']
if (error_code == constants.ERROR_CONNECT_TO_SERVER
or error_code == constants.ERROR_UNAUTHORIZED_TO_SERVER):
msg = _("check_hypermetro_exist error.")
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
if error_code != 0:
return False
return True
def change_hostlun_id(self, map_info, hostlun_id):
url = "/mappingview"
view_id = six.text_type(map_info['view_id'])
lun_id = six.text_type(map_info['lun_id'])
hostlun_id = six.text_type(hostlun_id)
data = {"TYPE": 245,
"ID": view_id,
"ASSOCIATEOBJTYPE": 11,
"ASSOCIATEOBJID": lun_id,
"ASSOCIATEMETADATA": [{"LUNID": lun_id,
"hostLUNId": hostlun_id}]}
result = self.call(url, data, "PUT")
msg = 'change hostlun id error.'
self._assert_rest_result(result, msg)
def find_view_by_id(self, view_id):
url = "/MAPPINGVIEW/" + view_id
result = self.call(url, None, "GET")
msg = _('Change hostlun id error.')
self._assert_rest_result(result, msg)
if 'data' in result:
return result["data"]["AVAILABLEHOSTLUNIDLIST"]
def get_metrogroup_by_name(self, name):
url = "/HyperMetro_ConsistentGroup?type='15364'"
result = self.call(url, None, "GET")
msg = _('Get hypermetro group by name error.')
self._assert_rest_result(result, msg)
return self._get_id_from_result(result, name, 'NAME')
def get_metrogroup_by_id(self, id):
url = "/HyperMetro_ConsistentGroup/" + id
result = self.call(url, None, "GET")
msg = _('Get hypermetro group by id error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
return result['data']
def create_metrogroup(self, name, description, domain_id):
url = "/HyperMetro_ConsistentGroup"
data = {"NAME": name,
"TYPE": "15364",
"DESCRIPTION": description,
"RECOVERYPOLICY": "1",
"SPEED": "2",
"PRIORITYSTATIONTYPE": "0",
"DOMAINID": domain_id}
result = self.call(url, data, "POST")
msg = _('create hypermetro group error.')
self._assert_rest_result(result, msg)
if 'data' in result:
return result["data"]["ID"]
def delete_metrogroup(self, metrogroup_id):
url = "/HyperMetro_ConsistentGroup/" + metrogroup_id
result = self.call(url, None, "DELETE")
msg = _('Delete hypermetro group error.')
self._assert_rest_result(result, msg)
def get_metrogroup(self, metrogroup_id):
url = "/HyperMetro_ConsistentGroup/" + metrogroup_id
result = self.call(url, None, "GET")
msg = _('Get hypermetro group error.')
self._assert_rest_result(result, msg)
def stop_metrogroup(self, metrogroup_id):
url = "/HyperMetro_ConsistentGroup/stop"
data = {"TYPE": "15364",
"ID": metrogroup_id
}
result = self.call(url, data, "PUT")
msg = _('stop hypermetro group error.')
self._assert_rest_result(result, msg)
def sync_metrogroup(self, metrogroup_id):
url = "/HyperMetro_ConsistentGroup/sync"
data = {"TYPE": "15364",
"ID": metrogroup_id
}
result = self.call(url, data, "PUT")
msg = _('sync hypermetro group error.')
self._assert_rest_result(result, msg)
def add_metro_to_metrogroup(self, metrogroup_id, metro_id):
url = "/hyperMetro/associate/pair"
data = {"TYPE": "15364",
"ID": metrogroup_id,
"ASSOCIATEOBJTYPE": "15361",
"ASSOCIATEOBJID": metro_id}
result = self.call(url, data, "POST")
msg = _('Add hypermetro to metrogroup error.')
self._assert_rest_result(result, msg)
def remove_metro_from_metrogroup(self, metrogroup_id, metro_id):
url = "/hyperMetro/associate/pair"
data = {"TYPE": "15364",
"ID": metrogroup_id,
"ASSOCIATEOBJTYPE": "15361",
"ASSOCIATEOBJID": metro_id}
result = self.call(url, data, "DELETE")
msg = _('Delete hypermetro from metrogroup error.')
self._assert_rest_result(result, msg)
def get_hypermetro_pairs(self):
url = "/HyperMetroPair?range=[0-4095]"
result = self.call(url, None, "GET")
msg = _('Get HyperMetroPair error.')
self._assert_rest_result(result, msg)
return result.get('data', [])
def get_split_mirrors(self):
url = "/splitmirror?range=[0-8191]"
result = self.call(url, None, "GET")
if result['error']['code'] == constants.NO_SPLITMIRROR_LICENSE:
msg = _('License is unavailable.')
raise exception.VolumeBackendAPIException(data=msg)
msg = _('Get SplitMirror error.')
self._assert_rest_result(result, msg)
return result.get('data', [])
def get_target_luns(self, id):
url = ("/SPLITMIRRORTARGETLUN/targetLUN?TYPE=228&PARENTID=%s&"
"PARENTTYPE=220") % id
result = self.call(url, None, "GET")
msg = _('Get target LUN of SplitMirror error.')
self._assert_rest_result(result, msg)
target_luns = []
for item in result.get('data', []):
target_luns.append(item.get('ID'))
return target_luns
def get_migration_task(self):
url = "/LUN_MIGRATION?range=[0-256]"
result = self.call(url, None, "GET")
if result['error']['code'] == constants.NO_MIGRATION_LICENSE:
msg = _('License is unavailable.')
raise exception.VolumeBackendAPIException(data=msg)
msg = _('Get migration task error.')
self._assert_rest_result(result, msg)
return result.get('data', [])
def is_lun_in_mirror(self, lun_id):
url = "/lun?range=[0-65535]"
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get volume by name error.'))
for item in result.get('data', []):
rss_obj = item.get('HASRSSOBJECT')
if rss_obj:
rss_obj = ast.literal_eval(rss_obj)
if (item.get('ID') == lun_id and
rss_obj.get('LUNMirror') == 'TRUE'):
return True
return False
def get_portgs_by_portid(self, port_id):
portgs = []
if not port_id:
return portgs
url = ("/portgroup/associate/fc_port?TYPE=257&ASSOCIATEOBJTYPE=212&"
"ASSOCIATEOBJID=%s") % port_id
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get port groups by port error.'))
for item in result.get("data", []):
portgs.append(item["ID"])
return portgs
def get_views_by_portg(self, portg_id):
views = []
if not portg_id:
return views
url = ("/mappingview/associate/portgroup?TYPE=245&ASSOCIATEOBJTYPE="
"257&ASSOCIATEOBJID=%s") % portg_id
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get views by port group error.'))
for item in result.get("data", []):
views.append(item["ID"])
return views
def get_lungroup_by_view(self, view_id):
if not view_id:
return None
url = ("/lungroup/associate/mappingview?TYPE=256&ASSOCIATEOBJTYPE="
"245&ASSOCIATEOBJID=%s") % view_id
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get LUN group by view error.'))
for item in result.get("data", []):
# In fact, there is just one lungroup in a view.
return item["ID"]
def get_portgroup_by_view(self, view_id):
if not view_id:
return None
url = ("/portgroup/associate/mappingview?TYPE=257&ASSOCIATEOBJTYPE="
"245&ASSOCIATEOBJID=%s") % view_id
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get port group by view error.'))
return result.get("data", [])
def get_fc_ports_by_portgroup(self, portg_id):
ports = {}
if not portg_id:
return ports
url = ("/fc_port/associate/portgroup?TYPE=212&ASSOCIATEOBJTYPE=257"
"&ASSOCIATEOBJID=%s") % portg_id
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get FC ports by port group '
'error.'))
for item in result.get("data", []):
ports[item["WWN"]] = item["ID"]
return ports
def create_portg(self, portg_name, description=""):
url = "/PortGroup"
data = {"DESCRIPTION": description,
"NAME": portg_name,
"TYPE": 257}
result = self.call(url, data, "POST")
self._assert_rest_result(result, _('Create port group error.'))
if "data" in result:
return result['data']['ID']
def add_port_to_portg(self, portg_id, port_id):
url = "/port/associate/portgroup"
data = {"ASSOCIATEOBJID": port_id,
"ASSOCIATEOBJTYPE": 212,
"ID": portg_id,
"TYPE": 257}
result = self.call(url, data, "POST")
self._assert_rest_result(result, _('Add port to port group error.'))
def delete_portgroup(self, portg_id):
url = "/PortGroup/%s" % portg_id
result = self.call(url, None, "DELETE")
self._assert_rest_result(result, _('Delete port group error.'))
def remove_port_from_portgroup(self, portg_id, port_id):
url = (("/port/associate/portgroup?ID=%(portg_id)s&TYPE=257&"
"ASSOCIATEOBJTYPE=212&ASSOCIATEOBJID=%(port_id)s")
% {"portg_id": portg_id, "port_id": port_id})
result = self.call(url, None, "DELETE")
self._assert_rest_result(result, _('Remove port from port group'
' error.'))
def get_all_engines(self):
url = "/storageengine"
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get engines error.'))
return result.get("data", [])
def get_portg_info(self, portg_id):
url = "/portgroup/%s" % portg_id
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get port group error.'))
return result.get("data", {})
def append_portg_desc(self, portg_id, description):
portg_info = self.get_portg_info(portg_id)
new_description = portg_info.get('DESCRIPTION') + ',' + description
url = "/portgroup/%s" % portg_id
data = {"DESCRIPTION": new_description,
"ID": portg_id,
"TYPE": 257}
result = self.call(url, data, "PUT")
self._assert_rest_result(result, _('Append port group description'
' error.'))
def get_ports_by_portg(self, portg_id):
wwns = []
url = ("/fc_port/associate?TYPE=213&ASSOCIATEOBJTYPE=257"
"&ASSOCIATEOBJID=%s" % portg_id)
result = self.call(url, None, "GET")
msg = _('Get ports by port group error.')
self._assert_rest_result(result, msg)
for item in result.get('data', []):
wwns.append(item['WWN'])
return wwns
def get_remote_devices(self):
url = "/remote_device"
result = self.call(url, None, "GET")
self._assert_rest_result(result, _('Get remote devices error.'))
return result.get('data', [])
def create_pair(self, pair_params):
url = "/REPLICATIONPAIR"
result = self.call(url, pair_params, "POST")
msg = _('Create replication error.')
self._assert_rest_result(result, msg)
self._assert_data_in_result(result, msg)
return result['data']
def get_pair_by_id(self, pair_id):
url = "/REPLICATIONPAIR/" + pair_id
result = self.call(url, None, "GET")
msg = _('Get pair failed.')
self._assert_rest_result(result, msg)
return result.get('data', {})
def switch_pair(self, pair_id):
url = '/REPLICATIONPAIR/switch'
data = {"ID": pair_id,
"TYPE": "263"}
result = self.call(url, data, "PUT")
msg = _('Switch over pair error.')
self._assert_rest_result(result, msg)
def split_pair(self, pair_id):
url = '/REPLICATIONPAIR/split'
data = {"ID": pair_id,
"TYPE": "263"}
result = self.call(url, data, "PUT")
msg = _('Split pair error.')
self._assert_rest_result(result, msg)
def delete_pair(self, pair_id, force=False):
url = "/REPLICATIONPAIR/" + pair_id
data = None
if force:
data = {"ISLOCALDELETE": force}
result = self.call(url, data, "DELETE")
msg = _('delete_replication error.')
self._assert_rest_result(result, msg)
def sync_pair(self, pair_id):
url = "/REPLICATIONPAIR/sync"
data = {"ID": pair_id,
"TYPE": "263"}
result = self.call(url, data, "PUT")
msg = _('Sync pair error.')
self._assert_rest_result(result, msg)
def check_pair_exist(self, pair_id):
url = "/REPLICATIONPAIR/" + pair_id
result = self.call(url, None, "GET")
return result['error']['code'] == 0
def set_pair_second_access(self, pair_id, access):
url = "/REPLICATIONPAIR/" + pair_id
data = {"ID": pair_id,
"SECRESACCESS": access}
result = self.call(url, data, "PUT")
msg = _('Set pair secondary access error.')
self._assert_rest_result(result, msg)
def is_host_associated_to_hostgroup(self, host_id):
url = "/host/" + host_id
result = self.call(url, None, "GET")
data = result.get('data')
if data is not None:
return data.get('ISADD2HOSTGROUP') == 'true'
return False
| apache-2.0 |
AeanSR/simc | dbc_extract3/dbc/config.py | 6 | 2934 | import configparser, sys, importlib, logging, types, os
import dbc.db
class Config:
def __init__(self, options):
self.options = options
self.data_store = dbc.db.DataStore(self.options)
self.config = {}
self.base_module_path = None
self.base_module = None
self.base_output_path = None
def output_file(self, file_name):
return os.path.join(self.base_output_path, file_name)
def open(self):
if len(self.options.args) < 1:
return False
config = configparser.ConfigParser()
config.read(self.options.args[0])
for section in config.sections():
if section == 'general':
self.base_module_path = config.get('general', 'module_base')
self.base_output_path = config.get('general', 'output_base')
else:
if section not in self.config:
self.config[section] = { 'generators': [], 'objects': [] }
for i in config.get(section, 'generators').split():
self.config[section]['generators'].append(i)
if not self.base_module_path:
logging.error('No "module_base" defined in general section')
return False
if not self.base_output_path:
logging.error('No "output_base" defined in general section')
return False
try:
self.base_module = importlib.import_module(self.base_module_path)
except:
logging.error('Unable to import %s', self.base_module_path)
return False
return True
def generate(self):
for section, config in self.config.items():
for generator in config['generators']:
try:
obj = getattr(self.base_module, generator)
except AttributeError:
logging.error('Unable to instantiate generator %s', generator)
return False
config['objects'].append(obj(self.options, self.data_store))
for section, config in self.config.items():
for generator in config['objects']:
logging.info('Initializing %s ...', generator.__class__.__name__)
if not generator.initialize():
logging.error('Unable to initialize %s, exiting ...', generator.__class__.__name__)
return False
for section, config in self.config.items():
output_file = self.output_file(section)
for generator in config['objects']:
if not generator.set_output(output_file, generator != config['objects'][0]):
return False
ids = generator.filter()
logging.info('Outputting %s to %s ...', generator.__class__.__name__, output_file)
generator.generate(ids)
generator.close()
| gpl-3.0 |
TheMOOCAgency/edx-platform | openedx/core/djangolib/testing/utils.py | 7 | 5318 | """
Utility classes for testing django applications.
:py:class:`CacheIsolationMixin`
A mixin helping to write tests which are isolated from cached data.
:py:class:`CacheIsolationTestCase`
A TestCase baseclass that has per-test isolated caches.
"""
import copy
import crum
from django import db
from django.contrib.auth.models import AnonymousUser
from django.core.cache import caches
from django.test import RequestFactory, TestCase, override_settings
from django.conf import settings
from django.contrib import sites
from nose.plugins import Plugin
from request_cache.middleware import RequestCache
class CacheIsolationMixin(object):
"""
This class can be used to enable specific django caches for
specific the TestCase that it's mixed into.
Usage:
Use the ENABLED_CACHES to list the names of caches that should
be enabled in the context of this TestCase. These caches will
use a loc_mem_cache with the default settings.
Set the class variable CACHES to explicitly specify the cache settings
that should be overridden. This class will insert those values into
django.conf.settings, and will reset all named caches before each
test.
If both CACHES and ENABLED_CACHES are not None, raises an error.
"""
CACHES = None
ENABLED_CACHES = None
__settings_overrides = []
__old_settings = []
@classmethod
def start_cache_isolation(cls):
"""
Start cache isolation by overriding the settings.CACHES and
flushing the cache.
"""
cache_settings = None
if cls.CACHES is not None and cls.ENABLED_CACHES is not None:
raise Exception(
"Use either CACHES or ENABLED_CACHES, but not both"
)
if cls.CACHES is not None:
cache_settings = cls.CACHES
elif cls.ENABLED_CACHES is not None:
cache_settings = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
cache_settings.update({
cache_name: {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': cache_name,
'KEY_FUNCTION': 'util.memcache.safe_key',
} for cache_name in cls.ENABLED_CACHES
})
if cache_settings is None:
return
cls.__old_settings.append(copy.deepcopy(settings.CACHES))
override = override_settings(CACHES=cache_settings)
override.__enter__()
cls.__settings_overrides.append(override)
assert settings.CACHES == cache_settings
# Start with empty caches
cls.clear_caches()
@classmethod
def end_cache_isolation(cls):
"""
End cache isolation by flushing the cache and then returning
settings.CACHES to its original state.
"""
# Make sure that cache contents don't leak out after the isolation is ended
cls.clear_caches()
if cls.__settings_overrides:
cls.__settings_overrides.pop().__exit__(None, None, None)
assert settings.CACHES == cls.__old_settings.pop()
@classmethod
def clear_caches(cls):
"""
Clear all of the caches defined in settings.CACHES.
"""
# N.B. As of 2016-04-20, Django won't return any caches
# from django.core.cache.caches.all() that haven't been
# accessed using caches[name] previously, so we loop
# over our list of overridden caches, instead.
for cache in settings.CACHES:
caches[cache].clear()
# The sites framework caches in a module-level dictionary.
# Clear that.
sites.models.SITE_CACHE.clear()
RequestCache.clear_request_cache()
class CacheIsolationTestCase(CacheIsolationMixin, TestCase):
"""
A TestCase that isolates caches (as described in
:py:class:`CacheIsolationMixin`) at class setup, and flushes the cache
between every test.
"""
@classmethod
def setUpClass(cls):
super(CacheIsolationTestCase, cls).setUpClass()
cls.start_cache_isolation()
@classmethod
def tearDownClass(cls):
cls.end_cache_isolation()
super(CacheIsolationTestCase, cls).tearDownClass()
def setUp(self):
super(CacheIsolationTestCase, self).setUp()
self.clear_caches()
self.addCleanup(self.clear_caches)
class NoseDatabaseIsolation(Plugin):
"""
nosetest plugin that resets django databases before any tests begin.
Used to make sure that tests running in multi processes aren't sharing
a database connection.
"""
name = "database-isolation"
def begin(self):
"""
Before any tests start, reset all django database connections.
"""
for db_ in db.connections.all():
db_.close()
def get_mock_request(user=None):
"""
Create a request object for the user, if specified.
"""
request = RequestFactory().get('/')
if user is not None:
request.user = user
else:
request.user = AnonymousUser()
request.is_secure = lambda: True
request.get_host = lambda: "edx.org"
crum.set_current_request(request)
return request
| agpl-3.0 |
prarthitm/edxplatform | common/djangoapps/student/tests/factories.py | 2 | 5162 | """Provides factories for student models."""
import random
from student.models import (User, UserProfile, Registration,
CourseEnrollmentAllowed, CourseEnrollment,
PendingEmailChange, UserStanding,
CourseAccessRole)
from course_modes.models import CourseMode
from django.contrib.auth.models import AnonymousUser, Group, Permission
from django.contrib.contenttypes.models import ContentType
from datetime import datetime
import factory
from factory import lazy_attribute
from factory.django import DjangoModelFactory
from uuid import uuid4
from pytz import UTC
from opaque_keys.edx.locations import SlashSeparatedCourseKey
# Factories are self documenting
# pylint: disable=missing-docstring
TEST_PASSWORD = 'test'
class GroupFactory(DjangoModelFactory):
class Meta(object):
model = Group
django_get_or_create = ('name', )
name = factory.Sequence(u'group{0}'.format)
class UserStandingFactory(DjangoModelFactory):
class Meta(object):
model = UserStanding
user = None
account_status = None
changed_by = None
class UserProfileFactory(DjangoModelFactory):
class Meta(object):
model = UserProfile
django_get_or_create = ('user', )
user = None
name = factory.LazyAttribute(u'{0.user.first_name} {0.user.last_name}'.format)
level_of_education = None
gender = u'm'
mailing_address = None
goals = u'Learn a lot'
allow_certificate = True
class CourseModeFactory(DjangoModelFactory):
class Meta(object):
model = CourseMode
course_id = None
mode_display_name = CourseMode.DEFAULT_MODE.name
mode_slug = CourseMode.DEFAULT_MODE_SLUG
suggested_prices = ''
currency = 'usd'
expiration_datetime = None
@lazy_attribute
def min_price(self):
if CourseMode.is_verified_slug(self.mode_slug):
return random.randint(1, 100)
return 0
class RegistrationFactory(DjangoModelFactory):
class Meta(object):
model = Registration
user = None
activation_key = uuid4().hex.decode('ascii')
class UserFactory(DjangoModelFactory):
class Meta(object):
model = User
django_get_or_create = ('email', 'username')
username = factory.Sequence(u'robot{0}'.format)
email = factory.Sequence(u'robot+test+{0}@edx.org'.format)
password = factory.PostGenerationMethodCall('set_password', 'test')
first_name = factory.Sequence(u'Robot{0}'.format)
last_name = 'Test'
is_staff = False
is_active = True
is_superuser = False
last_login = datetime(2012, 1, 1, tzinfo=UTC)
date_joined = datetime(2011, 1, 1, tzinfo=UTC)
@factory.post_generation
def profile(obj, create, extracted, **kwargs): # pylint: disable=unused-argument, no-self-argument
if create:
obj.save()
return UserProfileFactory.create(user=obj, **kwargs)
elif kwargs:
raise Exception("Cannot build a user profile without saving the user")
else:
return None
@factory.post_generation
def groups(self, create, extracted, **kwargs):
if extracted is None:
return
if isinstance(extracted, basestring):
extracted = [extracted]
for group_name in extracted:
self.groups.add(GroupFactory.simple_generate(create, name=group_name))
class AnonymousUserFactory(factory.Factory):
class Meta(object):
model = AnonymousUser
class AdminFactory(UserFactory):
is_staff = True
class SuperuserFactory(UserFactory):
is_superuser = True
class CourseEnrollmentFactory(DjangoModelFactory):
class Meta(object):
model = CourseEnrollment
user = factory.SubFactory(UserFactory)
course_id = SlashSeparatedCourseKey('edX', 'toy', '2012_Fall')
class CourseAccessRoleFactory(DjangoModelFactory):
class Meta(object):
model = CourseAccessRole
user = factory.SubFactory(UserFactory)
course_id = SlashSeparatedCourseKey('edX', 'toy', '2012_Fall')
role = 'TestRole'
class CourseEnrollmentAllowedFactory(DjangoModelFactory):
class Meta(object):
model = CourseEnrollmentAllowed
email = '[email protected]'
course_id = SlashSeparatedCourseKey('edX', 'toy', '2012_Fall')
class PendingEmailChangeFactory(DjangoModelFactory):
"""Factory for PendingEmailChange objects
user: generated by UserFactory
new_email: sequence of new+email+{}@edx.org
activation_key: sequence of integers, padded to 30 characters
"""
class Meta(object):
model = PendingEmailChange
user = factory.SubFactory(UserFactory)
new_email = factory.Sequence(u'new+email+{0}@edx.org'.format)
activation_key = factory.Sequence(u'{:0<30d}'.format)
class ContentTypeFactory(DjangoModelFactory):
class Meta(object):
model = ContentType
app_label = factory.Faker('app_name')
class PermissionFactory(DjangoModelFactory):
class Meta(object):
model = Permission
codename = factory.Faker('codename')
content_type = factory.SubFactory(ContentTypeFactory)
| agpl-3.0 |
hyperized/ansible | lib/ansible/modules/web_infrastructure/ansible_tower/tower_job_template.py | 17 | 10884 | #!/usr/bin/python
# coding: utf-8 -*-
# (c) 2017, Wayne Witzel III <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_job_template
author: "Wayne Witzel III (@wwitzel3)"
version_added: "2.3"
short_description: create, update, or destroy Ansible Tower job template.
description:
- Create, update, or destroy Ansible Tower job templates. See
U(https://www.ansible.com/tower) for an overview.
options:
name:
description:
- Name to use for the job template.
required: True
description:
description:
- Description to use for the job template.
job_type:
description:
- The job type to use for the job template.
required: True
choices: ["run", "check", "scan"]
inventory:
description:
- Name of the inventory to use for the job template.
project:
description:
- Name of the project to use for the job template.
required: True
playbook:
description:
- Path to the playbook to use for the job template within the project provided.
required: True
credential:
description:
- Name of the credential to use for the job template.
version_added: 2.7
vault_credential:
description:
- Name of the vault credential to use for the job template.
version_added: 2.7
forks:
description:
- The number of parallel or simultaneous processes to use while executing the playbook.
limit:
description:
- A host pattern to further constrain the list of hosts managed or affected by the playbook
verbosity:
description:
- Control the output level Ansible produces as the playbook runs. 0 - Normal, 1 - Verbose, 2 - More Verbose, 3 - Debug, 4 - Connection Debug.
choices: [0, 1, 2, 3, 4]
default: 0
extra_vars_path:
description:
- Path to the C(extra_vars) YAML file.
job_tags:
description:
- Comma separated list of the tags to use for the job template.
force_handlers_enabled:
description:
- Enable forcing playbook handlers to run even if a task fails.
version_added: 2.7
type: bool
default: 'no'
skip_tags:
description:
- Comma separated list of the tags to skip for the job template.
start_at_task:
description:
- Start the playbook at the task matching this name.
version_added: 2.7
diff_mode_enabled:
description:
- Enable diff mode for the job template.
version_added: 2.7
type: bool
default: 'no'
fact_caching_enabled:
description:
- Enable use of fact caching for the job template.
version_added: 2.7
type: bool
default: 'no'
host_config_key:
description:
- Allow provisioning callbacks using this host config key.
ask_diff_mode:
description:
- Prompt user to enable diff mode (show changes) to files when supported by modules.
version_added: 2.7
type: bool
default: 'no'
ask_extra_vars:
description:
- Prompt user for (extra_vars) on launch.
type: bool
default: 'no'
ask_limit:
description:
- Prompt user for a limit on launch.
version_added: 2.7
type: bool
default: 'no'
ask_tags:
description:
- Prompt user for job tags on launch.
type: bool
default: 'no'
ask_skip_tags:
description:
- Prompt user for job tags to skip on launch.
version_added: 2.7
type: bool
default: 'no'
ask_job_type:
description:
- Prompt user for job type on launch.
type: bool
default: 'no'
ask_verbosity:
description:
- Prompt user to choose a verbosity level on launch.
version_added: 2.7
type: bool
default: 'no'
ask_inventory:
description:
- Propmt user for inventory on launch.
type: bool
default: 'no'
ask_credential:
description:
- Prompt user for credential on launch.
type: bool
default: 'no'
survey_enabled:
description:
- Enable a survey on the job template.
version_added: 2.7
type: bool
default: 'no'
survey_spec:
description:
- JSON/YAML dict formatted survey definition.
version_added: 2.8
type: dict
required: False
become_enabled:
description:
- Activate privilege escalation.
type: bool
default: 'no'
concurrent_jobs_enabled:
description:
- Allow simultaneous runs of the job template.
version_added: 2.7
type: bool
default: 'no'
state:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
extends_documentation_fragment: tower
notes:
- JSON for survey_spec can be found in Tower API Documentation. See
U(https://docs.ansible.com/ansible-tower/latest/html/towerapi/api_ref.html#/Job_Templates/Job_Templates_job_templates_survey_spec_create)
for POST operation payload example.
'''
EXAMPLES = '''
- name: Create tower Ping job template
tower_job_template:
name: "Ping"
job_type: "run"
inventory: "Local"
project: "Demo"
playbook: "ping.yml"
credential: "Local"
state: "present"
tower_config_file: "~/tower_cli.cfg"
survey_enabled: yes
survey_spec: "{{ lookup('file', 'my_survey.json') }}"
'''
from ansible.module_utils.ansible_tower import TowerModule, tower_auth_config, tower_check_mode
try:
import tower_cli
import tower_cli.exceptions as exc
from tower_cli.conf import settings
except ImportError:
pass
def update_fields(p):
'''This updates the module field names
to match the field names tower-cli expects to make
calling of the modify/delete methods easier.
'''
params = p.copy()
field_map = {
'fact_caching_enabled': 'use_fact_cache',
'ask_diff_mode': 'ask_diff_mode_on_launch',
'ask_extra_vars': 'ask_variables_on_launch',
'ask_limit': 'ask_limit_on_launch',
'ask_tags': 'ask_tags_on_launch',
'ask_skip_tags': 'ask_skip_tags_on_launch',
'ask_verbosity': 'ask_verbosity_on_launch',
'ask_inventory': 'ask_inventory_on_launch',
'ask_credential': 'ask_credential_on_launch',
'ask_job_type': 'ask_job_type_on_launch',
'diff_mode_enabled': 'diff_mode',
'concurrent_jobs_enabled': 'allow_simultaneous',
'force_handlers_enabled': 'force_handlers',
}
params_update = {}
for old_k, new_k in field_map.items():
v = params.pop(old_k)
params_update[new_k] = v
extra_vars = params.get('extra_vars_path')
if extra_vars is not None:
params_update['extra_vars'] = ['@' + extra_vars]
params.update(params_update)
return params
def update_resources(module, p):
params = p.copy()
identity_map = {
'project': 'name',
'inventory': 'name',
'credential': 'name',
'vault_credential': 'name',
}
for k, v in identity_map.items():
try:
if params[k]:
key = 'credential' if '_credential' in k else k
result = tower_cli.get_resource(key).get(**{v: params[k]})
params[k] = result['id']
elif k in params:
# unset empty parameters to avoid ValueError: invalid literal for int() with base 10: ''
del(params[k])
except (exc.NotFound) as excinfo:
module.fail_json(msg='Failed to update job template: {0}'.format(excinfo), changed=False)
return params
def main():
argument_spec = dict(
name=dict(required=True),
description=dict(default=''),
job_type=dict(choices=['run', 'check', 'scan'], required=True),
inventory=dict(default=''),
project=dict(required=True),
playbook=dict(required=True),
credential=dict(default=''),
vault_credential=dict(default=''),
forks=dict(type='int'),
limit=dict(default=''),
verbosity=dict(type='int', choices=[0, 1, 2, 3, 4], default=0),
extra_vars_path=dict(type='path', required=False),
job_tags=dict(default=''),
force_handlers_enabled=dict(type='bool', default=False),
skip_tags=dict(default=''),
start_at_task=dict(default=''),
timeout=dict(type='int', default=0),
fact_caching_enabled=dict(type='bool', default=False),
host_config_key=dict(default=''),
ask_diff_mode=dict(type='bool', default=False),
ask_extra_vars=dict(type='bool', default=False),
ask_limit=dict(type='bool', default=False),
ask_tags=dict(type='bool', default=False),
ask_skip_tags=dict(type='bool', default=False),
ask_job_type=dict(type='bool', default=False),
ask_verbosity=dict(type='bool', default=False),
ask_inventory=dict(type='bool', default=False),
ask_credential=dict(type='bool', default=False),
survey_enabled=dict(type='bool', default=False),
survey_spec=dict(type='dict', required=False),
become_enabled=dict(type='bool', default=False),
diff_mode_enabled=dict(type='bool', default=False),
concurrent_jobs_enabled=dict(type='bool', default=False),
state=dict(choices=['present', 'absent'], default='present'),
)
module = TowerModule(argument_spec=argument_spec, supports_check_mode=True)
name = module.params.get('name')
state = module.params.pop('state')
json_output = {'job_template': name, 'state': state}
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
jt = tower_cli.get_resource('job_template')
params = update_resources(module, module.params)
params = update_fields(params)
params['create_on_missing'] = True
try:
if state == 'present':
result = jt.modify(**params)
json_output['id'] = result['id']
elif state == 'absent':
result = jt.delete(**params)
except (exc.ConnectionError, exc.BadRequest, exc.NotFound, exc.AuthError) as excinfo:
module.fail_json(msg='Failed to update job template: {0}'.format(excinfo), changed=False)
json_output['changed'] = result['changed']
module.exit_json(**json_output)
if __name__ == '__main__':
main()
| gpl-3.0 |
hrhtspr/IkaLog | tools/IkaClips.py | 5 | 6548 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# IkaLog
# ======
# Copyright (C) 2015 Takeshi HASEGAWA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# IkaLogを各種シーンを自動的に抜き出す
import sys
import os
import pprint
sys.path.append('.')
from ikalog.inputs.cvcapture import *
from ikalog.engine import *
from ikalog.outputs.preview import *
from ikalog.utils import *
class IkaClips:
def on_game_start(self, context):
self.t_GameStart = context['engine']['msec']
def on_game_go_sign(self, context):
start = context['engine'][
'msec'] if not self.t_GameStart else self.t_GameStart
clip = {
'file': self.file,
'type': 'GameStart',
'start': start - 5 * 1000,
'end': context['engine']['msec'] + 1 * 1000,
}
self.clips.append(clip)
def on_game_killed(self, context):
clip = {
'file': self.file,
'type': 'kill',
'start': context['engine']['msec'] - 6 * 1000,
'end': context['engine']['msec'] + 2.5 * 1000,
}
self.clips.append(clip)
def on_game_dead(self, context):
clip = {
'file': self.file,
'type': 'death',
'start': context['engine']['msec'] - 6 * 1000,
'end': context['engine']['msec'] + 2.5 * 1000,
}
self.clips.append(clip)
def on_game_finish(self, context):
clip = {
'file': self.file,
'type': 'finish',
'start': context['engine']['msec'] - 6 * 1000,
'end': context['engine']['msec'] + 10 * 1000,
}
self.clips.append(clip)
def on_frame_read_failed(self, context):
self.engine.stop()
def on_game_individual_result(self, context):
# 強制的にキャプチャを閉じて IkaEngine も止める
self.engine.capture.cap.release()
self.engine.capture.cap = None
self.engine.stop()
def merge(self):
# クリップをマージ
new_clips = []
last_clip = None
for clip in self.clips:
if last_clip and last_clip['end'] < clip['start']:
new_clips.append(last_clip)
last_clip = None
if last_clip is None:
last_clip = clip
continue
if clip['start'] < last_clip['end']:
last_clip['type'] = '%s+%s' % (last_clip['type'], clip['type'])
last_clip['end'] = clip['end']
if not last_clip is None:
new_clips.append(last_clip)
IkaUtils.dprint('%d clips are merged to %d clips.' %
(len(self.clips), len(new_clips)))
self.clips = new_clips
def analyze(self, file):
self.file = file
self.clips = []
self.t_GameStart = None
# インプットとして指定されたファイルを読む
source = CVCapture()
source.start_recorded_file(file)
source.need_resize = True
# 画面が見えないと進捗が判らないので
screen = Screen(0, size=(640, 360))
# プラグインとして自分自身(画面)を設定しコールバックを受ける
outputPlugins = [self, screen]
# IkaEngine を実行
self.engine = IkaEngine()
self.engine.pause(False)
self.engine.set_capture(source)
self.engine.set_plugins(outputPlugins)
try:
self.engine.run()
except:
pass
# print('IkaEngineが落ちたので分析終了です')
IkaUtils.dprint('%d clips found.' % (len(self.clips)))
_cut_number = 0
def msec2time(self, msec):
sec = int(msec / 100.0) / 10.0
if (sec <= 0.0):
return '00:00:00.0'
hh = '{0:02d}'.format(int(sec / 3600))
mm = '{0:02d}'.format(int(sec / 60) % 60)
ss = '{0:02d}'.format(int(sec % 60))
ms = int((sec - int(sec)) * 10)
s = '%s:%s:%s.%s' % (hh, mm, ss, ms)
return s
def duration(self, msec1, msec2):
duration = max(msec1, msec2) - min(msec1, msec2)
duration = int(duration / 100.0) / 10.0
return '%.1f' % duration
def cutVideoFile1(self, clip):
self._cut_number = self._cut_number + 1
srcname, ext = os.path.splitext(os.path.basename(self.file))
destfile = os.path.join(self.tmp_dir, '%s.%d.%s.avi' % (
srcname, self._cut_number, clip['type']))
at = self.msec2time(clip['start'])
dur = self.duration(clip['start'], clip['end'])
cmd = 'ffmpeg -y -i %s -ss %s -t %s -f avi -acodec copy -vcodec huffyuv %s' % (
clip['file'], at, dur, destfile)
IkaUtils.dprint(cmd)
os.system(cmd)
clip['file_out'] = destfile
def cutVideoFile(self):
IkaUtils.dprint('Cutting clips from original file...')
for clip in self.clips:
self.cutVideoFile1(clip)
def concatenateClips(self):
IkaUtils.dprint('Concatinating clips...')
srcname, ext = os.path.splitext(os.path.basename(self.file))
destfile = os.path.join(self.out_dir, srcname + '.summary.mp4')
files = []
for clip in self.clips:
files.append(clip['file_out'])
args_i = '-i "concat:%s|"' % ("|".join(files))
cmd = 'ffmpeg -y %s -vf scale=640:360 -f mp4 -c:v libx264 -b:v 1M -c:a copy %s' % (
args_i, destfile)
IkaUtils.dprint(cmd)
os.system(cmd)
for clip in self.clips:
os.remove(clip['file_out'])
def __init__(self):
self.scenes = []
self.tmp_dir = '/tmp/'
self.out_dir = './'
if __name__ == "__main__":
for file in sys.argv[1:]:
clips = IkaClips()
clips.analyze(file)
pprint.pprint(clips.clips)
clips.merge()
pprint.pprint(clips.clips)
clips.cutVideoFile()
clips.concatenateClips()
| apache-2.0 |
rnowling/pop-gen-models | single-pop/single_pop.py | 1 | 3379 | import sys
import numpy as np
import numpy.random as npr
from sklearn.neighbors.kde import KernelDensity
from scipy.special import gammaln
import matplotlib.pyplot as plt
from calculate_phist import read_counts
from calculate_phist import normalize_haplotypes
def log_factorial(n):
return gammaln(n+1)
def log_multinomial(xs, ps):
n = np.sum(xs)
log_prob = log_factorial(n) - np.sum(log_factorial(xs)) + np.sum(xs * np.log(ps + 0.0000000000001))
return log_prob
class KDE_MCMC_Sampler(object):
def __init__(self, observed_counts):
"""
Observed counts is 3D matrix of pop, locus, haplotype
"""
self.observed_counts = observed_counts
self.individual_counts = observed_counts.sum(axis=2)
self.observed_frequencies = normalize_haplotypes(observed_counts)
self.n_loci, self.n_pop, self.n_haplotypes = self.observed_counts.shape
# from bamova
self.DWEIGHT = 1.0
self.DADD = 0.00001
self.SMALL_NUM = 0.0000000000001
print "initializing frequencies"
self.freq = np.zeros((self.n_loci, self.n_haplotypes))
for l in xrange(self.n_loci):
self.freq[l, :] = self.sample_locus_freq(self.observed_frequencies[l, 0, :])
def sample_locus_freq(self, freq):
alphas = self.DWEIGHT * freq + self.DADD + self.SMALL_NUM
return npr.dirichlet(alphas)
def locus_prob(self, locus_obs_counts, locus_freq):
log_prob_sum = 0.0
for p in xrange(self.n_pop):
log_prob_sum += log_multinomial(locus_obs_counts[p], locus_freq)
return log_prob_sum
def step(self):
total_log_prob = 0.0
for l in xrange(self.n_loci):
locus_indiv_counts = self.individual_counts[l, :]
locus_obs_counts = self.observed_counts[l, :, :]
log_prob = self.locus_prob(locus_obs_counts, self.freq[l, :])
proposed_locus_freq = self.sample_locus_freq(self.freq[l, :])
proposed_log_prob = self.locus_prob(locus_obs_counts, proposed_locus_freq)
log_prob_ratio = proposed_log_prob - log_prob
log_r = np.log(npr.random())
if proposed_log_prob >= log_prob or log_r <= log_prob_ratio:
self.freq[l, :] = proposed_locus_freq
log_prob = proposed_log_prob
total_log_prob += log_prob
locus_prob = []
for l in xrange(self.n_loci):
log_prob = self.locus_prob(locus_obs_counts, self.freq[l, :])
locus_prob.append(log_prob)
return self.freq, total_log_prob, locus_prob
def plot_log_prob(flname, log_probs):
plt.clf()
plt.hold(True)
plt.hist(log_probs, bins=30)
plt.xlabel("Log Probability", fontsize=16)
plt.xlim([min(log_probs), 0.0])
plt.ylabel("Occurrences (Loci)", fontsize=16)
plt.savefig(flname, DPI=200)
def simulate(occur_fl, n_steps, plot_flname, prob_flname):
print "reading occurrences"
observed_counts = read_counts(occur_fl)
individual_counts = observed_counts.sum(axis=2)
observed_frequencies = normalize_haplotypes(observed_counts)
sampler = KDE_MCMC_Sampler(observed_counts)
fl = open(prob_flname, "w")
locus_log_prob = []
for i in xrange(n_steps):
freq, log_prob, locus_log_prob = sampler.step()
print "step", i, "log prob", log_prob
if i % 100 == 0:
for j, prob in enumerate(locus_log_prob):
fl.write("%s %s %s\n" % (i, j, prob))
fl.close()
plot_log_prob(plot_flname, locus_log_prob)
if __name__ == "__main__":
occur_fl = sys.argv[1]
n_steps = int(sys.argv[2])
plot_flname = sys.argv[3]
prob_flname = sys.argv[4]
simulate(occur_fl, n_steps, plot_flname, prob_flname)
| apache-2.0 |
SUNET/eduid-webapp | src/eduid_webapp/reset_password/schemas.py | 1 | 4836 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2019 SUNET
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# 3. Neither the name of SUNET nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from marshmallow import fields
from eduid_common.api.schemas.base import EduidSchema, FluxStandardAction
from eduid_common.api.schemas.csrf import CSRFRequestMixin, CSRFResponseMixin
from eduid_common.api.schemas.email import LowercaseEmail
__author__ = 'eperez'
class ResetPasswordEmailRequestSchema(EduidSchema, CSRFRequestMixin):
email = LowercaseEmail(required=True)
class ResetPasswordEmailCodeRequestSchema(EduidSchema, CSRFRequestMixin):
email_code = fields.String(required=True)
class ResetPasswordResponseSchema(FluxStandardAction):
class ResetPasswordResponsePayload(EduidSchema, CSRFResponseMixin):
pass
payload = fields.Nested(ResetPasswordResponsePayload)
class ResetPasswordVerifyEmailResponseSchema(FluxStandardAction):
class ResetPasswordVerifyEmailResponsePayload(EduidSchema, CSRFResponseMixin):
suggested_password = fields.String(required=True)
email_code = fields.String(required=True)
email_address = fields.String(required=True)
extra_security = fields.Dict(required=True)
password_entropy = fields.Integer(required=True)
password_length = fields.Integer(required=True)
password_service_url = fields.String(required=True)
success = fields.Bool(required=True)
zxcvbn_terms = fields.List(required=True, cls_or_instance=fields.String)
payload = fields.Nested(ResetPasswordVerifyEmailResponsePayload)
class ResetPasswordExtraSecPhoneSchema(EduidSchema, CSRFRequestMixin):
email_code = fields.String(required=True)
phone_index = fields.Integer(required=True)
class ResetPasswordWithCodeSchema(EduidSchema, CSRFRequestMixin):
email_code = fields.String(required=True)
password = fields.String(required=True)
class ResetPasswordWithPhoneCodeSchema(ResetPasswordWithCodeSchema):
phone_code = fields.String(required=True)
class ResetPasswordWithSecTokenSchema(ResetPasswordWithCodeSchema):
authenticator_data = fields.String(required=False, data_key='authenticatorData')
client_data_json = fields.String(required=False, data_key='clientDataJSON')
credential_id = fields.String(required=False, data_key='credentialId')
signature = fields.String(required=True)
class SuggestedPasswordResponseSchema(FluxStandardAction):
class SuggestedPasswordPayload(EduidSchema, CSRFResponseMixin):
suggested_password = fields.String(required=True)
payload = fields.Nested(SuggestedPasswordPayload, many=False)
class NewPasswordSecurePhoneRequestSchema(EduidSchema, CSRFRequestMixin):
email_code = fields.String(required=True)
password = fields.String(required=True)
phone_code = fields.String(required=True)
class NewPasswordSecureTokenRequestSchema(EduidSchema, CSRFRequestMixin):
email_code = fields.String(required=True)
password = fields.String(required=True)
token_response = fields.String(required=False, data_key='tokenResponse')
authenticator_data = fields.String(required=False, data_key='authenticatorData')
client_data_json = fields.String(required=False, data_key='clientDataJSON')
credential_id = fields.String(required=False, data_key='credentialId')
signature = fields.String(required=False)
| bsd-3-clause |
mj10777/QGIS | python/pyplugin_installer/unzip.py | 53 | 2524 | # -*- coding:utf-8 -*-
"""
/***************************************************************************
Plugin Installer module
unzip function
-------------------
Date : May 2013
Copyright : (C) 2013 by Borys Jurgiel
Email : info at borysjurgiel dot pl
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import zipfile
import os
def unzip(file, targetDir, password=None):
""" Creates directory structure and extracts the zip contents to it.
file (file object) - the zip file to extract
targetDir (str) - target location
password (str; optional) - password to decrypt the zip file (if encrypted)
"""
# convert password to bytes
if isinstance(password, str):
password = bytes(password, 'utf8')
# create destination directory if doesn't exist
if not targetDir.endswith(':') and not os.path.exists(targetDir):
os.makedirs(targetDir)
zf = zipfile.ZipFile(file)
for name in zf.namelist():
# Skip directories - they will be created when necessary by os.makedirs
if name.endswith('/'):
continue
# Read the source file before creating any output,
# so no directories are created if user doesn't know the password
memberContent = zf.read(name, password)
# create directory if doesn't exist
localDir = os.path.split(name)[0]
fullDir = os.path.normpath(os.path.join(targetDir, localDir))
if not os.path.exists(fullDir):
os.makedirs(fullDir)
# extract file
fullPath = os.path.normpath(os.path.join(targetDir, name))
outfile = open(fullPath, 'wb')
outfile.write(memberContent)
outfile.flush()
outfile.close()
zf.close()
| gpl-2.0 |
sirkubax/ansible-modules-extras | database/vertica/vertica_schema.py | 148 | 11740 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
---
module: vertica_schema
version_added: '2.0'
short_description: Adds or removes Vertica database schema and roles.
description:
- Adds or removes Vertica database schema and, optionally, roles
with schema access privileges.
- A schema will not be removed until all the objects have been dropped.
- In such a situation, if the module tries to remove the schema it
will fail and only remove roles created for the schema if they have
no dependencies.
options:
name:
description:
- Name of the schema to add or remove.
required: true
usage_roles:
description:
- Comma separated list of roles to create and grant usage access to the schema.
aliases: ['usage_role']
required: false
default: null
create_roles:
description:
- Comma separated list of roles to create and grant usage and create access to the schema.
aliases: ['create_role']
required: false
default: null
owner:
description:
- Name of the user to set as owner of the schema.
required: false
default: null
state:
description:
- Whether to create C(present), or drop C(absent) a schema.
required: false
default: present
choices: ['present', 'absent']
db:
description:
- Name of the Vertica database.
required: false
default: null
cluster:
description:
- Name of the Vertica cluster.
required: false
default: localhost
port:
description:
- Vertica cluster port to connect to.
required: false
default: 5433
login_user:
description:
- The username used to authenticate with.
required: false
default: dbadmin
login_password:
description:
- The password used to authenticate with.
required: false
default: null
notes:
- The default authentication assumes that you are either logging in as or sudo'ing
to the C(dbadmin) account on the host.
- This module uses C(pyodbc), a Python ODBC database adapter. You must ensure
that C(unixODBC) and C(pyodbc) is installed on the host and properly configured.
- Configuring C(unixODBC) for Vertica requires C(Driver = /opt/vertica/lib64/libverticaodbc.so)
to be added to the C(Vertica) section of either C(/etc/odbcinst.ini) or C($HOME/.odbcinst.ini)
and both C(ErrorMessagesPath = /opt/vertica/lib64) and C(DriverManagerEncoding = UTF-16)
to be added to the C(Driver) section of either C(/etc/vertica.ini) or C($HOME/.vertica.ini).
requirements: [ 'unixODBC', 'pyodbc' ]
author: "Dariusz Owczarek (@dareko)"
"""
EXAMPLES = """
- name: creating a new vertica schema
vertica_schema: name=schema_name db=db_name state=present
- name: creating a new schema with specific schema owner
vertica_schema: name=schema_name owner=dbowner db=db_name state=present
- name: creating a new schema with roles
vertica_schema:
name=schema_name
create_roles=schema_name_all
usage_roles=schema_name_ro,schema_name_rw
db=db_name
state=present
"""
try:
import pyodbc
except ImportError:
pyodbc_found = False
else:
pyodbc_found = True
class NotSupportedError(Exception):
pass
class CannotDropError(Exception):
pass
# module specific functions
def get_schema_facts(cursor, schema=''):
facts = {}
cursor.execute("""
select schema_name, schema_owner, create_time
from schemata
where not is_system_schema and schema_name not in ('public', 'TxtIndex')
and (? = '' or schema_name ilike ?)
""", schema, schema)
while True:
rows = cursor.fetchmany(100)
if not rows:
break
for row in rows:
facts[row.schema_name.lower()] = {
'name': row.schema_name,
'owner': row.schema_owner,
'create_time': str(row.create_time),
'usage_roles': [],
'create_roles': []}
cursor.execute("""
select g.object_name as schema_name, r.name as role_name,
lower(g.privileges_description) privileges_description
from roles r join grants g
on g.grantee_id = r.role_id and g.object_type='SCHEMA'
and g.privileges_description like '%USAGE%'
and g.grantee not in ('public', 'dbadmin')
and (? = '' or g.object_name ilike ?)
""", schema, schema)
while True:
rows = cursor.fetchmany(100)
if not rows:
break
for row in rows:
schema_key = row.schema_name.lower()
if 'create' in row.privileges_description:
facts[schema_key]['create_roles'].append(row.role_name)
else:
facts[schema_key]['usage_roles'].append(row.role_name)
return facts
def update_roles(schema_facts, cursor, schema,
existing, required,
create_existing, create_required):
for role in set(existing + create_existing) - set(required + create_required):
cursor.execute("drop role {0} cascade".format(role))
for role in set(create_existing) - set(create_required):
cursor.execute("revoke create on schema {0} from {1}".format(schema, role))
for role in set(required + create_required) - set(existing + create_existing):
cursor.execute("create role {0}".format(role))
cursor.execute("grant usage on schema {0} to {1}".format(schema, role))
for role in set(create_required) - set(create_existing):
cursor.execute("grant create on schema {0} to {1}".format(schema, role))
def check(schema_facts, schema, usage_roles, create_roles, owner):
schema_key = schema.lower()
if schema_key not in schema_facts:
return False
if owner and owner.lower() == schema_facts[schema_key]['owner'].lower():
return False
if cmp(sorted(usage_roles), sorted(schema_facts[schema_key]['usage_roles'])) != 0:
return False
if cmp(sorted(create_roles), sorted(schema_facts[schema_key]['create_roles'])) != 0:
return False
return True
def present(schema_facts, cursor, schema, usage_roles, create_roles, owner):
schema_key = schema.lower()
if schema_key not in schema_facts:
query_fragments = ["create schema {0}".format(schema)]
if owner:
query_fragments.append("authorization {0}".format(owner))
cursor.execute(' '.join(query_fragments))
update_roles(schema_facts, cursor, schema, [], usage_roles, [], create_roles)
schema_facts.update(get_schema_facts(cursor, schema))
return True
else:
changed = False
if owner and owner.lower() != schema_facts[schema_key]['owner'].lower():
raise NotSupportedError((
"Changing schema owner is not supported. "
"Current owner: {0}."
).format(schema_facts[schema_key]['owner']))
if cmp(sorted(usage_roles), sorted(schema_facts[schema_key]['usage_roles'])) != 0 or \
cmp(sorted(create_roles), sorted(schema_facts[schema_key]['create_roles'])) != 0:
update_roles(schema_facts, cursor, schema,
schema_facts[schema_key]['usage_roles'], usage_roles,
schema_facts[schema_key]['create_roles'], create_roles)
changed = True
if changed:
schema_facts.update(get_schema_facts(cursor, schema))
return changed
def absent(schema_facts, cursor, schema, usage_roles, create_roles):
schema_key = schema.lower()
if schema_key in schema_facts:
update_roles(schema_facts, cursor, schema,
schema_facts[schema_key]['usage_roles'], [], schema_facts[schema_key]['create_roles'], [])
try:
cursor.execute("drop schema {0} restrict".format(schema_facts[schema_key]['name']))
except pyodbc.Error:
raise CannotDropError("Dropping schema failed due to dependencies.")
del schema_facts[schema_key]
return True
else:
return False
# module logic
def main():
module = AnsibleModule(
argument_spec=dict(
schema=dict(required=True, aliases=['name']),
usage_roles=dict(default=None, aliases=['usage_role']),
create_roles=dict(default=None, aliases=['create_role']),
owner=dict(default=None),
state=dict(default='present', choices=['absent', 'present']),
db=dict(default=None),
cluster=dict(default='localhost'),
port=dict(default='5433'),
login_user=dict(default='dbadmin'),
login_password=dict(default=None),
), supports_check_mode = True)
if not pyodbc_found:
module.fail_json(msg="The python pyodbc module is required.")
schema = module.params['schema']
usage_roles = []
if module.params['usage_roles']:
usage_roles = module.params['usage_roles'].split(',')
usage_roles = filter(None, usage_roles)
create_roles = []
if module.params['create_roles']:
create_roles = module.params['create_roles'].split(',')
create_roles = filter(None, create_roles)
owner = module.params['owner']
state = module.params['state']
db = ''
if module.params['db']:
db = module.params['db']
changed = False
try:
dsn = (
"Driver=Vertica;"
"Server={0};"
"Port={1};"
"Database={2};"
"User={3};"
"Password={4};"
"ConnectionLoadBalance={5}"
).format(module.params['cluster'], module.params['port'], db,
module.params['login_user'], module.params['login_password'], 'true')
db_conn = pyodbc.connect(dsn, autocommit=True)
cursor = db_conn.cursor()
except Exception, e:
module.fail_json(msg="Unable to connect to database: {0}.".format(e))
try:
schema_facts = get_schema_facts(cursor)
if module.check_mode:
changed = not check(schema_facts, schema, usage_roles, create_roles, owner)
elif state == 'absent':
try:
changed = absent(schema_facts, cursor, schema, usage_roles, create_roles)
except pyodbc.Error, e:
module.fail_json(msg=str(e))
elif state == 'present':
try:
changed = present(schema_facts, cursor, schema, usage_roles, create_roles, owner)
except pyodbc.Error, e:
module.fail_json(msg=str(e))
except NotSupportedError, e:
module.fail_json(msg=str(e), ansible_facts={'vertica_schemas': schema_facts})
except CannotDropError, e:
module.fail_json(msg=str(e), ansible_facts={'vertica_schemas': schema_facts})
except SystemExit:
# avoid catching this on python 2.4
raise
except Exception, e:
module.fail_json(msg=e)
module.exit_json(changed=changed, schema=schema, ansible_facts={'vertica_schemas': schema_facts})
# import ansible utilities
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
USGSDenverPychron/pychron | pychron/experiment/run_history_view.py | 1 | 3330 | # ===============================================================================
# Copyright 2017 ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
from traits.api import HasTraits, List, Instance, Str, Int, on_trait_change, Any
from traitsui.api import View, UItem, TabularEditor, Item, EnumEditor, HGroup, VGroup, InstanceEditor, Controller, \
VSplit
from pychron.envisage.browser.adapters import AnalysisAdapter
from pychron.pychron_constants import NULL_STR
class RunHistoryModel(HasTraits):
analyses = List
dvc = Instance('pychron.dvc.dvc.DVC')
mass_spectrometer = Str
mass_spectrometers = List
n = Int(10, auto_set=False, enter_set=True)
analysis_view = Instance('pychron.processing.analyses.view.analysis_view.AnalysisView')
selected = Any
_cache = None
def load(self):
self.dvc.create_session()
self._load_analyses()
self.mass_spectrometers = [NULL_STR] + self.dvc.get_mass_spectrometer_names()
def destroy(self):
self.dvc.close_session()
self._cache = None
@on_trait_change('mass_spectrometer, n')
def _load_analyses(self):
ms = self.mass_spectrometer
if ms == NULL_STR:
ms = None
self.analyses = self.dvc.get_last_n_analyses(self.n, ms)
def _selected_changed(self, new):
if self._cache is None:
self._cache = {}
new = new.item
uuid = new.uuid
if uuid in self._cache:
av = self._cache[uuid]
else:
a = self.dvc.make_analysis(new.record_views[0])
av = a.analysis_view
self._cache[new.uuid] = av
self.analysis_view = av
class RunHistoryView(Controller):
def closed(self, info, is_ok):
self.model.destroy()
def traits_view(self):
agrp = HGroup(UItem('mass_spectrometer', editor=EnumEditor(name='mass_spectrometers')),
Item('n', label='Limit'))
adapter = AnalysisAdapter()
adapter.run_history_columns()
v = View(VSplit(VGroup(agrp,
UItem('analyses', editor=TabularEditor(dclicked='selected',
editable=False,
adapter=adapter))),
UItem('analysis_view',
visible_when='analysis_view',
style='custom', editor=InstanceEditor())),
title='Run History',
width=700,
height=700,
resizable=True,
buttons=['OK'])
return v
# ============= EOF =============================================
| apache-2.0 |
tanglu-org/tgl-misago | misago/apps/admin/sections/users.py | 3 | 8888 | from django.conf.urls import patterns, include, url
from django.utils.translation import ugettext_lazy as _
from misago.admin import AdminAction
from misago.models import Ban, Newsletter, PruningPolicy, Rank, User, WarnLevel
ADMIN_ACTIONS = (
AdminAction(
section='users',
id='users',
name=_("Users List"),
help=_("Search and browse users"),
icon='user',
model=User,
actions=[
{
'id': 'list',
'name': _("Browse Users"),
'help': _("Browse all registered user accounts"),
'link': 'admin_users'
},
{
'id': 'new',
'name': _("Add User"),
'help': _("Create new user account"),
'link': 'admin_users_new'
},
],
link='admin_users',
urlpatterns=patterns('misago.apps.admin.users.views',
url(r'^$', 'List', name='admin_users'),
url(r'^(?P<page>[1-9]([0-9]+)?)/$', 'List', name='admin_users'),
url(r'^inactive/$', 'inactive', name='admin_users_inactive'),
url(r'^new/$', 'New', name='admin_users_new'),
url(r'^edit/(?P<slug>[a-z0-9]+)-(?P<target>\d+)/$', 'Edit', name='admin_users_edit'),
url(r'^delete/(?P<slug>[a-z0-9]+)-(?P<target>\d+)/$', 'Delete', name='admin_users_delete'),
),
),
AdminAction(
section='users',
id='ranks',
name=_("Ranks"),
help=_("Administrate User Ranks"),
icon='star',
model=Rank,
actions=[
{
'id': 'list',
'name': _("Browse Ranks"),
'help': _("Browse all existing ranks"),
'link': 'admin_ranks'
},
{
'id': 'new',
'name': _("Add Rank"),
'help': _("Create new rank"),
'link': 'admin_ranks_new'
},
],
link='admin_ranks',
urlpatterns=patterns('misago.apps.admin.ranks.views',
url(r'^$', 'List', name='admin_ranks'),
url(r'^new/$', 'New', name='admin_ranks_new'),
url(r'^edit/(?P<slug>([a-z0-9]|-)+)-(?P<target>\d+)/$', 'Edit', name='admin_ranks_edit'),
url(r'^delete/(?P<slug>([a-z0-9]|-)+)-(?P<target>\d+)/$', 'Delete', name='admin_ranks_delete'),
),
),
AdminAction(
section='users',
id='warning_levels',
name=_("Warning Levels"),
help=_("Define penalties for different warning levels."),
icon='exclamation-sign',
model=WarnLevel,
actions=[
{
'id': 'list',
'name': _("Browse Warning Levels"),
'help': _("Browse all existing warning levels"),
'link': 'admin_warning_levels'
},
{
'id': 'new',
'name': _("Set New Warning Level"),
'help': None,
'link': 'admin_warning_levels_new'
},
],
link='admin_warning_levels',
urlpatterns=patterns('misago.apps.admin.warninglevels.views',
url(r'^$', 'List', name='admin_warning_levels'),
url(r'^new/$', 'New', name='admin_warning_levels_new'),
url(r'^edit/(?P<slug>([a-z0-9]|-)+)-(?P<target>\d+)/$', 'Edit', name='admin_warning_levels_edit'),
url(r'^delete/(?P<slug>([a-z0-9]|-)+)-(?P<target>\d+)/$', 'Delete', name='admin_warning_levels_delete'),
),
),
AdminAction(
section='users',
id='bans',
name=_("Bans"),
help=_("Ban or unban users from forums."),
icon='lock',
model=Ban,
actions=[
{
'id': 'list',
'name': _("Browse Bans"),
'help': _("Browse all existing bans"),
'link': 'admin_bans'
},
{
'id': 'new',
'name': _("Set Ban"),
'help': _("Set new Ban"),
'link': 'admin_bans_new'
},
],
link='admin_bans',
urlpatterns=patterns('misago.apps.admin.bans.views',
url(r'^$', 'List', name='admin_bans'),
url(r'^(?P<page>[1-9]([0-9]+)?)/$', 'List', name='admin_bans'),
url(r'^new/$', 'New', name='admin_bans_new'),
url(r'^edit/(?P<target>\d+)/$', 'Edit', name='admin_bans_edit'),
url(r'^delete/(?P<target>\d+)/$', 'Delete', name='admin_bans_delete'),
),
),
AdminAction(
section='users',
id='prune_users',
name=_("Prune Users"),
help=_("Delete multiple Users"),
icon='remove',
model=PruningPolicy,
actions=[
{
'id': 'list',
'name': _("Pruning Policies"),
'help': _("Browse all existing pruning policies"),
'link': 'admin_prune_users'
},
{
'id': 'new',
'name': _("Set New Policy"),
'help': _("Set new pruning policy"),
'link': 'admin_prune_users_new'
},
],
link='admin_prune_users',
urlpatterns=patterns('misago.apps.admin.pruneusers.views',
url(r'^$', 'List', name='admin_prune_users'),
url(r'^new/$', 'New', name='admin_prune_users_new'),
url(r'^edit/(?P<target>\d+)/$', 'Edit', name='admin_prune_users_edit'),
url(r'^delete/(?P<target>\d+)/$', 'Delete', name='admin_prune_users_delete'),
url(r'^apply/(?P<target>\d+)/$', 'Apply', name='admin_prune_users_apply'),
),
),
AdminAction(
section='users',
id='newsletters',
name=_("Newsletters"),
help=_("Manage and send Newsletters"),
icon='envelope',
model=Newsletter,
actions=[
{
'id': 'list',
'name': _("Browse Newsletters"),
'help': _("Browse all existing Newsletters"),
'link': 'admin_newsletters'
},
{
'id': 'new',
'name': _("New Newsletter"),
'help': _("Create new Newsletter"),
'link': 'admin_newsletters_new'
},
],
link='admin_newsletters',
urlpatterns=patterns('misago.apps.admin.newsletters.views',
url(r'^$', 'List', name='admin_newsletters'),
url(r'^(?P<page>[1-9]([0-9]+)?)/$', 'List', name='admin_newsletters'),
url(r'^new/$', 'New', name='admin_newsletters_new'),
url(r'^send/(?P<target>\d+)/(?P<token>[a-zA-Z0-9]+)/$', 'send', name='admin_newsletters_send'),
url(r'^edit/(?P<target>\d+)/$', 'Edit', name='admin_newsletters_edit'),
url(r'^delete/(?P<target>\d+)/$', 'Delete', name='admin_newsletters_delete'),
),
),
)
| gpl-3.0 |
robhudson/django | tests/model_formsets/models.py | 118 | 7878 | from __future__ import unicode_literals
import datetime
import uuid
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=100)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class BetterAuthor(Author):
write_speed = models.IntegerField()
@python_2_unicode_compatible
class Book(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
title = models.CharField(max_length=100)
class Meta:
unique_together = (
('author', 'title'),
)
ordering = ['id']
def __str__(self):
return self.title
def clean(self):
# Ensure author is always accessible in clean method
assert self.author.name is not None
@python_2_unicode_compatible
class BookWithCustomPK(models.Model):
my_pk = models.DecimalField(max_digits=5, decimal_places=0, primary_key=True)
author = models.ForeignKey(Author, models.CASCADE)
title = models.CharField(max_length=100)
def __str__(self):
return '%s: %s' % (self.my_pk, self.title)
class Editor(models.Model):
name = models.CharField(max_length=100)
@python_2_unicode_compatible
class BookWithOptionalAltEditor(models.Model):
author = models.ForeignKey(Author, models.CASCADE)
# Optional secondary author
alt_editor = models.ForeignKey(Editor, models.SET_NULL, blank=True, null=True)
title = models.CharField(max_length=100)
class Meta:
unique_together = (
('author', 'title', 'alt_editor'),
)
def __str__(self):
return self.title
@python_2_unicode_compatible
class AlternateBook(Book):
notes = models.CharField(max_length=100)
def __str__(self):
return '%s - %s' % (self.title, self.notes)
@python_2_unicode_compatible
class AuthorMeeting(models.Model):
name = models.CharField(max_length=100)
authors = models.ManyToManyField(Author)
created = models.DateField(editable=False)
def __str__(self):
return self.name
class CustomPrimaryKey(models.Model):
my_pk = models.CharField(max_length=10, primary_key=True)
some_field = models.CharField(max_length=100)
# models for inheritance tests.
@python_2_unicode_compatible
class Place(models.Model):
name = models.CharField(max_length=50)
city = models.CharField(max_length=50)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Owner(models.Model):
auto_id = models.AutoField(primary_key=True)
name = models.CharField(max_length=100)
place = models.ForeignKey(Place, models.CASCADE)
def __str__(self):
return "%s at %s" % (self.name, self.place)
class Location(models.Model):
place = models.ForeignKey(Place, models.CASCADE, unique=True)
# this is purely for testing the data doesn't matter here :)
lat = models.CharField(max_length=100)
lon = models.CharField(max_length=100)
@python_2_unicode_compatible
class OwnerProfile(models.Model):
owner = models.OneToOneField(Owner, models.CASCADE, primary_key=True)
age = models.PositiveIntegerField()
def __str__(self):
return "%s is %d" % (self.owner.name, self.age)
@python_2_unicode_compatible
class Restaurant(Place):
serves_pizza = models.BooleanField(default=False)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Product(models.Model):
slug = models.SlugField(unique=True)
def __str__(self):
return self.slug
@python_2_unicode_compatible
class Price(models.Model):
price = models.DecimalField(max_digits=10, decimal_places=2)
quantity = models.PositiveIntegerField()
def __str__(self):
return "%s for %s" % (self.quantity, self.price)
class Meta:
unique_together = (('price', 'quantity'),)
class MexicanRestaurant(Restaurant):
serves_tacos = models.BooleanField(default=False)
class ClassyMexicanRestaurant(MexicanRestaurant):
restaurant = models.OneToOneField(MexicanRestaurant, models.CASCADE, parent_link=True, primary_key=True)
tacos_are_yummy = models.BooleanField(default=False)
# models for testing unique_together validation when a fk is involved and
# using inlineformset_factory.
@python_2_unicode_compatible
class Repository(models.Model):
name = models.CharField(max_length=25)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Revision(models.Model):
repository = models.ForeignKey(Repository, models.CASCADE)
revision = models.CharField(max_length=40)
class Meta:
unique_together = (("repository", "revision"),)
def __str__(self):
return "%s (%s)" % (self.revision, six.text_type(self.repository))
# models for testing callable defaults (see bug #7975). If you define a model
# with a callable default value, you cannot rely on the initial value in a
# form.
class Person(models.Model):
name = models.CharField(max_length=128)
class Membership(models.Model):
person = models.ForeignKey(Person, models.CASCADE)
date_joined = models.DateTimeField(default=datetime.datetime.now)
karma = models.IntegerField()
# models for testing a null=True fk to a parent
class Team(models.Model):
name = models.CharField(max_length=100)
@python_2_unicode_compatible
class Player(models.Model):
team = models.ForeignKey(Team, models.SET_NULL, null=True)
name = models.CharField(max_length=100)
def __str__(self):
return self.name
# Models for testing custom ModelForm save methods in formsets and inline formsets
@python_2_unicode_compatible
class Poet(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Poem(models.Model):
poet = models.ForeignKey(Poet, models.CASCADE)
name = models.CharField(max_length=100)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Post(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField()
def __str__(self):
return self.name
# Models for testing UUID primary keys
class UUIDPKParent(models.Model):
uuid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=255)
class UUIDPKChild(models.Model):
uuid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=255)
parent = models.ForeignKey(UUIDPKParent, models.CASCADE)
class ChildWithEditablePK(models.Model):
name = models.CharField(max_length=255, primary_key=True)
parent = models.ForeignKey(UUIDPKParent, models.CASCADE)
class AutoPKChildOfUUIDPKParent(models.Model):
name = models.CharField(max_length=255)
parent = models.ForeignKey(UUIDPKParent, models.CASCADE)
class AutoPKParent(models.Model):
name = models.CharField(max_length=255)
class UUIDPKChildOfAutoPKParent(models.Model):
uuid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=255)
parent = models.ForeignKey(AutoPKParent, models.CASCADE)
class ParentWithUUIDAlternateKey(models.Model):
uuid = models.UUIDField(unique=True, default=uuid.uuid4, editable=False)
name = models.CharField(max_length=50)
class ChildRelatedViaAK(models.Model):
name = models.CharField(max_length=255)
parent = models.ForeignKey(ParentWithUUIDAlternateKey, models.CASCADE, to_field='uuid')
| bsd-3-clause |
pombreda/syzygy | syzygy/build/app_verifier.py | 4 | 10601 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wrapper for running a unittest under Application Verifier."""
import logging
import optparse
import os
import re
import subprocess
import sys
import verifier
_THIRD_PARTY = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..', 'third_party'))
sys.path.append(_THIRD_PARTY)
import colorama
_LOGGER = logging.getLogger(os.path.basename(__file__))
# A list of per-test Application Verifier checks to not run.
_DISABLED_CHECKS = {
'agent_common_unittests.exe': [
# We have a test that deliberately causes an exception which is caught and
# handled by the code under test. However, AV propogates this exception and
# launches a modal dialog window, which causes the test to timeout.
'Exceptions'
],
}
# A list of per-test Application Verifier exceptions.
_EXCEPTIONS = {
'basic_block_entry_unittests.exe': [
# This leak occurs due to a leaky global variable in ScopedHandle.
('Error', 'Leak', 2304, '.*::BasicBlockEntryTest::UnloadDll'),
# This leak occurs due to a leaky global lock in ScopedHandle.
('Error', 'Locks', 513, '.*::BasicBlockEntryTest::UnloadDll'),
# This is a known (semi-intentional) leak of the TLS index and the last
# active thread's TLS data on module unload.
('Error', 'TLS', 848, '.*::BasicBlockEntryTest::UnloadDll'),
],
'coverage_unittests.exe': [
# This leak occurs due to a leaky global variable in ScopedHandle.
('Error', 'Leak', 2304, '.*::CoverageClientTest::UnloadDll'),
# This leak occurs only in Debug, which leaks a thread local variable
# used to check thread restrictions.
('Error', 'TLS', 848, '.*::CoverageClientTest::UnloadDll'),
],
'instrument_unittests.exe': [
# The ASAN runtime ends up freeing a heap while holding it's critical
# section.
('Error', 'Locks', 513, '.*::PELibUnitTest::CheckTestDll'),
# This leak occurs due to a leaky global lock in ScopedHandle.
('Error', 'Locks', 514, '.*::PELibUnitTest::CheckTestDll'),
# This leak occurs only in Debug, which leaks a thread local variable
# used to check thread restrictions.
('Error', 'TLS', 848, '.*::PELibUnitTest::CheckTestDll'),
],
'parse_unittests.exe': [
# This leak occurs due to a leaky global variable in ScopedHandle.
('Error', 'Leak', 2304, '.*::ParseEngineRpcTest::UnloadCallTraceDll'),
# This leak occurs only in Debug, which leaks a thread local variable
# used to check thread restrictions.
('Error', 'TLS', 848, '.*::ParseEngineRpcTest::UnloadCallTraceDll'),
],
'profile_unittests.exe': [
# This leak occurs due to a leaky global variable in ScopedHandle.
('Error', 'Leak', 2304, '.*::ProfilerTest::UnloadDll'),
('Error', 'Leak', 2305, '.*::ProfilerTest::UnloadDll'),
# This leak occurs due to a leaky global lock in ScopedHandle.
('Error', 'Locks', 513, 'agent::profiler::.*::ProfilerTest::UnloadDll'),
# This leak occurs only in Debug, which leaks a thread local variable
# used to check thread restrictions.
('Error', 'TLS', 848, 'agent::profiler::.*::ProfilerTest::UnloadDll'),
],
'memprof_unittests.exe': [
# This leak occurs due to a leaky global variable in ScopedHandle.
('Error', 'Leak', 2304, '.*::MemoryProfilerTest::UnloadDll'),
# This leak occurs due to a leaky global lock in ScopedHandle.
('Error', 'Locks', 513, '.*::MemoryProfilerTest::UnloadDll'),
# This leak occurs only in Debug, which leaks a thread local variable
# used to check thread restrictions.
('Error', 'TLS', 848, '.*::MemoryProfilerTest::UnloadDll'),
],
}
# A list of unittests that should not be run under the application verifier at
# all.
_BLACK_LIST = [
# These can't be run under AppVerifier because we end up double hooking the
# operating system heap function, leading to nonsense.
'integration_tests.exe',
'syzyasan_rtl_unittests.exe',
]
class Error(Exception):
"""Base class used for exceptions thrown in this module."""
pass
def Colorize(text):
"""Colorizes the given app verifier output with ANSI color codes."""
fore = colorama.Fore
style = colorama.Style
def _ColorizeLine(line):
line = re.sub('^(Error.*:)(.*)',
style.BRIGHT + fore.RED + '\\1' + fore.YELLOW + '\\2' +
style.RESET_ALL,
line)
line = re.sub('^(Warning:)(.*)',
style.BRIGHT + fore.YELLOW + '\\1' + style.RESET_ALL + '\\2',
line)
return line
return '\n'.join([_ColorizeLine(line) for line in text.split('\n')])
def FilterExceptions(image_name, errors):
"""Filter out the Application Verifier errors that have exceptions."""
exceptions = _EXCEPTIONS.get(image_name, [])
def _HasNoException(error):
# Iterate over all the exceptions.
for (severity, layer, stopcode, regexp) in exceptions:
# And see if they match, first by type.
if (error.severity == severity and
error.layer == layer and
error.stopcode == stopcode):
# And then by regexpr match to the trace symbols.
for trace in error.trace:
if trace.symbol and re.match(regexp, trace.symbol):
return False
return True
filtered_errors = filter(_HasNoException, errors)
error_count = len(filtered_errors)
filtered_count = len(errors) - error_count
if error_count:
suffix = '' if error_count == 1 else 's'
filtered_errors.append(
'Error: Encountered %d AppVerifier exception%s for %s.' %
(error_count, suffix, image_name))
if filtered_count:
suffix1 = '' if filtered_count == 1 else 's'
suffix2 = '' if len(exceptions) == 1 else 's'
filtered_errors.append(
'Warning: Filtered %d AppVerifier exception%s for %s using %d rule%s.' %
(filtered_count, suffix1, image_name, len(exceptions), suffix2))
return (error_count, filtered_errors)
def _RunUnderAppVerifier(command):
runner = verifier.AppverifierTestRunner(False)
image_path = os.path.abspath(command[0])
image_name = os.path.basename(image_path)
disabled_checks = _DISABLED_CHECKS.get(image_name, [])
if not os.path.isfile(image_path):
raise Error('Path not found: %s' % image_path)
# Set up the verifier configuration.
runner.SetImageDefaults(image_name, disabled_checks=disabled_checks)
runner.ClearImageLogs(image_name)
# Run the executable. We disable exception catching as it interferes with
# Application Verifier.
command = [image_path] + command[1:] + ['--gtest_catch_exceptions=0']
_LOGGER.info('Running %s.', command)
popen = subprocess.Popen(command)
(dummy_stdout, dummy_stderr) = popen.communicate()
# Process the AppVerifier logs, filtering exceptions.
app_verifier_errors = runner.ProcessLogs(image_name)
(error_count, app_verifier_errors) = FilterExceptions(
image_name, app_verifier_errors)
# Generate warnings for error categories that were disabled.
for check in disabled_checks:
app_verifier_errors.append(
'Warning: Disabled AppVerifier %s checks.' % check)
# Output all warnings and errors.
for error in app_verifier_errors:
msg = Colorize(str(error) + '\n')
sys.stderr.write(msg)
# Clear the verifier settings for the image.
runner.ClearImageLogs(image_name)
runner.ResetImage(image_name)
if popen.returncode:
_LOGGER.error('%s failed with return code %d.', image_name,
popen.returncode)
if error_count:
suffix = '' if error_count == 1 else 's'
_LOGGER.error('%s failed AppVerifier test with %d exception%s.',
image_name, error_count, suffix)
if popen.returncode:
return popen.returncode
return error_count
def _RunNormally(command):
# We reset the image settings so that AppVerifier isn't left incidentally
# configured.
runner = verifier.AppverifierTestRunner(False)
image_path = os.path.abspath(command[0])
image_name = os.path.basename(image_path)
runner.ClearImageLogs(image_name)
runner.ResetImage(image_name)
image_path = os.path.abspath(command[0])
command = [image_path] + command[1:]
_LOGGER.info('Running %s outside of AppVerifier.' % command)
popen = subprocess.Popen(command)
(dummy_stdout, dummy_stderr) = popen.communicate()
# To be consistent with _RunUnderAppVerifier we output warnings at the end.
sys.stderr.write(Colorize(
'Warning: AppVerifier was disabled for this test.\n'))
return popen.returncode
_USAGE = '%prog [options] APPLICATION -- [application options]'
def _IsBlacklisted(command):
image_base = os.path.basename(command[0])
if image_base in _BLACK_LIST:
_LOGGER.info('Executable is blacklisted: %s.' % image_base)
return True
return False
def _ParseArgs():
parser = optparse.OptionParser(usage=_USAGE)
parser.add_option('-v', '--verbose', dest='verbose',
action='store_true', default=False,
help='Enable verbose logging.')
parser.add_option('--on-waterfall', dest='on_waterfall',
action='store_true', default=False,
help='Indicate that we are running on the waterfall.')
(opts, args) = parser.parse_args()
if not len(args):
parser.error('You must specify an application.')
if opts.verbose:
logging.basicConfig(level=logging.INFO)
else:
logging.basicConfig(level=logging.ERROR)
return (opts, args)
def Main():
colorama.init()
(opts, args) = _ParseArgs()
if _IsBlacklisted(args):
return_code = _RunNormally(args)
else:
return_code = _RunUnderAppVerifier(args)
if return_code and opts.on_waterfall:
command = [args[0]] + ['--'] + args[1:]
command = 'python build\\app_verifier.py %s' % ' '.join(command)
sys.stderr.write('To reproduce this error locally run the following '
'command from the Syzygy root directory:\n')
sys.stderr.write(command + '\n')
sys.exit(return_code)
if __name__ == '__main__':
Main()
| apache-2.0 |
Eddy0402/Environment | vim/ycmd/ycmd/completers/cpp/clang_helpers.py | 38 | 1037 | #!/usr/bin/env python
#
# Copyright (C) 2011, 2012 Google Inc.
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
def PrepareClangFlags( flags, filename ):
# When flags come from the compile_commands.json file, the first flag is
# usually the path to the compiler that should be invoked. We want to strip
# that.
if flags and not flags[ 0 ].startswith( '-' ):
return flags[ 1: ]
return flags
| gpl-3.0 |
heddle317/moto | tests/test_datapipeline/test_datapipeline.py | 3 | 5276 | from __future__ import unicode_literals
import boto.datapipeline
import sure # noqa
from moto import mock_datapipeline_deprecated
from moto.datapipeline.utils import remove_capitalization_of_dict_keys
def get_value_from_fields(key, fields):
for field in fields:
if field['key'] == key:
return field['stringValue']
@mock_datapipeline_deprecated
def test_create_pipeline():
conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"]
pipeline_descriptions = conn.describe_pipelines(
[pipeline_id])["pipelineDescriptionList"]
pipeline_descriptions.should.have.length_of(1)
pipeline_description = pipeline_descriptions[0]
pipeline_description['name'].should.equal("mypipeline")
pipeline_description["pipelineId"].should.equal(pipeline_id)
fields = pipeline_description['fields']
get_value_from_fields('@pipelineState', fields).should.equal("PENDING")
get_value_from_fields('uniqueId', fields).should.equal("some-unique-id")
PIPELINE_OBJECTS = [
{
"id": "Default",
"name": "Default",
"fields": [{
"key": "workerGroup",
"stringValue": "workerGroup"
}]
},
{
"id": "Schedule",
"name": "Schedule",
"fields": [{
"key": "startDateTime",
"stringValue": "2012-12-12T00:00:00"
}, {
"key": "type",
"stringValue": "Schedule"
}, {
"key": "period",
"stringValue": "1 hour"
}, {
"key": "endDateTime",
"stringValue": "2012-12-21T18:00:00"
}]
},
{
"id": "SayHello",
"name": "SayHello",
"fields": [{
"key": "type",
"stringValue": "ShellCommandActivity"
}, {
"key": "command",
"stringValue": "echo hello"
}, {
"key": "parent",
"refValue": "Default"
}, {
"key": "schedule",
"refValue": "Schedule"
}]
}
]
@mock_datapipeline_deprecated
def test_creating_pipeline_definition():
conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"]
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
pipeline_definition = conn.get_pipeline_definition(pipeline_id)
pipeline_definition['pipelineObjects'].should.have.length_of(3)
default_object = pipeline_definition['pipelineObjects'][0]
default_object['name'].should.equal("Default")
default_object['id'].should.equal("Default")
default_object['fields'].should.equal([{
"key": "workerGroup",
"stringValue": "workerGroup"
}])
@mock_datapipeline_deprecated
def test_describing_pipeline_objects():
conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"]
conn.put_pipeline_definition(PIPELINE_OBJECTS, pipeline_id)
objects = conn.describe_objects(["Schedule", "Default"], pipeline_id)[
'pipelineObjects']
objects.should.have.length_of(2)
default_object = [x for x in objects if x['id'] == 'Default'][0]
default_object['name'].should.equal("Default")
default_object['fields'].should.equal([{
"key": "workerGroup",
"stringValue": "workerGroup"
}])
@mock_datapipeline_deprecated
def test_activate_pipeline():
conn = boto.datapipeline.connect_to_region("us-west-2")
res = conn.create_pipeline("mypipeline", "some-unique-id")
pipeline_id = res["pipelineId"]
conn.activate_pipeline(pipeline_id)
pipeline_descriptions = conn.describe_pipelines(
[pipeline_id])["pipelineDescriptionList"]
pipeline_descriptions.should.have.length_of(1)
pipeline_description = pipeline_descriptions[0]
fields = pipeline_description['fields']
get_value_from_fields('@pipelineState', fields).should.equal("SCHEDULED")
@mock_datapipeline_deprecated
def test_listing_pipelines():
conn = boto.datapipeline.connect_to_region("us-west-2")
res1 = conn.create_pipeline("mypipeline1", "some-unique-id1")
res2 = conn.create_pipeline("mypipeline2", "some-unique-id2")
response = conn.list_pipelines()
response["hasMoreResults"].should.be(False)
response["marker"].should.be.none
response["pipelineIdList"].should.have.length_of(2)
response["pipelineIdList"].should.contain({
"id": res1["pipelineId"],
"name": "mypipeline1",
})
response["pipelineIdList"].should.contain({
"id": res2["pipelineId"],
"name": "mypipeline2"
})
# testing a helper function
def test_remove_capitalization_of_dict_keys():
result = remove_capitalization_of_dict_keys(
{
"Id": "IdValue",
"Fields": [{
"Key": "KeyValue",
"StringValue": "StringValueValue"
}]
}
)
result.should.equal({
"id": "IdValue",
"fields": [{
"key": "KeyValue",
"stringValue": "StringValueValue"
}],
})
| apache-2.0 |
HBehrens/feedsanitizer | django/views/generic/base.py | 5 | 5448 | from django import http
from django.core.exceptions import ImproperlyConfigured
from django.template import RequestContext, loader
from django.template.response import TemplateResponse
from django.utils.functional import update_wrapper
from django.utils.log import getLogger
from django.utils.decorators import classonlymethod
logger = getLogger('django.request')
class View(object):
"""
Intentionally simple parent class for all views. Only implements
dispatch-by-method and simple sanity checking.
"""
http_method_names = ['get', 'post', 'put', 'delete', 'head', 'options', 'trace']
def __init__(self, **kwargs):
"""
Constructor. Called in the URLconf; can contain helpful extra
keyword arguments, and other things.
"""
# Go through keyword arguments, and either save their values to our
# instance, or raise an error.
for key, value in kwargs.iteritems():
setattr(self, key, value)
@classonlymethod
def as_view(cls, **initkwargs):
"""
Main entry point for a request-response process.
"""
# sanitize keyword arguments
for key in initkwargs:
if key in cls.http_method_names:
raise TypeError(u"You tried to pass in the %s method name as a "
u"keyword argument to %s(). Don't do that."
% (key, cls.__name__))
if not hasattr(cls, key):
raise TypeError(u"%s() received an invalid keyword %r" % (
cls.__name__, key))
def view(request, *args, **kwargs):
self = cls(**initkwargs)
return self.dispatch(request, *args, **kwargs)
# take name and docstring from class
update_wrapper(view, cls, updated=())
# and possible attributes set by decorators
# like csrf_exempt from dispatch
update_wrapper(view, cls.dispatch, assigned=())
return view
def dispatch(self, request, *args, **kwargs):
# Try to dispatch to the right method; if a method doesn't exist,
# defer to the error handler. Also defer to the error handler if the
# request method isn't on the approved list.
if request.method.lower() in self.http_method_names:
handler = getattr(self, request.method.lower(), self.http_method_not_allowed)
else:
handler = self.http_method_not_allowed
self.request = request
self.args = args
self.kwargs = kwargs
return handler(request, *args, **kwargs)
def http_method_not_allowed(self, request, *args, **kwargs):
allowed_methods = [m for m in self.http_method_names if hasattr(self, m)]
logger.warning('Method Not Allowed (%s): %s' % (request.method, request.path),
extra={
'status_code': 405,
'request': self.request
}
)
return http.HttpResponseNotAllowed(allowed_methods)
class TemplateResponseMixin(object):
"""
A mixin that can be used to render a template.
"""
template_name = None
response_class = TemplateResponse
def render_to_response(self, context, **response_kwargs):
"""
Returns a response with a template rendered with the given context.
"""
return self.response_class(
request = self.request,
template = self.get_template_names(),
context = context,
**response_kwargs
)
def get_template_names(self):
"""
Returns a list of template names to be used for the request. Must return
a list. May not be called if render_to_response is overridden.
"""
if self.template_name is None:
return []
else:
return [self.template_name]
class TemplateView(TemplateResponseMixin, View):
"""
A view that renders a template.
"""
def get_context_data(self, **kwargs):
return {
'params': kwargs
}
def get(self, request, *args, **kwargs):
context = self.get_context_data(**kwargs)
return self.render_to_response(context)
class RedirectView(View):
"""
A view that provides a redirect on any GET request.
"""
permanent = True
url = None
query_string = False
def get_redirect_url(self, **kwargs):
"""
Return the URL redirect to. Keyword arguments from the
URL pattern match generating the redirect request
are provided as kwargs to this method.
"""
if self.url:
args = self.request.META["QUERY_STRING"]
if args and self.query_string:
url = "%s?%s" % (self.url, args)
else:
url = self.url
return url % kwargs
else:
return None
def get(self, request, *args, **kwargs):
url = self.get_redirect_url(**kwargs)
if url:
if self.permanent:
return http.HttpResponsePermanentRedirect(url)
else:
return http.HttpResponseRedirect(url)
else:
logger.warning('Gone: %s' % self.request.path,
extra={
'status_code': 410,
'request': self.request
})
return http.HttpResponseGone()
| mit |
matrix-org/synapse | synapse/rest/client/v2_alpha/sendtodevice.py | 1 | 2257 | # Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import Tuple
from synapse.http import servlet
from synapse.http.servlet import assert_params_in_dict, parse_json_object_from_request
from synapse.logging.opentracing import set_tag, trace
from synapse.rest.client.transactions import HttpTransactionCache
from ._base import client_patterns
logger = logging.getLogger(__name__)
class SendToDeviceRestServlet(servlet.RestServlet):
PATTERNS = client_patterns(
"/sendToDevice/(?P<message_type>[^/]*)/(?P<txn_id>[^/]*)$"
)
def __init__(self, hs):
"""
Args:
hs (synapse.server.HomeServer): server
"""
super().__init__()
self.hs = hs
self.auth = hs.get_auth()
self.txns = HttpTransactionCache(hs)
self.device_message_handler = hs.get_device_message_handler()
@trace(opname="sendToDevice")
def on_PUT(self, request, message_type, txn_id):
set_tag("message_type", message_type)
set_tag("txn_id", txn_id)
return self.txns.fetch_or_execute_request(
request, self._put, request, message_type, txn_id
)
async def _put(self, request, message_type, txn_id):
requester = await self.auth.get_user_by_req(request, allow_guest=True)
content = parse_json_object_from_request(request)
assert_params_in_dict(content, ("messages",))
await self.device_message_handler.send_device_message(
requester, message_type, content["messages"]
)
response = (200, {}) # type: Tuple[int, dict]
return response
def register_servlets(hs, http_server):
SendToDeviceRestServlet(hs).register(http_server)
| apache-2.0 |
michaelsmit/openparliament | parliament/hansards/views.py | 1 | 10309 | import datetime
from urllib import urlencode
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.core import urlresolvers
from django.http import HttpResponse, Http404, HttpResponsePermanentRedirect, HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.template import loader, RequestContext
from django.views.generic.dates import (ArchiveIndexView, YearArchiveView, MonthArchiveView)
from django.views.decorators.vary import vary_on_headers
from parliament.committees.models import CommitteeMeeting
from parliament.core.api import ModelDetailView, ModelListView, APIFilters
from parliament.hansards.models import Document, Statement
def _get_hansard(year, month, day):
return get_object_or_404(Document.debates,
date=datetime.date(int(year), int(month), int(day)))
class HansardView(ModelDetailView):
resource_name = 'House debate'
def get_object(self, request, **kwargs):
return _get_hansard(**kwargs)
def get_html(self, request, **kwargs):
return document_view(request, _get_hansard(**kwargs))
def get_related_resources(self, request, obj, result):
return {
'speeches_url': urlresolvers.reverse('speeches') + '?' +
urlencode({'document': result['url']}),
'debates_url': urlresolvers.reverse('debates')
}
hansard = HansardView.as_view()
class HansardStatementView(ModelDetailView):
resource_name = 'Speech (House debate)'
def get_object(self, request, year, month, day, slug):
date = datetime.date(int(year), int(month), int(day))
return Statement.objects.get(
document__document_type='D',
document__date=date,
slug=slug
)
def get_related_resources(self, request, qs, result):
return {
'document_speeches_url': urlresolvers.reverse('speeches') + '?' +
urlencode({'document': result['document_url']}),
}
def get_html(self, request, year, month, day, slug):
return document_view(request, _get_hansard(year, month, day), slug=slug)
hansard_statement = HansardStatementView.as_view()
def document_redirect(request, document_id, slug=None):
try:
document = Document.objects.select_related(
'committeemeeting', 'committeemeeting__committee').get(
pk=document_id)
except Document.DoesNotExist:
raise Http404
url = document.get_absolute_url()
if slug:
url += "%s/" % slug
return HttpResponsePermanentRedirect(url)
@vary_on_headers('X-Requested-With')
def document_view(request, document, meeting=None, slug=None):
per_page = 15
if 'singlepage' in request.GET:
per_page = 50000
statement_qs = Statement.objects.filter(document=document)\
.select_related('member__politician', 'member__riding', 'member__party')
paginator = Paginator(statement_qs, per_page)
highlight_statement = None
try:
if slug is not None and 'page' not in request.GET:
if slug.isdigit():
highlight_statement = int(slug)
else:
highlight_statement = statement_qs.filter(slug=slug).values_list('sequence', flat=True)[0]
page = int(highlight_statement/per_page) + 1
else:
page = int(request.GET.get('page', '1'))
except (ValueError, IndexError):
page = 1
# If page request (9999) is out of range, deliver last page of results.
try:
statements = paginator.page(page)
except (EmptyPage, InvalidPage):
statements = paginator.page(paginator.num_pages)
if highlight_statement is not None:
try:
highlight_statement = filter(
lambda s: s.sequence == highlight_statement, statements.object_list)[0]
except IndexError:
raise Http404
if request.is_ajax():
t = loader.get_template("hansards/statement_page.inc")
else:
if document.document_type == Document.DEBATE:
t = loader.get_template("hansards/hansard_detail.html")
elif document.document_type == Document.EVIDENCE:
t = loader.get_template("committees/meeting_evidence.html")
ctx = {
'document': document,
'page': statements,
'highlight_statement': highlight_statement,
'singlepage': 'singlepage' in request.GET,
}
if document.document_type == Document.DEBATE:
ctx.update({
'hansard': document,
'pagination_url': document.get_absolute_url(),
})
elif document.document_type == Document.EVIDENCE:
ctx.update({
'meeting': meeting,
'committee': meeting.committee,
'pagination_url': meeting.get_absolute_url(),
})
return HttpResponse(t.render(RequestContext(request, ctx)))
class SpeechesView(ModelListView):
def document_filter(qs, view, filter_name, filter_extra, val):
u = val.rstrip('/').split('/')
if u[-4] == 'debates':
# /debates/2013/2/15/
date = datetime.date(int(u[-3]), int(u[-2]), int(u[-1]))
return qs.filter(
document__document_type='D',
document__date=date
).order_by('sequence')
elif u[-4] == 'committees':
# /commmittees/national-defence/41-1/63/
meeting = CommitteeMeeting.objects.get(
committee__slug=u[-3], session=u[-2], number=u[-1])
return qs.filter(document=meeting.evidence_id).order_by('sequence')
document_filter.help = "the URL of the debate or committee meeting"
filters = {
'procedural': APIFilters.dbfield(help="is this a short, routine procedural speech? True or False"),
'document': document_filter,
'politician': APIFilters.politician(),
'politician_membership': APIFilters.fkey(lambda u: {'member': u[-1]}),
'time': APIFilters.dbfield(filter_types=APIFilters.numeric_filters,
help="e.g. time__range=2012-10-19 10:00,2012-10-19 11:00"),
'mentioned_politician': APIFilters.politician('mentioned_politicians'),
'mentioned_bill': APIFilters.fkey(lambda u: {
'bills__billinsession__session': u[-2],
'bills__number': u[-1]
}, help="e.g. /bills/41-1/C-14/")
}
resource_name = 'Speeches'
def get_qs(self, request):
qs = Statement.objects.all().prefetch_related('politician')
if 'document' not in request.GET:
qs = qs.order_by('-time')
return qs
speeches = SpeechesView.as_view()
class DebatePermalinkView(ModelDetailView):
def _get_objs(self, request, slug, year, month, day):
doc = _get_hansard(year, month, day)
if slug.isdigit():
statement = get_object_or_404(Statement, document=doc, sequence=slug)
else:
statement = get_object_or_404(Statement, document=doc, slug=slug)
return doc, statement
def get_json(self, request, **kwargs):
url = self._get_objs(request, **kwargs)[1].get_absolute_url()
return HttpResponseRedirect(url + '?' + request.GET.urlencode())
def get_html(self, request, **kwargs):
doc, statement = self._get_objs(request, **kwargs)
return statement_permalink(request, doc, statement, "hansards/statement_permalink.html",
hansard=doc)
debate_permalink = DebatePermalinkView.as_view()
def statement_permalink(request, doc, statement, template, **kwargs):
"""A page displaying only a single statement. Used as a non-JS permalink."""
if statement.politician:
who = statement.politician.name
else:
who = statement.who
title = who
if statement.topic:
title += u' on %s' % statement.topic
elif 'committee' in kwargs:
title += u' at the ' + kwargs['committee'].title
t = loader.get_template(template)
ctx = {
'title': title,
'who': who,
'page': {'object_list': [statement]},
'doc': doc,
'statement': statement,
'statements_full_date': True,
'statement_url': statement.get_absolute_url(),
#'statements_context_link': True
}
ctx.update(kwargs)
return HttpResponse(t.render(RequestContext(request, ctx)))
def document_cache(request, document_id, language):
document = get_object_or_404(Document, pk=document_id)
xmlfile = document.get_cached_xml(language)
resp = HttpResponse(xmlfile.read(), content_type="text/xml")
xmlfile.close()
return resp
class TitleAdder(object):
def get_context_data(self, **kwargs):
context = super(TitleAdder, self).get_context_data(**kwargs)
context.update(title=self.page_title)
return context
class APIArchiveView(ModelListView):
resource_name = 'House debates'
filters = {
'session': APIFilters.dbfield(help='e.g. 41-1'),
'date': APIFilters.dbfield(help='e.g. date__range=2010-01-01,2010-09-01'),
'number': APIFilters.dbfield(help='each Hansard in a session is given a sequential #'),
}
def get_html(self, request, **kwargs):
return self.get(request, **kwargs)
def get_qs(self, request, **kwargs):
return self.get_dated_items()[1]
class DebateIndexView(TitleAdder, ArchiveIndexView, APIArchiveView):
queryset = Document.debates.all()
date_field = 'date'
template_name = "hansards/hansard_archive.html"
page_title='The Debates of the House of Commons'
index = DebateIndexView.as_view()
class DebateYearArchive(TitleAdder, YearArchiveView, APIArchiveView):
queryset = Document.debates.all().order_by('date')
date_field = 'date'
make_object_list = True
template_name = "hansards/hansard_archive_year.html"
page_title = lambda self: 'Debates from %s' % self.get_year()
by_year = DebateYearArchive.as_view()
class DebateMonthArchive(TitleAdder, MonthArchiveView, APIArchiveView):
queryset = Document.debates.all().order_by('date')
date_field = 'date'
make_object_list = True
month_format = "%m"
template_name = "hansards/hansard_archive_year.html"
page_title = lambda self: 'Debates from %s' % self.get_year()
by_month = DebateMonthArchive.as_view() | agpl-3.0 |
lyceel/engine | third_party/libevent/event_rpcgen.py | 296 | 45502 | #!/usr/bin/env python
#
# Copyright (c) 2005 Niels Provos <[email protected]>
# All rights reserved.
#
# Generates marshaling code based on libevent.
import sys
import re
#
_NAME = "event_rpcgen.py"
_VERSION = "0.1"
_STRUCT_RE = '[a-z][a-z_0-9]*'
# Globals
line_count = 0
white = re.compile(r'^\s+')
cppcomment = re.compile(r'\/\/.*$')
headerdirect = []
cppdirect = []
# Holds everything that makes a struct
class Struct:
def __init__(self, name):
self._name = name
self._entries = []
self._tags = {}
print >>sys.stderr, ' Created struct: %s' % name
def AddEntry(self, entry):
if self._tags.has_key(entry.Tag()):
print >>sys.stderr, ( 'Entry "%s" duplicates tag number '
'%d from "%s" around line %d' ) % (
entry.Name(), entry.Tag(),
self._tags[entry.Tag()], line_count)
sys.exit(1)
self._entries.append(entry)
self._tags[entry.Tag()] = entry.Name()
print >>sys.stderr, ' Added entry: %s' % entry.Name()
def Name(self):
return self._name
def EntryTagName(self, entry):
"""Creates the name inside an enumeration for distinguishing data
types."""
name = "%s_%s" % (self._name, entry.Name())
return name.upper()
def PrintIdented(self, file, ident, code):
"""Takes an array, add indentation to each entry and prints it."""
for entry in code:
print >>file, '%s%s' % (ident, entry)
def PrintTags(self, file):
"""Prints the tag definitions for a structure."""
print >>file, '/* Tag definition for %s */' % self._name
print >>file, 'enum %s_ {' % self._name.lower()
for entry in self._entries:
print >>file, ' %s=%d,' % (self.EntryTagName(entry),
entry.Tag())
print >>file, ' %s_MAX_TAGS' % (self._name.upper())
print >>file, '};\n'
def PrintForwardDeclaration(self, file):
print >>file, 'struct %s;' % self._name
def PrintDeclaration(self, file):
print >>file, '/* Structure declaration for %s */' % self._name
print >>file, 'struct %s_access_ {' % self._name
for entry in self._entries:
dcl = entry.AssignDeclaration('(*%s_assign)' % entry.Name())
dcl.extend(
entry.GetDeclaration('(*%s_get)' % entry.Name()))
if entry.Array():
dcl.extend(
entry.AddDeclaration('(*%s_add)' % entry.Name()))
self.PrintIdented(file, ' ', dcl)
print >>file, '};\n'
print >>file, 'struct %s {' % self._name
print >>file, ' struct %s_access_ *base;\n' % self._name
for entry in self._entries:
dcl = entry.Declaration()
self.PrintIdented(file, ' ', dcl)
print >>file, ''
for entry in self._entries:
print >>file, ' ev_uint8_t %s_set;' % entry.Name()
print >>file, '};\n'
print >>file, \
"""struct %(name)s *%(name)s_new(void);
void %(name)s_free(struct %(name)s *);
void %(name)s_clear(struct %(name)s *);
void %(name)s_marshal(struct evbuffer *, const struct %(name)s *);
int %(name)s_unmarshal(struct %(name)s *, struct evbuffer *);
int %(name)s_complete(struct %(name)s *);
void evtag_marshal_%(name)s(struct evbuffer *, ev_uint32_t,
const struct %(name)s *);
int evtag_unmarshal_%(name)s(struct evbuffer *, ev_uint32_t,
struct %(name)s *);""" % { 'name' : self._name }
# Write a setting function of every variable
for entry in self._entries:
self.PrintIdented(file, '', entry.AssignDeclaration(
entry.AssignFuncName()))
self.PrintIdented(file, '', entry.GetDeclaration(
entry.GetFuncName()))
if entry.Array():
self.PrintIdented(file, '', entry.AddDeclaration(
entry.AddFuncName()))
print >>file, '/* --- %s done --- */\n' % self._name
def PrintCode(self, file):
print >>file, ('/*\n'
' * Implementation of %s\n'
' */\n') % self._name
print >>file, \
'static struct %(name)s_access_ __%(name)s_base = {' % \
{ 'name' : self._name }
for entry in self._entries:
self.PrintIdented(file, ' ', entry.CodeBase())
print >>file, '};\n'
# Creation
print >>file, (
'struct %(name)s *\n'
'%(name)s_new(void)\n'
'{\n'
' struct %(name)s *tmp;\n'
' if ((tmp = malloc(sizeof(struct %(name)s))) == NULL) {\n'
' event_warn("%%s: malloc", __func__);\n'
' return (NULL);\n'
' }\n'
' tmp->base = &__%(name)s_base;\n') % { 'name' : self._name }
for entry in self._entries:
self.PrintIdented(file, ' ', entry.CodeNew('tmp'))
print >>file, ' tmp->%s_set = 0;\n' % entry.Name()
print >>file, (
' return (tmp);\n'
'}\n')
# Adding
for entry in self._entries:
if entry.Array():
self.PrintIdented(file, '', entry.CodeAdd())
print >>file, ''
# Assigning
for entry in self._entries:
self.PrintIdented(file, '', entry.CodeAssign())
print >>file, ''
# Getting
for entry in self._entries:
self.PrintIdented(file, '', entry.CodeGet())
print >>file, ''
# Clearing
print >>file, ( 'void\n'
'%(name)s_clear(struct %(name)s *tmp)\n'
'{'
) % { 'name' : self._name }
for entry in self._entries:
self.PrintIdented(file, ' ', entry.CodeClear('tmp'))
print >>file, '}\n'
# Freeing
print >>file, ( 'void\n'
'%(name)s_free(struct %(name)s *tmp)\n'
'{'
) % { 'name' : self._name }
for entry in self._entries:
self.PrintIdented(file, ' ', entry.CodeFree('tmp'))
print >>file, (' free(tmp);\n'
'}\n')
# Marshaling
print >>file, ('void\n'
'%(name)s_marshal(struct evbuffer *evbuf, '
'const struct %(name)s *tmp)'
'{') % { 'name' : self._name }
for entry in self._entries:
indent = ' '
# Optional entries do not have to be set
if entry.Optional():
indent += ' '
print >>file, ' if (tmp->%s_set) {' % entry.Name()
self.PrintIdented(
file, indent,
entry.CodeMarshal('evbuf', self.EntryTagName(entry), 'tmp'))
if entry.Optional():
print >>file, ' }'
print >>file, '}\n'
# Unmarshaling
print >>file, ('int\n'
'%(name)s_unmarshal(struct %(name)s *tmp, '
' struct evbuffer *evbuf)\n'
'{\n'
' ev_uint32_t tag;\n'
' while (EVBUFFER_LENGTH(evbuf) > 0) {\n'
' if (evtag_peek(evbuf, &tag) == -1)\n'
' return (-1);\n'
' switch (tag) {\n'
) % { 'name' : self._name }
for entry in self._entries:
print >>file, ' case %s:\n' % self.EntryTagName(entry)
if not entry.Array():
print >>file, (
' if (tmp->%s_set)\n'
' return (-1);'
) % (entry.Name())
self.PrintIdented(
file, ' ',
entry.CodeUnmarshal('evbuf',
self.EntryTagName(entry), 'tmp'))
print >>file, ( ' tmp->%s_set = 1;\n' % entry.Name() +
' break;\n' )
print >>file, ( ' default:\n'
' return -1;\n'
' }\n'
' }\n' )
# Check if it was decoded completely
print >>file, ( ' if (%(name)s_complete(tmp) == -1)\n'
' return (-1);'
) % { 'name' : self._name }
# Successfully decoded
print >>file, ( ' return (0);\n'
'}\n')
# Checking if a structure has all the required data
print >>file, (
'int\n'
'%(name)s_complete(struct %(name)s *msg)\n'
'{' ) % { 'name' : self._name }
for entry in self._entries:
self.PrintIdented(
file, ' ',
entry.CodeComplete('msg'))
print >>file, (
' return (0);\n'
'}\n' )
# Complete message unmarshaling
print >>file, (
'int\n'
'evtag_unmarshal_%(name)s(struct evbuffer *evbuf, '
'ev_uint32_t need_tag, struct %(name)s *msg)\n'
'{\n'
' ev_uint32_t tag;\n'
' int res = -1;\n'
'\n'
' struct evbuffer *tmp = evbuffer_new();\n'
'\n'
' if (evtag_unmarshal(evbuf, &tag, tmp) == -1'
' || tag != need_tag)\n'
' goto error;\n'
'\n'
' if (%(name)s_unmarshal(msg, tmp) == -1)\n'
' goto error;\n'
'\n'
' res = 0;\n'
'\n'
' error:\n'
' evbuffer_free(tmp);\n'
' return (res);\n'
'}\n' ) % { 'name' : self._name }
# Complete message marshaling
print >>file, (
'void\n'
'evtag_marshal_%(name)s(struct evbuffer *evbuf, ev_uint32_t tag, '
'const struct %(name)s *msg)\n'
'{\n'
' struct evbuffer *_buf = evbuffer_new();\n'
' assert(_buf != NULL);\n'
' evbuffer_drain(_buf, -1);\n'
' %(name)s_marshal(_buf, msg);\n'
' evtag_marshal(evbuf, tag, EVBUFFER_DATA(_buf), '
'EVBUFFER_LENGTH(_buf));\n'
' evbuffer_free(_buf);\n'
'}\n' ) % { 'name' : self._name }
class Entry:
def __init__(self, type, name, tag):
self._type = type
self._name = name
self._tag = int(tag)
self._ctype = type
self._optional = 0
self._can_be_array = 0
self._array = 0
self._line_count = -1
self._struct = None
self._refname = None
def GetTranslation(self):
return { "parent_name" : self._struct.Name(),
"name" : self._name,
"ctype" : self._ctype,
"refname" : self._refname
}
def SetStruct(self, struct):
self._struct = struct
def LineCount(self):
assert self._line_count != -1
return self._line_count
def SetLineCount(self, number):
self._line_count = number
def Array(self):
return self._array
def Optional(self):
return self._optional
def Tag(self):
return self._tag
def Name(self):
return self._name
def Type(self):
return self._type
def MakeArray(self, yes=1):
self._array = yes
def MakeOptional(self):
self._optional = 1
def GetFuncName(self):
return '%s_%s_get' % (self._struct.Name(), self._name)
def GetDeclaration(self, funcname):
code = [ 'int %s(struct %s *, %s *);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def CodeGet(self):
code = (
'int',
'%(parent_name)s_%(name)s_get(struct %(parent_name)s *msg, '
'%(ctype)s *value)',
'{',
' if (msg->%(name)s_set != 1)',
' return (-1);',
' *value = msg->%(name)s_data;',
' return (0);',
'}' )
code = '\n'.join(code)
code = code % self.GetTranslation()
return code.split('\n')
def AssignFuncName(self):
return '%s_%s_assign' % (self._struct.Name(), self._name)
def AddFuncName(self):
return '%s_%s_add' % (self._struct.Name(), self._name)
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, const %s);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def CodeAssign(self):
code = [ 'int',
'%(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg,'
' const %(ctype)s value)',
'{',
' msg->%(name)s_set = 1;',
' msg->%(name)s_data = value;',
' return (0);',
'}' ]
code = '\n'.join(code)
code = code % self.GetTranslation()
return code.split('\n')
def CodeClear(self, structname):
code = [ '%s->%s_set = 0;' % (structname, self.Name()) ]
return code
def CodeComplete(self, structname):
if self.Optional():
return []
code = [ 'if (!%s->%s_set)' % (structname, self.Name()),
' return (-1);' ]
return code
def CodeFree(self, name):
return []
def CodeBase(self):
code = [
'%(parent_name)s_%(name)s_assign,',
'%(parent_name)s_%(name)s_get,'
]
if self.Array():
code.append('%(parent_name)s_%(name)s_add,')
code = '\n'.join(code)
code = code % self.GetTranslation()
return code.split('\n')
def Verify(self):
if self.Array() and not self._can_be_array:
print >>sys.stderr, (
'Entry "%s" cannot be created as an array '
'around line %d' ) % (self._name, self.LineCount())
sys.exit(1)
if not self._struct:
print >>sys.stderr, (
'Entry "%s" does not know which struct it belongs to '
'around line %d' ) % (self._name, self.LineCount())
sys.exit(1)
if self._optional and self._array:
print >>sys.stderr, ( 'Entry "%s" has illegal combination of '
'optional and array around line %d' ) % (
self._name, self.LineCount() )
sys.exit(1)
class EntryBytes(Entry):
def __init__(self, type, name, tag, length):
# Init base class
Entry.__init__(self, type, name, tag)
self._length = length
self._ctype = 'ev_uint8_t'
def GetDeclaration(self, funcname):
code = [ 'int %s(struct %s *, %s **);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, const %s *);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def Declaration(self):
dcl = ['ev_uint8_t %s_data[%s];' % (self._name, self._length)]
return dcl
def CodeGet(self):
name = self._name
code = [ 'int',
'%s_%s_get(struct %s *msg, %s **value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_set != 1)' % name,
' return (-1);',
' *value = msg->%s_data;' % name,
' return (0);',
'}' ]
return code
def CodeAssign(self):
name = self._name
code = [ 'int',
'%s_%s_assign(struct %s *msg, const %s *value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' msg->%s_set = 1;' % name,
' memcpy(msg->%s_data, value, %s);' % (
name, self._length),
' return (0);',
'}' ]
return code
def CodeUnmarshal(self, buf, tag_name, var_name):
code = [ 'if (evtag_unmarshal_fixed(%s, %s, ' % (buf, tag_name) +
'%s->%s_data, ' % (var_name, self._name) +
'sizeof(%s->%s_data)) == -1) {' % (
var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}'
]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['evtag_marshal(%s, %s, %s->%s_data, sizeof(%s->%s_data));' % (
buf, tag_name, var_name, self._name, var_name, self._name )]
return code
def CodeClear(self, structname):
code = [ '%s->%s_set = 0;' % (structname, self.Name()),
'memset(%s->%s_data, 0, sizeof(%s->%s_data));' % (
structname, self._name, structname, self._name)]
return code
def CodeNew(self, name):
code = ['memset(%s->%s_data, 0, sizeof(%s->%s_data));' % (
name, self._name, name, self._name)]
return code
def Verify(self):
if not self._length:
print >>sys.stderr, 'Entry "%s" needs a length around line %d' % (
self._name, self.LineCount() )
sys.exit(1)
Entry.Verify(self)
class EntryInt(Entry):
def __init__(self, type, name, tag):
# Init base class
Entry.__init__(self, type, name, tag)
self._ctype = 'ev_uint32_t'
def CodeUnmarshal(self, buf, tag_name, var_name):
code = ['if (evtag_unmarshal_int(%s, %s, &%s->%s_data) == -1) {' % (
buf, tag_name, var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}' ]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['evtag_marshal_int(%s, %s, %s->%s_data);' % (
buf, tag_name, var_name, self._name)]
return code
def Declaration(self):
dcl = ['ev_uint32_t %s_data;' % self._name]
return dcl
def CodeNew(self, name):
code = ['%s->%s_data = 0;' % (name, self._name)]
return code
class EntryString(Entry):
def __init__(self, type, name, tag):
# Init base class
Entry.__init__(self, type, name, tag)
self._ctype = 'char *'
def CodeAssign(self):
name = self._name
code = """int
%(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg,
const %(ctype)s value)
{
if (msg->%(name)s_data != NULL)
free(msg->%(name)s_data);
if ((msg->%(name)s_data = strdup(value)) == NULL)
return (-1);
msg->%(name)s_set = 1;
return (0);
}""" % self.GetTranslation()
return code.split('\n')
def CodeUnmarshal(self, buf, tag_name, var_name):
code = ['if (evtag_unmarshal_string(%s, %s, &%s->%s_data) == -1) {' % (
buf, tag_name, var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}'
]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['evtag_marshal_string(%s, %s, %s->%s_data);' % (
buf, tag_name, var_name, self._name)]
return code
def CodeClear(self, structname):
code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()),
' free (%s->%s_data);' % (structname, self.Name()),
' %s->%s_data = NULL;' % (structname, self.Name()),
' %s->%s_set = 0;' % (structname, self.Name()),
'}'
]
return code
def CodeNew(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name)]
return code
def CodeFree(self, name):
code = ['if (%s->%s_data != NULL)' % (name, self._name),
' free (%s->%s_data); ' % (name, self._name)]
return code
def Declaration(self):
dcl = ['char *%s_data;' % self._name]
return dcl
class EntryStruct(Entry):
def __init__(self, type, name, tag, refname):
# Init base class
Entry.__init__(self, type, name, tag)
self._can_be_array = 1
self._refname = refname
self._ctype = 'struct %s*' % refname
def CodeGet(self):
name = self._name
code = [ 'int',
'%s_%s_get(struct %s *msg, %s *value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_set != 1) {' % name,
' msg->%s_data = %s_new();' % (name, self._refname),
' if (msg->%s_data == NULL)' % name,
' return (-1);',
' msg->%s_set = 1;' % name,
' }',
' *value = msg->%s_data;' % name,
' return (0);',
'}' ]
return code
def CodeAssign(self):
name = self._name
code = """int
%(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg,
const %(ctype)s value)
{
struct evbuffer *tmp = NULL;
if (msg->%(name)s_set) {
%(refname)s_clear(msg->%(name)s_data);
msg->%(name)s_set = 0;
} else {
msg->%(name)s_data = %(refname)s_new();
if (msg->%(name)s_data == NULL) {
event_warn("%%s: %(refname)s_new()", __func__);
goto error;
}
}
if ((tmp = evbuffer_new()) == NULL) {
event_warn("%%s: evbuffer_new()", __func__);
goto error;
}
%(refname)s_marshal(tmp, value);
if (%(refname)s_unmarshal(msg->%(name)s_data, tmp) == -1) {
event_warnx("%%s: %(refname)s_unmarshal", __func__);
goto error;
}
msg->%(name)s_set = 1;
evbuffer_free(tmp);
return (0);
error:
if (tmp != NULL)
evbuffer_free(tmp);
if (msg->%(name)s_data != NULL) {
%(refname)s_free(msg->%(name)s_data);
msg->%(name)s_data = NULL;
}
return (-1);
}""" % self.GetTranslation()
return code.split('\n')
def CodeComplete(self, structname):
if self.Optional():
code = [ 'if (%s->%s_set && %s_complete(%s->%s_data) == -1)' % (
structname, self.Name(),
self._refname, structname, self.Name()),
' return (-1);' ]
else:
code = [ 'if (%s_complete(%s->%s_data) == -1)' % (
self._refname, structname, self.Name()),
' return (-1);' ]
return code
def CodeUnmarshal(self, buf, tag_name, var_name):
code = ['%s->%s_data = %s_new();' % (
var_name, self._name, self._refname),
'if (%s->%s_data == NULL)' % (var_name, self._name),
' return (-1);',
'if (evtag_unmarshal_%s(%s, %s, %s->%s_data) == -1) {' % (
self._refname, buf, tag_name, var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}'
]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['evtag_marshal_%s(%s, %s, %s->%s_data);' % (
self._refname, buf, tag_name, var_name, self._name)]
return code
def CodeClear(self, structname):
code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()),
' %s_free(%s->%s_data);' % (
self._refname, structname, self.Name()),
' %s->%s_data = NULL;' % (structname, self.Name()),
' %s->%s_set = 0;' % (structname, self.Name()),
'}'
]
return code
def CodeNew(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name)]
return code
def CodeFree(self, name):
code = ['if (%s->%s_data != NULL)' % (name, self._name),
' %s_free(%s->%s_data); ' % (
self._refname, name, self._name)]
return code
def Declaration(self):
dcl = ['%s %s_data;' % (self._ctype, self._name)]
return dcl
class EntryVarBytes(Entry):
def __init__(self, type, name, tag):
# Init base class
Entry.__init__(self, type, name, tag)
self._ctype = 'ev_uint8_t *'
def GetDeclaration(self, funcname):
code = [ 'int %s(struct %s *, %s *, ev_uint32_t *);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, const %s, ev_uint32_t);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def CodeAssign(self):
name = self._name
code = [ 'int',
'%s_%s_assign(struct %s *msg, '
'const %s value, ev_uint32_t len)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_data != NULL)' % name,
' free (msg->%s_data);' % name,
' msg->%s_data = malloc(len);' % name,
' if (msg->%s_data == NULL)' % name,
' return (-1);',
' msg->%s_set = 1;' % name,
' msg->%s_length = len;' % name,
' memcpy(msg->%s_data, value, len);' % name,
' return (0);',
'}' ]
return code
def CodeGet(self):
name = self._name
code = [ 'int',
'%s_%s_get(struct %s *msg, %s *value, ev_uint32_t *plen)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_set != 1)' % name,
' return (-1);',
' *value = msg->%s_data;' % name,
' *plen = msg->%s_length;' % name,
' return (0);',
'}' ]
return code
def CodeUnmarshal(self, buf, tag_name, var_name):
code = ['if (evtag_payload_length(%s, &%s->%s_length) == -1)' % (
buf, var_name, self._name),
' return (-1);',
# We do not want DoS opportunities
'if (%s->%s_length > EVBUFFER_LENGTH(%s))' % (
var_name, self._name, buf),
' return (-1);',
'if ((%s->%s_data = malloc(%s->%s_length)) == NULL)' % (
var_name, self._name, var_name, self._name),
' return (-1);',
'if (evtag_unmarshal_fixed(%s, %s, %s->%s_data, '
'%s->%s_length) == -1) {' % (
buf, tag_name, var_name, self._name, var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}'
]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['evtag_marshal(%s, %s, %s->%s_data, %s->%s_length);' % (
buf, tag_name, var_name, self._name, var_name, self._name)]
return code
def CodeClear(self, structname):
code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()),
' free (%s->%s_data);' % (structname, self.Name()),
' %s->%s_data = NULL;' % (structname, self.Name()),
' %s->%s_length = 0;' % (structname, self.Name()),
' %s->%s_set = 0;' % (structname, self.Name()),
'}'
]
return code
def CodeNew(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name),
'%s->%s_length = 0;' % (name, self._name) ]
return code
def CodeFree(self, name):
code = ['if (%s->%s_data != NULL)' % (name, self._name),
' free (%s->%s_data); ' % (name, self._name)]
return code
def Declaration(self):
dcl = ['ev_uint8_t *%s_data;' % self._name,
'ev_uint32_t %s_length;' % self._name]
return dcl
class EntryArray(Entry):
def __init__(self, entry):
# Init base class
Entry.__init__(self, entry._type, entry._name, entry._tag)
self._entry = entry
self._refname = entry._refname
self._ctype = 'struct %s *' % self._refname
def GetDeclaration(self, funcname):
"""Allows direct access to elements of the array."""
translate = self.GetTranslation()
translate["funcname"] = funcname
code = [
'int %(funcname)s(struct %(parent_name)s *, int, %(ctype)s *);' %
translate ]
return code
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, int, const %s);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def AddDeclaration(self, funcname):
code = [ '%s %s(struct %s *);' % (
self._ctype, funcname, self._struct.Name() ) ]
return code
def CodeGet(self):
code = """int
%(parent_name)s_%(name)s_get(struct %(parent_name)s *msg, int offset,
%(ctype)s *value)
{
if (!msg->%(name)s_set || offset < 0 || offset >= msg->%(name)s_length)
return (-1);
*value = msg->%(name)s_data[offset];
return (0);
}""" % self.GetTranslation()
return code.split('\n')
def CodeAssign(self):
code = """int
%(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg, int off,
const %(ctype)s value)
{
struct evbuffer *tmp = NULL;
if (!msg->%(name)s_set || off < 0 || off >= msg->%(name)s_length)
return (-1);
%(refname)s_clear(msg->%(name)s_data[off]);
if ((tmp = evbuffer_new()) == NULL) {
event_warn("%%s: evbuffer_new()", __func__);
goto error;
}
%(refname)s_marshal(tmp, value);
if (%(refname)s_unmarshal(msg->%(name)s_data[off], tmp) == -1) {
event_warnx("%%s: %(refname)s_unmarshal", __func__);
goto error;
}
evbuffer_free(tmp);
return (0);
error:
if (tmp != NULL)
evbuffer_free(tmp);
%(refname)s_clear(msg->%(name)s_data[off]);
return (-1);
}""" % self.GetTranslation()
return code.split('\n')
def CodeAdd(self):
code = \
"""%(ctype)s
%(parent_name)s_%(name)s_add(struct %(parent_name)s *msg)
{
if (++msg->%(name)s_length >= msg->%(name)s_num_allocated) {
int tobe_allocated = msg->%(name)s_num_allocated;
%(ctype)s* new_data = NULL;
tobe_allocated = !tobe_allocated ? 1 : tobe_allocated << 1;
new_data = (%(ctype)s*) realloc(msg->%(name)s_data,
tobe_allocated * sizeof(%(ctype)s));
if (new_data == NULL)
goto error;
msg->%(name)s_data = new_data;
msg->%(name)s_num_allocated = tobe_allocated;
}
msg->%(name)s_data[msg->%(name)s_length - 1] = %(refname)s_new();
if (msg->%(name)s_data[msg->%(name)s_length - 1] == NULL)
goto error;
msg->%(name)s_set = 1;
return (msg->%(name)s_data[msg->%(name)s_length - 1]);
error:
--msg->%(name)s_length;
return (NULL);
}
""" % self.GetTranslation()
return code.split('\n')
def CodeComplete(self, structname):
code = []
translate = self.GetTranslation()
if self.Optional():
code.append( 'if (%(structname)s->%(name)s_set)' % translate)
translate["structname"] = structname
tmp = """{
int i;
for (i = 0; i < %(structname)s->%(name)s_length; ++i) {
if (%(refname)s_complete(%(structname)s->%(name)s_data[i]) == -1)
return (-1);
}
}""" % translate
code.extend(tmp.split('\n'))
return code
def CodeUnmarshal(self, buf, tag_name, var_name):
translate = self.GetTranslation()
translate["var_name"] = var_name
translate["buf"] = buf
translate["tag_name"] = tag_name
code = """if (%(parent_name)s_%(name)s_add(%(var_name)s) == NULL)
return (-1);
if (evtag_unmarshal_%(refname)s(%(buf)s, %(tag_name)s,
%(var_name)s->%(name)s_data[%(var_name)s->%(name)s_length - 1]) == -1) {
--%(var_name)s->%(name)s_length;
event_warnx("%%s: failed to unmarshal %(name)s", __func__);
return (-1);
}""" % translate
return code.split('\n')
def CodeMarshal(self, buf, tag_name, var_name):
code = ['{',
' int i;',
' for (i = 0; i < %s->%s_length; ++i) {' % (
var_name, self._name),
' evtag_marshal_%s(%s, %s, %s->%s_data[i]);' % (
self._refname, buf, tag_name, var_name, self._name),
' }',
'}'
]
return code
def CodeClear(self, structname):
code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()),
' int i;',
' for (i = 0; i < %s->%s_length; ++i) {' % (
structname, self.Name()),
' %s_free(%s->%s_data[i]);' % (
self._refname, structname, self.Name()),
' }',
' free(%s->%s_data);' % (structname, self.Name()),
' %s->%s_data = NULL;' % (structname, self.Name()),
' %s->%s_set = 0;' % (structname, self.Name()),
' %s->%s_length = 0;' % (structname, self.Name()),
' %s->%s_num_allocated = 0;' % (structname, self.Name()),
'}'
]
return code
def CodeNew(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name),
'%s->%s_length = 0;' % (name, self._name),
'%s->%s_num_allocated = 0;' % (name, self._name)]
return code
def CodeFree(self, name):
code = ['if (%s->%s_data != NULL) {' % (name, self._name),
' int i;',
' for (i = 0; i < %s->%s_length; ++i) {' % (
name, self._name),
' %s_free(%s->%s_data[i]); ' % (
self._refname, name, self._name),
' %s->%s_data[i] = NULL;' % (name, self._name),
' }',
' free(%s->%s_data);' % (name, self._name),
' %s->%s_data = NULL;' % (name, self._name),
' %s->%s_length = 0;' % (name, self._name),
' %s->%s_num_allocated = 0;' % (name, self._name),
'}'
]
return code
def Declaration(self):
dcl = ['struct %s **%s_data;' % (self._refname, self._name),
'int %s_length;' % self._name,
'int %s_num_allocated;' % self._name ]
return dcl
def NormalizeLine(line):
global white
global cppcomment
line = cppcomment.sub('', line)
line = line.strip()
line = white.sub(' ', line)
return line
def ProcessOneEntry(newstruct, entry):
optional = 0
array = 0
entry_type = ''
name = ''
tag = ''
tag_set = None
separator = ''
fixed_length = ''
tokens = entry.split(' ')
while tokens:
token = tokens[0]
tokens = tokens[1:]
if not entry_type:
if not optional and token == 'optional':
optional = 1
continue
if not array and token == 'array':
array = 1
continue
if not entry_type:
entry_type = token
continue
if not name:
res = re.match(r'^([^\[\]]+)(\[.*\])?$', token)
if not res:
print >>sys.stderr, 'Cannot parse name: \"%s\" around %d' % (
entry, line_count)
sys.exit(1)
name = res.group(1)
fixed_length = res.group(2)
if fixed_length:
fixed_length = fixed_length[1:-1]
continue
if not separator:
separator = token
if separator != '=':
print >>sys.stderr, 'Expected "=" after name \"%s\" got %s' % (
name, token)
sys.exit(1)
continue
if not tag_set:
tag_set = 1
if not re.match(r'^(0x)?[0-9]+$', token):
print >>sys.stderr, 'Expected tag number: \"%s\"' % entry
sys.exit(1)
tag = int(token, 0)
continue
print >>sys.stderr, 'Cannot parse \"%s\"' % entry
sys.exit(1)
if not tag_set:
print >>sys.stderr, 'Need tag number: \"%s\"' % entry
sys.exit(1)
# Create the right entry
if entry_type == 'bytes':
if fixed_length:
newentry = EntryBytes(entry_type, name, tag, fixed_length)
else:
newentry = EntryVarBytes(entry_type, name, tag)
elif entry_type == 'int' and not fixed_length:
newentry = EntryInt(entry_type, name, tag)
elif entry_type == 'string' and not fixed_length:
newentry = EntryString(entry_type, name, tag)
else:
res = re.match(r'^struct\[(%s)\]$' % _STRUCT_RE,
entry_type, re.IGNORECASE)
if res:
# References another struct defined in our file
newentry = EntryStruct(entry_type, name, tag, res.group(1))
else:
print >>sys.stderr, 'Bad type: "%s" in "%s"' % (entry_type, entry)
sys.exit(1)
structs = []
if optional:
newentry.MakeOptional()
if array:
newentry.MakeArray()
newentry.SetStruct(newstruct)
newentry.SetLineCount(line_count)
newentry.Verify()
if array:
# We need to encapsulate this entry into a struct
newname = newentry.Name()+ '_array'
# Now borgify the new entry.
newentry = EntryArray(newentry)
newentry.SetStruct(newstruct)
newentry.SetLineCount(line_count)
newentry.MakeArray()
newstruct.AddEntry(newentry)
return structs
def ProcessStruct(data):
tokens = data.split(' ')
# First three tokens are: 'struct' 'name' '{'
newstruct = Struct(tokens[1])
inside = ' '.join(tokens[3:-1])
tokens = inside.split(';')
structs = []
for entry in tokens:
entry = NormalizeLine(entry)
if not entry:
continue
# It's possible that new structs get defined in here
structs.extend(ProcessOneEntry(newstruct, entry))
structs.append(newstruct)
return structs
def GetNextStruct(file):
global line_count
global cppdirect
got_struct = 0
processed_lines = []
have_c_comment = 0
data = ''
while 1:
line = file.readline()
if not line:
break
line_count += 1
line = line[:-1]
if not have_c_comment and re.search(r'/\*', line):
if re.search(r'/\*.*\*/', line):
line = re.sub(r'/\*.*\*/', '', line)
else:
line = re.sub(r'/\*.*$', '', line)
have_c_comment = 1
if have_c_comment:
if not re.search(r'\*/', line):
continue
have_c_comment = 0
line = re.sub(r'^.*\*/', '', line)
line = NormalizeLine(line)
if not line:
continue
if not got_struct:
if re.match(r'#include ["<].*[>"]', line):
cppdirect.append(line)
continue
if re.match(r'^#(if( |def)|endif)', line):
cppdirect.append(line)
continue
if re.match(r'^#define', line):
headerdirect.append(line)
continue
if not re.match(r'^struct %s {$' % _STRUCT_RE,
line, re.IGNORECASE):
print >>sys.stderr, 'Missing struct on line %d: %s' % (
line_count, line)
sys.exit(1)
else:
got_struct = 1
data += line
continue
# We are inside the struct
tokens = line.split('}')
if len(tokens) == 1:
data += ' ' + line
continue
if len(tokens[1]):
print >>sys.stderr, 'Trailing garbage after struct on line %d' % (
line_count )
sys.exit(1)
# We found the end of the struct
data += ' %s}' % tokens[0]
break
# Remove any comments, that might be in there
data = re.sub(r'/\*.*\*/', '', data)
return data
def Parse(file):
"""
Parses the input file and returns C code and corresponding header file.
"""
entities = []
while 1:
# Just gets the whole struct nicely formatted
data = GetNextStruct(file)
if not data:
break
entities.extend(ProcessStruct(data))
return entities
def GuardName(name):
name = '_'.join(name.split('.'))
name = '_'.join(name.split('/'))
guard = '_'+name.upper()+'_'
return guard
def HeaderPreamble(name):
guard = GuardName(name)
pre = (
'/*\n'
' * Automatically generated from %s\n'
' */\n\n'
'#ifndef %s\n'
'#define %s\n\n' ) % (
name, guard, guard)
# insert stdint.h - let's hope everyone has it
pre += (
'#include <event-config.h>\n'
'#ifdef _EVENT_HAVE_STDINT_H\n'
'#include <stdint.h>\n'
'#endif\n' )
for statement in headerdirect:
pre += '%s\n' % statement
if headerdirect:
pre += '\n'
pre += (
'#define EVTAG_HAS(msg, member) ((msg)->member##_set == 1)\n'
'#ifdef __GNUC__\n'
'#define EVTAG_ASSIGN(msg, member, args...) '
'(*(msg)->base->member##_assign)(msg, ## args)\n'
'#define EVTAG_GET(msg, member, args...) '
'(*(msg)->base->member##_get)(msg, ## args)\n'
'#else\n'
'#define EVTAG_ASSIGN(msg, member, ...) '
'(*(msg)->base->member##_assign)(msg, ## __VA_ARGS__)\n'
'#define EVTAG_GET(msg, member, ...) '
'(*(msg)->base->member##_get)(msg, ## __VA_ARGS__)\n'
'#endif\n'
'#define EVTAG_ADD(msg, member) (*(msg)->base->member##_add)(msg)\n'
'#define EVTAG_LEN(msg, member) ((msg)->member##_length)\n'
)
return pre
def HeaderPostamble(name):
guard = GuardName(name)
return '#endif /* %s */' % guard
def BodyPreamble(name):
global _NAME
global _VERSION
header_file = '.'.join(name.split('.')[:-1]) + '.gen.h'
pre = ( '/*\n'
' * Automatically generated from %s\n'
' * by %s/%s. DO NOT EDIT THIS FILE.\n'
' */\n\n' ) % (name, _NAME, _VERSION)
pre += ( '#include <sys/types.h>\n'
'#ifdef _EVENT_HAVE_SYS_TIME_H\n'
'#include <sys/time.h>\n'
'#endif\n'
'#include <stdlib.h>\n'
'#include <string.h>\n'
'#include <assert.h>\n'
'#define EVENT_NO_STRUCT\n'
'#include <event.h>\n\n'
'#ifdef _EVENT___func__\n'
'#define __func__ _EVENT___func__\n'
'#endif\n' )
for statement in cppdirect:
pre += '%s\n' % statement
pre += '\n#include "%s"\n\n' % header_file
pre += 'void event_err(int eval, const char *fmt, ...);\n'
pre += 'void event_warn(const char *fmt, ...);\n'
pre += 'void event_errx(int eval, const char *fmt, ...);\n'
pre += 'void event_warnx(const char *fmt, ...);\n\n'
return pre
def main(argv):
if len(argv) < 2 or not argv[1]:
print >>sys.stderr, 'Need RPC description file as first argument.'
sys.exit(1)
filename = argv[1]
ext = filename.split('.')[-1]
if ext != 'rpc':
print >>sys.stderr, 'Unrecognized file extension: %s' % ext
sys.exit(1)
print >>sys.stderr, 'Reading \"%s\"' % filename
fp = open(filename, 'r')
entities = Parse(fp)
fp.close()
header_file = '.'.join(filename.split('.')[:-1]) + '.gen.h'
impl_file = '.'.join(filename.split('.')[:-1]) + '.gen.c'
print >>sys.stderr, '... creating "%s"' % header_file
header_fp = open(header_file, 'w')
print >>header_fp, HeaderPreamble(filename)
# Create forward declarations: allows other structs to reference
# each other
for entry in entities:
entry.PrintForwardDeclaration(header_fp)
print >>header_fp, ''
for entry in entities:
entry.PrintTags(header_fp)
entry.PrintDeclaration(header_fp)
print >>header_fp, HeaderPostamble(filename)
header_fp.close()
print >>sys.stderr, '... creating "%s"' % impl_file
impl_fp = open(impl_file, 'w')
print >>impl_fp, BodyPreamble(filename)
for entry in entities:
entry.PrintCode(impl_fp)
impl_fp.close()
if __name__ == '__main__':
main(sys.argv)
| bsd-3-clause |
sarvex/tensorflow | tensorflow/python/compiler/tensorrt/model_tests/result_analyzer.py | 9 | 7962 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Analyzes the latency and numerics information of sample model inference."""
import itertools
import json
from typing import Any, Callable, Optional, Sequence, Tuple, Union
from absl import logging
import numpy as np
from tensorflow.python.compiler.tensorrt.model_tests import model_handler
import tensorflow.python.compiler.tensorrt.trt_convert as trt
# pylint: disable=bad-whitespace
class DataFrame:
"""Lightweight immutable Dataframe similar to Pandas Dataframe."""
def __init__(self,
column_names: Sequence[str],
rows: Sequence[Sequence[Any]] = None,
columns: Sequence[Sequence[Any]] = None):
self._column_names = column_names
if not rows and not columns:
raise ValueError("Cannot initialize with empty data!")
self._rows = rows
self._columns = columns
@property
def n_rows(self) -> int:
return len(self._rows) if self._rows else len(self._columns[0])
@property
def n_columns(self) -> int:
return len(self._columns) if self._columns else len(self._rows[0])
@property
def column_names(self) -> Sequence[str]:
return self._column_names
@property
def rows(self) -> Sequence[Sequence[Any]]:
return self._rows if self._rows else [
[c[i] for c in self._columns] for i in range(len(self._columns[0]))
]
@property
def columns(self) -> Sequence[Sequence[Any]]:
return self._columns if self._columns else [
[r[i] for r in self._rows] for i in range(len(self._rows[0]))
]
def __add__(self, other: "DataFrame") -> "DataFrame":
if (not set(self.column_names).intersection(other.column_names) and
len(self.rows) == len(other.rows)):
return DataFrame(
column_names=list(
itertools.chain(self.column_names, other.column_names)),
columns=list(itertools.chain(self.columns, other.columns)))
if self.column_names == other.column_names:
return DataFrame(
column_names=self.column_names,
rows=list(itertools.chain(self.rows, other.rows)))
raise ValueError("Cannot combine two DataFrame")
def __iadd__(self, other: "DataFrame") -> "DataFrame":
tmp = self + other
self._column_names = tmp._column_names
self._rows, self._columns = tmp._rows, tmp._columns
return self
def __call__(self, r: int, c: Optional[Union[int, str]] = None) -> Any:
if c is None:
return dict(zip(self.column_names, self.rows[r]))
c = self._column_names.index(c) if isinstance(c, str) else c
return self._rows[r][c] if self._rows else self._columns[c][r]
def __str__(self) -> str:
return ",".join(self.column_names) + "\n" + "\n".join(",".join(
"N/A" if v is None else str(v) for v in row) for row in self.rows)
def to_csv(self, path: str):
with open(path, "w") as file:
file.write(str(self))
def to_json(self, path: str):
with open(path, "w") as file:
json.dump([dict(zip(self.column_names, r)) for r in self.rows], file)
def extract_test_info(
test_results: model_handler.TestResultCollection) -> DataFrame:
"""Extracts the test infomation."""
column_names = list(
itertools.chain(model_handler.ModelConfig._fields,
["enable_gpu", "trt_model"],
trt.TrtConversionParams._fields))
rows = []
for result in test_results.results:
r = list(result.model_config) + [result.enable_gpu]
if result.trt_convert_params is not None:
r += [True] + list(result.trt_convert_params)
else:
r += [False] + [None for _ in trt.TrtConversionParams._fields]
rows.append(r)
return DataFrame(column_names=column_names, rows=rows)
def analyze_test_latency(test_results: model_handler.TestResultCollection,
use_cpu_baseline: bool) -> DataFrame:
"""Analyzes test latency."""
base_result = (
test_results.cpu_base_result
if use_cpu_baseline else test_results.gpu_base_result)
if base_result is None:
raise ValueError(
f"No {'CPU' if use_cpu_baseline else 'GPU'} baseline found!")
base_mean_time = np.asscalar(np.mean(base_result.model_latency))
column_names = ["time(ms)", "speedup"]
rows = []
for result in test_results.results:
mean_time = np.asscalar(np.mean(result.model_latency))
rows.append([mean_time * 1000.0, base_mean_time / mean_time])
return DataFrame(column_names=column_names, rows=rows)
def analyze_test_numerics(test_results: model_handler.TestResultCollection,
use_cpu_baseline: bool) -> DataFrame:
"""Analyzes test numerics."""
preprocess_funcs = {
"diff": lambda x, y: np.fabs(x - y),
# Ensures dividends are not zero to avoid exceptions/NaNs.
"rel_diff": lambda x, y: np.fabs(x - y) / np.fmax(np.fabs(y), 1.0e-6)
}
postprocess_funcs = {"mean": np.mean, "std": np.std}
column_names = []
columns = []
base_result = (
test_results.cpu_base_result
if use_cpu_baseline else test_results.gpu_base_result)
if base_result is None:
raise ValueError(
f"No {'CPU' if use_cpu_baseline else 'GPU'} baseline found!")
for fn0, fn1 in itertools.product(preprocess_funcs, postprocess_funcs):
func0, func1 = preprocess_funcs[fn0], postprocess_funcs[fn1]
column_names.append("{}_{}".format(fn0, fn1))
columns.append([])
for result in test_results.results:
columns[-1].append(dict())
for idx, tensor in enumerate(result.output_tensors):
name = base_result.output_names[idx]
cpu_tensor = base_result.output_tensors[idx]
metric_value = np.asscalar(func1(func0(tensor, cpu_tensor)))
columns[-1][-1][name] = metric_value
return DataFrame(column_names=column_names, columns=columns)
def check_column(df: DataFrame, name: str, fn: Callable[[float], bool]) -> bool:
"""Checks the values of a column using a custom function and logs abnormals.
The check is only performed on TensorRT models, not native CPU/GPU models.
Args:
df: The DataFrame to be checked.
name: The name of the column to be checked.
fn: The function that takes a value of at the specified column and returns
if the value statisfies the check.
Returns:
Whether all the values of the specified column satisfies the provided check.
"""
is_ok = True
for r in range(df.n_rows):
if df(r, "trt_model"):
if not fn(df(r, name)):
logging.error("Unsatisfied %s found at: %s", name, df(r))
is_ok = False
return is_ok
class ResultAnalyzer:
"""Analyzes ModelHandlerManager results."""
def __init__(
self,
use_cpu_latency_baseline: bool,
use_cpu_numerics_baseline: bool,
checkers: Sequence[Callable[[DataFrame], bool]],
):
self._use_cpu_latency_baseline = use_cpu_latency_baseline
self._use_cpu_numerics_baseline = use_cpu_numerics_baseline
self._checkers = checkers
def analysis(
self, test_results: model_handler.TestResultCollection
) -> Tuple[DataFrame, Sequence[bool]]:
df = extract_test_info(test_results)
df += analyze_test_latency(test_results, self._use_cpu_latency_baseline)
df += analyze_test_numerics(test_results, self._use_cpu_numerics_baseline)
checks = [c(df) for c in self._checkers]
return df, checks
| apache-2.0 |
watonyweng/neutron | neutron/tests/unit/test_auth.py | 43 | 5071 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_middleware import request_id
import webob
from neutron import auth
from neutron.tests import base
class NeutronKeystoneContextTestCase(base.BaseTestCase):
def setUp(self):
super(NeutronKeystoneContextTestCase, self).setUp()
@webob.dec.wsgify
def fake_app(req):
self.context = req.environ['neutron.context']
return webob.Response()
self.context = None
self.middleware = auth.NeutronKeystoneContext(fake_app)
self.request = webob.Request.blank('/')
self.request.headers['X_AUTH_TOKEN'] = 'testauthtoken'
def test_no_user_id(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '401 Unauthorized')
def test_with_user_id(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.user_id, 'testuserid')
self.assertEqual(self.context.user, 'testuserid')
def test_with_tenant_id(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'test_user_id'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.tenant_id, 'testtenantid')
self.assertEqual(self.context.tenant, 'testtenantid')
def test_roles_no_admin(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.headers['X_ROLES'] = 'role1, role2 , role3,role4,role5'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.roles, ['role1', 'role2', 'role3',
'role4', 'role5'])
self.assertEqual(self.context.is_admin, False)
def test_roles_with_admin(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.headers['X_ROLES'] = ('role1, role2 , role3,role4,role5,'
'AdMiN')
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.roles, ['role1', 'role2', 'role3',
'role4', 'role5', 'AdMiN'])
self.assertEqual(self.context.is_admin, True)
def test_with_user_tenant_name(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.headers['X_PROJECT_NAME'] = 'testtenantname'
self.request.headers['X_USER_NAME'] = 'testusername'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.user_id, 'testuserid')
self.assertEqual(self.context.user_name, 'testusername')
self.assertEqual(self.context.tenant_id, 'testtenantid')
self.assertEqual(self.context.tenant_name, 'testtenantname')
def test_request_id_extracted_from_env(self):
req_id = 'dummy-request-id'
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
self.request.environ[request_id.ENV_REQUEST_ID] = req_id
self.request.get_response(self.middleware)
self.assertEqual(req_id, self.context.request_id)
def test_with_auth_token(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
response = self.request.get_response(self.middleware)
self.assertEqual(response.status, '200 OK')
self.assertEqual(self.context.auth_token, 'testauthtoken')
def test_without_auth_token(self):
self.request.headers['X_PROJECT_ID'] = 'testtenantid'
self.request.headers['X_USER_ID'] = 'testuserid'
del self.request.headers['X_AUTH_TOKEN']
self.request.get_response(self.middleware)
self.assertIsNone(self.context.auth_token)
| apache-2.0 |
zrzka/blackmamba | blackmamba/lib/docutils/parsers/rst/roles.py | 4 | 14779 | # $Id: roles.py 7937 2016-05-24 10:48:48Z milde $
# Author: Edward Loper <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
This module defines standard interpreted text role functions, a registry for
interpreted text roles, and an API for adding to and retrieving from the
registry.
The interface for interpreted role functions is as follows::
def role_fn(name, rawtext, text, lineno, inliner,
options={}, content=[]):
code...
# Set function attributes for customization:
role_fn.options = ...
role_fn.content = ...
Parameters:
- ``name`` is the local name of the interpreted text role, the role name
actually used in the document.
- ``rawtext`` is a string containing the entire interpreted text construct.
Return it as a ``problematic`` node linked to a system message if there is a
problem.
- ``text`` is the interpreted text content, with backslash escapes converted
to nulls (``\x00``).
- ``lineno`` is the line number where the interpreted text beings.
- ``inliner`` is the Inliner object that called the role function.
It defines the following useful attributes: ``reporter``,
``problematic``, ``memo``, ``parent``, ``document``.
- ``options``: A dictionary of directive options for customization, to be
interpreted by the role function. Used for additional attributes for the
generated elements and other functionality.
- ``content``: A list of strings, the directive content for customization
("role" directive). To be interpreted by the role function.
Function attributes for customization, interpreted by the "role" directive:
- ``options``: A dictionary, mapping known option names to conversion
functions such as `int` or `float`. ``None`` or an empty dict implies no
options to parse. Several directive option conversion functions are defined
in the `directives` module.
All role functions implicitly support the "class" option, unless disabled
with an explicit ``{'class': None}``.
- ``content``: A boolean; true if content is allowed. Client code must handle
the case where content is required but not supplied (an empty content list
will be supplied).
Note that unlike directives, the "arguments" function attribute is not
supported for role customization. Directive arguments are handled by the
"role" directive itself.
Interpreted role functions return a tuple of two values:
- A list of nodes which will be inserted into the document tree at the
point where the interpreted role was encountered (can be an empty
list).
- A list of system messages, which will be inserted into the document tree
immediately after the end of the current inline block (can also be empty).
"""
__docformat__ = 'reStructuredText'
from docutils import nodes, utils
from docutils.parsers.rst import directives
from docutils.parsers.rst.languages import en as _fallback_language_module
from docutils.utils.code_analyzer import Lexer, LexerError
DEFAULT_INTERPRETED_ROLE = 'title-reference'
"""
The canonical name of the default interpreted role. This role is used
when no role is specified for a piece of interpreted text.
"""
_role_registry = {}
"""Mapping of canonical role names to role functions. Language-dependent role
names are defined in the ``language`` subpackage."""
_roles = {}
"""Mapping of local or language-dependent interpreted text role names to role
functions."""
def role(role_name, language_module, lineno, reporter):
"""
Locate and return a role function from its language-dependent name, along
with a list of system messages. If the role is not found in the current
language, check English. Return a 2-tuple: role function (``None`` if the
named role cannot be found) and a list of system messages.
"""
normname = role_name.lower()
messages = []
msg_text = []
if normname in _roles:
return _roles[normname], messages
if role_name:
canonicalname = None
try:
canonicalname = language_module.roles[normname]
except AttributeError as error:
msg_text.append('Problem retrieving role entry from language '
'module %r: %s.' % (language_module, error))
except KeyError:
msg_text.append('No role entry for "%s" in module "%s".'
% (role_name, language_module.__name__))
else:
canonicalname = DEFAULT_INTERPRETED_ROLE
# If we didn't find it, try English as a fallback.
if not canonicalname:
try:
canonicalname = _fallback_language_module.roles[normname]
msg_text.append('Using English fallback for role "%s".'
% role_name)
except KeyError:
msg_text.append('Trying "%s" as canonical role name.'
% role_name)
# The canonical name should be an English name, but just in case:
canonicalname = normname
# Collect any messages that we generated.
if msg_text:
message = reporter.info('\n'.join(msg_text), line=lineno)
messages.append(message)
# Look the role up in the registry, and return it.
if canonicalname in _role_registry:
role_fn = _role_registry[canonicalname]
register_local_role(normname, role_fn)
return role_fn, messages
else:
return None, messages # Error message will be generated by caller.
def register_canonical_role(name, role_fn):
"""
Register an interpreted text role by its canonical name.
:Parameters:
- `name`: The canonical name of the interpreted role.
- `role_fn`: The role function. See the module docstring.
"""
set_implicit_options(role_fn)
_role_registry[name] = role_fn
def register_local_role(name, role_fn):
"""
Register an interpreted text role by its local or language-dependent name.
:Parameters:
- `name`: The local or language-dependent name of the interpreted role.
- `role_fn`: The role function. See the module docstring.
"""
set_implicit_options(role_fn)
_roles[name] = role_fn
def set_implicit_options(role_fn):
"""
Add customization options to role functions, unless explicitly set or
disabled.
"""
if not hasattr(role_fn, 'options') or role_fn.options is None:
role_fn.options = {'class': directives.class_option}
elif 'class' not in role_fn.options:
role_fn.options['class'] = directives.class_option
def register_generic_role(canonical_name, node_class):
"""For roles which simply wrap a given `node_class` around the text."""
role = GenericRole(canonical_name, node_class)
register_canonical_role(canonical_name, role)
class GenericRole:
"""
Generic interpreted text role, where the interpreted text is simply
wrapped with the provided node class.
"""
def __init__(self, role_name, node_class):
self.name = role_name
self.node_class = node_class
def __call__(self, role, rawtext, text, lineno, inliner,
options={}, content=[]):
set_classes(options)
return [self.node_class(rawtext, utils.unescape(text), **options)], []
class CustomRole:
"""
Wrapper for custom interpreted text roles.
"""
def __init__(self, role_name, base_role, options={}, content=[]):
self.name = role_name
self.base_role = base_role
self.options = None
if hasattr(base_role, 'options'):
self.options = base_role.options
self.content = None
if hasattr(base_role, 'content'):
self.content = base_role.content
self.supplied_options = options
self.supplied_content = content
def __call__(self, role, rawtext, text, lineno, inliner,
options={}, content=[]):
opts = self.supplied_options.copy()
opts.update(options)
cont = list(self.supplied_content)
if cont and content:
cont += '\n'
cont.extend(content)
return self.base_role(role, rawtext, text, lineno, inliner,
options=opts, content=cont)
def generic_custom_role(role, rawtext, text, lineno, inliner,
options={}, content=[]):
""""""
# Once nested inline markup is implemented, this and other methods should
# recursively call inliner.nested_parse().
set_classes(options)
return [nodes.inline(rawtext, utils.unescape(text), **options)], []
generic_custom_role.options = {'class': directives.class_option}
######################################################################
# Define and register the standard roles:
######################################################################
register_generic_role('abbreviation', nodes.abbreviation)
register_generic_role('acronym', nodes.acronym)
register_generic_role('emphasis', nodes.emphasis)
register_generic_role('literal', nodes.literal)
register_generic_role('strong', nodes.strong)
register_generic_role('subscript', nodes.subscript)
register_generic_role('superscript', nodes.superscript)
register_generic_role('title-reference', nodes.title_reference)
def pep_reference_role(role, rawtext, text, lineno, inliner,
options={}, content=[]):
try:
pepnum = int(text)
if pepnum < 0 or pepnum > 9999:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'PEP number must be a number from 0 to 9999; "%s" is invalid.'
% text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
# Base URL mainly used by inliner.pep_reference; so this is correct:
ref = (inliner.document.settings.pep_base_url
+ inliner.document.settings.pep_file_url_template % pepnum)
set_classes(options)
return [nodes.reference(rawtext, 'PEP ' + utils.unescape(text), refuri=ref,
**options)], []
register_canonical_role('pep-reference', pep_reference_role)
def rfc_reference_role(role, rawtext, text, lineno, inliner,
options={}, content=[]):
try:
rfcnum = int(text)
if rfcnum <= 0:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'RFC number must be a number greater than or equal to 1; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
# Base URL mainly used by inliner.rfc_reference, so this is correct:
ref = inliner.document.settings.rfc_base_url + inliner.rfc_url % rfcnum
set_classes(options)
node = nodes.reference(rawtext, 'RFC ' + utils.unescape(text), refuri=ref,
**options)
return [node], []
register_canonical_role('rfc-reference', rfc_reference_role)
def raw_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
if not inliner.document.settings.raw_enabled:
msg = inliner.reporter.warning('raw (and derived) roles disabled')
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
if 'format' not in options:
msg = inliner.reporter.error(
'No format (Writer name) is associated with this role: "%s".\n'
'The "raw" role cannot be used directly.\n'
'Instead, use the "role" directive to create a new role with '
'an associated format.' % role, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
set_classes(options)
node = nodes.raw(rawtext, utils.unescape(text, 1), **options)
node.source, node.line = inliner.reporter.get_source_and_line(lineno)
return [node], []
raw_role.options = {'format': directives.unchanged}
register_canonical_role('raw', raw_role)
def code_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
set_classes(options)
language = options.get('language', '')
classes = ['code']
if 'classes' in options:
classes.extend(options['classes'])
if language and language not in classes:
classes.append(language)
try:
tokens = Lexer(utils.unescape(text, 1), language,
inliner.document.settings.syntax_highlight)
except LexerError as error:
msg = inliner.reporter.warning(error)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
node = nodes.literal(rawtext, '', classes=classes)
# analyse content and add nodes for every token
for classes, value in tokens:
# print (classes, value)
if classes:
node += nodes.inline(value, value, classes=classes)
else:
# insert as Text to decrease the verbosity of the output
node += nodes.Text(value, value)
return [node], []
code_role.options = {'class': directives.class_option,
'language': directives.unchanged}
register_canonical_role('code', code_role)
def math_role(role, rawtext, text, lineno, inliner, options={}, content=[]):
set_classes(options)
i = rawtext.find('`')
text = rawtext.split('`')[1]
node = nodes.math(rawtext, text, **options)
return [node], []
register_canonical_role('math', math_role)
######################################################################
# Register roles that are currently unimplemented.
######################################################################
def unimplemented_role(role, rawtext, text, lineno, inliner, attributes={}):
msg = inliner.reporter.error(
'Interpreted text role "%s" not implemented.' % role, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
register_canonical_role('index', unimplemented_role)
register_canonical_role('named-reference', unimplemented_role)
register_canonical_role('anonymous-reference', unimplemented_role)
register_canonical_role('uri-reference', unimplemented_role)
register_canonical_role('footnote-reference', unimplemented_role)
register_canonical_role('citation-reference', unimplemented_role)
register_canonical_role('substitution-reference', unimplemented_role)
register_canonical_role('target', unimplemented_role)
# This should remain unimplemented, for testing purposes:
register_canonical_role('restructuredtext-unimplemented-role',
unimplemented_role)
def set_classes(options):
"""
Auxiliary function to set options['classes'] and delete
options['class'].
"""
if 'class' in options:
assert 'classes' not in options
options['classes'] = options['class']
del options['class']
| mit |
supergis/QGIS | python/ext-libs/pygments/styles/emacs.py | 364 | 2486 | # -*- coding: utf-8 -*-
"""
pygments.styles.emacs
~~~~~~~~~~~~~~~~~~~~~
A highlighting style for Pygments, inspired by Emacs.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class EmacsStyle(Style):
"""
The default style (inspired by Emacs 22).
"""
background_color = "#f8f8f8"
default_style = ""
styles = {
Whitespace: "#bbbbbb",
Comment: "italic #008800",
Comment.Preproc: "noitalic",
Comment.Special: "noitalic bold",
Keyword: "bold #AA22FF",
Keyword.Pseudo: "nobold",
Keyword.Type: "bold #00BB00",
Operator: "#666666",
Operator.Word: "bold #AA22FF",
Name.Builtin: "#AA22FF",
Name.Function: "#00A000",
Name.Class: "#0000FF",
Name.Namespace: "bold #0000FF",
Name.Exception: "bold #D2413A",
Name.Variable: "#B8860B",
Name.Constant: "#880000",
Name.Label: "#A0A000",
Name.Entity: "bold #999999",
Name.Attribute: "#BB4444",
Name.Tag: "bold #008000",
Name.Decorator: "#AA22FF",
String: "#BB4444",
String.Doc: "italic",
String.Interpol: "bold #BB6688",
String.Escape: "bold #BB6622",
String.Regex: "#BB6688",
String.Symbol: "#B8860B",
String.Other: "#008000",
Number: "#666666",
Generic.Heading: "bold #000080",
Generic.Subheading: "bold #800080",
Generic.Deleted: "#A00000",
Generic.Inserted: "#00A000",
Generic.Error: "#FF0000",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold #000080",
Generic.Output: "#888",
Generic.Traceback: "#04D",
Error: "border:#FF0000"
}
| gpl-2.0 |
StefanRijnhart/odoo | addons/product_margin/wizard/__init__.py | 444 | 1078 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import product_margin
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
klahnakoski/ActiveData-ETL | vendor/mo_hg/parse.py | 2 | 6797 | # encoding: utf-8
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Contact: Kyle Lahnakoski ([email protected])
#
from __future__ import absolute_import, division, unicode_literals
import re
from jx_base import DataClass
from mo_dots import wrap
from mo_logs import Log, strings
MAX_CONTENT_LENGTH = 500 # SOME "lines" FOR CODE ARE REALLY TOO LONG
GET_DIFF = "{{location}}/rev/{{rev}}"
GET_FILE = "{{location}}/file/{{rev}}{{path}}"
HUNK_HEADER = re.compile(r"^-(\d+),(\d+) \+(\d+),(\d+) @@.*")
FILE_SEP = re.compile(r"^--- ", re.MULTILINE)
HUNK_SEP = re.compile(r"^@@ ", re.MULTILINE)
MOVE = {
" ": lambda c: (c[0] + 1, c[1] + 1),
"\\": lambda c: c, # FOR "\ no newline at end of file
"+": lambda c: (c[0] + 1, c[1]),
"-": lambda c: (c[0], c[1] + 1),
}
no_change = MOVE[" "]
def diff_to_json(unified_diff):
"""
CONVERT UNIFIED DIFF TO EASY-TO-STORE JSON FORMAT
:param unified_diff: text
:return: JSON details
"""
output = []
files = FILE_SEP.split(unified_diff)[1:]
for file_ in files:
changes = []
old_file_header, new_file_header, file_diff = file_.split("\n", 2)
old_file_path = old_file_header[
1:
] # eg old_file_header == "a/testing/marionette/harness/marionette_harness/tests/unit/unit-tests.ini"
new_file_path = new_file_header[
5:
] # eg new_file_header == "+++ b/tests/resources/example_file.py"
c = 0, 0
hunks = HUNK_SEP.split(file_diff)[1:]
for hunk in hunks:
line_diffs = hunk.split("\n")
old_start, old_length, new_start, new_length = HUNK_HEADER.match(
line_diffs[0]
).groups()
next_c = max(0, int(new_start) - 1), max(0, int(old_start) - 1)
if next_c[0] - next_c[1] != c[0] - c[1]:
Log.error("expecting a skew of {{skew}}", skew=next_c[0] - next_c[1])
if c[0] > next_c[0]:
Log.error("can not handle out-of-order diffs")
while c[0] != next_c[0]:
c = no_change(c)
for line in line_diffs[1:]:
if not line:
continue
if (
line.startswith("new file mode")
or line.startswith("deleted file mode")
or line.startswith("index ")
or line.startswith("diff --git")
):
# HAPPENS AT THE TOP OF NEW FILES
# diff --git a/security/sandbox/linux/SandboxFilter.cpp b/security/sandbox/linux/SandboxFilter.cpp
# u'new file mode 100644'
# u'deleted file mode 100644'
# index a763e390731f5379ddf5fa77090550009a002d13..798826525491b3d762503a422b1481f140238d19
# GIT binary patch
# literal 30804
break
d = line[0]
if d == "+":
changes.append(
{
"new": {
"line": int(c[0]),
"content": strings.limit(line[1:], MAX_CONTENT_LENGTH),
}
}
)
elif d == "-":
changes.append(
{
"old": {
"line": int(c[1]),
"content": strings.limit(line[1:], MAX_CONTENT_LENGTH),
}
}
)
try:
c = MOVE[d](c)
except Exception as e:
Log.warning("bad line {{line|quote}}", line=line, cause=e)
output.append(
{"new": {"name": new_file_path}, "old": {"name": old_file_path}, "changes": changes}
)
return wrap(output)
def diff_to_moves(unified_diff):
"""
TODO: WE SHOULD BE ABLE TO STREAM THE RAW DIFF SO WE HANDLE LARGE ONES
FOR EACH FILE, RETURN AN ARRAY OF (line, action) PAIRS
:param unified_diff: raw diff
:return: (file, line, action) triples
"""
output = []
files = FILE_SEP.split(unified_diff)[1:]
for file_ in files:
changes = []
old_file_header, new_file_header, file_diff = file_.split("\n", 2)
old_file_path = old_file_header[
1:
] # eg old_file_header == "a/testing/marionette/harness/marionette_harness/tests/unit/unit-tests.ini"
new_file_path = new_file_header[
5:
] # eg new_file_header == "+++ b/tests/resources/example_file.py"
c = 0, 0
hunks = HUNK_SEP.split(file_diff)[1:]
for hunk in hunks:
line_diffs = hunk.split("\n")
old_start, old_length, new_start, new_length = HUNK_HEADER.match(
line_diffs[0]
).groups()
next_c = max(0, int(new_start) - 1), max(0, int(old_start) - 1)
if next_c[0] - next_c[1] != c[0] - c[1]:
Log.error("expecting a skew of {{skew}}", skew=next_c[0] - next_c[1])
if c[0] > next_c[0]:
Log.error("can not handle out-of-order diffs")
while c[0] != next_c[0]:
c = no_change(c)
for line in line_diffs[1:]:
if not line:
continue
if (
line.startswith("new file mode")
or line.startswith("deleted file mode")
or line.startswith("index ")
or line.startswith("diff --git")
):
# HAPPENS AT THE TOP OF NEW FILES
# diff --git a/security/sandbox/linux/SandboxFilter.cpp b/security/sandbox/linux/SandboxFilter.cpp
# u'new file mode 100644'
# u'deleted file mode 100644'
# index a763e390731f5379ddf5fa77090550009a002d13..798826525491b3d762503a422b1481f140238d19
# GIT binary patch
# literal 30804
break
d = line[0]
if d != " ":
changes.append(Action(line=int(c[0]), action=d))
c = MOVE[d](c)
output.append(
{"new": {"name": new_file_path}, "old": {"name": old_file_path}, "changes": changes}
)
return wrap(output)
Action = DataClass(
"Action",
["line", "action"],
constraint=True, # TODO: remove when constrain=None is the same as True
)
| mpl-2.0 |
q474818917/solr-5.2.0 | dev-tools/scripts/buildAndPushRelease.py | 4 | 10086 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import datetime
import re
import time
import shutil
import os
import sys
import subprocess
import textwrap
LOG = '/tmp/release.log'
def log(msg):
f = open(LOG, mode='ab')
f.write(msg.encode('utf-8'))
f.close()
def run(command):
log('\n\n%s: RUN: %s\n' % (datetime.datetime.now(), command))
if os.system('%s >> %s 2>&1' % (command, LOG)):
msg = ' FAILED: %s [see log %s]' % (command, LOG)
print(msg)
raise RuntimeError(msg)
def runAndSendGPGPassword(command, password):
p = subprocess.Popen(command, shell=True, bufsize=0, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE)
f = open(LOG, 'ab')
while True:
p.stdout.flush()
line = p.stdout.readline()
if len(line) == 0:
break
f.write(line)
if line.find(b'Enter GPG keystore password:') != -1:
time.sleep(1.0)
p.stdin.write((password + '\n').encode('UTF-8'))
p.stdin.write('\n'.encode('UTF-8'))
result = p.poll()
if result != 0:
msg = ' FAILED: %s [see log %s]' % (command, LOG)
print(msg)
raise RuntimeError(msg)
def scrubCheckout():
# removes any files not checked into svn
unversionedRex = re.compile('^ ?[\?ID] *[1-9 ]*[a-zA-Z]* +(.*)')
for l in os.popen('svn status --no-ignore -v').readlines():
match = unversionedRex.match(l)
if match:
s = match.group(1)
if os.path.exists(s):
print(' delete %s' % s)
if os.path.isdir(s) and not os.path.islink(s):
shutil.rmtree(s)
else:
os.remove(s)
def getSVNRev():
rev = os.popen('svnversion').read().strip()
try:
int(rev)
except (TypeError, ValueError):
raise RuntimeError('svn version is not clean: %s' % rev)
return rev
def prepare(root, version, gpgKeyID, gpgPassword):
print()
print('Prepare release...')
if os.path.exists(LOG):
os.remove(LOG)
os.chdir(root)
print(' svn up...')
run('svn up')
rev = getSVNRev()
print(' svn rev: %s' % rev)
log('\nSVN rev: %s\n' % rev)
print(' ant clean test')
run('ant clean test')
print(' clean checkout')
scrubCheckout()
open('rev.txt', mode='wb').write(rev.encode('UTF-8'))
print(' lucene prepare-release')
os.chdir('lucene')
cmd = 'ant -Dversion=%s' % version
if gpgKeyID is not None:
cmd += ' -Dgpg.key=%s prepare-release' % gpgKeyID
else:
cmd += ' prepare-release-no-sign'
if gpgPassword is not None:
runAndSendGPGPassword(cmd, gpgPassword)
else:
run(cmd)
print(' solr prepare-release')
os.chdir('../solr')
cmd = 'ant -Dversion=%s' % version
if gpgKeyID is not None:
cmd += ' -Dgpg.key=%s prepare-release' % gpgKeyID
else:
cmd += ' prepare-release-no-sign'
if gpgPassword is not None:
runAndSendGPGPassword(cmd, gpgPassword)
else:
run(cmd)
print(' done!')
print()
return rev
def push(version, root, rev, rcNum, username):
print('Push...')
dir = 'lucene-solr-%s-RC%d-rev%s' % (version, rcNum, rev)
s = os.popen('ssh %[email protected] "ls -ld public_html/staging_area/%s" 2>&1' % (username, dir)).read()
if 'no such file or directory' not in s.lower():
print(' Remove old dir...')
run('ssh %[email protected] "chmod -R u+rwX public_html/staging_area/%s; rm -rf public_html/staging_area/%s"' %
(username, dir, dir))
run('ssh %[email protected] "mkdir -p public_html/staging_area/%s/lucene public_html/staging_area/%s/solr"' % \
(username, dir, dir))
print(' Lucene')
os.chdir('%s/lucene/dist' % root)
print(' zip...')
if os.path.exists('lucene.tar.bz2'):
os.remove('lucene.tar.bz2')
run('tar cjf lucene.tar.bz2 *')
print(' copy...')
run('scp lucene.tar.bz2 %[email protected]:public_html/staging_area/%s/lucene' % (username, dir))
print(' unzip...')
run('ssh %[email protected] "cd public_html/staging_area/%s/lucene; tar xjf lucene.tar.bz2; rm -f lucene.tar.bz2"' % (username, dir))
os.remove('lucene.tar.bz2')
print(' Solr')
os.chdir('%s/solr/package' % root)
print(' zip...')
if os.path.exists('solr.tar.bz2'):
os.remove('solr.tar.bz2')
run('tar cjf solr.tar.bz2 *')
print(' copy...')
run('scp solr.tar.bz2 %[email protected]:public_html/staging_area/%s/solr' % (username, dir))
print(' unzip...')
run('ssh %[email protected] "cd public_html/staging_area/%s/solr; tar xjf solr.tar.bz2; rm -f solr.tar.bz2"' % (username, dir))
os.remove('solr.tar.bz2')
print(' chmod...')
run('ssh %[email protected] "chmod -R a+rX-w public_html/staging_area/%s"' % (username, dir))
print(' done!')
url = 'http://people.apache.org/~%s/staging_area/%s' % (username, dir)
return url
def pushLocal(version, root, rev, rcNum, localDir):
print('Push local [%s]...' % localDir)
os.makedirs(localDir)
dir = 'lucene-solr-%s-RC%d-rev%s' % (version, rcNum, rev)
os.makedirs('%s/%s/lucene' % (localDir, dir))
os.makedirs('%s/%s/solr' % (localDir, dir))
print(' Lucene')
os.chdir('%s/lucene/dist' % root)
print(' zip...')
if os.path.exists('lucene.tar.bz2'):
os.remove('lucene.tar.bz2')
run('tar cjf lucene.tar.bz2 *')
os.chdir('%s/%s/lucene' % (localDir, dir))
print(' unzip...')
run('tar xjf "%s/lucene/dist/lucene.tar.bz2"' % root)
os.remove('%s/lucene/dist/lucene.tar.bz2' % root)
print(' Solr')
os.chdir('%s/solr/package' % root)
print(' zip...')
if os.path.exists('solr.tar.bz2'):
os.remove('solr.tar.bz2')
run('tar cjf solr.tar.bz2 *')
print(' unzip...')
os.chdir('%s/%s/solr' % (localDir, dir))
run('tar xjf "%s/solr/package/solr.tar.bz2"' % root)
os.remove('%s/solr/package/solr.tar.bz2' % root)
print(' KEYS')
run('wget http://people.apache.org/keys/group/lucene.asc')
os.rename('lucene.asc', 'KEYS')
run('chmod a+r-w KEYS')
run('cp KEYS ../lucene')
print(' chmod...')
os.chdir('..')
run('chmod -R a+rX-w .')
print(' done!')
return 'file://%s/%s' % (os.path.abspath(localDir), dir)
def read_version(path):
version_props_file = os.path.join(path, 'lucene', 'version.properties')
return re.search(r'version\.base=(.*)', open(version_props_file).read()).group(1)
def parse_config():
epilogue = textwrap.dedent('''
Example usage for a Release Manager:
python3.2 -u buildAndPushRelease.py --push-remote mikemccand --sign 6E68DA61 --rc-num 1 --version 4.7.0 /path/to/lucene_solr_4_7
''')
description = 'Utility to build, push, and test a release.'
parser = argparse.ArgumentParser(description=description, epilog=epilogue,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('--no-prepare', dest='prepare', default=True, action='store_false',
help='Use the already built release in the provided checkout')
parser.add_argument('--push-remote', metavar='USERNAME',
help='Push the release to people.apache.org for the given user')
parser.add_argument('--push-local', metavar='PATH',
help='Push the release to the local path')
parser.add_argument('--sign', metavar='KEYID',
help='Sign the release with the given gpg key')
parser.add_argument('--rc-num', metavar='NUM', type=int, default=1,
help='Release Candidate number, required')
parser.add_argument('--smoke-test', metavar='PATH',
help='Run the smoker tester on the release in the given directory')
parser.add_argument('root', metavar='checkout_path',
help='Root of SVN checkout for lucene-solr')
config = parser.parse_args()
if config.push_remote is not None and config.push_local is not None:
parser.error('Cannot specify --push-remote and --push-local together')
if not config.prepare and config.sign:
parser.error('Cannot sign already built release')
if config.push_local is not None and os.path.exists(config.push_local):
parser.error('Cannot push to local path that already exists')
if config.rc_num <= 0:
parser.error('Release Candidate number must be a positive integer')
if not os.path.isdir(config.root):
# TODO: add additional svn check to ensure dir is a real lucene-solr checkout
parser.error('Root path is not a valid lucene-solr checkout')
config.version = read_version(config.root)
print('Building version: %s' % config.version)
if config.sign:
sys.stdout.flush()
import getpass
config.key_id = config.sign
config.key_password = getpass.getpass('Enter GPG keystore password: ')
else:
config.gpg_password = None
return config
def main():
c = parse_config()
if c.prepare:
rev = prepare(c.root, c.version, c.key_id, c.key_password)
else:
os.chdir(root)
rev = open('rev.txt', encoding='UTF-8').read()
if c.push_remote:
url = push(c.version, c.root, rev, c.rc_num, c.push_remote)
elif c.push_local:
url = pushLocal(c.version, c.root, rev, c.rc_num, c.push_local)
else:
url = None
if url is not None:
print(' URL: %s' % url)
print('Next set the PYTHON_EXEC env var and you can run the smoker tester:')
print(' $PYTHON_EXEC %s %s' % (sys.argv[0], url))
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('Keyboard interrupt...exiting')
| apache-2.0 |
banksee/budger | config/urls.py | 1 | 1482 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from django.views import defaults as default_views
from rest_framework_jwt.views import obtain_jwt_token
urlpatterns = [
# Django Admin, use {% url 'admin:index' %}
url(settings.ADMIN_URL, admin.site.urls),
# REST API Web Authorization
url(r'^api-auth/', include('rest_framework.urls',
namespace='rest_framework')),
url(r'^api-token-auth/', obtain_jwt_token),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
| bsd-3-clause |
jchevin/MissionPlanner-master | packages/IronPython.StdLib.2.7.4/content/Lib/MimeWriter.py | 315 | 6482 | """Generic MIME writer.
This module defines the class MimeWriter. The MimeWriter class implements
a basic formatter for creating MIME multi-part files. It doesn't seek around
the output file nor does it use large amounts of buffer space. You must write
the parts out in the order that they should occur in the final file.
MimeWriter does buffer the headers you add, allowing you to rearrange their
order.
"""
import mimetools
__all__ = ["MimeWriter"]
import warnings
warnings.warn("the MimeWriter module is deprecated; use the email package instead",
DeprecationWarning, 2)
class MimeWriter:
"""Generic MIME writer.
Methods:
__init__()
addheader()
flushheaders()
startbody()
startmultipartbody()
nextpart()
lastpart()
A MIME writer is much more primitive than a MIME parser. It
doesn't seek around on the output file, and it doesn't use large
amounts of buffer space, so you have to write the parts in the
order they should occur on the output file. It does buffer the
headers you add, allowing you to rearrange their order.
General usage is:
f = <open the output file>
w = MimeWriter(f)
...call w.addheader(key, value) 0 or more times...
followed by either:
f = w.startbody(content_type)
...call f.write(data) for body data...
or:
w.startmultipartbody(subtype)
for each part:
subwriter = w.nextpart()
...use the subwriter's methods to create the subpart...
w.lastpart()
The subwriter is another MimeWriter instance, and should be
treated in the same way as the toplevel MimeWriter. This way,
writing recursive body parts is easy.
Warning: don't forget to call lastpart()!
XXX There should be more state so calls made in the wrong order
are detected.
Some special cases:
- startbody() just returns the file passed to the constructor;
but don't use this knowledge, as it may be changed.
- startmultipartbody() actually returns a file as well;
this can be used to write the initial 'if you can read this your
mailer is not MIME-aware' message.
- If you call flushheaders(), the headers accumulated so far are
written out (and forgotten); this is useful if you don't need a
body part at all, e.g. for a subpart of type message/rfc822
that's (mis)used to store some header-like information.
- Passing a keyword argument 'prefix=<flag>' to addheader(),
start*body() affects where the header is inserted; 0 means
append at the end, 1 means insert at the start; default is
append for addheader(), but insert for start*body(), which use
it to determine where the Content-Type header goes.
"""
def __init__(self, fp):
self._fp = fp
self._headers = []
def addheader(self, key, value, prefix=0):
"""Add a header line to the MIME message.
The key is the name of the header, where the value obviously provides
the value of the header. The optional argument prefix determines
where the header is inserted; 0 means append at the end, 1 means
insert at the start. The default is to append.
"""
lines = value.split("\n")
while lines and not lines[-1]: del lines[-1]
while lines and not lines[0]: del lines[0]
for i in range(1, len(lines)):
lines[i] = " " + lines[i].strip()
value = "\n".join(lines) + "\n"
line = key + ": " + value
if prefix:
self._headers.insert(0, line)
else:
self._headers.append(line)
def flushheaders(self):
"""Writes out and forgets all headers accumulated so far.
This is useful if you don't need a body part at all; for example,
for a subpart of type message/rfc822 that's (mis)used to store some
header-like information.
"""
self._fp.writelines(self._headers)
self._headers = []
def startbody(self, ctype, plist=[], prefix=1):
"""Returns a file-like object for writing the body of the message.
The content-type is set to the provided ctype, and the optional
parameter, plist, provides additional parameters for the
content-type declaration. The optional argument prefix determines
where the header is inserted; 0 means append at the end, 1 means
insert at the start. The default is to insert at the start.
"""
for name, value in plist:
ctype = ctype + ';\n %s=\"%s\"' % (name, value)
self.addheader("Content-Type", ctype, prefix=prefix)
self.flushheaders()
self._fp.write("\n")
return self._fp
def startmultipartbody(self, subtype, boundary=None, plist=[], prefix=1):
"""Returns a file-like object for writing the body of the message.
Additionally, this method initializes the multi-part code, where the
subtype parameter provides the multipart subtype, the boundary
parameter may provide a user-defined boundary specification, and the
plist parameter provides optional parameters for the subtype. The
optional argument, prefix, determines where the header is inserted;
0 means append at the end, 1 means insert at the start. The default
is to insert at the start. Subparts should be created using the
nextpart() method.
"""
self._boundary = boundary or mimetools.choose_boundary()
return self.startbody("multipart/" + subtype,
[("boundary", self._boundary)] + plist,
prefix=prefix)
def nextpart(self):
"""Returns a new instance of MimeWriter which represents an
individual part in a multipart message.
This may be used to write the part as well as used for creating
recursively complex multipart messages. The message must first be
initialized with the startmultipartbody() method before using the
nextpart() method.
"""
self._fp.write("\n--" + self._boundary + "\n")
return self.__class__(self._fp)
def lastpart(self):
"""This is used to designate the last part of a multipart message.
It should always be used when writing multipart messages.
"""
self._fp.write("\n--" + self._boundary + "--\n")
if __name__ == '__main__':
import test.test_MimeWriter
| gpl-3.0 |
phil1425/qualitySensor | neuralnet.py | 1 | 1427 | import tflearn
from tflearn.data_preprocessing import DataPreprocessing
from tflearn.layers.core import input_data, dropout, fully_connected, reshape
from tflearn.layers.conv import conv_2d, max_pool_2d
from tflearn.layers.estimator import regression
from tflearn.metrics import Accuracy
from tflearn.data_augmentation import ImageAugmentation
acc = Accuracy()
augmentation = ImageAugmentation()
augmentation.add_random_blur()
augmentation.add_random_rotation(180)
network = input_data(shape=[None, 640, 480, 3], data_augmentation=augmentation)
network = conv_2d(network, 4, 5, strides=2, activation='relu', name = 'conv1')
network = max_pool_2d(network, 2, strides=2)
network = conv_2d(network, 4, 5, strides=1, activation='relu', name = 'conv2')
network = max_pool_2d(network, 2, strides=2)
network = conv_2d(network, 4, 3, strides=1, activation='relu', name = 'conv3')
network = max_pool_2d(network, 2, strides=2)
network = fully_connected(network, 128, activation='tanh')
network = dropout(network, 0.5)
network = fully_connected(network, 64, activation='tanh')
network = dropout(network, 0.5)
network = fully_connected(network, 2, activation='softmax')
network = regression(network, optimizer='adam',
loss='categorical_crossentropy',
learning_rate=0.001, metric=acc)
model = tflearn.DNN(network, checkpoint_path='models/model-', best_checkpoint_path='models/best-model-')
| mit |
kalxas/QGIS | tests/src/python/test_qgsattributeformeditorwidget.py | 45 | 4566 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsAttributeFormEditorWidget.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '2016-05'
__copyright__ = 'Copyright 2016, The QGIS Project'
import qgis # NOQA
from qgis.gui import (QgsSearchWidgetWrapper,
QgsAttributeFormEditorWidget,
QgsDefaultSearchWidgetWrapper,
QgsAttributeForm,
QgsSearchWidgetToolButton,
QgsGui
)
from qgis.core import (QgsVectorLayer)
from qgis.PyQt.QtWidgets import QWidget, QDateTimeEdit
from qgis.PyQt.QtCore import QDateTime, QDate, QTime
from qgis.testing import start_app, unittest
start_app()
QgsGui.editorWidgetRegistry().initEditors()
class PyQgsAttributeFormEditorWidget(unittest.TestCase):
def testCurrentFilterExpression(self):
""" Test creating an expression using the widget"""
layer = QgsVectorLayer("Point?field=fldint:integer", "test", "memory")
parent = QWidget()
w = QgsDefaultSearchWidgetWrapper(layer, 0, parent)
setup = QgsGui.editorWidgetRegistry().findBest(layer, "fldint")
wrapper = QgsGui.editorWidgetRegistry().create(layer, 0, None, parent)
af = QgsAttributeFormEditorWidget(wrapper, setup.type(), None)
af.setSearchWidgetWrapper(w)
# test that filter combines both current value in search widget wrapper and flags from search tool button
w.lineEdit().setText('5.5')
sb = af.findChild(QWidget, "SearchWidgetToolButton")
sb.setActiveFlags(QgsSearchWidgetWrapper.EqualTo)
self.assertEqual(af.currentFilterExpression(), '"fldint"=5.5')
sb.setActiveFlags(QgsSearchWidgetWrapper.NotEqualTo)
self.assertEqual(af.currentFilterExpression(), '"fldint"<>5.5')
def testSetActive(self):
""" Test setting the search as active - should set active flags to match search widget wrapper's defaults """
layer = QgsVectorLayer("Point?field=fldtext:string&field=fldint:integer", "test", "memory")
parent = QWidget()
w = QgsDefaultSearchWidgetWrapper(layer, 0, parent)
setup = QgsGui.editorWidgetRegistry().findBest(layer, "fldint")
wrapper = QgsGui.editorWidgetRegistry().create(layer, 0, None, parent)
af = QgsAttributeFormEditorWidget(wrapper, setup.type(), None)
af.setSearchWidgetWrapper(w)
sb = af.findChild(QWidget, "SearchWidgetToolButton")
# start with inactive
sb.setActiveFlags(QgsSearchWidgetWrapper.FilterFlags())
# set to inactive
sb.setActive()
# check that correct default flag was taken from search widget wrapper
self.assertTrue(sb.activeFlags() & QgsSearchWidgetWrapper.Contains)
# try again with numeric field - default should be "EqualTo"
w = QgsDefaultSearchWidgetWrapper(layer, 1, parent)
af.setSearchWidgetWrapper(w)
# start with inactive
sb.setActiveFlags(QgsSearchWidgetWrapper.FilterFlags())
# set to inactive
sb.setActive()
# check that correct default flag was taken from search widget wrapper
self.assertTrue(sb.activeFlags() & QgsSearchWidgetWrapper.EqualTo)
def testBetweenFilter(self):
""" Test creating a between type filter """
layer = QgsVectorLayer("Point?field=fldtext:string&field=fldint:integer", "test", "memory")
form = QgsAttributeForm(layer)
wrapper = QgsGui.editorWidgetRegistry().create(layer, 0, None, form)
af = QgsAttributeFormEditorWidget(wrapper, 'DateTime', None)
af.createSearchWidgetWrappers()
d1 = af.findChildren(QDateTimeEdit)[0]
d2 = af.findChildren(QDateTimeEdit)[1]
d1.setDateTime(QDateTime(QDate(2013, 5, 6), QTime()))
d2.setDateTime(QDateTime(QDate(2013, 5, 16), QTime()))
sb = af.findChild(QWidget, "SearchWidgetToolButton")
sb.setActiveFlags(QgsSearchWidgetWrapper.Between)
self.assertEqual(af.currentFilterExpression(), '"fldtext">=\'2013-05-06\' AND "fldtext"<=\'2013-05-16\'')
sb.setActiveFlags(QgsSearchWidgetWrapper.IsNotBetween)
self.assertEqual(af.currentFilterExpression(), '"fldtext"<\'2013-05-06\' OR "fldtext">\'2013-05-16\'')
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
fqqb/yamcs-studio | bundles/org.csstudio.examples/examples/BOY Examples/scripts/addROI.py | 5 | 1595 | from org.csstudio.opibuilder.scriptUtil import PVUtil
from org.csstudio.swt.widgets.figures.IntensityGraphFigure import IROIListener, IROIInfoProvider
from org.csstudio.simplepv import IPVListener
from java.lang import Thread, Runnable
from org.eclipse.swt.widgets import Display
roiXPV = pvs[1]
roiYPV = pvs[2]
roiWPV = pvs[3]
roiHPV = pvs[4]
intensityGraph = widget.getFigure()
name = PVUtil.getString(pvs[0])
class MyROIInfoProvider(IROIInfoProvider):
'''Provide custom information for ROI.
'''
def getROIInfo(self, xIndex, yIndex, width, height):
return name + "(" + str(xIndex) + ", " + str(yIndex) + " )"
class MyROIListener(IROIListener):
'''Listener on ROI updates.
'''
def roiUpdated(self, xIndex, yIndex, width, height):
roiXPV.setValue(xIndex)
roiYPV.setValue(yIndex)
roiWPV.setValue(width)
roiHPV.setValue(height)
currentDisplay = Display.getCurrent()
class UpdateROIUITask(Runnable):
def run(self):
#this method must be called in UI thread
intensityGraph.setROIDataBounds(name, PVUtil.getLong(roiXPV), PVUtil.getLong(roiYPV), PVUtil.getLong(roiWPV),PVUtil.getLong(roiHPV))
class UpdateROIFromPVListener(IPVListener):
'''Update the ROI while ROI PV value updated'''
def valueChanged(self, pv):
currentDisplay.asyncExec(UpdateROIUITask())
intensityGraph.addROI(name, MyROIListener(), MyROIInfoProvider())
roiXPV.addListener(UpdateROIFromPVListener())
roiYPV.addListener(UpdateROIFromPVListener())
roiWPV.addListener(UpdateROIFromPVListener())
roiHPV.addListener(UpdateROIFromPVListener())
| epl-1.0 |
felixbb/forseti-security | google/cloud/security/common/gcp_type/resource.py | 1 | 4605 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GCP Resource.
For now, this only represents Organization resources. In the future, we may
need to separate the classes depending on implementation.
"""
import abc
from google.cloud.security.common.gcp_type import errors
class ResourceType(object):
"""Resource types."""
ORGANIZATION = 'organization'
FOLDER = 'folder'
PROJECT = 'project'
GROUP = 'group'
FORWARDING_RULE = 'forwarding_rule'
BUCKETS_ACL = 'buckets_acl'
resource_types = frozenset([
ORGANIZATION,
FOLDER,
PROJECT,
GROUP,
FORWARDING_RULE,
])
@classmethod
def verify(cls, resource_type):
"""Verify if the resource type is recognized.
Args:
resource_type: The string resource type.
Returns:
The resource type if it is recognized in the resource_types.
Raises:
InvalidResourceTypeError if resource type is not recognized.
"""
if resource_type not in cls.resource_types:
raise errors.InvalidResourceTypeError(
'Invalid resource type: {}'.format(resource_type))
return resource_type
# pylint: disable=too-few-public-methods
class LifecycleState(object):
"""Resource lifecycle state."""
ACTIVE = 'ACTIVE'
DELETED = 'DELETED'
UNSPECIFIED = 'LIFECYCLE_STATE_UNSPECIFIED'
class Resource(object):
"""Represents a GCP resource."""
__metaclass__ = abc.ABCMeta
def __init__(
self,
resource_id,
resource_type,
name=None,
display_name=None,
parent=None,
lifecycle_state=LifecycleState.UNSPECIFIED):
"""Initialize.
Args:
resource_id: The resource's unique id (string) in GCP.
resource_type: The resource type.
name: The resource unique name,
e.g. "<resource type>/{id}".
display_name: The resource display name.
parent: The parent Resource object.
lifecycle_state: The lifecycle state of the Resource.
"""
self._resource_id = str(resource_id)
self._resource_type = resource_type
if name:
self._name = name
else:
self._name = self.RESOURCE_NAME_FMT % resource_id
self._display_name = display_name
# TODO: maybe need assertion for parent type, e.g. assert that
# organization has no parent, whereas projects and folders can
# have either another folder or organization as a parent.
self._parent = parent
self._lifecycle_state = lifecycle_state
def __eq__(self, other):
"""Test equality of Resource."""
if not isinstance(other, type(self)):
return NotImplemented
return (self.id == other.id and
self.type == self.type)
def __ne__(self, other):
"""Test inequality of Resource."""
return not self == other
def __hash__(self):
"""Create a hash on the resource type and id."""
return hash((self.type, self.id))
def __repr__(self):
"""String representation of the Resource."""
return '{}<id={},parent={}>'.format(
self.type, self.id, self.parent)
@property
def id(self):
"""Resource id."""
return self._resource_id
@property
def type(self):
"""Resource type."""
return self._resource_type
@property
def name(self):
"""GCP name."""
return self._name
@property
def display_name(self):
"""Display name."""
return self._display_name
@property
def parent(self):
"""Resource parent."""
return self._parent
@property
def lifecycle_state(self):
"""Lifecycle state."""
return self._lifecycle_state
@abc.abstractmethod
def exists(self):
"""Verify that the resource exists in GCP."""
raise NotImplementedError('Implement exists() in subclass')
| apache-2.0 |
shivam1111/odoo | addons/auth_signup/controllers/main.py | 144 | 6049 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import logging
import werkzeug
import openerp
from openerp.addons.auth_signup.res_users import SignupError
from openerp.addons.web.controllers.main import ensure_db
from openerp import http
from openerp.http import request
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class AuthSignupHome(openerp.addons.web.controllers.main.Home):
@http.route()
def web_login(self, *args, **kw):
ensure_db()
response = super(AuthSignupHome, self).web_login(*args, **kw)
response.qcontext.update(self.get_auth_signup_config())
if request.httprequest.method == 'GET' and request.session.uid and request.params.get('redirect'):
# Redirect if already logged in and redirect param is present
return http.redirect_with_hash(request.params.get('redirect'))
return response
@http.route('/web/signup', type='http', auth='public', website=True)
def web_auth_signup(self, *args, **kw):
qcontext = self.get_auth_signup_qcontext()
if not qcontext.get('token') and not qcontext.get('signup_enabled'):
raise werkzeug.exceptions.NotFound()
if 'error' not in qcontext and request.httprequest.method == 'POST':
try:
self.do_signup(qcontext)
return super(AuthSignupHome, self).web_login(*args, **kw)
except (SignupError, AssertionError), e:
qcontext['error'] = _(e.message)
return request.render('auth_signup.signup', qcontext)
@http.route('/web/reset_password', type='http', auth='public', website=True)
def web_auth_reset_password(self, *args, **kw):
qcontext = self.get_auth_signup_qcontext()
if not qcontext.get('token') and not qcontext.get('reset_password_enabled'):
raise werkzeug.exceptions.NotFound()
if 'error' not in qcontext and request.httprequest.method == 'POST':
try:
if qcontext.get('token'):
self.do_signup(qcontext)
return super(AuthSignupHome, self).web_login(*args, **kw)
else:
login = qcontext.get('login')
assert login, "No login provided."
res_users = request.registry.get('res.users')
res_users.reset_password(request.cr, openerp.SUPERUSER_ID, login)
qcontext['message'] = _("An email has been sent with credentials to reset your password")
except SignupError:
qcontext['error'] = _("Could not reset your password")
_logger.exception('error when resetting password')
except Exception, e:
qcontext['error'] = _(e.message)
return request.render('auth_signup.reset_password', qcontext)
def get_auth_signup_config(self):
"""retrieve the module config (which features are enabled) for the login page"""
icp = request.registry.get('ir.config_parameter')
return {
'signup_enabled': icp.get_param(request.cr, openerp.SUPERUSER_ID, 'auth_signup.allow_uninvited') == 'True',
'reset_password_enabled': icp.get_param(request.cr, openerp.SUPERUSER_ID, 'auth_signup.reset_password') == 'True',
}
def get_auth_signup_qcontext(self):
""" Shared helper returning the rendering context for signup and reset password """
qcontext = request.params.copy()
qcontext.update(self.get_auth_signup_config())
if qcontext.get('token'):
try:
# retrieve the user info (name, login or email) corresponding to a signup token
res_partner = request.registry.get('res.partner')
token_infos = res_partner.signup_retrieve_info(request.cr, openerp.SUPERUSER_ID, qcontext.get('token'))
for k, v in token_infos.items():
qcontext.setdefault(k, v)
except:
qcontext['error'] = _("Invalid signup token")
return qcontext
def do_signup(self, qcontext):
""" Shared helper that creates a res.partner out of a token """
values = dict((key, qcontext.get(key)) for key in ('login', 'name', 'password'))
assert any([k for k in values.values()]), "The form was not properly filled in."
assert values.get('password') == qcontext.get('confirm_password'), "Passwords do not match; please retype them."
values['lang'] = request.lang
self._signup_with_values(qcontext.get('token'), values)
request.cr.commit()
def _signup_with_values(self, token, values):
db, login, password = request.registry['res.users'].signup(request.cr, openerp.SUPERUSER_ID, values, token)
request.cr.commit() # as authenticate will use its own cursor we need to commit the current transaction
uid = request.session.authenticate(db, login, password)
if not uid:
raise SignupError(_('Authentification Failed.'))
# vim:expandtab:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
dmarteau/QGIS | tests/src/python/test_qgsserver_accesscontrol_wfs_transactional.py | 6 | 8590 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServer.
From build dir, run: ctest -R PyQgsServerAccessControlWFSTransactional -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Stephane Brunner'
__date__ = '28/08/2015'
__copyright__ = 'Copyright 2015, The QGIS Project'
print('CTEST_FULL_OUTPUT')
from qgis.testing import unittest
from test_qgsserver_accesscontrol import TestQgsServerAccessControl, XML_NS
WFS_TRANSACTION_INSERT = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:Transaction {xml_ns}>
<wfs:Insert idgen="GenerateNew">
<qgs:db_point>
<qgs:geometry>
<gml:Point srsDimension="2" srsName="http://www.opengis.net/def/crs/EPSG/0/4326">
<gml:coordinates decimal="." cs="," ts=" ">{x},{y}</gml:coordinates>
</gml:Point>
</qgs:geometry>
<qgs:name>{name}</qgs:name>
<qgs:color>{color}</qgs:color>
</qgs:db_point>
</wfs:Insert>
</wfs:Transaction>""".format(x=1000, y=2000, name="test", color="{color}", xml_ns=XML_NS)
WFS_TRANSACTION_UPDATE = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:Transaction {xml_ns}>
<wfs:Update typeName="db_point">
<wfs:Property>
<wfs:Name>color</wfs:Name>
<wfs:Value>{color}</wfs:Value>
</wfs:Property>
<ogc:Filter>
<ogc:FeatureId fid="{id}"/>
</ogc:Filter>
</wfs:Update>
</wfs:Transaction>"""
WFS_TRANSACTION_DELETE = """<?xml version="1.0" encoding="UTF-8"?>
<wfs:Transaction {xml_ns}>
<wfs:Delete typeName="db_point">
<ogc:Filter>
<ogc:FeatureId fid="{id}"/>
</ogc:Filter>
</wfs:Delete>
</wfs:Transaction>"""
class TestQgsServerAccessControlWFSTransactional(TestQgsServerAccessControl):
def test_wfstransaction_insert(self):
data = WFS_TRANSACTION_INSERT.format(x=1000, y=2000, name="test", color="{color}", xml_ns=XML_NS)
self._test_colors({1: "blue"})
response, headers = self._post_fullaccess(data.format(color="red"))
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Insert is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") != -1,
"WFS/Transactions Insert don't succeed\n%s" % response)
self._test_colors({2: "red"})
response, headers = self._post_restricted(data.format(color="blue"))
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Insert is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") == -1,
"WFS/Transactions Insert succeed\n%s" % response)
response, headers = self._post_restricted(data.format(color="red"), "LAYER_PERM=no")
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Insert is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find(
'<ServiceException code="Security">No permissions to do WFS changes on layer \\\'db_point\\\'</ServiceException>') != -1,
"WFS/Transactions Insert succeed\n%s" % response)
response, headers = self._post_restricted(data.format(color="yellow"), "LAYER_PERM=yes")
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Insert is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") != -1,
"WFS/Transactions Insert don't succeed\n%s" % response)
self._test_colors({3: "yellow"})
def test_wfstransaction_update(self):
data = WFS_TRANSACTION_UPDATE.format(id="1", color="{color}", xml_ns=XML_NS)
self._test_colors({1: "blue"})
response, headers = self._post_restricted(data.format(color="yellow"))
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") == -1,
"WFS/Transactions Update succeed\n%s" % response)
self._test_colors({1: "blue"})
response, headers = self._post_fullaccess(data.format(color="red"))
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Update is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") != -1,
"WFS/Transactions Update don't succeed\n%s" % response)
self._test_colors({1: "red"})
response, headers = self._post_restricted(data.format(color="blue"))
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Update is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") == -1,
"WFS/Transactions Update succeed\n%s" % response)
self._test_colors({1: "red"})
response, headers = self._post_restricted(data.format(color="yellow"), "LAYER_PERM=no")
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Update is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find(
'<ServiceException code="Security">No permissions to do WFS changes on layer \\\'db_point\\\'</ServiceException>') != -1,
"WFS/Transactions Update succeed\n%s" % response)
self._test_colors({1: "red"})
response, headers = self._post_restricted(data.format(color="yellow"), "LAYER_PERM=yes")
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for Update is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") != -1,
"WFS/Transactions Update don't succeed\n%s" % response)
self._test_colors({1: "yellow"})
def test_wfstransaction_delete_fullaccess(self):
data = WFS_TRANSACTION_DELETE.format(id="1", xml_ns=XML_NS)
self._test_colors({1: "blue"})
response, headers = self._post_fullaccess(data)
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") != -1,
"WFS/Transactions Delete don't succeed\n%s" % response)
def test_wfstransaction_delete_restricted(self):
data = WFS_TRANSACTION_DELETE.format(id="1", xml_ns=XML_NS)
self._test_colors({1: "blue"})
response, headers = self._post_restricted(data)
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") == -1,
"WFS/Transactions Delete succeed\n%s" % response)
data_update = WFS_TRANSACTION_UPDATE.format(id="1", color="red", xml_ns=XML_NS)
response, headers = self._post_fullaccess(data_update)
self._test_colors({1: "red"})
response, headers = self._post_restricted(data, "LAYER_PERM=no")
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find(
'<ServiceException code="Security">No permissions to do WFS changes on layer \\\'db_point\\\'</ServiceException>') != -1,
"WFS/Transactions Delete succeed\n%s" % response)
response, headers = self._post_restricted(data, "LAYER_PERM=yes")
self.assertEqual(
headers.get("Content-Type"), "text/xml; charset=utf-8",
"Content type for GetMap is wrong: %s" % headers.get("Content-Type"))
self.assertTrue(
str(response).find("<SUCCESS/>") != -1,
"WFS/Transactions Delete don't succeed\n%s" % response)
if __name__ == "__main__":
unittest.main()
| gpl-2.0 |
danielvdao/facebookMacBot | venv/lib/python2.7/site-packages/pip/_vendor/distlib/manifest.py | 367 | 13497 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2013 Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""
Class representing the list of files in a distribution.
Equivalent to distutils.filelist, but fixes some problems.
"""
import fnmatch
import logging
import os
import re
from . import DistlibException
from .compat import fsdecode
from .util import convert_path
__all__ = ['Manifest']
logger = logging.getLogger(__name__)
# a \ followed by some spaces + EOL
_COLLAPSE_PATTERN = re.compile('\\\w*\n', re.M)
_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
class Manifest(object):
"""A list of files built by on exploring the filesystem and filtered by
applying various patterns to what we find there.
"""
def __init__(self, base=None):
"""
Initialise an instance.
:param base: The base directory to explore under.
"""
self.base = os.path.abspath(os.path.normpath(base or os.getcwd()))
self.prefix = self.base + os.sep
self.allfiles = None
self.files = set()
#
# Public API
#
def findall(self):
"""Find all files under the base and set ``allfiles`` to the absolute
pathnames of files found.
"""
from stat import S_ISREG, S_ISDIR, S_ISLNK
self.allfiles = allfiles = []
root = self.base
stack = [root]
pop = stack.pop
push = stack.append
while stack:
root = pop()
names = os.listdir(root)
for name in names:
fullname = os.path.join(root, name)
# Avoid excess stat calls -- just one will do, thank you!
stat = os.stat(fullname)
mode = stat.st_mode
if S_ISREG(mode):
allfiles.append(fsdecode(fullname))
elif S_ISDIR(mode) and not S_ISLNK(mode):
push(fullname)
def add(self, item):
"""
Add a file to the manifest.
:param item: The pathname to add. This can be relative to the base.
"""
if not item.startswith(self.prefix):
item = os.path.join(self.base, item)
self.files.add(os.path.normpath(item))
def add_many(self, items):
"""
Add a list of files to the manifest.
:param items: The pathnames to add. These can be relative to the base.
"""
for item in items:
self.add(item)
def sorted(self, wantdirs=False):
"""
Return sorted files in directory order
"""
def add_dir(dirs, d):
dirs.add(d)
logger.debug('add_dir added %s', d)
if d != self.base:
parent, _ = os.path.split(d)
assert parent not in ('', '/')
add_dir(dirs, parent)
result = set(self.files) # make a copy!
if wantdirs:
dirs = set()
for f in result:
add_dir(dirs, os.path.dirname(f))
result |= dirs
return [os.path.join(*path_tuple) for path_tuple in
sorted(os.path.split(path) for path in result)]
def clear(self):
"""Clear all collected files."""
self.files = set()
self.allfiles = []
def process_directive(self, directive):
"""
Process a directive which either adds some files from ``allfiles`` to
``files``, or removes some files from ``files``.
:param directive: The directive to process. This should be in a format
compatible with distutils ``MANIFEST.in`` files:
http://docs.python.org/distutils/sourcedist.html#commands
"""
# Parse the line: split it up, make sure the right number of words
# is there, and return the relevant words. 'action' is always
# defined: it's the first word of the line. Which of the other
# three are defined depends on the action; it'll be either
# patterns, (dir and patterns), or (dirpattern).
action, patterns, thedir, dirpattern = self._parse_directive(directive)
# OK, now we know that the action is valid and we have the
# right number of words on the line for that action -- so we
# can proceed with minimal error-checking.
if action == 'include':
for pattern in patterns:
if not self._include_pattern(pattern, anchor=True):
logger.warning('no files found matching %r', pattern)
elif action == 'exclude':
for pattern in patterns:
if not self._exclude_pattern(pattern, anchor=True):
logger.warning('no previously-included files '
'found matching %r', pattern)
elif action == 'global-include':
for pattern in patterns:
if not self._include_pattern(pattern, anchor=False):
logger.warning('no files found matching %r '
'anywhere in distribution', pattern)
elif action == 'global-exclude':
for pattern in patterns:
if not self._exclude_pattern(pattern, anchor=False):
logger.warning('no previously-included files '
'matching %r found anywhere in '
'distribution', pattern)
elif action == 'recursive-include':
for pattern in patterns:
if not self._include_pattern(pattern, prefix=thedir):
logger.warning('no files found matching %r '
'under directory %r', pattern, thedir)
elif action == 'recursive-exclude':
for pattern in patterns:
if not self._exclude_pattern(pattern, prefix=thedir):
logger.warning('no previously-included files '
'matching %r found under directory %r',
pattern, thedir)
elif action == 'graft':
if not self._include_pattern(None, prefix=dirpattern):
logger.warning('no directories found matching %r',
dirpattern)
elif action == 'prune':
if not self._exclude_pattern(None, prefix=dirpattern):
logger.warning('no previously-included directories found '
'matching %r', dirpattern)
else: # pragma: no cover
# This should never happen, as it should be caught in
# _parse_template_line
raise DistlibException(
'invalid action %r' % action)
#
# Private API
#
def _parse_directive(self, directive):
"""
Validate a directive.
:param directive: The directive to validate.
:return: A tuple of action, patterns, thedir, dir_patterns
"""
words = directive.split()
if len(words) == 1 and words[0] not in ('include', 'exclude',
'global-include',
'global-exclude',
'recursive-include',
'recursive-exclude',
'graft', 'prune'):
# no action given, let's use the default 'include'
words.insert(0, 'include')
action = words[0]
patterns = thedir = dir_pattern = None
if action in ('include', 'exclude',
'global-include', 'global-exclude'):
if len(words) < 2:
raise DistlibException(
'%r expects <pattern1> <pattern2> ...' % action)
patterns = [convert_path(word) for word in words[1:]]
elif action in ('recursive-include', 'recursive-exclude'):
if len(words) < 3:
raise DistlibException(
'%r expects <dir> <pattern1> <pattern2> ...' % action)
thedir = convert_path(words[1])
patterns = [convert_path(word) for word in words[2:]]
elif action in ('graft', 'prune'):
if len(words) != 2:
raise DistlibException(
'%r expects a single <dir_pattern>' % action)
dir_pattern = convert_path(words[1])
else:
raise DistlibException('unknown action %r' % action)
return action, patterns, thedir, dir_pattern
def _include_pattern(self, pattern, anchor=True, prefix=None,
is_regex=False):
"""Select strings (presumably filenames) from 'self.files' that
match 'pattern', a Unix-style wildcard (glob) pattern.
Patterns are not quite the same as implemented by the 'fnmatch'
module: '*' and '?' match non-special characters, where "special"
is platform-dependent: slash on Unix; colon, slash, and backslash on
DOS/Windows; and colon on Mac OS.
If 'anchor' is true (the default), then the pattern match is more
stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
'anchor' is false, both of these will match.
If 'prefix' is supplied, then only filenames starting with 'prefix'
(itself a pattern) and ending with 'pattern', with anything in between
them, will match. 'anchor' is ignored in this case.
If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
'pattern' is assumed to be either a string containing a regex or a
regex object -- no translation is done, the regex is just compiled
and used as-is.
Selected strings will be added to self.files.
Return True if files are found.
"""
# XXX docstring lying about what the special chars are?
found = False
pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
# delayed loading of allfiles list
if self.allfiles is None:
self.findall()
for name in self.allfiles:
if pattern_re.search(name):
self.files.add(name)
found = True
return found
def _exclude_pattern(self, pattern, anchor=True, prefix=None,
is_regex=False):
"""Remove strings (presumably filenames) from 'files' that match
'pattern'.
Other parameters are the same as for 'include_pattern()', above.
The list 'self.files' is modified in place. Return True if files are
found.
This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
packaging source distributions
"""
found = False
pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
for f in list(self.files):
if pattern_re.search(f):
self.files.remove(f)
found = True
return found
def _translate_pattern(self, pattern, anchor=True, prefix=None,
is_regex=False):
"""Translate a shell-like wildcard pattern to a compiled regular
expression.
Return the compiled regex. If 'is_regex' true,
then 'pattern' is directly compiled to a regex (if it's a string)
or just returned as-is (assumes it's a regex object).
"""
if is_regex:
if isinstance(pattern, str):
return re.compile(pattern)
else:
return pattern
if pattern:
pattern_re = self._glob_to_re(pattern)
else:
pattern_re = ''
base = re.escape(os.path.join(self.base, ''))
if prefix is not None:
# ditch end of pattern character
empty_pattern = self._glob_to_re('')
prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
sep = os.sep
if os.sep == '\\':
sep = r'\\'
pattern_re = '^' + base + sep.join((prefix_re,
'.*' + pattern_re))
else: # no prefix -- respect anchor flag
if anchor:
pattern_re = '^' + base + pattern_re
return re.compile(pattern_re)
def _glob_to_re(self, pattern):
"""Translate a shell-like glob pattern to a regular expression.
Return a string containing the regex. Differs from
'fnmatch.translate()' in that '*' does not match "special characters"
(which are platform-specific).
"""
pattern_re = fnmatch.translate(pattern)
# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
# and by extension they shouldn't match such "special characters" under
# any OS. So change all non-escaped dots in the RE to match any
# character except the special characters (currently: just os.sep).
sep = os.sep
if os.sep == '\\':
# we're using a regex to manipulate a regex, so we need
# to escape the backslash twice
sep = r'\\\\'
escaped = r'\1[^%s]' % sep
pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
return pattern_re
| mit |
uoaerg/linux-dccp | Documentation/conf.py | 60 | 19545 | # -*- coding: utf-8 -*-
#
# The Linux Kernel documentation build configuration file, created by
# sphinx-quickstart on Fri Feb 12 13:51:46 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import sphinx
# Get Sphinx version
major, minor, patch = sphinx.version_info[:3]
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('sphinx'))
from load_config import loadConfig
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.3'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['kerneldoc', 'rstFlatTable', 'kernel_include', 'cdomain', 'kfigure']
# The name of the math extension changed on Sphinx 1.4
if major == 1 and minor > 3:
extensions.append("sphinx.ext.imgmath")
else:
extensions.append("sphinx.ext.pngmath")
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'The Linux Kernel'
copyright = 'The kernel development community'
author = 'The kernel development community'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# In a normal build, version and release are are set to KERNELVERSION and
# KERNELRELEASE, respectively, from the Makefile via Sphinx command line
# arguments.
#
# The following code tries to extract the information by reading the Makefile,
# when Sphinx is run directly (e.g. by Read the Docs).
try:
makefile_version = None
makefile_patchlevel = None
for line in open('../Makefile'):
key, val = [x.strip() for x in line.split('=', 2)]
if key == 'VERSION':
makefile_version = val
elif key == 'PATCHLEVEL':
makefile_patchlevel = val
if makefile_version and makefile_patchlevel:
break
except:
pass
finally:
if makefile_version and makefile_patchlevel:
version = release = makefile_version + '.' + makefile_patchlevel
else:
sys.stderr.write('Warning: Could not extract kernel version\n')
version = release = "unknown version"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['output']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
primary_domain = 'c'
highlight_language = 'none'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# The Read the Docs theme is available from
# - https://github.com/snide/sphinx_rtd_theme
# - https://pypi.python.org/pypi/sphinx_rtd_theme
# - python-sphinx-rtd-theme package (on Debian)
try:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
except ImportError:
sys.stderr.write('Warning: The Sphinx \'sphinx_rtd_theme\' HTML theme was not found. Make sure you have the theme installed to produce pretty HTML output. Falling back to the default theme.\n')
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['sphinx-static']
html_context = {
'css_files': [
'_static/theme_overrides.css',
],
}
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'TheLinuxKerneldoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
'pointsize': '8pt',
# Latex figure (float) alignment
#'figure_align': 'htbp',
# Don't mangle with UTF-8 chars
'inputenc': '',
'utf8extra': '',
# Additional stuff for the LaTeX preamble.
'preamble': '''
% Use some font with UTF-8 support with XeLaTeX
\\usepackage{fontspec}
\\setsansfont{DejaVu Serif}
\\setromanfont{DejaVu Sans}
\\setmonofont{DejaVu Sans Mono}
'''
}
# Fix reference escape troubles with Sphinx 1.4.x
if major == 1 and minor > 3:
latex_elements['preamble'] += '\\renewcommand*{\\DUrole}[2]{ #2 }\n'
if major == 1 and minor <= 4:
latex_elements['preamble'] += '\\usepackage[margin=0.5in, top=1in, bottom=1in]{geometry}'
elif major == 1 and (minor > 5 or (minor == 5 and patch >= 3)):
latex_elements['sphinxsetup'] = 'hmargin=0.5in, vmargin=1in'
latex_elements['preamble'] += '\\fvset{fontsize=auto}\n'
# Customize notice background colors on Sphinx < 1.6:
if major == 1 and minor < 6:
latex_elements['preamble'] += '''
\\usepackage{ifthen}
% Put notes in color and let them be inside a table
\\definecolor{NoteColor}{RGB}{204,255,255}
\\definecolor{WarningColor}{RGB}{255,204,204}
\\definecolor{AttentionColor}{RGB}{255,255,204}
\\definecolor{ImportantColor}{RGB}{192,255,204}
\\definecolor{OtherColor}{RGB}{204,204,204}
\\newlength{\\mynoticelength}
\\makeatletter\\newenvironment{coloredbox}[1]{%
\\setlength{\\fboxrule}{1pt}
\\setlength{\\fboxsep}{7pt}
\\setlength{\\mynoticelength}{\\linewidth}
\\addtolength{\\mynoticelength}{-2\\fboxsep}
\\addtolength{\\mynoticelength}{-2\\fboxrule}
\\begin{lrbox}{\\@tempboxa}\\begin{minipage}{\\mynoticelength}}{\\end{minipage}\\end{lrbox}%
\\ifthenelse%
{\\equal{\\py@noticetype}{note}}%
{\\colorbox{NoteColor}{\\usebox{\\@tempboxa}}}%
{%
\\ifthenelse%
{\\equal{\\py@noticetype}{warning}}%
{\\colorbox{WarningColor}{\\usebox{\\@tempboxa}}}%
{%
\\ifthenelse%
{\\equal{\\py@noticetype}{attention}}%
{\\colorbox{AttentionColor}{\\usebox{\\@tempboxa}}}%
{%
\\ifthenelse%
{\\equal{\\py@noticetype}{important}}%
{\\colorbox{ImportantColor}{\\usebox{\\@tempboxa}}}%
{\\colorbox{OtherColor}{\\usebox{\\@tempboxa}}}%
}%
}%
}%
}\\makeatother
\\makeatletter
\\renewenvironment{notice}[2]{%
\\def\\py@noticetype{#1}
\\begin{coloredbox}{#1}
\\bf\\it
\\par\\strong{#2}
\\csname py@noticestart@#1\\endcsname
}
{
\\csname py@noticeend@\\py@noticetype\\endcsname
\\end{coloredbox}
}
\\makeatother
'''
# With Sphinx 1.6, it is possible to change the Bg color directly
# by using:
# \definecolor{sphinxnoteBgColor}{RGB}{204,255,255}
# \definecolor{sphinxwarningBgColor}{RGB}{255,204,204}
# \definecolor{sphinxattentionBgColor}{RGB}{255,255,204}
# \definecolor{sphinximportantBgColor}{RGB}{192,255,204}
#
# However, it require to use sphinx heavy box with:
#
# \renewenvironment{sphinxlightbox} {%
# \\begin{sphinxheavybox}
# }
# \\end{sphinxheavybox}
# }
#
# Unfortunately, the implementation is buggy: if a note is inside a
# table, it isn't displayed well. So, for now, let's use boring
# black and white notes.
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
# Sorted in alphabetical order
latex_documents = [
('admin-guide/index', 'linux-user.tex', 'Linux Kernel User Documentation',
'The kernel development community', 'manual'),
('core-api/index', 'core-api.tex', 'The kernel core API manual',
'The kernel development community', 'manual'),
('crypto/index', 'crypto-api.tex', 'Linux Kernel Crypto API manual',
'The kernel development community', 'manual'),
('dev-tools/index', 'dev-tools.tex', 'Development tools for the Kernel',
'The kernel development community', 'manual'),
('doc-guide/index', 'kernel-doc-guide.tex', 'Linux Kernel Documentation Guide',
'The kernel development community', 'manual'),
('driver-api/index', 'driver-api.tex', 'The kernel driver API manual',
'The kernel development community', 'manual'),
('filesystems/index', 'filesystems.tex', 'Linux Filesystems API',
'The kernel development community', 'manual'),
('gpu/index', 'gpu.tex', 'Linux GPU Driver Developer\'s Guide',
'The kernel development community', 'manual'),
('input/index', 'linux-input.tex', 'The Linux input driver subsystem',
'The kernel development community', 'manual'),
('kernel-hacking/index', 'kernel-hacking.tex', 'Unreliable Guide To Hacking The Linux Kernel',
'The kernel development community', 'manual'),
('media/index', 'media.tex', 'Linux Media Subsystem Documentation',
'The kernel development community', 'manual'),
('networking/index', 'networking.tex', 'Linux Networking Documentation',
'The kernel development community', 'manual'),
('process/index', 'development-process.tex', 'Linux Kernel Development Documentation',
'The kernel development community', 'manual'),
('security/index', 'security.tex', 'The kernel security subsystem manual',
'The kernel development community', 'manual'),
('sh/index', 'sh.tex', 'SuperH architecture implementation manual',
'The kernel development community', 'manual'),
('sound/index', 'sound.tex', 'Linux Sound Subsystem Documentation',
'The kernel development community', 'manual'),
('userspace-api/index', 'userspace-api.tex', 'The Linux kernel user-space API guide',
'The kernel development community', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'thelinuxkernel', 'The Linux Kernel Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'TheLinuxKernel', 'The Linux Kernel Documentation',
author, 'TheLinuxKernel', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
#epub_basename = project
# The HTML theme for the epub output. Since the default themes are not
# optimized for small screen space, using the same theme for HTML and epub
# output is usually not wise. This defaults to 'epub', a theme designed to save
# visual space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files that should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
#=======
# rst2pdf
#
# Grouping the document tree into PDF files. List of tuples
# (source start file, target name, title, author, options).
#
# See the Sphinx chapter of http://ralsina.me/static/manual.pdf
#
# FIXME: Do not add the index file here; the result will be too big. Adding
# multiple PDF files here actually tries to get the cross-referencing right
# *between* PDF files.
pdf_documents = [
('kernel-documentation', u'Kernel', u'Kernel', u'J. Random Bozo'),
]
# kernel-doc extension configuration for running Sphinx directly (e.g. by Read
# the Docs). In a normal build, these are supplied from the Makefile via command
# line arguments.
kerneldoc_bin = '../scripts/kernel-doc'
kerneldoc_srctree = '..'
# ------------------------------------------------------------------------------
# Since loadConfig overwrites settings from the global namespace, it has to be
# the last statement in the conf.py file
# ------------------------------------------------------------------------------
loadConfig(globals())
| gpl-2.0 |
vfulco/scalpel | test/gravity/tae/pos/test_pos.py | 2 | 1712 |
from gravity.tae.tokenizer import Token, TokenSet
from gravity.tae.pos.pos import POS
from gravity import GRAVITY_LIB_HOME
from gravity.tae.corpora.conll import CoNLL2002
import unittest, os
class TestPOS(unittest.TestCase):
def test_pos(self):
self.assertEqual(len(POS.list()), 2)
for n in ('tnt', 'stanford'):
self.assertEqual(isinstance(POS.pos(n), POS), True)
txt = "This is center of Amsterdam !"
n = POS.pos("stanford", 'en')(txt)
self.assertEqual(len(n), len(txt.split()))
self.validate_tokens(txt, n)
self.assertEqual((n[-1][3] & Token.POS_PUNCT) > 0, True)
self.assertEqual((n[-2][3] & Token.POS_NOUN) > 0, True)
self.assertEqual((n[-2][3] & Token.POS_PUNCT) > 0, False)
import platform
if platform.platform().lower().find("linux") >= 0:
txt = "Dat is de center."
def test_lang(): POS.pos("tnt", 'en')
self.assertRaises(AssertionError, test_lang)
n = POS.pos("tnt", 'nl')(txt)
self.assertEqual(len(n), 5)
self.validate_tokens(txt, n)
self.assertEqual((n[-1][3] & Token.POS_PUNCT) > 0, True)
self.assertEqual((n[-2][3] & Token.POS_NOUN) > 0, True)
self.assertEqual((n[-2][3] & Token.POS_PUNCT) > 0, False)
self.assertEqual((n[-3][3] & Token.POS_ART) > 0, True)
else:
print "Skip testing TNT POS tagger. It is possible on Linux platform only."
def validate_tokens(self, txt, tokens):
assert len(tokens) > 0
for t in tokens: self.assertEqual(t[0], txt[t[1]:t[1] + t[2]])
if __name__ == '__main__':
unittest.main()
| lgpl-3.0 |
FlowFX/unkenmathe.de | src/um/exercises/views.py | 1 | 3723 | """Views for exercise app."""
from django.contrib.auth.mixins import LoginRequiredMixin, UserPassesTestMixin
from django.http import HttpResponseRedirect
from django.shortcuts import redirect, reverse
from django.urls import reverse_lazy
from django.views.generic import CreateView, DeleteView, DetailView, ListView, UpdateView
from .forms import ExerciseForm
from .models import Exercise, ExerciseExample
from ..core.jinja2 import jinja2_latex_env
from ..core.utils import pdflatex
from ..core.views import UserCanEditMixin, UserFormKwargsMixin, SaveAndContinueMixin, TestFuncMixin
from django.http import HttpResponse
class HowtoView(ListView):
"""View a number of example exercises."""
model = ExerciseExample
context_object_name = 'examples'
template_name = 'exercises/howto.html'
def exercise_pdf_view(request, slug):
"""Return PDF version of the single exercise."""
obj = Exercise.objects.get(slug=slug)
if not obj.text_tex: # pragma: no cover
obj.render_tex()
# Prepare LaTeX template
env = jinja2_latex_env
template = env.get_template('exercise_detail.j2.tex')
context = {'exercise': obj, }
rendered_template = template.render(context)
# Generate PDF from template
pdf = pdflatex(rendered_template)
# HTTP response
response = HttpResponse(content=pdf)
response['Content-Type'] = 'application/pdf'
filename = 'exercise.pdf'
response['Content-Disposition'] = f'inline; filename={filename}'
return response
class ExcerciseListView(ListView):
"""List all exercises."""
model = Exercise
context_object_name = 'exercises'
template_name = 'exercises/exercise_list.html'
class ExerciseCreateView(LoginRequiredMixin, UserFormKwargsMixin, CreateView):
"""Create view for a new exercise."""
model = Exercise
form_class = ExerciseForm
context_object_name = 'exercise'
def form_valid(self, form):
"""Add the current user as the author of object."""
self.object = form.save(commit=False)
self.object.author = self.get_form_kwargs()['user']
self.object.save()
return super(ExerciseCreateView, self).form_valid(form)
def get_context_data(self, **kwargs):
"""Add data to the template context."""
context = super(ExerciseCreateView, self).get_context_data(**kwargs)
# When exercise template is given on GET request
if context['form']['text'].initial:
context.update({
'exercise': {'text': context['form']['text'].initial},
})
return context
def get_initial(self):
"""Return initial data to use for forms on this view."""
initial = self.initial.copy()
template = self.request.GET.get('template')
if template:
template_exercise = Exercise.objects.get(slug=template)
initial.update({
'text': template_exercise.text,
})
return initial
class ExerciseDetailView(UserCanEditMixin, DetailView):
"""Detail view for an exercise."""
model = Exercise
success_url = reverse_lazy('index')
context_object_name = 'exercise'
class ExerciseUpdateView(TestFuncMixin, UserPassesTestMixin, SaveAndContinueMixin, UserFormKwargsMixin, UpdateView):
"""Update view for an exercise."""
model = Exercise
form_class = ExerciseForm
context_object_name = 'exercise'
def get_update_url(self):
return reverse('exercises:update', kwargs={'slug': self.object.slug})
class ExerciseDeleteView(TestFuncMixin, UserPassesTestMixin, DeleteView):
"""Delete view for an exercise."""
model = Exercise
success_url = reverse_lazy('index')
| agpl-3.0 |
willemt/yabtorrent | clib.py | 1 | 5248 | #!/usr/bin/env python
# encoding: utf-8
# Willem-Hendrik Thiart, 2014
from waflib.Configure import conf
import simplejson as json
import os
import itertools
# TODO
# Add exception for instances where there are multiple packages with same name
class PackageNotFoundException(Exception):
pass
class ClibPackage(object):
pass
def build(ctx):
ctx.clib_index()
def unionsets_if_list(func):
def func_wrapper(self, package, **kwargs):
if isinstance(package, list):
s = set()
for p in package:
s.update(func(self, p, **kwargs))
return list(s)
else:
return func(self, package, **kwargs)
return func_wrapper
@unionsets_if_list
def _clib_h_files(self, package, include_deps=True):
files = filter(lambda x: x.endswith(".h"), self.clib_manifest(package)['src'])
files = map(lambda x: '{0}{1}'.format(self.clib_path(package), os.path.basename(x)), files)
if include_deps:
deps = self.clib_dependencies(package)
files.extend(itertools.chain.from_iterable([self.clib_h_files(pkg) for pkg in deps]))
return list(set(files))
@unionsets_if_list
def _clib_c_files(self, package, include_deps=True):
files = filter(lambda x: x.endswith(".c"), self.clib_manifest(package)['src'])
files = map(lambda x: '{0}{1}'.format(self.clib_path(package), os.path.basename(x)), files)
if include_deps:
deps = self.clib_dependencies(package)
files.extend(itertools.chain.from_iterable([self.clib_c_files(pkg) for pkg in deps]))
return list(set(files))
@conf
def clib_h_files(self, package, include_deps=True):
""" Return all header files from package
Parameters
----------
package : string or list of strings
The package (repo or name) to get header files from.
This can be a list of packages.
include_deps: boolean
Whether or not to include package depedencies
"""
return _clib_h_files(self, package, include_deps=include_deps)
@conf
def clib_c_files(self, package, include_deps=True):
""" Return all c files from package
Parameters
----------
package : string or list of strings
The package (repo or name) to get c files from.
This can be a list of packages.
include_deps: boolean
Whether or not to include package depedencies
"""
return _clib_c_files(self, package, include_deps=include_deps)
@unionsets_if_list
def _clib_h_paths(self, package, include_deps=True):
paths = set([self.clib_path(package)])
if include_deps:
deps = self.clib_dependencies(package)
paths.update(itertools.chain.from_iterable([self.clib_paths(pkg) for pkg in deps]))
return paths
@conf
def clib_h_paths(self, package, include_deps=True):
""" Return all paths that contain h files from package
Parameters
----------
package : string or list of strings
The package (repo or name) to get h paths from.
This can be a list of packages.
include_deps: boolean
Whether or not to include package depedencies
"""
return list(set([h[:h.rfind('/')]
for h in self.clib_h_files(package, include_deps=include_deps)]))
@conf
def clib_path(self, package):
""" Return package path
Parameters
----------
package : string
The package (repo or name) to get the path from.
"""
#return '{0}/{1}/'.format(os.getcwd(), self.clib_get(package).path)
return '{0}/'.format(self.clib_get(package).path)
@conf
def clib_index(self):
""" Read package.json files inside deps folder """
self.packages_by_name = {}
self.packages_by_repo = {}
for dirname, dirnames, filenames in os.walk('deps/'):
if 'package.json' in filenames:
pkg = ClibPackage()
pkg.path = dirname
json_data = open("{0}/package.json".format(pkg.path))
pkg.manifest = json.load(json_data)
json_data.close()
self.packages_by_repo[pkg.manifest['repo']] = pkg
self.packages_by_name[pkg.manifest['name']] = pkg
@conf
def clib_manifest(self, package):
""" Return the dictionary contents of package.json file
Parameters
----------
package : string
The package (repo or name) to get the manifset from.
"""
return self.clib_get(package).manifest
@conf
def clib_dependencies(self, package):
""" Return a package's dependecies (repo name)
Parameters
----------
package : string
The package (repo or name) to get the depedencies from.
"""
deps = set()
for dep in self.clib_manifest(package).get('dependencies', {}).iterkeys():
deps.add(dep)
for d in self.clib_dependencies(dep):
deps.add(d)
return deps
@conf
def clib_get(self, package):
""" Return package object """
if package in self.packages_by_name:
return self.packages_by_name[package]
elif package in self.packages_by_repo:
return self.packages_by_repo[package]
raise PackageNotFoundException(package)
| bsd-3-clause |
cloakedcode/CouchPotatoServer | libs/requests/packages/chardet2/charsetgroupprober.py | 63 | 3636 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import sys
from .charsetprober import CharSetProber
class CharSetGroupProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mActiveNum = 0
self._mProbers = []
self._mBestGuessProber = None
def reset(self):
CharSetProber.reset(self)
self._mActiveNum = 0
for prober in self._mProbers:
if prober:
prober.reset()
prober.active = True
self._mActiveNum += 1
self._mBestGuessProber = None
def get_charset_name(self):
if not self._mBestGuessProber:
self.get_confidence()
if not self._mBestGuessProber: return None
# self._mBestGuessProber = self._mProbers[0]
return self._mBestGuessProber.get_charset_name()
def feed(self, aBuf):
for prober in self._mProbers:
if not prober: continue
if not prober.active: continue
st = prober.feed(aBuf)
if not st: continue
if st == constants.eFoundIt:
self._mBestGuessProber = prober
return self.get_state()
elif st == constants.eNotMe:
prober.active = False
self._mActiveNum -= 1
if self._mActiveNum <= 0:
self._mState = constants.eNotMe
return self.get_state()
return self.get_state()
def get_confidence(self):
st = self.get_state()
if st == constants.eFoundIt:
return 0.99
elif st == constants.eNotMe:
return 0.01
bestConf = 0.0
self._mBestGuessProber = None
for prober in self._mProbers:
if not prober: continue
if not prober.active:
if constants._debug:
sys.stderr.write(prober.get_charset_name() + ' not active\n')
continue
cf = prober.get_confidence()
if constants._debug:
sys.stderr.write('%s confidence = %s\n' % (prober.get_charset_name(), cf))
if bestConf < cf:
bestConf = cf
self._mBestGuessProber = prober
if not self._mBestGuessProber: return 0.0
return bestConf
# else:
# self._mBestGuessProber = self._mProbers[0]
# return self._mBestGuessProber.get_confidence()
| gpl-3.0 |
oangervuori/namubufferi | namubufferiapp/models.py | 2 | 5893 | from base64 import b64encode
from datetime import timedelta
from decimal import Decimal
from hashlib import sha256
from os import urandom
import uuid
from django.contrib.auth.models import User
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.conf import settings
from django.db import models
from django.utils import timezone
# For old migrations
def generate_magic_key():
system_check_removed_details = {
'msg': (
'generate_magic_key has been removed except for support in '
'historical migrations.'
),
'hint': 'Use generate_magic_token instead.',
}
def generate_magic_token():
random = str(uuid.uuid4())
random = random.upper()
random = random.replace("-","")
magic = random[0:5]
print(magic)
return magic
class Tag(models.Model):
"""
A tag that represents things like NFC-tag, barcode etc.
"""
uid = models.CharField(max_length=128, blank=False, unique=True)
def __str__(self):
return self.uid
class Meta:
abstract = True
class UserTag(Tag):
"""
A tag representing user's identification info
"""
user = models.ForeignKey(User)
timestamp = models.DateTimeField(auto_now_add=True)
timestamp.editable = False
class Account(models.Model):
"""
Extending the built-in model 'User' using a one-to-one relationship to
the built-in model.
https://docs.djangoproject.com/en/1.7/topics/auth/customizing/#extending-user
"""
user = models.OneToOneField(User)
magic_token = models.CharField(max_length=44, null=True, blank=True)
magic_token_ttl = models.DateTimeField(default=(timezone.now() + timedelta(minutes=15))) # TODO: Static
tos_accepted = models.BooleanField(default=False)
@property
def balance(self):
cur_balance = Decimal(0)
transactions = Transaction.objects.filter(customer=self).filter(canceled=False)
for transaction in transactions:
cur_balance += transaction.amount
return cur_balance
"""
Magic token allows user to login by email with unique
link that is alive only for 15 minutes
"""
def update_magic_token(self):
self.magic_token_ttl = timezone.now() + timedelta(minutes=15)
magic_token = generate_magic_token()
while (len(Account.objects.filter(magic_token=str(magic_token))) != 0):
magic_token = generate_magic_token()
self.magic_token = magic_token
self.save()
return self.magic_token
def deactivate_magic_token(self):
self.magic_token = None
self.magic_token_ttl = timezone.now()
self.save()
def magic_token_is_alive(self):
return timezone.now() < self.magic_token_ttl
def __str__(self):
return self.user.username
@receiver(post_save, sender=User)
def handle_user_save(sender, instance, created, **kwargs):
"""
If an user is created directly by User.objects, without
this function, it wouldn't have account-instance
"""
if created:
acc = Account.objects.create(user=instance)
acc.save()
acc.update_magic_token()
class Category(models.Model):
"""
Mainly category for products, but could also used
for something else
"""
name = models.CharField(max_length=30, unique=True)
def __str__(self):
return self.name
class Meta:
ordering = ['name']
class Product(models.Model):
name = models.CharField(max_length=128, unique=True)
category = models.ForeignKey(Category, related_name='products')
price = models.DecimalField(max_digits=5,
decimal_places=2,
default=1,
)
inventory = models.IntegerField(default=1)
hidden = models.BooleanField(default=False)
def make_sale(self):
self.inventory += -1
self.save()
def cancel_sale(self):
self.inventory += 1
self.save()
def __str__(self):
return self.name
class Meta:
ordering = ['name']
class ProductTag(Tag):
"""
A tag representing product's identity (like barcode)
"""
product = models.ForeignKey(Product)
class Transaction(models.Model):
"""
One transaction
Positive amount means money going into user's account,
negative amount means money going away from user's account.
Amount can't be derived from product as products might have
different prices at different times.
Canceled-flag should be noted for example when calculating
balance from all transactions.
"""
amount = models.DecimalField(max_digits=5,
decimal_places=2,
default=0,
)
timestamp = models.DateTimeField(auto_now_add=True)
timestamp.editable = False
customer = models.ForeignKey(Account, null=True)
product = models.ForeignKey(Product, null=True)
canceled = models.BooleanField(default=False)
comment = models.CharField(max_length=256, null=True)
def get_date_string(self):
DATE_FORMAT = "%Y-%m-%d"
TIME_FORMAT = "%H:%M:%S"
if self.timestamp:
return self.timestamp.strftime("%s %s" %
(DATE_FORMAT, TIME_FORMAT))
def cancel(self):
if not self.canceled:
self.canceled = True
self.save()
if self.product:
self.product.cancel_sale()
def __str__(self):
if self.customer is not None:
return "%s, %s, %s" % (self.get_date_string(), self.customer.user.username, self.amount)
else:
return "%s, %s" % (self.get_date_string(), self.amount)
class Meta:
ordering = ["-timestamp"]
| mit |
TeamEOS/external_chromium_org | third_party/simplejson/scanner.py | 674 | 2560 | """JSON token scanner
"""
import re
def _import_c_make_scanner():
try:
from simplejson._speedups import make_scanner
return make_scanner
except ImportError:
return None
c_make_scanner = _import_c_make_scanner()
__all__ = ['make_scanner']
NUMBER_RE = re.compile(
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
(re.VERBOSE | re.MULTILINE | re.DOTALL))
def py_make_scanner(context):
parse_object = context.parse_object
parse_array = context.parse_array
parse_string = context.parse_string
match_number = NUMBER_RE.match
encoding = context.encoding
strict = context.strict
parse_float = context.parse_float
parse_int = context.parse_int
parse_constant = context.parse_constant
object_hook = context.object_hook
object_pairs_hook = context.object_pairs_hook
memo = context.memo
def _scan_once(string, idx):
try:
nextchar = string[idx]
except IndexError:
raise StopIteration
if nextchar == '"':
return parse_string(string, idx + 1, encoding, strict)
elif nextchar == '{':
return parse_object((string, idx + 1), encoding, strict,
_scan_once, object_hook, object_pairs_hook, memo)
elif nextchar == '[':
return parse_array((string, idx + 1), _scan_once)
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
return None, idx + 4
elif nextchar == 't' and string[idx:idx + 4] == 'true':
return True, idx + 4
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
return False, idx + 5
m = match_number(string, idx)
if m is not None:
integer, frac, exp = m.groups()
if frac or exp:
res = parse_float(integer + (frac or '') + (exp or ''))
else:
res = parse_int(integer)
return res, m.end()
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
return parse_constant('NaN'), idx + 3
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
return parse_constant('Infinity'), idx + 8
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
return parse_constant('-Infinity'), idx + 9
else:
raise StopIteration
def scan_once(string, idx):
try:
return _scan_once(string, idx)
finally:
memo.clear()
return scan_once
make_scanner = c_make_scanner or py_make_scanner
| bsd-3-clause |
kemcoi/libfacebookcpp | src/external-lib/jsoncpp-src-0.5.0/devtools/fixeol.py | 247 | 1941 | import os.path
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
if not os.path.isfile( path ):
raise ValueError( 'Path "%s" is not a file' % path )
try:
f = open(path, 'rb')
except IOError, msg:
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
return False
try:
raw_lines = f.readlines()
finally:
f.close()
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
if raw_lines != fixed_lines:
print '%s =>' % path,
if not is_dry_run:
f = open(path, "wb")
try:
f.writelines(fixed_lines)
finally:
f.close()
if verbose:
print is_dry_run and ' NEED FIX' or ' FIXED'
return True
##
##
##
##def _do_fix( is_dry_run = True ):
## from waftools import antglob
## python_sources = antglob.glob( '.',
## includes = '**/*.py **/wscript **/wscript_build',
## excludes = antglob.default_excludes + './waf.py',
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
## for path in python_sources:
## _fix_python_source( path, is_dry_run )
##
## cpp_sources = antglob.glob( '.',
## includes = '**/*.cpp **/*.h **/*.inl',
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
## for path in cpp_sources:
## _fix_source_eol( path, is_dry_run )
##
##
##def dry_fix(context):
## _do_fix( is_dry_run = True )
##
##def fix(context):
## _do_fix( is_dry_run = False )
##
##def shutdown():
## pass
##
##def check(context):
## # Unit tests are run when "check" target is used
## ut = UnitTest.unit_test()
## ut.change_to_testfile_dir = True
## ut.want_to_see_test_output = True
## ut.want_to_see_test_error = True
## ut.run()
## ut.print_results()
| lgpl-2.1 |
heri16/AutobahnPython | examples/websocket/echo_httpheaders/server.py | 17 | 2267 | ###############################################################################
##
## Copyright 2013 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import sys
from twisted.internet import reactor
from twisted.python import log
from autobahn.websocket import WebSocketServerFactory, \
WebSocketServerProtocol, \
listenWS
class EchoServerProtocol(WebSocketServerProtocol):
def onConnect(self, connectionRequest):
headers = {'MyCustomDynamicServerHeader1': 'Hello'}
## Note: HTTP header field names are case-insensitive,
## hence AutobahnPython will normalize header field names to
## lower case.
##
if connectionRequest.headers.has_key('mycustomclientheader'):
headers['MyCustomDynamicServerHeader2'] = connectionRequest.headers['mycustomclientheader']
## return a pair with WS protocol spoken (or None for any) and
## custom headers to send in initial WS opening handshake HTTP response
##
return (None, headers)
def onMessage(self, msg, binary):
self.sendMessage(msg, binary)
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'debug':
log.startLogging(sys.stdout)
debug = True
else:
debug = False
headers = {'MyCustomServerHeader': 'Foobar'}
factory = WebSocketServerFactory("ws://localhost:9000",
headers = headers,
debug = debug,
debugCodePaths = debug)
factory.protocol = EchoServerProtocol
listenWS(factory)
reactor.run()
| apache-2.0 |
sysalexis/kbengine | kbe/src/lib/python/Lib/distutils/command/install_scripts.py | 204 | 2017 | """distutils.command.install_scripts
Implements the Distutils 'install_scripts' command, for installing
Python scripts."""
# contributed by Bastian Kleineidam
import os
from distutils.core import Command
from distutils import log
from stat import ST_MODE
class install_scripts(Command):
description = "install scripts (Python or otherwise)"
user_options = [
('install-dir=', 'd', "directory to install scripts to"),
('build-dir=','b', "build directory (where to install from)"),
('force', 'f', "force installation (overwrite existing files)"),
('skip-build', None, "skip the build steps"),
]
boolean_options = ['force', 'skip-build']
def initialize_options(self):
self.install_dir = None
self.force = 0
self.build_dir = None
self.skip_build = None
def finalize_options(self):
self.set_undefined_options('build', ('build_scripts', 'build_dir'))
self.set_undefined_options('install',
('install_scripts', 'install_dir'),
('force', 'force'),
('skip_build', 'skip_build'),
)
def run(self):
if not self.skip_build:
self.run_command('build_scripts')
self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
if os.name == 'posix':
# Set the executable bits (owner, group, and world) on
# all the scripts we just installed.
for file in self.get_outputs():
if self.dry_run:
log.info("changing mode of %s", file)
else:
mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777
log.info("changing mode of %s to %o", file, mode)
os.chmod(file, mode)
def get_inputs(self):
return self.distribution.scripts or []
def get_outputs(self):
return self.outfiles or []
| lgpl-3.0 |
Allend575/opencog | opencog/python/embodiment/situation.py | 34 | 13777 | from utility.generic import subsets_of_len_two
__author__ = 'keyvan'
from opencog.atomspace import AtomSpace, types, TruthValue
from random import randrange
_default_tv = TruthValue(1, 1)
class CombinationDescriptor(object):
pass
class TypeDescriptor(CombinationDescriptor):
def __init__(self, weight, object_type, block_type_descriptions=None):
self.weight = weight
self.object_type = object_type
self.block_type_descriptions = block_type_descriptions
class SpatialRelationDescriptor(CombinationDescriptor):
def __init__(self, weight, type_descriptors, relation):
self.weight = weight
self.type_descriptors = type_descriptors
self.relation = relation
class SituationGenerator(object):
def __init__(self, atomspace):
self.atomspace = atomspace
near_predicate_node = self.atomspace.add_node(types.PredicateNode, 'near', _default_tv)
on_top_of_predicate_node = self.atomspace.add_node(types.PredicateNode, 'on-top-of', _default_tv)
underneath_predicate_node = self.atomspace.add_node(types.PredicateNode, 'underneath', _default_tv)
inside_predicate_node = self.atomspace.add_node(types.PredicateNode, 'inside', _default_tv)
adjacent_to_predicate_node = self.atomspace.add_node(types.PredicateNode, 'adjacent-to', _default_tv)
self.spatial_relations = [near_predicate_node, on_top_of_predicate_node,
underneath_predicate_node, inside_predicate_node, adjacent_to_predicate_node]
self._counter_by_type = {}
def _generate_unique_name(self, object_type):
if object_type in self._counter_by_type:
self._counter_by_type[object_type] += 1
else:
self._counter_by_type[object_type] = 0
return object_type + '_' + str(self._counter_by_type[object_type])
def _generate_block_entity_nodes_from_description(self, type_descriptor):
nodes = []
for i in xrange(type_descriptor.weight):
entity_node = self.atomspace.add_node(types.BlockEntityNode,
self._generate_unique_name(type_descriptor.object_type), _default_tv)
if type_descriptor.block_type_descriptions is not None:
for property, predicate_type, predicate_value in type_descriptor.block_type_descriptions:
for i in range(randrange(1, 10)):
block = self.atomspace.add_node(types.StructureNode,
self._generate_unique_name('CHUNK_BLOCK'), _default_tv)
predicate_node = self.atomspace.add_node(types.PredicateNode, property, _default_tv)
value_node = self.atomspace.add_node(types.__dict__[predicate_type],
predicate_value, _default_tv)
list_link = self.atomspace.add_link(types.ListLink, [block, value_node])
self.atomspace.add_link(types.EvaluationLink, [predicate_node, list_link], _default_tv)
part_of_predicate = self.atomspace.add_node(types.PredicateNode, 'part-of', _default_tv)
list_link = self.atomspace.add_link(types.ListLink, [block, entity_node])
self.atomspace.add_link(types.EvaluationLink, [part_of_predicate, list_link], _default_tv)
nodes.append(entity_node)
return nodes
def generate_situation(self, **kwargs):
if 'randomness' not in kwargs:
randomness = 0
else:
randomness = kwargs['randomness']
if 'type_descriptors' not in kwargs:
type_descriptors = []
else:
type_descriptors = kwargs['type_descriptors']
if 'spatial_relation_descriptors' not in kwargs:
spatial_relation_descriptors = []
else:
spatial_relation_descriptors = kwargs['spatial_relation_descriptors']
entity_nodes = []
for descriptor in spatial_relation_descriptors:
for i in range(descriptor.weight):
for first_descriptor, second_descriptor in subsets_of_len_two(descriptor.type_descriptors):
for first_node in self._generate_block_entity_nodes_from_description(first_descriptor):
entity_nodes.append(first_node)
for second_node in self._generate_block_entity_nodes_from_description(second_descriptor):
entity_nodes.append(second_node)
list_link = self.atomspace.add_link(types.ListLink, [first_node, second_node])
predicate_node = self.atomspace.add_node(types.PredicateNode, descriptor.relation,
_default_tv)
self.atomspace.add_link(types.EvaluationLink, [predicate_node, list_link], _default_tv)
for descriptor in type_descriptors:
entity_nodes += self._generate_block_entity_nodes_from_description(descriptor)
number_of_entities = len(entity_nodes)
for i in range(int(round(randomness * number_of_entities))):
# select two random entities
index_of_first_entity = randrange(number_of_entities)
while True:
index_of_second_entity = randrange(number_of_entities)
if index_of_first_entity != index_of_second_entity:
break
list_link = self.atomspace.add_link(types.ListLink,
[entity_nodes[index_of_first_entity],
entity_nodes[index_of_second_entity]])
predicate_node = self.spatial_relations[randrange(len(self.spatial_relations))]
self.atomspace.add_link(types.EvaluationLink, [predicate_node, list_link], _default_tv)
def generate_sample_situation(atomspace):
garden_descriptor = TypeDescriptor(5, 'tree', [('color', 'ConceptNode', 'green'),
('color', 'ConceptNode', 'brown')])
# Describes 10 instances following Scheme definition of a tree:
#
# (EvaluationLink (stv 1 0.0012484394)
# (PredicateNode 'block-list')
# (ListLink
# (BlockEntityNode 'tree' (av 1000 0 0))
# (ListLink
# (StructureNode 'CHUNK_BLOCK_0')
# (StructureNode 'CHUNK_BLOCK_1')
# )
# )
#
# (EvaluationLink (stv 1 0.0012484394)
# (PredicateNode 'color')
# (ListLink
# (StructureNode 'CHUNK_BLOCK_0')
# (ConceptNode 'green')
# )
# )
#
# (EvaluationLink (stv 1 0.0012484394)
# (PredicateNode 'color')
# (ListLink
# (StructureNode 'CHUNK_BLOCK_1')
# (ConceptNode 'brown')
# )
# )
#
# Note: A random number >0 <10 of blocks with given description
# are generated, all bound to given 'block_type_descriptions' which
# is a list of tuples in form of:
# (property_name, predicate_type, predicate_value)
# e.g. for tree, block_type_descriptions would be:
# [('color', 'ConceptNode', 'green'), ('color', 'ConceptNode', 'brown')]
house_descriptor = TypeDescriptor(1, 'house')
village_descriptor = SpatialRelationDescriptor(2, [house_descriptor, garden_descriptor], 'adjacent-to')
SituationGenerator(atomspace).generate_situation(spatial_relation_descriptors=[village_descriptor], randomness=0.2)
# Following code is commented out due to having errors,
# I'd fix it, but don't know where 'fishgram' is located nowadays...
#from logic import *
#from fishgram import *
#
#
#def test(atomspace):
# generate_sample_situation(atomspace)
# atomspace.print_list()
#
# print '\n==========================================='
# print 'Fishgram preprocessing'
# print '===========================================\n'
# chainer = Chainer(atomspace)
# target = T('EvaluationLink',
# atomspace.add(t.PredicateNode, 'contains-block-of-color'),
# new_var()
# )
# chainer.bc(target, nsteps=5000, nresults=20)
#
# import pdb;
#
# pdb.set_trace()
#
# print '\n==========================================='
# print 'Fishgram'
# print '===========================================\n'
# fishAndChips = Fishgram(atomspace)
# notice_changes(atomspace)
# fishAndChips.forest.extractForest()
# layers = fishAndChips.run()
#
# print 'concept nodes'
# fishAndChips.outputConceptNodes(layers)
#
# print '\n==========================================='
# print 'PLN - all subsets'
# print '===========================================\n'
#
# chainer = Chainer(atomspace)
#
# concept_nodes = (atomspace.get_atoms_by_type(types.ConceptNode, False) +
# atomspace.get_atoms_by_type(types.PredicateNode))
# concept_nodes = [n for n in concept_nodes if n.type_name in ['ConceptNode', 'PredicateNode']]
# concept_nodes = map(Tree, concept_nodes)
#
# print len(concept_nodes), 'concepts'
#
# for A in concept_nodes:
# for B in concept_nodes:
# target = T('SubsetLink', A, B)
#
# print target
# results = chainer.bc(target)
# print results
if __name__ == '__main__':
atomspace = AtomSpace()
# jade.ATOMS
ENTITY = atomspace.add_node('VariableNode', '$ENTITY')
BLOCK = atomspace.add_node('VariableNode', '$BLOCK')
COLOR = atomspace.add_node('VariableNode', '$COLOR')
atomspace.add_link('ForAllLink',
[atomspace.add_link('ListLink', [ENTITY, BLOCK, COLOR]),
atomspace.add_link('ImplicationLink',
[atomspace.add_link('AndLink',
[atomspace.add_link('EvaluationLink',
[atomspace.add_node('PredicateNode',
'part-of'),
atomspace.add_link('ListLink',
[BLOCK,
ENTITY])]),
atomspace.add_link('EvaluationLink',
[atomspace.add_node('PredicateNode',
'color'),
atomspace.add_link('ListLink',
[BLOCK,
COLOR])])]),
atomspace.add_link('EvaluationLink',
[atomspace.add_node('PredicateNode',
'contains-block-of-color'),
atomspace.add_link('ListLink', [ENTITY, COLOR])
])
])
], _default_tv)
ENTITY1 = atomspace.add_node('VariableNode', '$ENTITY1')
ENTITY2 = atomspace.add_node('VariableNode', '$ENTITY2')
CATEGORY = atomspace.add_node('VariableNode', '$CATEGORY')
atomspace.add_link('ForAllLink',
[atomspace.add_link('ListLink', [ENTITY1, ENTITY2, CATEGORY]),
atomspace.add_link('ImplicationLink',
[atomspace.add_link('AndLink',
[atomspace.add_link('EvaluationLink',
[atomspace.add_node('PredicateNode',
'adjacent-to'),
atomspace.add_link('ListLink',
[ENTITY1,
ENTITY2])]),
atomspace.add_link('MemberLink', [ENTITY2, CATEGORY])]),
atomspace.add_link('EvaluationLink',
[atomspace.add_node('PredicateNode',
'near-object-of-type'),
atomspace.add_link('ListLink', [ENTITY1, CATEGORY])
])
])
], _default_tv)
#test(atomspace) | agpl-3.0 |
dionyziz/rupture | backend/breach/tests/test_views.py | 1 | 8744 | from django.test import Client, TestCase
from django.core.urlresolvers import reverse
from breach.models import Target, Victim, Round, SampleSet
import json
from binascii import hexlify
from mock import patch
class ViewsTestCase(TestCase):
def setUp(self):
self.client = Client()
self.target1 = Target.objects.create(
name='ruptureit',
endpoint='https://ruptureit.com/test.php?reflection=%s',
prefix='imper',
alphabet='abcdefghijklmnopqrstuvwxyz',
secretlength=9,
alignmentalphabet='ABCDEFGHIJKLMNOPQRSTUVWXYZ',
recordscardinality=1,
method=1
)
self.target2 = Target.objects.create(
name='ruptureit2',
endpoint='https://ruptureit.com/test.php?reflection=%s',
prefix='imper',
alphabet='abcdefghijklmnopqrstuvwxyz',
secretlength=9,
alignmentalphabet='ABCDEFGHIJKLMNOPQRSTUVWXYZ',
recordscardinality=1,
method=2
)
self.target1_data = {
'name': 'ruptureit',
'endpoint': 'https://ruptureit.com/test.php?reflection=%s',
'prefix': 'imper',
'alphabet': 'abcdefghijklmnopqrstuvwxyz',
'secretlength': 9,
'alignmentalphabet': 'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'recordscardinality': 1,
'method': 1
}
self.target2_data = {
'name': 'ruptureit2',
'endpoint': 'https://ruptureit.com/test.php?reflection=%s',
'prefix': 'imper',
'alphabet': 'abcdefghijklmnopqrstuvwxyz',
'secretlength': 9,
'alignmentalphabet': 'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'recordscardinality': 1,
'method': 2
}
def test_target_post(self):
"""
Test post requests for /target
"""
# Create the request
data = {
'name': 'ruptureit3',
'endpoint': 'https://ruptureit.com/test.php?reflection=%s',
'prefix': 'imper',
'alphabet': 'abcdefghijklmnopqrstuvwxyz',
'secretlength': 9,
'alignmentalphabet': 'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'recordscardinality': 1,
'method': 1
}
response = self.client.post(reverse('TargetView'), json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content)['target_name'], 'ruptureit3')
def test_target_get(self):
response = self.client.get(reverse('TargetView'))
response_dict1 = {key: json.loads(response.content)['targets'][0][key] for key in self.target1_data}
response_dict2 = {key: json.loads(response.content)['targets'][1][key] for key in self.target2_data}
self.assertEqual(response.status_code, 200)
self.assertEqual(response_dict1, self.target1_data)
self.assertEqual(response_dict2, self.target2_data)
def test_victim_post(self):
"""
Test post requests for /victim
"""
# Create the request
data = {
'sourceip': '192.168.1.5',
}
response = self.client.post(reverse('VictimListView'), json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content)['victim_id'], 1)
def test_victim_get(self):
victim = Victim.objects.create(
sourceip='192.168.1.5',
target=self.target1
)
round_data = {
'victim': victim,
'index': 1,
'amount': self.target1.samplesize,
'knownalphabet': 'abcdefghijklmnopqrstuvxyz',
'knownsecret': 'imper'
}
new_round = Round(**round_data)
new_round.save()
response = self.client.get(reverse('VictimListView'))
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content)['victims'][0]['sourceip'], '192.168.1.5')
@patch('breach.models.Victim.attack')
def test_attack_post_noID(self, attack):
"""
Test post requests for /victim
"""
# Create the request
data = {
'sourceip': '192.168.1.6',
'target': self.target1.name
}
response = self.client.post(reverse('AttackView'), json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content)['victim_id'], 1)
@patch('breach.models.Victim.attack')
def test_attack_post_ID(self, attack):
"""
Test post requests for /victim
"""
victim = Victim.objects.create(
sourceip='192.168.1.5'
)
# Create the request
data = {
'id': victim.id,
'target': self.target1.name
}
response = self.client.post(reverse('AttackView'), json.dumps(data), content_type='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content)['victim_id'], victim.id)
def test_victimID_get(self):
victim = Victim.objects.create(
sourceip='192.168.1.5',
target=self.target1
)
Victim.objects.create(
sourceip='192.168.1.6',
target=self.target2
)
round_data = {
'victim': victim,
'index': 1,
'amount': victim.target.samplesize,
'knownalphabet': 'abcdefghijklmnopqrstuvxyz',
'knownsecret': 'imper'
}
new_round = Round(**round_data)
new_round.save()
sampleset1_data = {
'round': new_round,
'candidatealphabet': 'a',
'datalength': len(hexlify('length')),
'success': True,
'alignmentalphabet': 'ABCDEFGHIJKLMNOPQRSTUVXYZ'
}
sampleset = SampleSet(**sampleset1_data)
sampleset.save()
sampleset2_data = {
'round': new_round,
'candidatealphabet': 'b',
'datalength': len(hexlify('length2')),
'success': True,
'alignmentalphabet': 'ABCDEFGHIJKLMNOPQRSTUVXYZ'
}
sampleset2 = SampleSet(**sampleset2_data)
sampleset2.save()
response = self.client.get(reverse('VictimDetailView', kwargs={'victim_id': victim.id}))
self.assertEqual(json.loads(response.content)['victim_ip'], '192.168.1.5')
self.assertEqual(json.loads(response.content)['target_name'], 'ruptureit')
self.assertEqual(json.loads(response.content)['attack_details'][0]['batch'], 0)
def test_victimID_patch_state(self):
victim = Victim.objects.create(
sourceip='192.168.1.5',
target=self.target1,
)
data1 = {'state': 'paused'}
data2 = {'state': 'running'}
response = self.client.patch(reverse('VictimDetailView', kwargs={'victim_id': victim.id}), json.dumps(data1), content_type='application/json', )
self.assertEqual(response.status_code, 200)
paused_victim = Victim.objects.get(pk=victim.id)
self.assertEqual(paused_victim.state, 'paused')
response = self.client.patch(reverse('VictimDetailView', kwargs={'victim_id': victim.id}), json.dumps(data2), content_type='application/json', )
restarted_victim = Victim.objects.get(pk=victim.id)
self.assertEqual(restarted_victim.state, 'running')
def test_victimID_patch_delete(self):
victim = Victim.objects.create(
sourceip='192.168.1.5',
target=self.target1,
)
data1 = {'deleted': True}
data2 = {'deleted': False}
response = self.client.patch(reverse('VictimDetailView', kwargs={'victim_id': victim.id}), json.dumps(data1), content_type='application/json', )
self.assertEqual(response.status_code, 200)
deleted_victim = Victim.objects.get(pk=victim.id)
self.assertNotEqual(deleted_victim.trashed_at, None)
response = self.client.patch(reverse('VictimDetailView', kwargs={'victim_id': victim.id}), json.dumps(data2), content_type='application/json', )
restored_victim = Victim.objects.get(pk=victim.id)
self.assertEqual(response.status_code, 200)
self.assertEqual(restored_victim.trashed_at, None)
@patch('breach.helpers.network.scan_network')
def test_victim_notstarted(self, scan_network):
response = self.client.get(reverse('DiscoveredVictimsView'))
self.assertEqual(response.status_code, 200)
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.