repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
Jozhogg/iris | docs/iris/example_tests/test_anomaly_log_colouring.py | 1 | 1304 | # (C) British Crown Copyright 2014, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
# Import Iris tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
from . import extest_util
with extest_util.add_examples_to_path():
import anomaly_log_colouring
class TestAnomalyLogColouring(tests.GraphicsTest):
"""Test the anomaly colouring example code."""
def test_anomaly_log_colouring(self):
with extest_util.show_replaced_by_check_graphic(self):
anomaly_log_colouring.main()
if __name__ == '__main__':
tests.main()
| lgpl-3.0 | 4,757,317,864,431,538,000 | 33.315789 | 74 | 0.736963 | false |
dgsantana/arsenalsuite | cpp/lib/PyQt4/examples/opengl/samplebuffers.py | 20 | 6279 | #!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2010 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
import sys
import math
from PyQt4 import QtCore, QtGui, QtOpenGL
try:
from OpenGL import GL
except ImportError:
app = QtGui.QApplication(sys.argv)
QtGui.QMessageBox.critical(None, "OpenGL samplebuffers",
"PyOpenGL must be installed to run this example.")
sys.exit(1)
class GLWidget(QtOpenGL.QGLWidget):
GL_MULTISAMPLE = 0x809D
rot = 0.0
def __init__(self, parent):
super(GLWidget, self).__init__(QtOpenGL.QGLFormat(QtOpenGL.QGL.SampleBuffers), parent)
self.list_ = []
self.startTimer(40)
self.setWindowTitle("Sample Buffers")
def initializeGL(self):
GL.glMatrixMode(GL.GL_PROJECTION)
GL.glLoadIdentity()
GL.glOrtho( -.5, .5, .5, -.5, -1000, 1000)
GL.glMatrixMode(GL.GL_MODELVIEW)
GL.glLoadIdentity()
GL.glClearColor(1.0, 1.0, 1.0, 1.0)
self.makeObject()
def resizeGL(self, w, h):
GL.glViewport(0, 0, w, h)
def paintGL(self):
GL.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT)
GL.glMatrixMode(GL.GL_MODELVIEW)
GL.glPushMatrix()
GL.glEnable(GLWidget.GL_MULTISAMPLE)
GL.glTranslatef( -0.25, -0.10, 0.0)
GL.glScalef(0.75, 1.15, 0.0)
GL.glRotatef(GLWidget.rot, 0.0, 0.0, 1.0)
GL.glCallList(self.list_)
GL.glPopMatrix()
GL.glPushMatrix()
GL.glDisable(GLWidget.GL_MULTISAMPLE)
GL.glTranslatef(0.25, -0.10, 0.0)
GL.glScalef(0.75, 1.15, 0.0)
GL.glRotatef(GLWidget.rot, 0.0, 0.0, 1.0)
GL.glCallList(self.list_)
GL.glPopMatrix()
GLWidget.rot += 0.2
self.qglColor(QtCore.Qt.black)
self.renderText(-0.35, 0.4, 0.0, "Multisampling enabled")
self.renderText(0.15, 0.4, 0.0, "Multisampling disabled")
def timerEvent(self, event):
self.update()
def makeObject(self):
trolltechGreen = QtGui.QColor.fromCmykF(0.40, 0.0, 1.0, 0.0)
NumSectors = 15
x1 = +0.06
y1 = -0.14
x2 = +0.14
y2 = -0.06
x3 = +0.08
y3 = +0.00
x4 = +0.30
y4 = +0.22
self.list_ = GL.glGenLists(1)
GL.glNewList(self.list_, GL.GL_COMPILE)
for i in range(NumSectors):
angle1 = float((i * 2 * math.pi) / NumSectors)
x5 = 0.30 * math.sin(angle1)
y5 = 0.30 * math.cos(angle1)
x6 = 0.20 * math.sin(angle1)
y6 = 0.20 * math.cos(angle1)
angle2 = float(((i + 1) * 2 * math.pi) / NumSectors)
x7 = 0.20 * math.sin(angle2)
y7 = 0.20 * math.cos(angle2)
x8 = 0.30 * math.sin(angle2)
y8 = 0.30 * math.cos(angle2)
self.qglColor(trolltechGreen)
self.quad(GL.GL_QUADS, x5, y5, x6, y6, x7, y7, x8, y8)
self.qglColor(QtCore.Qt.black)
self.quad(GL.GL_LINE_LOOP, x5, y5, x6, y6, x7, y7, x8, y8)
self.qglColor(trolltechGreen)
self.quad(GL.GL_QUADS, x1, y1, x2, y2, y2, x2, y1, x1)
self.quad(GL.GL_QUADS, x3, y3, x4, y4, y4, x4, y3, x3)
self.qglColor(QtCore.Qt.black)
self.quad(GL.GL_LINE_LOOP, x1, y1, x2, y2, y2, x2, y1, x1)
self.quad(GL.GL_LINE_LOOP, x3, y3, x4, y4, y4, x4, y3, x3)
GL.glEndList()
def quad(self, primitive, x1, y1, x2, y2, x3, y3, x4, y4):
GL.glBegin(primitive)
GL.glVertex2d(x1, y1)
GL.glVertex2d(x2, y2)
GL.glVertex2d(x3, y3)
GL.glVertex2d(x4, y4)
GL.glEnd()
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
f = QtOpenGL.QGLFormat.defaultFormat()
f.setSampleBuffers(True)
QtOpenGL.QGLFormat.setDefaultFormat(f)
if not QtOpenGL.QGLFormat.hasOpenGL():
QtGui.QMessageBox.information(None, "OpenGL samplebuffers",
"This system does not support OpenGL.")
sys.exit(0)
widget = GLWidget(None)
if not widget.format().sampleBuffers():
QtGui.QMessageBox.information(None, "OpenGL samplebuffers",
"This system does not have sample buffer support.")
sys.exit(0)
widget.resize(640, 480)
widget.show()
sys.exit(app.exec_())
| gpl-2.0 | -3,656,334,288,124,663,300 | 32.047368 | 94 | 0.607899 | false |
asnorkin/sentiment_analysis | site/lib/python2.7/site-packages/sklearn/ensemble/tests/test_weight_boosting.py | 23 | 17698 | """Testing for the boost module (sklearn.ensemble.boost)."""
import numpy as np
from sklearn.utils.testing import assert_array_equal, assert_array_less
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_equal, assert_true, assert_greater
from sklearn.utils.testing import assert_raises, assert_raises_regexp
from sklearn.base import BaseEstimator
from sklearn.model_selection import train_test_split
from sklearn.model_selection import GridSearchCV
from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import AdaBoostRegressor
from sklearn.ensemble import weight_boosting
from scipy.sparse import csc_matrix
from scipy.sparse import csr_matrix
from scipy.sparse import coo_matrix
from scipy.sparse import dok_matrix
from scipy.sparse import lil_matrix
from sklearn.svm import SVC, SVR
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
from sklearn.utils import shuffle
from sklearn import datasets
# Common random state
rng = np.random.RandomState(0)
# Toy sample
X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]]
y_class = ["foo", "foo", "foo", 1, 1, 1] # test string class labels
y_regr = [-1, -1, -1, 1, 1, 1]
T = [[-1, -1], [2, 2], [3, 2]]
y_t_class = ["foo", 1, 1]
y_t_regr = [-1, 1, 1]
# Load the iris dataset and randomly permute it
iris = datasets.load_iris()
perm = rng.permutation(iris.target.size)
iris.data, iris.target = shuffle(iris.data, iris.target, random_state=rng)
# Load the boston dataset and randomly permute it
boston = datasets.load_boston()
boston.data, boston.target = shuffle(boston.data, boston.target,
random_state=rng)
def test_samme_proba():
# Test the `_samme_proba` helper function.
# Define some example (bad) `predict_proba` output.
probs = np.array([[1, 1e-6, 0],
[0.19, 0.6, 0.2],
[-999, 0.51, 0.5],
[1e-6, 1, 1e-9]])
probs /= np.abs(probs.sum(axis=1))[:, np.newaxis]
# _samme_proba calls estimator.predict_proba.
# Make a mock object so I can control what gets returned.
class MockEstimator(object):
def predict_proba(self, X):
assert_array_equal(X.shape, probs.shape)
return probs
mock = MockEstimator()
samme_proba = weight_boosting._samme_proba(mock, 3, np.ones_like(probs))
assert_array_equal(samme_proba.shape, probs.shape)
assert_true(np.isfinite(samme_proba).all())
# Make sure that the correct elements come out as smallest --
# `_samme_proba` should preserve the ordering in each example.
assert_array_equal(np.argmin(samme_proba, axis=1), [2, 0, 0, 2])
assert_array_equal(np.argmax(samme_proba, axis=1), [0, 1, 1, 1])
def test_classification_toy():
# Check classification on a toy dataset.
for alg in ['SAMME', 'SAMME.R']:
clf = AdaBoostClassifier(algorithm=alg, random_state=0)
clf.fit(X, y_class)
assert_array_equal(clf.predict(T), y_t_class)
assert_array_equal(np.unique(np.asarray(y_t_class)), clf.classes_)
assert_equal(clf.predict_proba(T).shape, (len(T), 2))
assert_equal(clf.decision_function(T).shape, (len(T),))
def test_regression_toy():
# Check classification on a toy dataset.
clf = AdaBoostRegressor(random_state=0)
clf.fit(X, y_regr)
assert_array_equal(clf.predict(T), y_t_regr)
def test_iris():
# Check consistency on dataset iris.
classes = np.unique(iris.target)
clf_samme = prob_samme = None
for alg in ['SAMME', 'SAMME.R']:
clf = AdaBoostClassifier(algorithm=alg)
clf.fit(iris.data, iris.target)
assert_array_equal(classes, clf.classes_)
proba = clf.predict_proba(iris.data)
if alg == "SAMME":
clf_samme = clf
prob_samme = proba
assert_equal(proba.shape[1], len(classes))
assert_equal(clf.decision_function(iris.data).shape[1], len(classes))
score = clf.score(iris.data, iris.target)
assert score > 0.9, "Failed with algorithm %s and score = %f" % \
(alg, score)
# Check we used multiple estimators
assert_greater(len(clf.estimators_), 1)
# Check for distinct random states (see issue #7408)
assert_equal(len(set(est.random_state for est in clf.estimators_)),
len(clf.estimators_))
# Somewhat hacky regression test: prior to
# ae7adc880d624615a34bafdb1d75ef67051b8200,
# predict_proba returned SAMME.R values for SAMME.
clf_samme.algorithm = "SAMME.R"
assert_array_less(0,
np.abs(clf_samme.predict_proba(iris.data) - prob_samme))
def test_boston():
# Check consistency on dataset boston house prices.
reg = AdaBoostRegressor(random_state=0)
reg.fit(boston.data, boston.target)
score = reg.score(boston.data, boston.target)
assert score > 0.85
# Check we used multiple estimators
assert_true(len(reg.estimators_) > 1)
# Check for distinct random states (see issue #7408)
assert_equal(len(set(est.random_state for est in reg.estimators_)),
len(reg.estimators_))
def test_staged_predict():
# Check staged predictions.
rng = np.random.RandomState(0)
iris_weights = rng.randint(10, size=iris.target.shape)
boston_weights = rng.randint(10, size=boston.target.shape)
# AdaBoost classification
for alg in ['SAMME', 'SAMME.R']:
clf = AdaBoostClassifier(algorithm=alg, n_estimators=10)
clf.fit(iris.data, iris.target, sample_weight=iris_weights)
predictions = clf.predict(iris.data)
staged_predictions = [p for p in clf.staged_predict(iris.data)]
proba = clf.predict_proba(iris.data)
staged_probas = [p for p in clf.staged_predict_proba(iris.data)]
score = clf.score(iris.data, iris.target, sample_weight=iris_weights)
staged_scores = [
s for s in clf.staged_score(
iris.data, iris.target, sample_weight=iris_weights)]
assert_equal(len(staged_predictions), 10)
assert_array_almost_equal(predictions, staged_predictions[-1])
assert_equal(len(staged_probas), 10)
assert_array_almost_equal(proba, staged_probas[-1])
assert_equal(len(staged_scores), 10)
assert_array_almost_equal(score, staged_scores[-1])
# AdaBoost regression
clf = AdaBoostRegressor(n_estimators=10, random_state=0)
clf.fit(boston.data, boston.target, sample_weight=boston_weights)
predictions = clf.predict(boston.data)
staged_predictions = [p for p in clf.staged_predict(boston.data)]
score = clf.score(boston.data, boston.target, sample_weight=boston_weights)
staged_scores = [
s for s in clf.staged_score(
boston.data, boston.target, sample_weight=boston_weights)]
assert_equal(len(staged_predictions), 10)
assert_array_almost_equal(predictions, staged_predictions[-1])
assert_equal(len(staged_scores), 10)
assert_array_almost_equal(score, staged_scores[-1])
def test_gridsearch():
# Check that base trees can be grid-searched.
# AdaBoost classification
boost = AdaBoostClassifier(base_estimator=DecisionTreeClassifier())
parameters = {'n_estimators': (1, 2),
'base_estimator__max_depth': (1, 2),
'algorithm': ('SAMME', 'SAMME.R')}
clf = GridSearchCV(boost, parameters)
clf.fit(iris.data, iris.target)
# AdaBoost regression
boost = AdaBoostRegressor(base_estimator=DecisionTreeRegressor(),
random_state=0)
parameters = {'n_estimators': (1, 2),
'base_estimator__max_depth': (1, 2)}
clf = GridSearchCV(boost, parameters)
clf.fit(boston.data, boston.target)
def test_pickle():
# Check pickability.
import pickle
# Adaboost classifier
for alg in ['SAMME', 'SAMME.R']:
obj = AdaBoostClassifier(algorithm=alg)
obj.fit(iris.data, iris.target)
score = obj.score(iris.data, iris.target)
s = pickle.dumps(obj)
obj2 = pickle.loads(s)
assert_equal(type(obj2), obj.__class__)
score2 = obj2.score(iris.data, iris.target)
assert_equal(score, score2)
# Adaboost regressor
obj = AdaBoostRegressor(random_state=0)
obj.fit(boston.data, boston.target)
score = obj.score(boston.data, boston.target)
s = pickle.dumps(obj)
obj2 = pickle.loads(s)
assert_equal(type(obj2), obj.__class__)
score2 = obj2.score(boston.data, boston.target)
assert_equal(score, score2)
def test_importances():
# Check variable importances.
X, y = datasets.make_classification(n_samples=2000,
n_features=10,
n_informative=3,
n_redundant=0,
n_repeated=0,
shuffle=False,
random_state=1)
for alg in ['SAMME', 'SAMME.R']:
clf = AdaBoostClassifier(algorithm=alg)
clf.fit(X, y)
importances = clf.feature_importances_
assert_equal(importances.shape[0], 10)
assert_equal((importances[:3, np.newaxis] >= importances[3:]).all(),
True)
def test_error():
# Test that it gives proper exception on deficient input.
assert_raises(ValueError,
AdaBoostClassifier(learning_rate=-1).fit,
X, y_class)
assert_raises(ValueError,
AdaBoostClassifier(algorithm="foo").fit,
X, y_class)
assert_raises(ValueError,
AdaBoostClassifier().fit,
X, y_class, sample_weight=np.asarray([-1]))
def test_base_estimator():
# Test different base estimators.
from sklearn.ensemble import RandomForestClassifier
from sklearn.svm import SVC
# XXX doesn't work with y_class because RF doesn't support classes_
# Shouldn't AdaBoost run a LabelBinarizer?
clf = AdaBoostClassifier(RandomForestClassifier())
clf.fit(X, y_regr)
clf = AdaBoostClassifier(SVC(), algorithm="SAMME")
clf.fit(X, y_class)
from sklearn.ensemble import RandomForestRegressor
from sklearn.svm import SVR
clf = AdaBoostRegressor(RandomForestRegressor(), random_state=0)
clf.fit(X, y_regr)
clf = AdaBoostRegressor(SVR(), random_state=0)
clf.fit(X, y_regr)
# Check that an empty discrete ensemble fails in fit, not predict.
X_fail = [[1, 1], [1, 1], [1, 1], [1, 1]]
y_fail = ["foo", "bar", 1, 2]
clf = AdaBoostClassifier(SVC(), algorithm="SAMME")
assert_raises_regexp(ValueError, "worse than random",
clf.fit, X_fail, y_fail)
def test_sample_weight_missing():
from sklearn.linear_model import LogisticRegression
from sklearn.cluster import KMeans
clf = AdaBoostClassifier(KMeans(), algorithm="SAMME")
assert_raises(ValueError, clf.fit, X, y_regr)
clf = AdaBoostRegressor(KMeans())
assert_raises(ValueError, clf.fit, X, y_regr)
def test_sparse_classification():
# Check classification with sparse input.
class CustomSVC(SVC):
"""SVC variant that records the nature of the training set."""
def fit(self, X, y, sample_weight=None):
"""Modification on fit caries data type for later verification."""
super(CustomSVC, self).fit(X, y, sample_weight=sample_weight)
self.data_type_ = type(X)
return self
X, y = datasets.make_multilabel_classification(n_classes=1, n_samples=15,
n_features=5,
random_state=42)
# Flatten y to a 1d array
y = np.ravel(y)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
for sparse_format in [csc_matrix, csr_matrix, lil_matrix, coo_matrix,
dok_matrix]:
X_train_sparse = sparse_format(X_train)
X_test_sparse = sparse_format(X_test)
# Trained on sparse format
sparse_classifier = AdaBoostClassifier(
base_estimator=CustomSVC(probability=True),
random_state=1,
algorithm="SAMME"
).fit(X_train_sparse, y_train)
# Trained on dense format
dense_classifier = AdaBoostClassifier(
base_estimator=CustomSVC(probability=True),
random_state=1,
algorithm="SAMME"
).fit(X_train, y_train)
# predict
sparse_results = sparse_classifier.predict(X_test_sparse)
dense_results = dense_classifier.predict(X_test)
assert_array_equal(sparse_results, dense_results)
# decision_function
sparse_results = sparse_classifier.decision_function(X_test_sparse)
dense_results = dense_classifier.decision_function(X_test)
assert_array_equal(sparse_results, dense_results)
# predict_log_proba
sparse_results = sparse_classifier.predict_log_proba(X_test_sparse)
dense_results = dense_classifier.predict_log_proba(X_test)
assert_array_equal(sparse_results, dense_results)
# predict_proba
sparse_results = sparse_classifier.predict_proba(X_test_sparse)
dense_results = dense_classifier.predict_proba(X_test)
assert_array_equal(sparse_results, dense_results)
# score
sparse_results = sparse_classifier.score(X_test_sparse, y_test)
dense_results = dense_classifier.score(X_test, y_test)
assert_array_equal(sparse_results, dense_results)
# staged_decision_function
sparse_results = sparse_classifier.staged_decision_function(
X_test_sparse)
dense_results = dense_classifier.staged_decision_function(X_test)
for sprase_res, dense_res in zip(sparse_results, dense_results):
assert_array_equal(sprase_res, dense_res)
# staged_predict
sparse_results = sparse_classifier.staged_predict(X_test_sparse)
dense_results = dense_classifier.staged_predict(X_test)
for sprase_res, dense_res in zip(sparse_results, dense_results):
assert_array_equal(sprase_res, dense_res)
# staged_predict_proba
sparse_results = sparse_classifier.staged_predict_proba(X_test_sparse)
dense_results = dense_classifier.staged_predict_proba(X_test)
for sprase_res, dense_res in zip(sparse_results, dense_results):
assert_array_equal(sprase_res, dense_res)
# staged_score
sparse_results = sparse_classifier.staged_score(X_test_sparse,
y_test)
dense_results = dense_classifier.staged_score(X_test, y_test)
for sprase_res, dense_res in zip(sparse_results, dense_results):
assert_array_equal(sprase_res, dense_res)
# Verify sparsity of data is maintained during training
types = [i.data_type_ for i in sparse_classifier.estimators_]
assert all([(t == csc_matrix or t == csr_matrix)
for t in types])
def test_sparse_regression():
# Check regression with sparse input.
class CustomSVR(SVR):
"""SVR variant that records the nature of the training set."""
def fit(self, X, y, sample_weight=None):
"""Modification on fit caries data type for later verification."""
super(CustomSVR, self).fit(X, y, sample_weight=sample_weight)
self.data_type_ = type(X)
return self
X, y = datasets.make_regression(n_samples=15, n_features=50, n_targets=1,
random_state=42)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
for sparse_format in [csc_matrix, csr_matrix, lil_matrix, coo_matrix,
dok_matrix]:
X_train_sparse = sparse_format(X_train)
X_test_sparse = sparse_format(X_test)
# Trained on sparse format
sparse_classifier = AdaBoostRegressor(
base_estimator=CustomSVR(),
random_state=1
).fit(X_train_sparse, y_train)
# Trained on dense format
dense_classifier = dense_results = AdaBoostRegressor(
base_estimator=CustomSVR(),
random_state=1
).fit(X_train, y_train)
# predict
sparse_results = sparse_classifier.predict(X_test_sparse)
dense_results = dense_classifier.predict(X_test)
assert_array_equal(sparse_results, dense_results)
# staged_predict
sparse_results = sparse_classifier.staged_predict(X_test_sparse)
dense_results = dense_classifier.staged_predict(X_test)
for sprase_res, dense_res in zip(sparse_results, dense_results):
assert_array_equal(sprase_res, dense_res)
types = [i.data_type_ for i in sparse_classifier.estimators_]
assert all([(t == csc_matrix or t == csr_matrix)
for t in types])
def test_sample_weight_adaboost_regressor():
"""
AdaBoostRegressor should work without sample_weights in the base estimator
The random weighted sampling is done internally in the _boost method in
AdaBoostRegressor.
"""
class DummyEstimator(BaseEstimator):
def fit(self, X, y):
pass
def predict(self, X):
return np.zeros(X.shape[0])
boost = AdaBoostRegressor(DummyEstimator(), n_estimators=3)
boost.fit(X, y_regr)
assert_equal(len(boost.estimator_weights_), len(boost.estimator_errors_))
| mit | 1,253,682,346,333,298,700 | 36.102725 | 79 | 0.632162 | false |
delectable/DIGITS | tools/test_analyze_db.py | 3 | 1449 | # Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved.
import os.path
import shutil
import tempfile
import lmdb
import numpy as np
from . import analyze_db as _
import caffe.io
import caffe_pb2
class BaseTestWithDB(object):
SAME_SHAPE = True
PASS_DEFAULTS = True
PASS_FORCE = True
PASS_COUNT = True
@classmethod
def setUpClass(cls):
cls._data_dir = tempfile.mkdtemp()
cls.db = lmdb.open(os.path.join(cls._data_dir, 'db'))
for i in xrange(2):
if cls.SAME_SHAPE:
width = 10
else:
width = 10+i
datum = cls.create_datum(10,width,3)
with cls.db.begin(write=True) as txn:
txn.put(str(i), datum.SerializeToString())
@classmethod
def tearDownClass(cls):
cls.db.close()
shutil.rmtree(cls._data_dir)
@staticmethod
def create_datum(*shape):
"""
Creates a datum with an image of the given shape
"""
image = np.ones(shape, dtype='uint8')
return caffe.io.array_to_datum(image)
def test_defaults(self):
assert _.analyze_db(self.db.path()) == self.PASS_DEFAULTS
def test_force_shape(self):
assert _.analyze_db(self.db.path(), force_same_shape=True) == self.PASS_FORCE
class TestSameShape(BaseTestWithDB):
pass
class TestDifferentShape(BaseTestWithDB):
SAME_SHAPE = False
PASS_FORCE = False
| bsd-3-clause | -5,870,222,944,748,095,000 | 23.15 | 85 | 0.610766 | false |
michaelrosejr/pyaos6 | netmiko/pluribus/pluribus_ssh.py | 1 | 1221 | from __future__ import unicode_literals
from netmiko.base_connection import BaseConnection
class PluribusSSH(BaseConnection):
'''Common methods for Pluribus.'''
def __init__(self, *args, **kwargs):
super(PluribusSSH, self).__init__(*args, **kwargs)
self._config_mode = False
def disable_paging(self, command="pager off", delay_factor=1):
'''Make sure paging is disabled.'''
return super(PluribusSSH, self).disable_paging(command=command, delay_factor=delay_factor)
def session_preparation(self):
'''Prepare the netmiko session.'''
self._test_channel_read()
self.set_base_prompt()
self.disable_paging()
def check_config_mode(self, *args, **kwargs):
'''
Pluribus devices don't have a config mode.
Therefore it can be considered as always in config mode.
'''
return self._config_mode
def config_mode(self, *args, **kwargs):
'''No special actions to enter in config mode.'''
self._config_mode = True
return ''
def exit_config_mode(self, *args, **kwargs):
'''No special actions to exit config mode.'''
self._config_mode = False
return ''
| mit | 7,345,471,844,455,937,000 | 32 | 98 | 0.620803 | false |
centricular/meson | mesonbuild/modules/gnome.py | 1 | 45416 | # Copyright 2015-2016 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''This module provides helper functions for Gnome/GLib related
functionality such as gobject-introspection and gresources.'''
from .. import build
import os
import sys
import copy
import subprocess
from ..mesonlib import MesonException
from .. import dependencies
from .. import mlog
from .. import mesonlib
from .. import interpreter
# gresource compilation is broken due to the way
# the resource compiler and Ninja clash about it
#
# https://github.com/ninja-build/ninja/issues/1184
# https://bugzilla.gnome.org/show_bug.cgi?id=774368
gresource_dep_needed_version = '>9.99.99'
native_glib_version = None
girwarning_printed = False
gresource_warning_printed = False
class GnomeModule:
@staticmethod
def _get_native_glib_version(state):
global native_glib_version
if native_glib_version is None:
glib_dep = dependencies.PkgConfigDependency(
'glib-2.0', state.environment, {'native': True})
native_glib_version = glib_dep.get_modversion()
return native_glib_version
def __print_gresources_warning(self, state):
global gresource_warning_printed
if not gresource_warning_printed:
if not mesonlib.version_compare(self._get_native_glib_version(state), gresource_dep_needed_version):
mlog.warning('''GLib compiled dependencies do not work reliably with
the current version of GLib. See the following upstream issue:''',
mlog.bold('https://bugzilla.gnome.org/show_bug.cgi?id=774368'))
gresource_warning_printed = True
return []
def compile_resources(self, state, args, kwargs):
self.__print_gresources_warning(state)
cmd = ['glib-compile-resources', '@INPUT@']
source_dirs = kwargs.pop('source_dir', [])
if not isinstance(source_dirs, list):
source_dirs = [source_dirs]
if len(args) < 2:
raise MesonException('Not enough arguments; The name of the resource and the path to the XML file are required')
dependencies = kwargs.pop('dependencies', [])
if not isinstance(dependencies, list):
dependencies = [dependencies]
glib_version = self._get_native_glib_version(state)
if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
if len(dependencies) > 0:
raise MesonException('''The "dependencies" argument of gnome.compile_resources()
can not be used with the current version of glib-compiled-resources, due to
<https://bugzilla.gnome.org/show_bug.cgi?id=774368>''')
ifile = args[1]
if isinstance(ifile, mesonlib.File):
ifile = os.path.join(ifile.subdir, ifile.fname)
elif isinstance(ifile, str):
ifile = os.path.join(state.subdir, ifile)
else:
raise RuntimeError('Unreachable code.')
depend_files, depends, subdirs = self._get_gresource_dependencies(
state, ifile, source_dirs, dependencies)
# Make source dirs relative to build dir now
source_dirs = [os.path.join(state.build_to_src, state.subdir, d) for d in source_dirs]
# Always include current directory, but after paths set by user
source_dirs.append(os.path.join(state.build_to_src, state.subdir))
# Ensure build directories of generated deps are included
source_dirs += subdirs
for source_dir in set(source_dirs):
cmd += ['--sourcedir', source_dir]
if 'c_name' in kwargs:
cmd += ['--c-name', kwargs.pop('c_name')]
cmd += ['--generate', '--target', '@OUTPUT@']
cmd += mesonlib.stringlistify(kwargs.pop('extra_args', []))
kwargs['input'] = args[1]
kwargs['output'] = args[0] + '.c'
kwargs['depends'] = depends
if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
# This will eventually go out of sync if dependencies are added
kwargs['depend_files'] = depend_files
kwargs['command'] = cmd
else:
depfile = kwargs['output'] + '.d'
kwargs['depfile'] = depfile
kwargs['command'] = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@']
target_c = build.CustomTarget(args[0] + '_c', state.subdir, kwargs)
h_kwargs = {
'command': cmd,
'input': args[1],
'output': args[0] + '.h',
# The header doesn't actually care about the files yet it errors if missing
'depends': depends
}
target_h = build.CustomTarget(args[0] + '_h', state.subdir, h_kwargs)
return [target_c, target_h]
def _get_gresource_dependencies(self, state, input_file, source_dirs, dependencies):
for dep in dependencies:
if not isinstance(dep, interpreter.CustomTargetHolder) and not \
isinstance(dep, mesonlib.File):
raise MesonException(
'Unexpected dependency type for gnome.compile_resources() '
'"dependencies" argument. Please pass the output of '
'custom_target() or configure_file().')
cmd = ['glib-compile-resources',
input_file,
'--generate-dependencies']
for source_dir in source_dirs:
cmd += ['--sourcedir', os.path.join(state.subdir, source_dir)]
cmd += ['--sourcedir', state.subdir] # Current dir
pc = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True,
cwd=state.environment.get_source_dir())
(stdout, _) = pc.communicate()
if pc.returncode != 0:
mlog.warning('glib-compile-resources has failed to get the dependencies for {}'.format(cmd[1]))
raise subprocess.CalledProcessError(pc.returncode, cmd)
dep_files = stdout.split('\n')[:-1]
# In generate-dependencies mode, glib-compile-resources doesn't raise
# an error for missing resources but instead prints whatever filename
# was listed in the input file. That's good because it means we can
# handle resource files that get generated as part of the build, as
# follows.
#
# If there are multiple generated resource files with the same basename
# then this code will get confused.
def exists_in_srcdir(f):
return os.path.exists(os.path.join(state.environment.get_source_dir(), f))
missing_dep_files = [f for f in dep_files if not exists_in_srcdir(f)]
depends = []
subdirs = []
for missing in missing_dep_files:
found = False
missing_basename = os.path.basename(missing)
for dep in dependencies:
if isinstance(dep, mesonlib.File):
if dep.fname == missing_basename:
found = True
dep_files.remove(missing)
dep_files.append(dep)
subdirs.append(dep.subdir)
break
elif isinstance(dep, interpreter.CustomTargetHolder):
if dep.held_object.get_basename() == missing_basename:
found = True
dep_files.remove(missing)
dep_files.append(
mesonlib.File(
is_built=True,
subdir=dep.held_object.get_subdir(),
fname=dep.held_object.get_basename()))
depends.append(dep.held_object)
subdirs.append(dep.held_object.get_subdir())
break
if not found:
raise MesonException(
'Resource "%s" listed in "%s" was not found. If this is a '
'generated file, pass the target that generates it to '
'gnome.compile_resources() using the "dependencies" '
'keyword argument.' % (missing, input_file))
return dep_files, depends, subdirs
@staticmethod
def _get_link_args(state, lib, depends=None):
link_command = ['-l%s' % lib.name]
if isinstance(lib, build.SharedLibrary):
link_command += ['-L%s' %
os.path.join(state.environment.get_build_dir(),
lib.subdir)]
if depends:
depends.append(lib)
return link_command
@staticmethod
def _get_include_args(state, include_dirs, prefix='-I'):
if not include_dirs:
return []
dirs_str = []
for incdirs in include_dirs:
if hasattr(incdirs, "held_object"):
dirs = incdirs.held_object
else:
dirs = incdirs
if isinstance(dirs, str):
dirs_str += ['%s%s' % (prefix, dirs)]
continue
# Should be build.IncludeDirs object.
basedir = dirs.get_curdir()
for d in dirs.get_incdirs():
expdir = os.path.join(basedir, d)
srctreedir = os.path.join(state.environment.get_source_dir(), expdir)
buildtreedir = os.path.join(state.environment.get_build_dir(), expdir)
dirs_str += ['%s%s' % (prefix, buildtreedir),
'%s%s' % (prefix, srctreedir)]
for d in dirs.get_extra_build_dirs():
dirs_str += ['%s%s' % (prefix, d)]
return dirs_str
def _get_dependencies_flags(self, deps, state, depends=None):
cflags = set()
ldflags = set()
gi_includes = set()
if not isinstance(deps, list):
deps = [deps]
for dep in deps:
if hasattr(dep, 'held_object'):
dep = dep.held_object
if isinstance(dep, dependencies.InternalDependency):
cflags.update(self._get_include_args(state, dep.include_directories))
for lib in dep.libraries:
ldflags.update(self._get_link_args(state, lib.held_object, depends))
libdepflags = self._get_dependencies_flags(lib.held_object.get_external_deps(), state, depends)
cflags.update(libdepflags[0])
ldflags.update(libdepflags[1])
gi_includes.update(libdepflags[2])
extdepflags = self._get_dependencies_flags(dep.ext_deps, state, depends)
cflags.update(extdepflags[0])
ldflags.update(extdepflags[1])
gi_includes.update(extdepflags[2])
for source in dep.sources:
if hasattr(source, 'held_object') and isinstance(source.held_object, GirTarget):
gi_includes.update([os.path.join(state.environment.get_build_dir(),
source.held_object.get_subdir())])
# This should be any dependency other than an internal one.
elif isinstance(dep, dependencies.Dependency):
cflags.update(dep.get_compile_args())
for lib in dep.get_link_args():
if (os.path.isabs(lib) and
# For PkgConfigDependency only:
getattr(dep, 'is_libtool', False)):
ldflags.update(["-L%s" % os.path.dirname(lib)])
libname = os.path.basename(lib)
if libname.startswith("lib"):
libname = libname[3:]
libname = libname.split(".so")[0]
lib = "-l%s" % libname
# Hack to avoid passing some compiler options in
if lib.startswith("-W"):
continue
ldflags.update([lib])
if isinstance(dep, dependencies.PkgConfigDependency):
girdir = dep.get_pkgconfig_variable("girdir")
if girdir:
gi_includes.update([girdir])
elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
for incd in dep.get_include_dirs():
cflags.update(incd.get_incdirs())
else:
mlog.log('dependency %s not handled to build gir files' % dep)
continue
return cflags, ldflags, gi_includes
def generate_gir(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('Gir takes one argument')
if kwargs.get('install_dir'):
raise MesonException('install_dir is not supported with generate_gir(), see "install_dir_gir" and "install_dir_typelib"')
girtarget = args[0]
while hasattr(girtarget, 'held_object'):
girtarget = girtarget.held_object
if not isinstance(girtarget, (build.Executable, build.SharedLibrary)):
raise MesonException('Gir target must be an executable or shared library')
try:
pkgstr = subprocess.check_output(['pkg-config', '--cflags', 'gobject-introspection-1.0'])
except Exception:
global girwarning_printed
if not girwarning_printed:
mlog.warning('gobject-introspection dependency was not found, disabling gir generation.')
girwarning_printed = True
return []
pkgargs = pkgstr.decode().strip().split()
ns = kwargs.pop('namespace')
nsversion = kwargs.pop('nsversion')
libsources = kwargs.pop('sources')
girfile = '%s-%s.gir' % (ns, nsversion)
depends = [girtarget]
gir_inc_dirs = []
scan_command = ['g-ir-scanner', '@INPUT@']
scan_command += pkgargs
scan_command += ['--no-libtool', '--namespace='+ns, '--nsversion=' + nsversion, '--warn-all',
'--output', '@OUTPUT@']
extra_args = mesonlib.stringlistify(kwargs.pop('extra_args', []))
scan_command += extra_args
scan_command += ['-I' + os.path.join(state.environment.get_source_dir(), state.subdir),
'-I' + os.path.join(state.environment.get_build_dir(), state.subdir)]
scan_command += self._get_include_args(state, girtarget.get_include_dirs())
if 'link_with' in kwargs:
link_with = kwargs.pop('link_with')
if not isinstance(link_with, list):
link_with = [link_with]
for link in link_with:
scan_command += self._get_link_args(state, link.held_object, depends)
if 'includes' in kwargs:
includes = kwargs.pop('includes')
if not isinstance(includes, list):
includes = [includes]
for inc in includes:
if hasattr(inc, 'held_object'):
inc = inc.held_object
if isinstance(inc, str):
scan_command += ['--include=%s' % (inc, )]
elif isinstance(inc, GirTarget):
gir_inc_dirs += [
os.path.join(state.environment.get_build_dir(),
inc.get_subdir()),
]
scan_command += [
"--include=%s" % (inc.get_basename()[:-4], ),
]
depends += [inc]
else:
raise MesonException(
'Gir includes must be str, GirTarget, or list of them')
cflags = []
if state.global_args.get('c'):
cflags += state.global_args['c']
if state.project_args.get('c'):
cflags += state.project_args['c']
for compiler in state.compilers:
if compiler.get_language() == 'c':
sanitize = compiler.get_options().get('b_sanitize')
if sanitize:
cflags += compilers.sanitizer_compile_args(sanitize)
if cflags:
scan_command += ['--cflags-begin']
scan_command += cflags
scan_command += ['--cflags-end']
if kwargs.get('symbol_prefix'):
sym_prefix = kwargs.pop('symbol_prefix')
if not isinstance(sym_prefix, str):
raise MesonException('Gir symbol prefix must be str')
scan_command += ['--symbol-prefix=%s' % sym_prefix]
if kwargs.get('identifier_prefix'):
identifier_prefix = kwargs.pop('identifier_prefix')
if not isinstance(identifier_prefix, str):
raise MesonException('Gir identifier prefix must be str')
scan_command += ['--identifier-prefix=%s' % identifier_prefix]
if kwargs.get('export_packages'):
pkgs = kwargs.pop('export_packages')
if isinstance(pkgs, str):
scan_command += ['--pkg-export=%s' % pkgs]
elif isinstance(pkgs, list):
scan_command += ['--pkg-export=%s' % pkg for pkg in pkgs]
else:
raise MesonException('Gir export packages must be str or list')
deps = kwargs.pop('dependencies', [])
if not isinstance(deps, list):
deps = [deps]
deps = (girtarget.get_all_link_deps() + girtarget.get_external_deps() +
deps)
# Need to recursively add deps on GirTarget sources from our
# dependencies and also find the include directories needed for the
# typelib generation custom target below.
typelib_includes = []
for dep in deps:
if hasattr(dep, 'held_object'):
dep = dep.held_object
# Add a dependency on each GirTarget listed in dependencies and add
# the directory where it will be generated to the typelib includes
if isinstance(dep, dependencies.InternalDependency):
for source in dep.sources:
if hasattr(source, 'held_object'):
source = source.held_object
if isinstance(source, GirTarget) and source not in depends:
depends.append(source)
subdir = os.path.join(state.environment.get_build_dir(),
source.get_subdir())
if subdir not in typelib_includes:
typelib_includes.append(subdir)
# Do the same, but for dependencies of dependencies. These are
# stored in the list of generated sources for each link dep (from
# girtarget.get_all_link_deps() above).
# FIXME: Store this in the original form from declare_dependency()
# so it can be used here directly.
elif isinstance(dep, build.SharedLibrary):
for source in dep.generated:
if isinstance(source, GirTarget):
subdir = os.path.join(state.environment.get_build_dir(),
source.get_subdir())
if subdir not in typelib_includes:
typelib_includes.append(subdir)
elif isinstance(dep, dependencies.PkgConfigDependency):
girdir = dep.get_pkgconfig_variable("girdir")
if girdir and girdir not in typelib_includes:
typelib_includes.append(girdir)
# ldflags will be misinterpreted by gir scanner (showing
# spurious dependencies) but building GStreamer fails if they
# are not used here.
cflags, ldflags, gi_includes = self._get_dependencies_flags(deps, state, depends)
scan_command += list(cflags)
# need to put our output directory first as we need to use the
# generated libraries instead of any possibly installed system/prefix
# ones.
if isinstance(girtarget, build.SharedLibrary):
scan_command += ["-L@PRIVATE_OUTDIR_ABS_%s@" % girtarget.get_id()]
scan_command += list(ldflags)
for i in gi_includes:
scan_command += ['--add-include-path=%s' % i]
inc_dirs = kwargs.pop('include_directories', [])
if not isinstance(inc_dirs, list):
inc_dirs = [inc_dirs]
for incd in inc_dirs:
if not isinstance(incd.held_object, (str, build.IncludeDirs)):
raise MesonException(
'Gir include dirs should be include_directories().')
scan_command += self._get_include_args(state, inc_dirs)
scan_command += self._get_include_args(state, gir_inc_dirs + inc_dirs,
prefix='--add-include-path=')
if isinstance(girtarget, build.Executable):
scan_command += ['--program', girtarget]
elif isinstance(girtarget, build.SharedLibrary):
libname = girtarget.get_basename()
scan_command += ['--library', libname]
scankwargs = {'output' : girfile,
'input' : libsources,
'command' : scan_command,
'depends' : depends,
}
if kwargs.get('install'):
scankwargs['install'] = kwargs['install']
scankwargs['install_dir'] = kwargs.get('install_dir_gir',
os.path.join(state.environment.get_datadir(), 'gir-1.0'))
scan_target = GirTarget(girfile, state.subdir, scankwargs)
typelib_output = '%s-%s.typelib' % (ns, nsversion)
typelib_cmd = ['g-ir-compiler', scan_target, '--output', '@OUTPUT@']
typelib_cmd += self._get_include_args(state, gir_inc_dirs,
prefix='--includedir=')
for incdir in typelib_includes:
typelib_cmd += ["--includedir=" + incdir]
typelib_kwargs = {
'output': typelib_output,
'command': typelib_cmd,
}
if kwargs.get('install'):
typelib_kwargs['install'] = kwargs['install']
typelib_kwargs['install_dir'] = kwargs.get('install_dir_typelib',
os.path.join(state.environment.get_libdir(), 'girepository-1.0'))
typelib_target = TypelibTarget(typelib_output, state.subdir, typelib_kwargs)
return [scan_target, typelib_target]
def compile_schemas(self, state, args, kwargs):
if len(args) != 0:
raise MesonException('Compile_schemas does not take positional arguments.')
srcdir = os.path.join(state.build_to_src, state.subdir)
outdir = state.subdir
cmd = ['glib-compile-schemas', '--targetdir', outdir, srcdir]
kwargs['command'] = cmd
kwargs['input'] = []
kwargs['output'] = 'gschemas.compiled'
if state.subdir == '':
targetname = 'gsettings-compile'
else:
targetname = 'gsettings-compile-' + state.subdir
target_g = build.CustomTarget(targetname, state.subdir, kwargs)
return target_g
def yelp(self, state, args, kwargs):
if len(args) < 1:
raise MesonException('Yelp requires a project id')
project_id = args[0]
sources = mesonlib.stringlistify(kwargs.pop('sources', []))
if not sources:
if len(args) > 1:
sources = mesonlib.stringlistify(args[1:])
if not sources:
raise MesonException('Yelp requires a list of sources')
source_str = '@@'.join(sources)
langs = mesonlib.stringlistify(kwargs.pop('languages', []))
media = mesonlib.stringlistify(kwargs.pop('media', []))
symlinks = kwargs.pop('symlink_media', False)
if not isinstance(symlinks, bool):
raise MesonException('symlink_media must be a boolean')
if kwargs:
raise MesonException('Unknown arguments passed: {}'.format(', '.join(kwargs.keys())))
install_cmd = [
sys.executable,
state.environment.get_build_command(),
'--internal',
'yelphelper',
'install',
'--subdir=' + state.subdir,
'--id=' + project_id,
'--installdir=' + os.path.join(state.environment.get_datadir(), 'help'),
'--sources=' + source_str,
]
if symlinks:
install_cmd.append('--symlinks=true')
if media:
install_cmd.append('--media=' + '@@'.join(media))
if langs:
install_cmd.append('--langs=' + '@@'.join(langs))
inscript = build.InstallScript(install_cmd)
potargs = [state.environment.get_build_command(), '--internal', 'yelphelper', 'pot',
'--subdir=' + state.subdir,
'--id=' + project_id,
'--sources=' + source_str]
pottarget = build.RunTarget('help-' + project_id + '-pot', sys.executable,
potargs, [], state.subdir)
poargs = [state.environment.get_build_command(), '--internal', 'yelphelper', 'update-po',
'--subdir=' + state.subdir,
'--id=' + project_id,
'--sources=' + source_str,
'--langs=' + '@@'.join(langs)]
potarget = build.RunTarget('help-' + project_id + '-update-po', sys.executable,
poargs, [], state.subdir)
return [inscript, pottarget, potarget]
def gtkdoc(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('Gtkdoc must have one positional argument.')
modulename = args[0]
if not isinstance(modulename, str):
raise MesonException('Gtkdoc arg must be string.')
if not 'src_dir' in kwargs:
raise MesonException('Keyword argument src_dir missing.')
main_file = kwargs.get('main_sgml', '')
if not isinstance(main_file, str):
raise MesonException('Main sgml keyword argument must be a string.')
main_xml = kwargs.get('main_xml', '')
if not isinstance(main_xml, str):
raise MesonException('Main xml keyword argument must be a string.')
if main_xml != '':
if main_file != '':
raise MesonException('You can only specify main_xml or main_sgml, not both.')
main_file = main_xml
src_dir = kwargs['src_dir']
targetname = modulename + '-doc'
command = [state.environment.get_build_command(), '--internal', 'gtkdoc']
if hasattr(src_dir, 'held_object'):
src_dir= src_dir.held_object
if not isinstance(src_dir, build.IncludeDirs):
raise MesonException('Invalid keyword argument for src_dir.')
incdirs = src_dir.get_incdirs()
if len(incdirs) != 1:
raise MesonException('Argument src_dir has more than one directory specified.')
header_dir = os.path.join(state.environment.get_source_dir(), src_dir.get_curdir(), incdirs[0])
else:
header_dir = os.path.normpath(os.path.join(state.subdir, src_dir))
args = ['--sourcedir=' + state.environment.get_source_dir(),
'--builddir=' + state.environment.get_build_dir(),
'--subdir=' + state.subdir,
'--headerdir=' + header_dir,
'--mainfile=' + main_file,
'--modulename=' + modulename]
args += self._unpack_args('--htmlargs=', 'html_args', kwargs)
args += self._unpack_args('--scanargs=', 'scan_args', kwargs)
args += self._unpack_args('--scanobjsargs=', 'scanobjs_args', kwargs)
args += self._unpack_args('--gobjects-types-file=', 'gobject_typesfile', kwargs, state)
args += self._unpack_args('--fixxrefargs=', 'fixxref_args', kwargs)
args += self._unpack_args('--html-assets=', 'html_assets', kwargs, state)
args += self._unpack_args('--content-files=', 'content_files', kwargs, state)
args += self._unpack_args('--ignore-headers=', 'ignore_headers', kwargs)
args += self._unpack_args('--installdir=', 'install_dir', kwargs, state)
args += self._get_build_args(kwargs, state)
res = [build.RunTarget(targetname, command[0], command[1:] + args, [], state.subdir)]
if kwargs.get('install', True):
res.append(build.InstallScript(command + args))
return res
def _get_build_args(self, kwargs, state):
args = []
cflags, ldflags, gi_includes = self._get_dependencies_flags(kwargs.get('dependencies', []), state)
inc_dirs = kwargs.get('include_directories', [])
if not isinstance(inc_dirs, list):
inc_dirs = [inc_dirs]
for incd in inc_dirs:
if not isinstance(incd.held_object, (str, build.IncludeDirs)):
raise MesonException(
'Gir include dirs should be include_directories().')
cflags.update(self._get_include_args(state, inc_dirs))
if cflags:
args += ['--cflags=%s' % ' '.join(cflags)]
if ldflags:
args += ['--ldflags=%s' % ' '.join(ldflags)]
compiler = state.environment.coredata.compilers.get('c')
if compiler:
args += ['--cc=%s' % ' '.join(compiler.get_exelist())]
args += ['--ld=%s' % ' '.join(compiler.get_linker_exelist())]
return args
def gtkdoc_html_dir(self, state, args, kwarga):
if len(args) != 1:
raise MesonException('Must have exactly one argument.')
modulename = args[0]
if not isinstance(modulename, str):
raise MesonException('Argument must be a string')
return os.path.join('share/gtkdoc/html', modulename)
@staticmethod
def _unpack_args(arg, kwarg_name, kwargs, expend_file_state=None):
if kwarg_name not in kwargs:
return []
new_args = kwargs[kwarg_name]
if not isinstance(new_args, list):
new_args = [new_args]
args = []
for i in new_args:
if expend_file_state and isinstance(i, mesonlib.File):
i = os.path.join(expend_file_state.environment.get_build_dir(), i.subdir, i.fname)
elif not isinstance(i, str):
raise MesonException(kwarg_name + ' values must be strings.')
args.append(i)
if args:
return [arg + '@@'.join(args)]
return []
def gdbus_codegen(self, state, args, kwargs):
if len(args) != 2:
raise MesonException('Gdbus_codegen takes two arguments, name and xml file.')
namebase = args[0]
xml_file = args[1]
cmd = ['gdbus-codegen']
if 'interface_prefix' in kwargs:
cmd += ['--interface-prefix', kwargs.pop('interface_prefix')]
if 'namespace' in kwargs:
cmd += ['--c-namespace', kwargs.pop('namespace')]
cmd += ['--generate-c-code', '@OUTDIR@/' + namebase, '@INPUT@']
outputs = [namebase + '.c', namebase + '.h']
custom_kwargs = {'input' : xml_file,
'output' : outputs,
'command' : cmd
}
return build.CustomTarget(namebase + '-gdbus', state.subdir, custom_kwargs)
def mkenums(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('Mkenums requires one positional argument.')
basename = args[0]
if 'sources' not in kwargs:
raise MesonException('Missing keyword argument "sources".')
sources = kwargs.pop('sources')
if isinstance(sources, str):
sources = [sources]
elif not isinstance(sources, list):
raise MesonException(
'Sources keyword argument must be a string or array.')
cmd = []
known_kwargs = ['comments', 'eprod', 'fhead', 'fprod', 'ftail',
'identifier_prefix', 'symbol_prefix', 'template',
'vhead', 'vprod', 'vtail']
known_custom_target_kwargs = ['install', 'install_dir', 'build_always',
'depends', 'depend_files']
c_template = h_template = None
install_header = False
for arg, value in kwargs.items():
if arg == 'sources':
sources = [value] + sources
elif arg == 'c_template':
c_template = value
elif arg == 'h_template':
h_template = value
elif arg == 'install_header':
install_header = value
elif arg in known_kwargs:
cmd += ['--' + arg.replace('_', '-'), value]
elif arg not in known_custom_target_kwargs:
raise MesonException(
'Mkenums does not take a %s keyword argument.' % (arg, ))
cmd = ['glib-mkenums'] + cmd
custom_kwargs = {}
for arg in known_custom_target_kwargs:
if arg in kwargs:
custom_kwargs[arg] = kwargs[arg]
targets = []
if h_template is not None:
h_output = os.path.splitext(h_template)[0]
# We always set template as the first element in the source array
# so --template consumes it.
h_cmd = cmd + ['--template', '@INPUT@']
h_sources = [h_template] + sources
custom_kwargs['install'] = install_header
if 'install_dir' not in custom_kwargs:
custom_kwargs['install_dir'] = \
state.environment.coredata.get_builtin_option('includedir')
h_target = self._make_mkenum_custom_target(state, h_sources,
h_output, h_cmd,
custom_kwargs)
targets.append(h_target)
if c_template is not None:
c_output = os.path.splitext(c_template)[0]
# We always set template as the first element in the source array
# so --template consumes it.
c_cmd = cmd + ['--template', '@INPUT@']
c_sources = [c_template] + sources
# Never install the C file. Complain on bug tracker if you need it.
custom_kwargs['install'] = False
if h_template is not None:
if 'depends' in custom_kwargs:
custom_kwargs['depends'] += [h_target]
else:
custom_kwargs['depends'] = h_target
c_target = self._make_mkenum_custom_target(state, c_sources,
c_output, c_cmd,
custom_kwargs)
targets.insert(0, c_target)
if c_template is None and h_template is None:
generic_cmd = cmd + ['@INPUT@']
custom_kwargs['install'] = install_header
if 'install_dir' not in custom_kwargs:
custom_kwargs['install_dir'] = \
state.environment.coredata.get_builtin_option('includedir')
target = self._make_mkenum_custom_target(state, sources, basename,
generic_cmd, custom_kwargs)
return target
elif len(targets) == 1:
return targets[0]
else:
return targets
@staticmethod
def _make_mkenum_custom_target(state, sources, output, cmd, kwargs):
custom_kwargs = {
'input': sources,
'output': output,
'capture': True,
'command': cmd
}
custom_kwargs.update(kwargs)
return build.CustomTarget(output, state.subdir, custom_kwargs)
def genmarshal(self, state, args, kwargs):
if len(args) != 1:
raise MesonException(
'Genmarshal requires one positional argument.')
output = args[0]
if 'sources' not in kwargs:
raise MesonException('Missing keyword argument "sources".')
sources = kwargs.pop('sources')
if isinstance(sources, str):
sources = [sources]
elif not isinstance(sources, list):
raise MesonException(
'Sources keyword argument must be a string or array.')
cmd = ['glib-genmarshal']
known_kwargs = ['internal', 'nostdinc', 'skip_source', 'stdinc',
'valist_marshallers']
known_custom_target_kwargs = ['build_always', 'depends',
'depend_files', 'install_dir',
'install_header']
for arg, value in kwargs.items():
if arg == 'prefix':
cmd += ['--prefix', value]
elif arg in known_kwargs and value:
cmd += ['--' + arg.replace('_', '-')]
elif arg not in known_custom_target_kwargs:
raise MesonException(
'Genmarshal does not take a %s keyword argument.' % (
arg, ))
install_header = kwargs.pop('install_header', False)
install_dir = kwargs.pop('install_dir', None)
custom_kwargs = {
'input': sources,
'capture': True,
}
for arg in known_custom_target_kwargs:
if arg in kwargs:
custom_kwargs[arg] = kwargs[arg]
custom_kwargs['command'] = cmd + ['--header', '--body', '@INPUT@']
custom_kwargs['output'] = output + '.c'
body = build.CustomTarget(output + '_c', state.subdir, custom_kwargs)
custom_kwargs['install'] = install_header
if install_dir is not None:
custom_kwargs['install_dir'] = install_dir
custom_kwargs['command'] = cmd + ['--header', '@INPUT@']
custom_kwargs['output'] = output + '.h'
header = build.CustomTarget(output + '_h', state.subdir, custom_kwargs)
return [body, header]
@staticmethod
def _vapi_args_to_command(prefix, variable, kwargs, accept_vapi=False):
arg_list = kwargs.get(variable)
if not arg_list:
return []
ret = []
if not isinstance(arg_list, list):
arg_list = [arg_list]
for arg in arg_list:
if not isinstance(arg, str):
types = 'strings' + ' or InternalDependencys' if accept_vapi else ''
raise MesonException('All {} must be {}'.format(variable, types))
ret.append(prefix + arg)
return ret
def _extract_vapi_packages(self, state, kwargs):
'''
Packages are special because we need to:
- Get a list of packages for the .deps file
- Get a list of depends for any VapiTargets
- Get package name from VapiTargets
- Add include dirs for any VapiTargets
'''
arg_list = kwargs.get('packages')
if not arg_list:
return [], [], [], []
if not isinstance(arg_list, list):
arg_list = [arg_list]
vapi_depends = []
vapi_packages = []
vapi_includes = []
ret = []
remaining_args = []
for arg in arg_list:
if hasattr(arg, 'held_object'):
arg = arg.held_object
if isinstance(arg, dependencies.InternalDependency):
targets = [t for t in arg.sources if isinstance(t, VapiTarget)]
for target in targets:
srcdir = os.path.join(state.environment.get_source_dir(),
target.get_subdir())
outdir = os.path.join(state.environment.get_build_dir(),
target.get_subdir())
outfile = target.output[0][:-5] # Strip .vapi
ret.append('--vapidir=' + outdir)
ret.append('--girdir=' + outdir)
ret.append('--pkg=' + outfile)
vapi_depends.append(target)
vapi_packages.append(outfile)
vapi_includes.append(srcdir)
else:
vapi_packages.append(arg)
remaining_args.append(arg)
kwargs['packages'] = remaining_args
vapi_args = ret + self._vapi_args_to_command('--pkg=', 'packages', kwargs, accept_vapi=True)
return vapi_args, vapi_depends, vapi_packages, vapi_includes
def _generate_deps(self, state, library, packages, indir):
outdir = state.environment.scratch_dir
fname = os.path.join(outdir, library + '.deps')
with open(fname, 'w') as ofile:
for package in packages:
ofile.write(package + '\n')
return build.Data(False, outdir, [fname], indir)
def _get_vapi_link_with(self, target):
link_with = []
for dep in target.get_target_dependencies():
if isinstance(dep, build.SharedLibrary):
link_with.append(dep)
elif isinstance(dep, GirTarget):
link_with += self._get_vapi_link_with(dep)
return link_with
def generate_vapi(self, state, args, kwargs):
if len(args) != 1:
raise MesonException('The library name is required')
if not isinstance(args[0], str):
raise MesonException('The first argument must be the name of the library')
library = args[0]
build_dir = os.path.join(state.environment.get_build_dir(), state.subdir)
source_dir = os.path.join(state.environment.get_source_dir(), state.subdir)
pkg_cmd, vapi_depends, vapi_packages, vapi_includes = self._extract_vapi_packages(state, kwargs)
cmd = ['vapigen', '--quiet', '--library=' + library, '--directory=' + build_dir]
cmd += self._vapi_args_to_command('--vapidir=', 'vapi_dirs', kwargs)
cmd += self._vapi_args_to_command('--metadatadir=', 'metadata_dirs', kwargs)
cmd += self._vapi_args_to_command('--girdir=', 'gir_dirs', kwargs)
cmd += pkg_cmd
cmd += ['--metadatadir=' + source_dir]
inputs = kwargs.get('sources')
if not inputs:
raise MesonException('sources are required to generate the vapi file')
if not isinstance(inputs, list):
inputs = [inputs]
link_with = []
for i in inputs:
if isinstance(i, str):
cmd.append(os.path.join(source_dir, i))
elif hasattr(i, 'held_object') \
and isinstance(i.held_object, GirTarget):
link_with += self._get_vapi_link_with(i.held_object)
subdir = os.path.join(state.environment.get_build_dir(),
i.held_object.get_subdir())
gir_file = os.path.join(subdir, i.held_object.output[0])
cmd.append(gir_file)
else:
raise MesonException('Input must be a str or GirTarget')
vapi_output = library + '.vapi'
custom_kwargs = {
'command': cmd,
'input': inputs,
'output': vapi_output,
'depends': vapi_depends,
}
install_dir = kwargs.get('install_dir',
os.path.join(state.environment.coredata.get_builtin_option('datadir'),
'vala', 'vapi'))
if kwargs.get('install'):
custom_kwargs['install'] = kwargs['install']
custom_kwargs['install_dir'] = install_dir
# We shouldn't need this locally but we install it
deps_target = self._generate_deps(state, library, vapi_packages, install_dir)
state.data.append(deps_target)
vapi_target = VapiTarget(vapi_output, state.subdir, custom_kwargs)
# So to try our best to get this to just work we need:
# - link with with the correct library
# - include the vapi and dependent vapi files in sources
# - add relevant directories to include dirs
includes = [build.IncludeDirs(state.subdir, ['.'] + vapi_includes, False)]
sources = [vapi_target] + vapi_depends
return dependencies.InternalDependency(
None, includes, [], [], link_with, sources, []
)
def initialize():
return GnomeModule()
class GirTarget(build.CustomTarget):
def __init__(self, name, subdir, kwargs):
super().__init__(name, subdir, kwargs)
class TypelibTarget(build.CustomTarget):
def __init__(self, name, subdir, kwargs):
super().__init__(name, subdir, kwargs)
class VapiTarget(build.CustomTarget):
def __init__(self, name, subdir, kwargs):
super().__init__(name, subdir, kwargs)
| apache-2.0 | 4,630,229,603,091,870,000 | 43.788955 | 133 | 0.550489 | false |
openstack-packages/rdopkg | rdopkg/actionmods/rdoinfo.py | 2 | 4098 | from __future__ import print_function
import distroinfo
import distroinfo.query
from distroinfo.info import DistroInfo
from rdopkg import exception, helpers
from rdopkg.utils import log
from rdopkg.conf import cfg
def print_releases(info):
print("{t.bold}releases & repos:{t.normal}".format(t=log.term))
for rls in info['releases']:
s = " {t.bold}{rls}{t.normal}".format(t=log.term, rls=rls['name'])
if 'fedora' in rls:
s += ' (Fedora %s)' % rls['fedora']
print(s)
for repo in rls['repos']:
if 'special' in repo:
print(" {t.bold}{name}{t.normal}: "
"{t.yellow}{special}{t.normal}".format(
t=log.term,
name=repo['name'],
special=repo['special']))
else:
print(" {t.bold}{name}{t.normal} built in"
" {t.bold}{bs}{t.normal} from"
" {t.bold}{branch}{t.normal} branch".format(
t=log.term,
name=repo['name'],
bs=repo.get('buildsys', '??'),
branch=repo['branch']))
def print_pkg_summary(info):
pkgs = info['packages']
n = len(pkgs)
print("{t.bold}{n} packages defined:{t.normal}".format(
t=log.term, n=n))
confs = {}
for pkg in pkgs:
conf = pkg.get('conf')
confs[conf] = confs.get(conf, 0) + 1
for conf, n in confs.items():
if not conf:
continue
print(" {t.bold}{n}{t.normal} using {t.bold}{conf}{t.normal} "
"config".format(t=log.term, conf=conf, n=n))
n = confs.get(None)
if n:
print(" {t.bold}{n}{t.normal} configured ad-hoc".format(
t=log.term, n=n))
def print_summary(info):
print_releases(info)
print('')
print_pkg_summary(info)
def print_pkgs(info, filters=None):
pkgs = info['packages']
if filters:
pkgs = distroinfo.query.filter_pkgs(pkgs, rexen=filters)
if not pkgs:
print("No packages match your filter.")
return
print("{t.bold}{n} packages found:{t.normal}".format(t=log.term,
n=len(pkgs)))
for pkg in pkgs:
print("")
print_pkg(pkg)
def print_pkg(pkg):
dp = helpers.DictPrinter(
header='name',
first=['project', 'conf', 'upstream', 'patches', 'distgit', 'master',
'distgit'],
last=['maintainers'])
dp(pkg)
def info_file(distro=None):
"""Return default distroinfo info file"""
if not distro:
distro = cfg['DISTRO']
info_file_conf = distro.upper() + 'INFO_FILE'
try:
return cfg[info_file_conf]
except KeyError:
raise exception.InvalidUsage(
why="Couldn't find config option %s for distro: %s"
% (info_file_conf, distro))
def get_distroinfo(distro=None):
"""Get DistroInfo initialized from configuration"""
if not distro:
distro = cfg['DISTRO']
_info_file = info_file(distro)
# prefer git fetcher if available
git_info_url_conf = distro.upper() + 'INFO_REPO'
try:
remote_git_info = cfg[git_info_url_conf]
return DistroInfo(_info_file, remote_git_info=remote_git_info)
except KeyError:
pass
# try raw remote fetcher
remote_info_url_conf = distro.upper() + 'INFO_RAW_URL'
try:
remote_info = cfg[remote_info_url_conf]
return DistroInfo(_info_file, remote_info=remote_info)
except KeyError:
raise exception.InvalidUsage(
why="Couldn't find config option %s or %s for distro: %s"
% (git_info_url_conf, remote_info_url_conf, distro))
def get_rdoinfo():
"""Compat function
"""
return get_distroinfo(distro='rdo')
def get_default_inforepo(apply_tag=None, include_fns=None):
raise DeprecationWarning("rdopkg >= 0.47.0 uses distroinfo, please use "
"`get_distroinfo` instead.")
| apache-2.0 | 9,216,457,177,252,371,000 | 30.523077 | 77 | 0.546852 | false |
peragro/peragro-at | src/damn_at/damnfs/path.py | 1 | 5453 | """
Path utilities for DAMN FS
"""
from __future__ import absolute_import
from __future__ import print_function
import os
import six
from six.moves import range
FILE_MARKER = '<children>'
def attach(file_id, trunk, branch=None):
"""Insert a branch of directories on its trunk."""
if branch is None:
branch = file_id.filename
parts = branch.split('/', 1)
if len(parts) == 1: # branch is a file
if file_id:
found = False
for _, fid in trunk[FILE_MARKER]:
if fid.hash == file_id.hash:
found = True
break
if not found:
trunk[FILE_MARKER].append((parts[0], file_id))
else:
node, others = parts
if node not in trunk:
trunk[node] = {FILE_MARKER: []}
attach(file_id, trunk[node], others)
def prettify(tree, indent=0):
"""Print the file tree structure with proper indentation."""
for key, value in six.iteritems(tree):
if key == FILE_MARKER:
if value:
print((' ' * indent + str(value)))
else:
print((' ' * indent + str(key)))
if isinstance(value, dict):
prettify(value, indent+1)
else:
print((' ' * (indent+1) + str(value)))
def normdirpath(path):
"""Make a directory path end with /"""
if not path.endswith('/') and path != '':
path += '/'
return path
def path_depth(path):
"""Give the depth, the number of directories deep, of a path"""
parts = os.path.dirname(path).split('/')
parts = [part for part in parts if part != '']
length = len(parts)
return length
def expand_path(path, start_path, base_depth):
"""Make a path relative to the given starting path and make the path
atleast base_depth deep by prepending '_' directories.
:param path: :py:class:`string`: the path to expand
:param start_path: :py:class:`damn_at.FileDescription`: the starting path
:param base_depth: :py:class:`int`: how deep the the base file should be
:rtype: :py:class:`string`
"""
path = path.replace(start_path, '')
path = os.path.normpath(path)
pathdepth = path_depth(path)
path = path.replace('../', '', base_depth)
if base_depth > pathdepth:
for _ in range(base_depth - path_depth(path)):
path = '_/' + path
return path
def file_ids_as_tree(file_ids, start_path):
"""Create a tree like structure using the filenames of the given FileIds.
:param file_ids: :py:func:`list` of :py:class:`damn_at.thrift.generated.damn_types.ttypes.FileId`
:param start_path: :py:class:`string`: the base path
:rtype: :py:class:`dict` {'adir':<>, '<files>':[...]}
"""
relative_path = normdirpath(start_path)
paths = {file_id.filename.replace(relative_path, '') for file_id in file_ids}
base_depth = max([path.count('../') for path in paths])
main_dict = {FILE_MARKER: []}
for file_id in file_ids:
attach(
file_id,
main_dict,
expand_path(file_id.filename, relative_path, base_depth)
)
return main_dict
def get_files_for_path(file_ids_tree, path):
"""Traverse the FileIds tree with the given path and return the sub-tree
at that level or the file if a leaf-node.
:param file_ids_tree: :py:class:`dict` {'adir':<>, '<files>':[...]}
:param path: :py:class:`string`: the path
:rtype: :py:class:`dict` {'adir':<>, '<files>':[...]}
"""
entry = file_ids_tree
parts = path.split('/')
for part in parts:
if part != '':
try:
entry = entry[part]
except KeyError as e:
files = entry[FILE_MARKER]
for entry in files:
if entry[0] == part:
return entry
raise e
return entry
def find_path_for_file_id(file_ids_tree, file_id):
"""Traverse the FileIds tree to construct a path for the given FileId
:param file_ids_tree: :py:class:`dict` {'adir':<>, '<files>':[...]}
:param file_id: :py:class:`damn_at.thrift.generated.damn_types.ttypes.FileId`: the fileId we're looking for
:rtype: :py:class:`string`: the path
"""
for key, value in six.iteritems(file_ids_tree):
if key == FILE_MARKER:
for name, fid in value:
if fid.hash == file_id.hash:
return name
else:
if isinstance(value, dict):
ret = find_path_for_file_id(value, file_id)
if ret:
return key + '/' + ret
def parse_path(path):
"""Parse a path of /hash/action/my/path returning a tuple of
('hash', 'action', '/my/path') or None values if a shorter path is
given.
:param path: :py:class:`string`: the path
:rtype: :py:func:`tuple`
"""
if path == '/':
return None, None, None
paths = path[1:].split('/', 1)
#Filter Empty strings
paths = [p for p in paths if p != '']
if len(paths) == 1:
return paths[0], None, None
else:
file_hash, rest = paths
paths = rest.split('/', 1)
#Filter Empty strings
paths = [p for p in paths if p != '']
if len(paths) == 1:
return file_hash, paths[0], None
else:
action, rest = paths
return file_hash, action, rest
| bsd-3-clause | 5,155,483,631,779,823,000 | 30.33908 | 111 | 0.557491 | false |
plotly/plotly.py | packages/python/plotly/plotly/validators/histogram2dcontour/_hoverlabel.py | 2 | 2081 | import _plotly_utils.basevalidators
class HoverlabelValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name="hoverlabel", parent_name="histogram2dcontour", **kwargs
):
super(HoverlabelValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Hoverlabel"),
data_docs=kwargs.pop(
"data_docs",
"""
align
Sets the horizontal alignment of the text
content within hover label box. Has an effect
only if the hover label text spans more two or
more lines
alignsrc
Sets the source reference on Chart Studio Cloud
for align .
bgcolor
Sets the background color of the hover labels
for this trace
bgcolorsrc
Sets the source reference on Chart Studio Cloud
for bgcolor .
bordercolor
Sets the border color of the hover labels for
this trace.
bordercolorsrc
Sets the source reference on Chart Studio Cloud
for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of
characters) of the trace name in the hover
labels for all traces. -1 shows the whole name
regardless of length. 0-3 shows the first 0-3
characters, and an integer >3 will show the
whole name if it is less than that many
characters, but if it is longer, will truncate
to `namelength - 3` characters and add an
ellipsis.
namelengthsrc
Sets the source reference on Chart Studio Cloud
for namelength .
""",
),
**kwargs
)
| mit | -950,340,162,685,917,800 | 38.264151 | 82 | 0.534839 | false |
JEStaubach/playrestapi | webapp.py | 1 | 17812 | import cherrypy
import json
from collections import OrderedDict
import mysql.connector
import db_conf
import sys
import atexit
import os
import os.path
from oauth2client import client, crypt
import urllib2
from urlparse import urlparse
#sys.stdout = sys.stderr
#cherrypy.config.update({'environment': 'embedded'})
client_id = '105600165694-08orfb5k9o0tit237hnohila4m694ufu.apps.googleusercontent.com'
if cherrypy.__version__.startswith('3.0') and cherrypy.engine.state == 0:
cherrypy.engine.start(blocking=False)
atexit.register(cherrypy.engine.stop)
def get_list(args):
list_name = args[0]
return_vals = []
cnx = mysql.connector.connect(user=db_conf.settings['DB']['db_user'],
password=db_conf.settings['DB']['db_pass'],
host=db_conf.settings['DB']['db_host'],
database=db_conf.settings['DB']['db_user'] + '$' + db_conf.settings['DB']['db_name'])
cursor = cnx.cursor(dictionary=True)
query = ("SELECT * FROM " + list_name)
cursor.execute(query)
for row in cursor:
return_vals.append(dict(row))
cursor.close()
cnx.close()
return return_vals
def remove_row(args):
list_name = args[0]
id = args[1]
cnx = mysql.connector.connect(user=db_conf.settings['DB']['db_user'],
password=db_conf.settings['DB']['db_pass'],
host=db_conf.settings['DB']['db_host'],
database=db_conf.settings['DB']['db_user'] + '$' + db_conf.settings['DB']['db_name'])
cursor = cnx.cursor(dictionary=True)
cmd = "DELETE FROM " + list_name + "_tbl WHERE " + list_name + "_tbl." + list_name[:-1] + "_id = " + id
query = cmd
cursor.execute(query)
cursor.close()
cnx.commit()
cnx.close()
return {'method': 'DELETE', 'status': 'success'}
def create_row(args):
new_data = args[0]
list_name = args[1]
cnx = mysql.connector.connect(user=db_conf.settings['DB']['db_user'],
password=db_conf.settings['DB']['db_pass'],
host=db_conf.settings['DB']['db_host'],
database=db_conf.settings['DB']['db_user'] + '$' + db_conf.settings['DB']['db_name'])
cursor = cnx.cursor(dictionary=True)
cmd = "INSERT INTO " + list_name + "_tbl (" + ",".join(new_data.keys())+") VALUES (" + ",".join([ "'" + new_data[key] + "'" for key in new_data]) + ")"
query = cmd
cursor.execute(query)
cursor.close()
cnx.commit()
cnx.close()
return {'method': 'POST', 'status': 'success'}
def update_row(args):
new_data = args[0]
list_name = args[1]
id = args[2]
cnx = mysql.connector.connect(user=db_conf.settings['DB']['db_user'],
password=db_conf.settings['DB']['db_pass'],
host=db_conf.settings['DB']['db_host'],
database=db_conf.settings['DB']['db_user'] + '$' + db_conf.settings['DB']['db_name'])
cursor = cnx.cursor(dictionary=True)
cmd = "UPDATE " + list_name + "_tbl SET " + ','.join([key + " = '" + new_data[key] + "'" for key in new_data.keys()]) + " WHERE " + list_name + "_tbl." + list_name[:-1] + "_id = " + id
print(cmd)
query = cmd
cursor.execute(query)
cursor.close()
cnx.commit()
cnx.close()
return {'method': 'UPDATE', 'status': 'success'}
def verify(token):
print('signin')
try:
idinfo = client.verify_id_token(token, None)
if idinfo['aud'] not in [client_id]:
raise crypt.AppIdentityError("Unrecognized client.")
if idinfo['iss'] not in ['accounts.google.com', 'https://accounts.google.com']:
raise crypt.AppIdentityError("Wrong issuer.")
except crypt.AppIdentityError:
return {'status': 'token validation failed'}
email = idinfo['email']
print(email)
return_vals = []
cnx = mysql.connector.connect(user=db_conf.settings['DB']['db_user'],
password=db_conf.settings['DB']['db_pass'],
host=db_conf.settings['DB']['db_host'],
database=db_conf.settings['DB']['db_user'] + '$' + db_conf.settings['DB']['db_name'])
cursor = cnx.cursor(dictionary=True)
query = ("SELECT * FROM users WHERE user_email = '" + email + "'")
cursor.execute(query)
for row in cursor:
return_vals.append(dict(row))
cursor.close()
cnx.close()
if len(return_vals) > 0:
login_succeeded(email)
return {'status': 'success', 'permissions': return_vals[0]['user_permissions']}
else:
login_failed(email)
return {'status': 'user not registered'}
def login_failed(email):
cnx = mysql.connector.connect(user=db_conf.settings['DB']['db_user'],
password=db_conf.settings['DB']['db_pass'],
host=db_conf.settings['DB']['db_host'],
database=db_conf.settings['DB']['db_user'] + '$' + db_conf.settings['DB']['db_name'])
cursor = cnx.cursor(dictionary=True)
query = ("SELECT * FROM failedlogins WHERE failedlogin_email = '" + email + "'")
cursor.execute(query)
rows = []
for row in cursor:
rows.append(dict(row))
cursor.close()
cnx.close()
fail_count = 1
fail_id = None
if len(rows) > 0:
fail_count = rows[0]['failedlogin_count'] + 1
fail_id = rows[0]['failedlogin_id']
cnx = mysql.connector.connect(user=db_conf.settings['DB']['db_user'],
password=db_conf.settings['DB']['db_pass'],
host=db_conf.settings['DB']['db_host'],
database=db_conf.settings['DB']['db_user'] + '$' + db_conf.settings['DB']['db_name'])
cursor = cnx.cursor(dictionary=True)
if fail_count == 1:
query = "INSERT INTO failedlogins_tbl ( failedlogin_email, failedlogin_count, failedlogin_lastdate, failedlogin_lasttime ) VALUES ( '" + email + "'," + str(fail_count) + ", CURDATE(), CURTIME() )"
else:
query = "UPDATE failedlogins_tbl SET failedlogin_count=" + str(fail_count) + ", failedlogin_lastdate=CURDATE(), failedlogin_lasttime=CURTIME() WHERE failedlogin_id = " + str(fail_id)
cursor.execute(query)
cursor.close()
cnx.commit()
cnx.close()
def login_succeeded(email):
cnx = mysql.connector.connect(user=db_conf.settings['DB']['db_user'],
password=db_conf.settings['DB']['db_pass'],
host=db_conf.settings['DB']['db_host'],
database=db_conf.settings['DB']['db_user'] + '$' + db_conf.settings['DB']['db_name'])
cursor = cnx.cursor(dictionary=True)
query = "INSERT INTO logins_tbl ( login_email, login_date, login_time ) VALUES ( '" + email + "', CURDATE(), CURTIME() )"
cursor.execute(query)
cursor.close()
cnx.commit()
cnx.close()
class HoppersWebService(object):
exposed = True
exposed_views = {}
def __init__(self):
print('init called')
self.get_exposed_views()
print(str(self.exposed_views))
def get_exposed_views(self):
cnx = mysql.connector.connect(user=db_conf.settings['DB']['db_user'],
password=db_conf.settings['DB']['db_pass'],
host=db_conf.settings['DB']['db_host'],
database=db_conf.settings['DB']['db_user'] + '$' + db_conf.settings['DB'][
'db_name'])
cursor = cnx.cursor(dictionary=True)
query = ("SELECT TABLE_NAME, COLUMN_NAME, DATA_TYPE FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME IN (SELECT exposedview_name FROM exposedviews)")
cursor.execute(query)
self.exposed_views = {}
for row in cursor:
row_dict = dict(row)
if row_dict['TABLE_NAME'] not in self.exposed_views.keys():
self.exposed_views[str(row_dict['TABLE_NAME'])] = []
self.exposed_views[str(row_dict['TABLE_NAME'])].append({'column_name': row_dict['COLUMN_NAME'],
'column_type': row_dict['DATA_TYPE']})
cursor.close()
cnx.close()
def unhandled(self, method, url, args):
cherrypy.response.status = 404
return json.dumps({'method': method,
'resource': url,
'status': 'Unhandled resource location ' + str(args)})
def bad_fields(self, method, url, args, field_errors):
cherrypy.response.status = 400
return json.dumps({'method': method,
'resource': url,
'status': 'Unknown resource attributes: ' + str(field_errors)})
def collection_exposed(self, collection):
if collection in self.exposed_views.keys():
return True
else:
return False
def field_mismatches(self, collection, new_data):
allowed_fields = [x['column_name'] for x in self.exposed_views[collection]]
print('collection: ' + collection)
print('allowed_fields: ' + str(allowed_fields))
additional_supplied_fields = [x for x in new_data.keys() if x not in allowed_fields]
unsupplied_fields = [x for x in allowed_fields if x not in new_data.keys() and x != collection[:-1] + '_id']
tables_with_unsupplied_ids = [x[:-3] for x in unsupplied_fields if x[-3:] == '_id']
missing_fields = []
for table in tables_with_unsupplied_ids:
for field in new_data.keys():
if table in field:
missing_fields.append(table + '_id')
return {'additional_supplied_fields': additional_supplied_fields,
'unsupplied_fields': unsupplied_fields,
'missing_fields': missing_fields}
def check_token(self, token, method, url, cb, args):
if not token:
# token required in order to be verified
cherrypy.response.status = 401
return json.dumps({'method': method,
'resource': url,
'status': 'missing token'})
else:
crud = {'POST': 'C',
'GET': 'R',
'PUT': 'U',
'DELETE': 'D',}
authorization = verify(token)
if authorization['status'] == 'success':
# token is authentic and user is registered.
if crud[method] in authorization['permissions']:
# user has required permissions
return json.dumps(cb(args))
else:
# User lacks READ permissions
cherrypy.response.status = 403
return json.dumps({'method': method,
'resource': url,
'status': 'Insufficient privileges'})
elif authorization['status'] == 'token validation failed':
# bad token.
cherrypy.response.status = 401
cherrypy.response.headers['Location'] = url
return json.dumps({'method': method,
'resource': url,
'status': authorization['status']})
elif authorization['status'] == 'user not registered':
# token OK, but user not registered.
cherrypy.response.status = 401
cherrypy.response.headers['Location'] = url
return json.dumps({'method': method,
'resource': url,
'status': authorization['status']})
else:
# token verification - unhandled response
cherrypy.response.status = 401
cherrypy.response.headers['Location'] = url
return json.dumps({'method': method,
'resource': url,
'status': authorization['status']})
def GET(self, *args, **kwargs):
print('GET:'+str(args)+cherrypy.request.scheme)
token = cherrypy.request.headers.get('Authorization')
url = urlparse(cherrypy.url()).path
if not args:
args = [None, None]
if args[0] == 'hoppers' and args[1] == 'manage':
return self.manage()
elif args[0] == 'hoppers' and args[1] == 'rest':
if not token:
# Attempt to access a resource or collection without including token.
# Redirect to login page, pass along the requested URL in Location header.
cherrypy.response.headers['Location'] = url
raise cherrypy.HTTPRedirect("/hoppers/manage/#/" + args[2])
else:
if not self.collection_exposed(args[2]):
return self.unhandled('GET', url, args[2:])
return self.check_token(token, 'GET', url, get_list, args[2:])
elif args[0] == 'hoppers' and args[1] == 'tokensignin':
def on_success(args=None):
return json.dumps({'method': 'GET',
'resource': url,
'status': 'success',})
return self.check_token(token, 'GET', url, on_success, None)
else:
return self.unhandled('GET', url, args)
def POST(self, *args):
print('POST '+str(args)+cherrypy.request.scheme)
token = cherrypy.request.headers.get('Authorization')
url = urlparse(cherrypy.url()).path
rawData = cherrypy.request.body.read(int(cherrypy.request.headers['Content-Length']))
new_data = json.loads(rawData)
print('post data: '+str(new_data))
if args[0] == 'hoppers' and args[1] == 'rest':
if not self.collection_exposed(args[2]):
return self.unhandled('POST', url, args[2:])
field_errors = self.field_mismatches(args[2], new_data)
if field_errors['additional_supplied_fields'] or field_errors['unsupplied_fields']:
return self.bad_fields('POST', url, args[2:], field_errors)
return self.check_token(token, 'POST', url, create_row, [new_data] + list(args[2:]))
else:
return self.unhandled('POST', url, args)
def PUT(self, *args):
print('PUT ' + str(args)+cherrypy.request.scheme)
token = cherrypy.request.headers.get('Authorization')
url = urlparse(cherrypy.url()).path
rawData = cherrypy.request.body.read(int(cherrypy.request.headers['Content-Length']))
new_data = json.loads(rawData)
print('put data: ' + str(new_data))
if args[0] == 'hoppers' and args[1] == 'rest':
if not self.collection_exposed(args[2]):
return self.unhandled('PUT', url, args[2:])
field_errors = self.field_mismatches(args[2], new_data)
if field_errors['additional_supplied_fields'] or field_errors['missing_fields']:
return self.bad_fields('PUT', url, args[2:], field_errors)
return self.check_token(token, 'PUT', url, update_row, [new_data] + list(args[2:]))
else:
return self.unhandled('PUT', url, args)
def DELETE(self, *args):
print('DELETE ' + str(args)+cherrypy.request.scheme)
token = cherrypy.request.headers.get('Authorization')
url = urlparse(cherrypy.url()).path
#rawData = cherrypy.request.body.read(int(cherrypy.request.headers['Content-Length']))
#new_data = json.loads(rawData)
#print('delete data: ' + str(new_data))
if args[0] == 'hoppers' and args[1] == 'rest':
if not self.collection_exposed(args[2]):
return self.unhandled('DELETE', url, args[2:])
return self.check_token(token, 'DELETE', url, remove_row, args[2:])
else:
return self.unhandled('DELETE', url, args)
def serve_index(self):
print('index'+cherrypy.request.scheme)
print(db_conf.settings['static']['path'])
index_file = os.path.abspath(db_conf.settings['static']['path'] + 'index.html')
f = open( index_file, 'r' )
return f.read()
def manage(self):
index_file = os.path.abspath(db_conf.settings['static']['path'] + 'manage.html')
f = open( index_file, 'r' )
return f.read()
if __name__ == '__main__':
print("name {}".format(db_conf.settings['DB']['db_name']))
print("user {}".format(db_conf.settings['DB']['db_user']))
path = None
cherrypy.tree.mount(
HoppersWebService(),
'/',
{
'/hoppers/rest': {
'request.dispatch': cherrypy.dispatch.MethodDispatcher()
},
'/hoppers/tokensignin': {
'request.dispatch': cherrypy.dispatch.MethodDispatcher()
},
'/hoppers/manage': {
'request.dispatch': cherrypy.dispatch.MethodDispatcher()
},
'/': {
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.abspath(db_conf.settings['static']['path']),
'tools.staticdir.index': 'index.html',
}
}, )
cherrypy.server.ssl_module = 'builtin'
cherrypy.server.ssl_certificate = "cert.pem"
cherrypy.server.ssl_private_key = "privkey.pem"
cherrypy.engine.start()
cherrypy.engine.block()
| mit | 1,731,798,065,227,048,000 | 44.208122 | 204 | 0.542163 | false |
apyrgio/synnefo | snf-common/synnefo/lib/dictconfig.py | 9 | 22911 | # This is a copy of the Python logging.config.dictconfig module.
# It is provided here for backwards compatibility for Python versions
# prior to 2.7.
#
# Copyright 2009-2010 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import logging.handlers
import re
import sys
import types
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
def valid_ident(s):
m = IDENTIFIER.match(s)
if not m:
raise ValueError('Not a valid Python identifier: %r' % s)
return True
#
# This function is defined in logging only in recent versions of Python
#
try:
from logging import _checkLevel
except ImportError:
def _checkLevel(level):
if isinstance(level, int):
rv = level
elif str(level) == level:
if level not in logging._levelNames:
raise ValueError('Unknown level: %r' % level)
rv = logging._levelNames[level]
else:
raise TypeError('Level not an integer or a '
'valid string: %r' % level)
return rv
# The ConvertingXXX classes are wrappers around standard Python containers,
# and they serve to convert any suitable values in the container. The
# conversion converts base dicts, lists and tuples to their wrapped
# equivalents, whereas strings which match a conversion format are converted
# appropriately.
#
# Each wrapper should have a configurator attribute holding the actual
# configurator to use for conversion.
class ConvertingDict(dict):
"""A converting dictionary wrapper."""
def __getitem__(self, key):
value = dict.__getitem__(self, key)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def get(self, key, default=None):
value = dict.get(self, key, default)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, key, default=None):
value = dict.pop(self, key, default)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class ConvertingList(list):
"""A converting list wrapper."""
def __getitem__(self, key):
value = list.__getitem__(self, key)
result = self.configurator.convert(value)
#If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, idx=-1):
value = list.pop(self, idx)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
return result
class ConvertingTuple(tuple):
"""A converting tuple wrapper."""
def __getitem__(self, key):
value = tuple.__getitem__(self, key)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class BaseConfigurator(object):
"""
The configurator base class which defines some useful defaults.
"""
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
DIGIT_PATTERN = re.compile(r'^\d+$')
value_converters = {
'ext' : 'ext_convert',
'cfg' : 'cfg_convert',
}
# We might want to use a different one, e.g. importlib
importer = __import__
def __init__(self, config):
self.config = ConvertingDict(config)
self.config.configurator = self
def resolve(self, s):
"""
Resolve strings to objects using standard import and attribute
syntax.
"""
name = s.split('.')
used = name.pop(0)
try:
found = self.importer(used)
for frag in name:
used += '.' + frag
try:
found = getattr(found, frag)
except AttributeError:
self.importer(used)
found = getattr(found, frag)
return found
except ImportError:
e, tb = sys.exc_info()[1:]
v = ValueError('Cannot resolve %r: %s' % (s, e))
v.__cause__, v.__traceback__ = e, tb
raise v
def ext_convert(self, value):
"""Default converter for the ext:// protocol."""
return self.resolve(value)
def cfg_convert(self, value):
"""Default converter for the cfg:// protocol."""
rest = value
m = self.WORD_PATTERN.match(rest)
if m is None:
raise ValueError("Unable to convert %r" % value)
else:
rest = rest[m.end():]
d = self.config[m.groups()[0]]
#print d, rest
while rest:
m = self.DOT_PATTERN.match(rest)
if m:
d = d[m.groups()[0]]
else:
m = self.INDEX_PATTERN.match(rest)
if m:
idx = m.groups()[0]
if not self.DIGIT_PATTERN.match(idx):
d = d[idx]
else:
try:
n = int(idx) # try as number first (most likely)
d = d[n]
except TypeError:
d = d[idx]
if m:
rest = rest[m.end():]
else:
raise ValueError('Unable to convert '
'%r at %r' % (value, rest))
#rest should be empty
return d
def convert(self, value):
"""
Convert values to an appropriate type. dicts, lists and tuples are
replaced by their converting alternatives. Strings are checked to
see if they have a conversion format and are converted if they do.
"""
if not isinstance(value, ConvertingDict) and isinstance(value, dict):
value = ConvertingDict(value)
value.configurator = self
elif not isinstance(value, ConvertingList) and isinstance(value, list):
value = ConvertingList(value)
value.configurator = self
elif not isinstance(value, ConvertingTuple) and\
isinstance(value, tuple):
value = ConvertingTuple(value)
value.configurator = self
elif isinstance(value, basestring): # str for py3k
m = self.CONVERT_PATTERN.match(value)
if m:
d = m.groupdict()
prefix = d['prefix']
converter = self.value_converters.get(prefix, None)
if converter:
suffix = d['suffix']
converter = getattr(self, converter)
value = converter(suffix)
return value
def configure_custom(self, config):
"""Configure an object with a user-supplied factory."""
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
result = c(**kwargs)
if props:
for name, value in props.items():
setattr(result, name, value)
return result
def as_tuple(self, value):
"""Utility function which converts lists to tuples."""
if isinstance(value, list):
value = tuple(value)
return value
class DictConfigurator(BaseConfigurator):
"""
Configure logging using a dictionary-like object to describe the
configuration.
"""
def configure(self):
"""Do the configuration."""
config = self.config
if 'version' not in config:
raise ValueError("dictionary doesn't specify a version")
if config['version'] != 1:
raise ValueError("Unsupported version: %s" % config['version'])
incremental = config.pop('incremental', False)
EMPTY_DICT = {}
logging._acquireLock()
try:
if incremental:
handlers = config.get('handlers', EMPTY_DICT)
# incremental handler config only if handler name
# ties in to logging._handlers (Python 2.7)
if sys.version_info[:2] == (2, 7):
for name in handlers:
if name not in logging._handlers:
raise ValueError('No handler found with '
'name %r' % name)
else:
try:
handler = logging._handlers[name]
handler_config = handlers[name]
level = handler_config.get('level', None)
if level:
handler.setLevel(_checkLevel(level))
except StandardError, e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
try:
self.configure_logger(name, loggers[name], True)
except StandardError, e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
root = config.get('root', None)
if root:
try:
self.configure_root(root, True)
except StandardError, e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
else:
disable_existing = config.pop('disable_existing_loggers', True)
logging._handlers.clear()
del logging._handlerList[:]
# Do formatters first - they don't refer to anything else
formatters = config.get('formatters', EMPTY_DICT)
for name in formatters:
try:
formatters[name] = self.configure_formatter(
formatters[name])
except StandardError, e:
raise ValueError('Unable to configure '
'formatter %r: %s' % (name, e))
# Next, do filters - they don't refer to anything else, either
filters = config.get('filters', EMPTY_DICT)
for name in filters:
try:
filters[name] = self.configure_filter(filters[name])
except StandardError, e:
raise ValueError('Unable to configure '
'filter %r: %s' % (name, e))
# Next, do handlers - they refer to formatters and filters
# As handlers can refer to other handlers, sort the keys
# to allow a deterministic order of configuration
handlers = config.get('handlers', EMPTY_DICT)
for name in sorted(handlers):
try:
handler = self.configure_handler(handlers[name])
handler.name = name
handlers[name] = handler
except StandardError, e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
# Next, do loggers - they refer to handlers and filters
#we don't want to lose the existing loggers,
#since other threads may have pointers to them.
#existing is set to contain all existing loggers,
#and as we go through the new configuration we
#remove any which are configured. At the end,
#what's left in existing is the set of loggers
#which were in the previous configuration but
#which are not in the new configuration.
root = logging.root
existing = root.manager.loggerDict.keys()
#The list needs to be sorted so that we can
#avoid disabling child loggers of explicitly
#named loggers. With a sorted list it is easier
#to find the child loggers.
existing.sort()
#We'll keep the list of existing loggers
#which are children of named loggers here...
child_loggers = []
#now set up the new ones...
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
if name in existing:
i = existing.index(name)
prefixed = name + "."
pflen = len(prefixed)
num_existing = len(existing)
i = i + 1 # look at the entry after name
while (i < num_existing) and\
(existing[i][:pflen] == prefixed):
child_loggers.append(existing[i])
i = i + 1
existing.remove(name)
try:
self.configure_logger(name, loggers[name])
except StandardError, e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
#Disable any old loggers. There's no point deleting
#them as other threads may continue to hold references
#and by disabling them, you stop them doing any logging.
#However, don't disable children of named loggers, as that's
#probably not what was intended by the user.
for log in existing:
logger = root.manager.loggerDict[log]
if log in child_loggers:
logger.level = logging.NOTSET
logger.handlers = []
logger.propagate = True
elif disable_existing:
logger.disabled = True
# And finally, do the root logger
root = config.get('root', None)
if root:
try:
self.configure_root(root)
except StandardError, e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
finally:
logging._releaseLock()
def configure_formatter(self, config):
"""Configure a formatter from a dictionary."""
if '()' in config:
factory = config['()'] # for use in exception handler
try:
result = self.configure_custom(config)
except TypeError, te:
if "'format'" not in str(te):
raise
#Name of parameter changed from fmt to format.
#Retry with old name.
#This is so that code can be used with older Python versions
#(e.g. by Django)
config['fmt'] = config.pop('format')
config['()'] = factory
result = self.configure_custom(config)
else:
fmt = config.get('format', None)
dfmt = config.get('datefmt', None)
result = logging.Formatter(fmt, dfmt)
return result
def configure_filter(self, config):
"""Configure a filter from a dictionary."""
if '()' in config:
result = self.configure_custom(config)
else:
name = config.get('name', '')
result = logging.Filter(name)
return result
def add_filters(self, filterer, filters):
"""Add filters to a filterer from a list of names."""
for f in filters:
try:
filterer.addFilter(self.config['filters'][f])
except StandardError, e:
raise ValueError('Unable to add filter %r: %s' % (f, e))
def configure_handler(self, config):
"""Configure a handler from a dictionary."""
formatter = config.pop('formatter', None)
if formatter:
try:
formatter = self.config['formatters'][formatter]
except StandardError, e:
raise ValueError('Unable to set formatter '
'%r: %s' % (formatter, e))
level = config.pop('level', None)
filters = config.pop('filters', None)
if '()' in config:
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
c = self.resolve(c)
factory = c
else:
klass = self.resolve(config.pop('class'))
#Special case for handler which refers to another handler
if issubclass(klass, logging.handlers.MemoryHandler) and\
'target' in config:
try:
config['target'] = self.config['handlers'][config['target']]
except StandardError, e:
raise ValueError('Unable to set target handler '
'%r: %s' % (config['target'], e))
elif issubclass(klass, logging.handlers.SMTPHandler) and\
'mailhost' in config:
config['mailhost'] = self.as_tuple(config['mailhost'])
elif issubclass(klass, logging.handlers.SysLogHandler) and\
'address' in config:
config['address'] = self.as_tuple(config['address'])
factory = klass
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
try:
result = factory(**kwargs)
except TypeError, te:
if "'stream'" not in str(te):
raise
#The argument name changed from strm to stream
#Retry with old name.
#This is so that code can be used with older Python versions
#(e.g. by Django)
kwargs['strm'] = kwargs.pop('stream')
result = factory(**kwargs)
if formatter:
result.setFormatter(formatter)
if level is not None:
result.setLevel(_checkLevel(level))
if filters:
self.add_filters(result, filters)
return result
def add_handlers(self, logger, handlers):
"""Add handlers to a logger from a list of names."""
for h in handlers:
try:
logger.addHandler(self.config['handlers'][h])
except StandardError, e:
raise ValueError('Unable to add handler %r: %s' % (h, e))
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
#Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def configure_logger(self, name, config, incremental=False):
"""Configure a non-root logger from a dictionary."""
logger = logging.getLogger(name)
self.common_logger_config(logger, config, incremental)
propagate = config.get('propagate', None)
if propagate is not None:
logger.propagate = propagate
def configure_root(self, config, incremental=False):
"""Configure a root logger from a dictionary."""
root = logging.getLogger()
self.common_logger_config(root, config, incremental)
dictConfigClass = DictConfigurator
def dictConfig(config):
"""Configure logging using a dictionary."""
dictConfigClass(config).configure()
| gpl-3.0 | 1,480,099,828,052,946,400 | 40.43038 | 105 | 0.524508 | false |
ity/pants | src/python/pants/backend/codegen/tasks/apache_thrift_gen.py | 1 | 6470 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import re
import shutil
import subprocess
from twitter.common.collections import OrderedSet
from pants.backend.codegen.subsystems.thrift_defaults import ThriftDefaults
from pants.backend.codegen.targets.java_thrift_library import JavaThriftLibrary
from pants.backend.codegen.tasks.simple_codegen_task import SimpleCodegenTask
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TargetDefinitionException, TaskError
from pants.base.workunit import WorkUnitLabel
from pants.binaries.thrift_binary import ThriftBinary
from pants.util.memo import memoized_property
class ApacheThriftGen(SimpleCodegenTask):
_COMPILER = 'thrift'
_LANG = 'java'
_RPC_STYLE = 'sync'
@classmethod
def register_options(cls, register):
super(ApacheThriftGen, cls).register_options(register)
# NB: As of thrift 0.9.2 there is 1 warning that -strict promotes to an error - missing a
# struct field id. If an artifact was cached with strict off, we must re-gen with strict on
# since this case may be present and need to generate a thrift compile error.
register('--strict', default=True, fingerprint=True, type=bool,
help='Run thrift compiler with strict warnings.')
register('--gen-options', advanced=True, fingerprint=True,
help='Use these apache thrift java gen options.')
register('--deps', advanced=True, type=list,
help='A list of specs pointing to dependencies of thrift generated java code.')
register('--service-deps', advanced=True, type=list,
help='A list of specs pointing to dependencies of thrift generated java service '
'code. If not supplied, then --deps will be used for service deps.')
@classmethod
def global_subsystems(cls):
return super(ApacheThriftGen, cls).global_subsystems() + (ThriftDefaults,)
@classmethod
def task_subsystems(cls):
return super(ApacheThriftGen, cls).task_subsystems() + (ThriftBinary.Factory,)
@classmethod
def subsystem_dependencies(cls):
return (super(ApacheThriftGen, cls).subsystem_dependencies() +
(ThriftDefaults, ThriftBinary.Factory.scoped(cls)))
@classmethod
def implementation_version(cls):
return super(ApacheThriftGen, cls).implementation_version() + [('ApacheThriftGen', 2)]
def __init__(self, *args, **kwargs):
super(ApacheThriftGen, self).__init__(*args, **kwargs)
self._thrift_defaults = ThriftDefaults.global_instance()
@memoized_property
def _thrift_binary(self):
thrift_binary = ThriftBinary.Factory.scoped_instance(self).create()
return thrift_binary.path
@memoized_property
def _deps(self):
deps = self.get_options().deps
return list(self.resolve_deps(deps)) if deps else []
@memoized_property
def _service_deps(self):
service_deps = self.get_options().service_deps
return list(self.resolve_deps(service_deps)) if service_deps else self._deps
SERVICE_PARSER = re.compile(r'^\s*service\s+(?:[^\s{]+)')
def _declares_service(self, source):
with open(source) as thrift:
return any(line for line in thrift if self.SERVICE_PARSER.search(line))
def synthetic_target_extra_dependencies(self, target, target_workdir):
for source in target.sources_relative_to_buildroot():
if self._declares_service(os.path.join(get_buildroot(), source)):
return self._service_deps
return self._deps
def synthetic_target_type(self, target):
return JavaLibrary
def is_gentarget(self, target):
return (isinstance(target, JavaThriftLibrary) and
self._COMPILER == self._thrift_defaults.compiler(target))
def _validate(self, target):
if self._thrift_defaults.language(target) != self._LANG:
raise TargetDefinitionException(
target,
'Compiler {} supports only language={}.'.format(self._COMPILER, self._LANG))
if self._thrift_defaults.rpc_style(target) != self._RPC_STYLE:
raise TargetDefinitionException(
target,
'Compiler {} supports only rpc_style={}.'.format(self._COMPILER, self._RPC_STYLE))
@memoized_property
def _thrift_cmd(self):
cmd = [self._thrift_binary]
gen_options = self.get_options().gen_options
cmd.extend(('--gen', 'java:{}'.format(gen_options) if gen_options else self._LANG))
if self.get_options().strict:
cmd.append('-strict')
if self.get_options().level == 'debug':
cmd.append('-verbose')
return cmd
def _generate_thrift(self, target, target_workdir):
target_cmd = self._thrift_cmd[:]
bases = OrderedSet(tgt.target_base for tgt in target.closure() if self.is_gentarget(tgt))
for base in bases:
target_cmd.extend(('-I', base))
target_cmd.extend(('-o', target_workdir))
for source in target.sources_relative_to_buildroot():
cmd = target_cmd[:]
cmd.append(os.path.join(get_buildroot(), source))
with self.context.new_workunit(name=source,
labels=[WorkUnitLabel.TOOL],
cmd=' '.join(cmd)) as workunit:
result = subprocess.call(cmd,
stdout=workunit.output('stdout'),
stderr=workunit.output('stderr'))
if result != 0:
raise TaskError('{} ... exited non-zero ({})'.format(self._thrift_binary, result))
# The thrift compiler generates sources to a gen-[lang] subdir of the `-o` argument. We
# relocate the generated java sources to the root of the `target_workdir` so that our base class
# maps them properly for source jars and the like.
gen_dir = os.path.join(target_workdir, 'gen-java')
for path in os.listdir(gen_dir):
shutil.move(os.path.join(gen_dir, path), target_workdir)
os.rmdir(gen_dir)
def execute_codegen(self, target, target_workdir):
self._validate(target)
self._generate_thrift(target, target_workdir)
@property
def _copy_target_attributes(self):
"""Propagate the provides attribute to the synthetic java_library() target for publishing."""
return ['provides']
| apache-2.0 | -5,773,585,216,296,320,000 | 38.45122 | 100 | 0.687172 | false |
bgarrels/sky | sky/standalone/lxmlTree.py | 2 | 2580 | #!/usr/bin/env python3
import asciitree
class Node(object):
def __init__(self, name, children):
self.name = name
self.children = children
def __str__(self):
return(self.name)
def lineage(self):
total = [self.name] + [y.name for y in self.children]
for x in self.children:
total += x.lineage()
return(total)
def lxml_get_name(x, namedAttrs):
my_string = x.tag
if not isinstance(my_string, str):
my_string = 'comment'
if namedAttrs:
for key, value in x.items():
if key in namedAttrs:
my_string += ', ' + key[0] + '=' + value
return(my_string)
def lxml_traverser(parent, graph, simplify, namedAttrs):
graph = []
for x in parent:
my_string = lxml_get_name(x, namedAttrs)
graph.append(Node(my_string, lxml_traverser(x, graph, simplify, namedAttrs)))
if not simplify:
return(graph)
pruned_graph = []
watcher = {}
for x in graph:
blood = "".join(x.lineage())
if blood not in watcher:
watcher[blood] = 1
else:
watcher[blood] += 1
new_watcher = []
for x in graph:
blood = "".join(x.lineage())
if blood not in new_watcher:
new_watcher.append(blood)
pruned_graph.append(Node(x.name + " (" + str(watcher[blood]) + ")", x.children))
return(pruned_graph)
def lxmlTree(lxmls, returning=False, printing=True, simplify=True, namedAttrs=None):
if namedAttrs is None:
namedAttrs = ['class', 'id']
outps = []
max_lens = 0
if not isinstance(lxmls, list):
lxmls = [lxmls]
for num, lxml in enumerate(lxmls):
z = lxml_traverser(lxml, [], simplify, namedAttrs)
#outp = pinpoint(lxmls, num)
outp = asciitree.draw_tree(Node(lxml_get_name(lxml, namedAttrs), z))
max_lens = max(max_lens, max([len(x) for x in outp.split('\n')]))
num += 1
if num * (max_lens + 10) > 270:
print('can only fit', num - 1, 'out of', len(lxmls))
break
outps.append(outp.split('\n'))
newoutps = []
max_lines = max([len(x) for x in outps])
for i in range(max_lines):
tmp = ""
for x in outps:
try:
tmp += '{:<{}}'.format(x[i], max_lens + 10)
except IndexError:
tmp += '{:<{}}'.format('', max_lens + 10)
newoutps.append(tmp)
if printing:
print('\n', "\n".join(newoutps))
if returning:
return("\n".join(newoutps))
| bsd-3-clause | 5,129,618,601,137,610,000 | 29 | 92 | 0.540698 | false |
jjas0nn/solvem | tensorflow/lib/python2.7/site-packages/numpy/polynomial/tests/test_hermite.py | 58 | 18297 | """Tests for hermite module.
"""
from __future__ import division, absolute_import, print_function
import numpy as np
import numpy.polynomial.hermite as herm
from numpy.polynomial.polynomial import polyval
from numpy.testing import (
TestCase, assert_almost_equal, assert_raises,
assert_equal, assert_, run_module_suite)
H0 = np.array([1])
H1 = np.array([0, 2])
H2 = np.array([-2, 0, 4])
H3 = np.array([0, -12, 0, 8])
H4 = np.array([12, 0, -48, 0, 16])
H5 = np.array([0, 120, 0, -160, 0, 32])
H6 = np.array([-120, 0, 720, 0, -480, 0, 64])
H7 = np.array([0, -1680, 0, 3360, 0, -1344, 0, 128])
H8 = np.array([1680, 0, -13440, 0, 13440, 0, -3584, 0, 256])
H9 = np.array([0, 30240, 0, -80640, 0, 48384, 0, -9216, 0, 512])
Hlist = [H0, H1, H2, H3, H4, H5, H6, H7, H8, H9]
def trim(x):
return herm.hermtrim(x, tol=1e-6)
class TestConstants(TestCase):
def test_hermdomain(self):
assert_equal(herm.hermdomain, [-1, 1])
def test_hermzero(self):
assert_equal(herm.hermzero, [0])
def test_hermone(self):
assert_equal(herm.hermone, [1])
def test_hermx(self):
assert_equal(herm.hermx, [0, .5])
class TestArithmetic(TestCase):
x = np.linspace(-3, 3, 100)
def test_hermadd(self):
for i in range(5):
for j in range(5):
msg = "At i=%d, j=%d" % (i, j)
tgt = np.zeros(max(i, j) + 1)
tgt[i] += 1
tgt[j] += 1
res = herm.hermadd([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_hermsub(self):
for i in range(5):
for j in range(5):
msg = "At i=%d, j=%d" % (i, j)
tgt = np.zeros(max(i, j) + 1)
tgt[i] += 1
tgt[j] -= 1
res = herm.hermsub([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_hermmulx(self):
assert_equal(herm.hermmulx([0]), [0])
assert_equal(herm.hermmulx([1]), [0, .5])
for i in range(1, 5):
ser = [0]*i + [1]
tgt = [0]*(i - 1) + [i, 0, .5]
assert_equal(herm.hermmulx(ser), tgt)
def test_hermmul(self):
# check values of result
for i in range(5):
pol1 = [0]*i + [1]
val1 = herm.hermval(self.x, pol1)
for j in range(5):
msg = "At i=%d, j=%d" % (i, j)
pol2 = [0]*j + [1]
val2 = herm.hermval(self.x, pol2)
pol3 = herm.hermmul(pol1, pol2)
val3 = herm.hermval(self.x, pol3)
assert_(len(pol3) == i + j + 1, msg)
assert_almost_equal(val3, val1*val2, err_msg=msg)
def test_hermdiv(self):
for i in range(5):
for j in range(5):
msg = "At i=%d, j=%d" % (i, j)
ci = [0]*i + [1]
cj = [0]*j + [1]
tgt = herm.hermadd(ci, cj)
quo, rem = herm.hermdiv(tgt, ci)
res = herm.hermadd(herm.hermmul(quo, ci), rem)
assert_equal(trim(res), trim(tgt), err_msg=msg)
class TestEvaluation(TestCase):
# coefficients of 1 + 2*x + 3*x**2
c1d = np.array([2.5, 1., .75])
c2d = np.einsum('i,j->ij', c1d, c1d)
c3d = np.einsum('i,j,k->ijk', c1d, c1d, c1d)
# some random values in [-1, 1)
x = np.random.random((3, 5))*2 - 1
y = polyval(x, [1., 2., 3.])
def test_hermval(self):
#check empty input
assert_equal(herm.hermval([], [1]).size, 0)
#check normal input)
x = np.linspace(-1, 1)
y = [polyval(x, c) for c in Hlist]
for i in range(10):
msg = "At i=%d" % i
tgt = y[i]
res = herm.hermval(x, [0]*i + [1])
assert_almost_equal(res, tgt, err_msg=msg)
#check that shape is preserved
for i in range(3):
dims = [2]*i
x = np.zeros(dims)
assert_equal(herm.hermval(x, [1]).shape, dims)
assert_equal(herm.hermval(x, [1, 0]).shape, dims)
assert_equal(herm.hermval(x, [1, 0, 0]).shape, dims)
def test_hermval2d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
#test exceptions
assert_raises(ValueError, herm.hermval2d, x1, x2[:2], self.c2d)
#test values
tgt = y1*y2
res = herm.hermval2d(x1, x2, self.c2d)
assert_almost_equal(res, tgt)
#test shape
z = np.ones((2, 3))
res = herm.hermval2d(z, z, self.c2d)
assert_(res.shape == (2, 3))
def test_hermval3d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
#test exceptions
assert_raises(ValueError, herm.hermval3d, x1, x2, x3[:2], self.c3d)
#test values
tgt = y1*y2*y3
res = herm.hermval3d(x1, x2, x3, self.c3d)
assert_almost_equal(res, tgt)
#test shape
z = np.ones((2, 3))
res = herm.hermval3d(z, z, z, self.c3d)
assert_(res.shape == (2, 3))
def test_hermgrid2d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
#test values
tgt = np.einsum('i,j->ij', y1, y2)
res = herm.hermgrid2d(x1, x2, self.c2d)
assert_almost_equal(res, tgt)
#test shape
z = np.ones((2, 3))
res = herm.hermgrid2d(z, z, self.c2d)
assert_(res.shape == (2, 3)*2)
def test_hermgrid3d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
#test values
tgt = np.einsum('i,j,k->ijk', y1, y2, y3)
res = herm.hermgrid3d(x1, x2, x3, self.c3d)
assert_almost_equal(res, tgt)
#test shape
z = np.ones((2, 3))
res = herm.hermgrid3d(z, z, z, self.c3d)
assert_(res.shape == (2, 3)*3)
class TestIntegral(TestCase):
def test_hermint(self):
# check exceptions
assert_raises(ValueError, herm.hermint, [0], .5)
assert_raises(ValueError, herm.hermint, [0], -1)
assert_raises(ValueError, herm.hermint, [0], 1, [0, 0])
# test integration of zero polynomial
for i in range(2, 5):
k = [0]*(i - 2) + [1]
res = herm.hermint([0], m=i, k=k)
assert_almost_equal(res, [0, .5])
# check single integration with integration constant
for i in range(5):
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [1/scl]
hermpol = herm.poly2herm(pol)
hermint = herm.hermint(hermpol, m=1, k=[i])
res = herm.herm2poly(hermint)
assert_almost_equal(trim(res), trim(tgt))
# check single integration with integration constant and lbnd
for i in range(5):
scl = i + 1
pol = [0]*i + [1]
hermpol = herm.poly2herm(pol)
hermint = herm.hermint(hermpol, m=1, k=[i], lbnd=-1)
assert_almost_equal(herm.hermval(-1, hermint), i)
# check single integration with integration constant and scaling
for i in range(5):
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [2/scl]
hermpol = herm.poly2herm(pol)
hermint = herm.hermint(hermpol, m=1, k=[i], scl=2)
res = herm.herm2poly(hermint)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with default k
for i in range(5):
for j in range(2, 5):
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j):
tgt = herm.hermint(tgt, m=1)
res = herm.hermint(pol, m=j)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with defined k
for i in range(5):
for j in range(2, 5):
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j):
tgt = herm.hermint(tgt, m=1, k=[k])
res = herm.hermint(pol, m=j, k=list(range(j)))
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with lbnd
for i in range(5):
for j in range(2, 5):
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j):
tgt = herm.hermint(tgt, m=1, k=[k], lbnd=-1)
res = herm.hermint(pol, m=j, k=list(range(j)), lbnd=-1)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with scaling
for i in range(5):
for j in range(2, 5):
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j):
tgt = herm.hermint(tgt, m=1, k=[k], scl=2)
res = herm.hermint(pol, m=j, k=list(range(j)), scl=2)
assert_almost_equal(trim(res), trim(tgt))
def test_hermint_axis(self):
# check that axis keyword works
c2d = np.random.random((3, 4))
tgt = np.vstack([herm.hermint(c) for c in c2d.T]).T
res = herm.hermint(c2d, axis=0)
assert_almost_equal(res, tgt)
tgt = np.vstack([herm.hermint(c) for c in c2d])
res = herm.hermint(c2d, axis=1)
assert_almost_equal(res, tgt)
tgt = np.vstack([herm.hermint(c, k=3) for c in c2d])
res = herm.hermint(c2d, k=3, axis=1)
assert_almost_equal(res, tgt)
class TestDerivative(TestCase):
def test_hermder(self):
# check exceptions
assert_raises(ValueError, herm.hermder, [0], .5)
assert_raises(ValueError, herm.hermder, [0], -1)
# check that zeroth derivative does nothing
for i in range(5):
tgt = [0]*i + [1]
res = herm.hermder(tgt, m=0)
assert_equal(trim(res), trim(tgt))
# check that derivation is the inverse of integration
for i in range(5):
for j in range(2, 5):
tgt = [0]*i + [1]
res = herm.hermder(herm.hermint(tgt, m=j), m=j)
assert_almost_equal(trim(res), trim(tgt))
# check derivation with scaling
for i in range(5):
for j in range(2, 5):
tgt = [0]*i + [1]
res = herm.hermder(herm.hermint(tgt, m=j, scl=2), m=j, scl=.5)
assert_almost_equal(trim(res), trim(tgt))
def test_hermder_axis(self):
# check that axis keyword works
c2d = np.random.random((3, 4))
tgt = np.vstack([herm.hermder(c) for c in c2d.T]).T
res = herm.hermder(c2d, axis=0)
assert_almost_equal(res, tgt)
tgt = np.vstack([herm.hermder(c) for c in c2d])
res = herm.hermder(c2d, axis=1)
assert_almost_equal(res, tgt)
class TestVander(TestCase):
# some random values in [-1, 1)
x = np.random.random((3, 5))*2 - 1
def test_hermvander(self):
# check for 1d x
x = np.arange(3)
v = herm.hermvander(x, 3)
assert_(v.shape == (3, 4))
for i in range(4):
coef = [0]*i + [1]
assert_almost_equal(v[..., i], herm.hermval(x, coef))
# check for 2d x
x = np.array([[1, 2], [3, 4], [5, 6]])
v = herm.hermvander(x, 3)
assert_(v.shape == (3, 2, 4))
for i in range(4):
coef = [0]*i + [1]
assert_almost_equal(v[..., i], herm.hermval(x, coef))
def test_hermvander2d(self):
# also tests hermval2d for non-square coefficient array
x1, x2, x3 = self.x
c = np.random.random((2, 3))
van = herm.hermvander2d(x1, x2, [1, 2])
tgt = herm.hermval2d(x1, x2, c)
res = np.dot(van, c.flat)
assert_almost_equal(res, tgt)
# check shape
van = herm.hermvander2d([x1], [x2], [1, 2])
assert_(van.shape == (1, 5, 6))
def test_hermvander3d(self):
# also tests hermval3d for non-square coefficient array
x1, x2, x3 = self.x
c = np.random.random((2, 3, 4))
van = herm.hermvander3d(x1, x2, x3, [1, 2, 3])
tgt = herm.hermval3d(x1, x2, x3, c)
res = np.dot(van, c.flat)
assert_almost_equal(res, tgt)
# check shape
van = herm.hermvander3d([x1], [x2], [x3], [1, 2, 3])
assert_(van.shape == (1, 5, 24))
class TestFitting(TestCase):
def test_hermfit(self):
def f(x):
return x*(x - 1)*(x - 2)
def f2(x):
return x**4 + x**2 + 1
# Test exceptions
assert_raises(ValueError, herm.hermfit, [1], [1], -1)
assert_raises(TypeError, herm.hermfit, [[1]], [1], 0)
assert_raises(TypeError, herm.hermfit, [], [1], 0)
assert_raises(TypeError, herm.hermfit, [1], [[[1]]], 0)
assert_raises(TypeError, herm.hermfit, [1, 2], [1], 0)
assert_raises(TypeError, herm.hermfit, [1], [1, 2], 0)
assert_raises(TypeError, herm.hermfit, [1], [1], 0, w=[[1]])
assert_raises(TypeError, herm.hermfit, [1], [1], 0, w=[1, 1])
assert_raises(ValueError, herm.hermfit, [1], [1], [-1,])
assert_raises(ValueError, herm.hermfit, [1], [1], [2, -1, 6])
assert_raises(TypeError, herm.hermfit, [1], [1], [])
# Test fit
x = np.linspace(0, 2)
y = f(x)
#
coef3 = herm.hermfit(x, y, 3)
assert_equal(len(coef3), 4)
assert_almost_equal(herm.hermval(x, coef3), y)
coef3 = herm.hermfit(x, y, [0, 1, 2, 3])
assert_equal(len(coef3), 4)
assert_almost_equal(herm.hermval(x, coef3), y)
#
coef4 = herm.hermfit(x, y, 4)
assert_equal(len(coef4), 5)
assert_almost_equal(herm.hermval(x, coef4), y)
coef4 = herm.hermfit(x, y, [0, 1, 2, 3, 4])
assert_equal(len(coef4), 5)
assert_almost_equal(herm.hermval(x, coef4), y)
# check things still work if deg is not in strict increasing
coef4 = herm.hermfit(x, y, [2, 3, 4, 1, 0])
assert_equal(len(coef4), 5)
assert_almost_equal(herm.hermval(x, coef4), y)
#
coef2d = herm.hermfit(x, np.array([y, y]).T, 3)
assert_almost_equal(coef2d, np.array([coef3, coef3]).T)
coef2d = herm.hermfit(x, np.array([y, y]).T, [0, 1, 2, 3])
assert_almost_equal(coef2d, np.array([coef3, coef3]).T)
# test weighting
w = np.zeros_like(x)
yw = y.copy()
w[1::2] = 1
y[0::2] = 0
wcoef3 = herm.hermfit(x, yw, 3, w=w)
assert_almost_equal(wcoef3, coef3)
wcoef3 = herm.hermfit(x, yw, [0, 1, 2, 3], w=w)
assert_almost_equal(wcoef3, coef3)
#
wcoef2d = herm.hermfit(x, np.array([yw, yw]).T, 3, w=w)
assert_almost_equal(wcoef2d, np.array([coef3, coef3]).T)
wcoef2d = herm.hermfit(x, np.array([yw, yw]).T, [0, 1, 2, 3], w=w)
assert_almost_equal(wcoef2d, np.array([coef3, coef3]).T)
# test scaling with complex values x points whose square
# is zero when summed.
x = [1, 1j, -1, -1j]
assert_almost_equal(herm.hermfit(x, x, 1), [0, .5])
assert_almost_equal(herm.hermfit(x, x, [0, 1]), [0, .5])
# test fitting only even Legendre polynomials
x = np.linspace(-1, 1)
y = f2(x)
coef1 = herm.hermfit(x, y, 4)
assert_almost_equal(herm.hermval(x, coef1), y)
coef2 = herm.hermfit(x, y, [0, 2, 4])
assert_almost_equal(herm.hermval(x, coef2), y)
assert_almost_equal(coef1, coef2)
class TestCompanion(TestCase):
def test_raises(self):
assert_raises(ValueError, herm.hermcompanion, [])
assert_raises(ValueError, herm.hermcompanion, [1])
def test_dimensions(self):
for i in range(1, 5):
coef = [0]*i + [1]
assert_(herm.hermcompanion(coef).shape == (i, i))
def test_linear_root(self):
assert_(herm.hermcompanion([1, 2])[0, 0] == -.25)
class TestGauss(TestCase):
def test_100(self):
x, w = herm.hermgauss(100)
# test orthogonality. Note that the results need to be normalized,
# otherwise the huge values that can arise from fast growing
# functions like Laguerre can be very confusing.
v = herm.hermvander(x, 99)
vv = np.dot(v.T * w, v)
vd = 1/np.sqrt(vv.diagonal())
vv = vd[:, None] * vv * vd
assert_almost_equal(vv, np.eye(100))
# check that the integral of 1 is correct
tgt = np.sqrt(np.pi)
assert_almost_equal(w.sum(), tgt)
class TestMisc(TestCase):
def test_hermfromroots(self):
res = herm.hermfromroots([])
assert_almost_equal(trim(res), [1])
for i in range(1, 5):
roots = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2])
pol = herm.hermfromroots(roots)
res = herm.hermval(roots, pol)
tgt = 0
assert_(len(pol) == i + 1)
assert_almost_equal(herm.herm2poly(pol)[-1], 1)
assert_almost_equal(res, tgt)
def test_hermroots(self):
assert_almost_equal(herm.hermroots([1]), [])
assert_almost_equal(herm.hermroots([1, 1]), [-.5])
for i in range(2, 5):
tgt = np.linspace(-1, 1, i)
res = herm.hermroots(herm.hermfromroots(tgt))
assert_almost_equal(trim(res), trim(tgt))
def test_hermtrim(self):
coef = [2, -1, 1, 0]
# Test exceptions
assert_raises(ValueError, herm.hermtrim, coef, -1)
# Test results
assert_equal(herm.hermtrim(coef), coef[:-1])
assert_equal(herm.hermtrim(coef, 1), coef[:-3])
assert_equal(herm.hermtrim(coef, 2), [0])
def test_hermline(self):
assert_equal(herm.hermline(3, 4), [3, 2])
def test_herm2poly(self):
for i in range(10):
assert_almost_equal(herm.herm2poly([0]*i + [1]), Hlist[i])
def test_poly2herm(self):
for i in range(10):
assert_almost_equal(herm.poly2herm(Hlist[i]), [0]*i + [1])
def test_weight(self):
x = np.linspace(-5, 5, 11)
tgt = np.exp(-x**2)
res = herm.hermweight(x)
assert_almost_equal(res, tgt)
if __name__ == "__main__":
run_module_suite()
| mit | 8,856,769,253,943,782,000 | 32.449726 | 78 | 0.513691 | false |
indygreg/pymake | pymake/process.py | 1 | 11645 | """
Skipping shell invocations is good, when possible. This wrapper around subprocess does dirty work of
parsing command lines into argv and making sure that no shell magic is being used.
"""
#TODO: ship pyprocessing?
import multiprocessing, multiprocessing.dummy
import subprocess, shlex, re, logging, sys, traceback, os, imp, glob
# XXXkhuey Work around http://bugs.python.org/issue1731717
subprocess._cleanup = lambda: None
import command, util
if sys.platform=='win32':
import win32process
_log = logging.getLogger('pymake.process')
_escapednewlines = re.compile(r'\\\n')
_blacklist = re.compile(r'[$><;[{~`|&]')
_needsglob = re.compile(r'[\*\?]')
def clinetoargv(cline):
"""
If this command line can safely skip the shell, return an argv array.
@returns argv, badchar
"""
str = _escapednewlines.sub('', cline)
m = _blacklist.search(str)
if m is not None:
return None, m.group(0)
args = shlex.split(str, comments=True)
if len(args) and args[0].find('=') != -1:
return None, '='
return args, None
def doglobbing(args, cwd):
"""
Perform any needed globbing on the argument list passed in
"""
globbedargs = []
for arg in args:
if _needsglob.search(arg):
globbedargs.extend(glob.glob(os.path.join(cwd, arg)))
else:
globbedargs.append(arg)
return globbedargs
shellwords = (':', '.', 'break', 'cd', 'continue', 'exec', 'exit', 'export',
'getopts', 'hash', 'pwd', 'readonly', 'return', 'shift',
'test', 'times', 'trap', 'umask', 'unset', 'alias',
'set', 'bind', 'builtin', 'caller', 'command', 'declare',
'echo', 'enable', 'help', 'let', 'local', 'logout',
'printf', 'read', 'shopt', 'source', 'type', 'typeset',
'ulimit', 'unalias', 'set')
def call(cline, env, cwd, loc, cb, context, echo, justprint=False):
#TODO: call this once up-front somewhere and save the result?
shell, msys = util.checkmsyscompat()
shellreason = None
if msys and cline.startswith('/'):
shellreason = "command starts with /"
else:
argv, badchar = clinetoargv(cline)
if argv is None:
shellreason = "command contains shell-special character '%s'" % (badchar,)
elif len(argv) and argv[0] in shellwords:
shellreason = "command starts with shell primitive '%s'" % (argv[0],)
else:
argv = doglobbing(argv, cwd)
if shellreason is not None:
_log.debug("%s: using shell: %s: '%s'", loc, shellreason, cline)
if msys:
if len(cline) > 3 and cline[1] == ':' and cline[2] == '/':
cline = '/' + cline[0] + cline[2:]
cline = [shell, "-c", cline]
context.call(cline, shell=not msys, env=env, cwd=cwd, cb=cb, echo=echo,
justprint=justprint)
return
if not len(argv):
cb(res=0)
return
if argv[0] == command.makepypath:
command.main(argv[1:], env, cwd, cb)
return
if argv[0:2] == [sys.executable.replace('\\', '/'),
command.makepypath.replace('\\', '/')]:
command.main(argv[2:], env, cwd, cb)
return
if argv[0].find('/') != -1:
executable = util.normaljoin(cwd, argv[0])
else:
executable = None
context.call(argv, executable=executable, shell=False, env=env, cwd=cwd, cb=cb,
echo=echo, justprint=justprint)
def call_native(module, method, argv, env, cwd, loc, cb, context, echo, justprint=False,
pycommandpath=None):
argv = doglobbing(argv, cwd)
context.call_native(module, method, argv, env=env, cwd=cwd, cb=cb,
echo=echo, justprint=justprint, pycommandpath=pycommandpath)
def statustoresult(status):
"""
Convert the status returned from waitpid into a prettier numeric result.
"""
sig = status & 0xFF
if sig:
return -sig
return status >>8
class Job(object):
"""
A single job to be executed on the process pool.
"""
done = False # set to true when the job completes
def __init__(self):
self.exitcode = -127
def notify(self, condition, result):
condition.acquire()
self.done = True
self.exitcode = result
condition.notify()
condition.release()
def get_callback(self, condition):
return lambda result: self.notify(condition, result)
class PopenJob(Job):
"""
A job that executes a command using subprocess.Popen.
"""
def __init__(self, argv, executable, shell, env, cwd):
Job.__init__(self)
self.argv = argv
self.executable = executable
self.shell = shell
self.env = env
self.cwd = cwd
def run(self):
try:
p = subprocess.Popen(self.argv, executable=self.executable, shell=self.shell, env=self.env, cwd=self.cwd)
return p.wait()
except OSError, e:
print >>sys.stderr, e
return -127
class PythonException(Exception):
def __init__(self, message, exitcode):
Exception.__init__(self)
self.message = message
self.exitcode = exitcode
def __str__(self):
return self.message
def load_module_recursive(module, path):
"""
Emulate the behavior of __import__, but allow
passing a custom path to search for modules.
"""
bits = module.split('.')
for i, bit in enumerate(bits):
dotname = '.'.join(bits[:i+1])
try:
f, path, desc = imp.find_module(bit, path)
m = imp.load_module(dotname, f, path, desc)
if f is None:
path = m.__path__
except ImportError:
return
class PythonJob(Job):
"""
A job that calls a Python method.
"""
def __init__(self, module, method, argv, env, cwd, pycommandpath=None):
self.module = module
self.method = method
self.argv = argv
self.env = env
self.cwd = cwd
self.pycommandpath = pycommandpath or []
def run(self):
oldenv = os.environ
try:
os.chdir(self.cwd)
os.environ = self.env
if self.module not in sys.modules:
load_module_recursive(self.module,
sys.path + self.pycommandpath)
if self.module not in sys.modules:
print >>sys.stderr, "No module named '%s'" % self.module
return -127
m = sys.modules[self.module]
if self.method not in m.__dict__:
print >>sys.stderr, "No method named '%s' in module %s" % (method, module)
return -127
m.__dict__[self.method](self.argv)
except PythonException, e:
print >>sys.stderr, e
return e.exitcode
except:
print >>sys.stderr, sys.exc_info()[1]
print >>sys.stderr, traceback.print_exc()
return -127
finally:
os.environ = oldenv
return 0
def job_runner(job):
"""
Run a job. Called in a Process pool.
"""
return job.run()
class ParallelContext(object):
"""
Manages the parallel execution of processes.
"""
_allcontexts = set()
_condition = multiprocessing.Condition()
def __init__(self, jcount):
self.jcount = jcount
self.exit = False
self.processpool = multiprocessing.Pool(processes=jcount)
self.threadpool = multiprocessing.dummy.Pool(processes=jcount)
self.pending = [] # list of (cb, args, kwargs)
self.running = [] # list of (subprocess, cb)
self._allcontexts.add(self)
def finish(self):
assert len(self.pending) == 0 and len(self.running) == 0, "pending: %i running: %i" % (len(self.pending), len(self.running))
self.processpool.close()
self.threadpool.close()
self.processpool.join()
self.threadpool.join()
self._allcontexts.remove(self)
def run(self):
while len(self.pending) and len(self.running) < self.jcount:
cb, args, kwargs = self.pending.pop(0)
cb(*args, **kwargs)
def defer(self, cb, *args, **kwargs):
assert self.jcount > 1 or not len(self.pending), "Serial execution error defering %r %r %r: currently pending %r" % (cb, args, kwargs, self.pending)
self.pending.append((cb, args, kwargs))
def _docall_generic(self, pool, job, cb, echo, justprint):
if echo is not None:
print echo
processcb = job.get_callback(ParallelContext._condition)
if justprint:
processcb(0)
else:
pool.apply_async(job_runner, args=(job,), callback=processcb)
self.running.append((job, cb))
def call(self, argv, shell, env, cwd, cb, echo, justprint=False, executable=None):
"""
Asynchronously call the process
"""
job = PopenJob(argv, executable=executable, shell=shell, env=env, cwd=cwd)
self.defer(self._docall_generic, self.threadpool, job, cb, echo, justprint)
def call_native(self, module, method, argv, env, cwd, cb,
echo, justprint=False, pycommandpath=None):
"""
Asynchronously call the native function
"""
job = PythonJob(module, method, argv, env, cwd, pycommandpath)
self.defer(self._docall_generic, self.processpool, job, cb, echo, justprint)
@staticmethod
def _waitany(condition):
def _checkdone():
jobs = []
for c in ParallelContext._allcontexts:
for i in xrange(0, len(c.running)):
if c.running[i][0].done:
jobs.append(c.running[i])
for j in jobs:
if j in c.running:
c.running.remove(j)
return jobs
# We must acquire the lock, and then check to see if any jobs have
# finished. If we don't check after acquiring the lock it's possible
# that all outstanding jobs will have completed before we wait and we'll
# wait for notifications that have already occurred.
condition.acquire()
jobs = _checkdone()
if jobs == []:
condition.wait()
jobs = _checkdone()
condition.release()
return jobs
@staticmethod
def spin():
"""
Spin the 'event loop', and never return.
"""
while True:
clist = list(ParallelContext._allcontexts)
for c in clist:
c.run()
dowait = util.any((len(c.running) for c in ParallelContext._allcontexts))
if dowait:
# Wait on local jobs first for perf
for job, cb in ParallelContext._waitany(ParallelContext._condition):
cb(job.exitcode)
else:
assert any(len(c.pending) for c in ParallelContext._allcontexts)
def makedeferrable(usercb, **userkwargs):
def cb(*args, **kwargs):
kwargs.update(userkwargs)
return usercb(*args, **kwargs)
return cb
_serialContext = None
_parallelContext = None
def getcontext(jcount):
global _serialContext, _parallelContext
if jcount == 1:
if _serialContext is None:
_serialContext = ParallelContext(1)
return _serialContext
else:
if _parallelContext is None:
_parallelContext = ParallelContext(jcount)
return _parallelContext
| mit | 4,634,528,347,883,023,000 | 31.257618 | 156 | 0.573207 | false |
willthames/ansible | lib/ansible/modules/monitoring/pagerduty.py | 77 | 9529 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: pagerduty
short_description: Create PagerDuty maintenance windows
description:
- This module will let you create PagerDuty maintenance windows
version_added: "1.2"
author:
- "Andrew Newdigate (@suprememoocow)"
- "Dylan Silva (@thaumos)"
- "Justin Johns"
- "Bruce Pennypacker"
requirements:
- PagerDuty API access
options:
state:
description:
- Create a maintenance window or get a list of ongoing windows.
required: true
default: null
choices: [ "running", "started", "ongoing", "absent" ]
aliases: []
name:
description:
- PagerDuty unique subdomain.
required: true
default: null
choices: []
aliases: []
user:
description:
- PagerDuty user ID.
required: true
default: null
choices: []
aliases: []
passwd:
description:
- PagerDuty user password.
required: true
default: null
choices: []
aliases: []
token:
description:
- A pagerduty token, generated on the pagerduty site. Can be used instead of
user/passwd combination.
required: true
default: null
choices: []
aliases: []
version_added: '1.8'
requester_id:
description:
- ID of user making the request. Only needed when using a token and creating a maintenance_window.
required: true
default: null
choices: []
aliases: []
version_added: '1.8'
service:
description:
- A comma separated list of PagerDuty service IDs.
required: false
default: null
choices: []
aliases: [ services ]
hours:
description:
- Length of maintenance window in hours.
required: false
default: 1
choices: []
aliases: []
minutes:
description:
- Maintenance window in minutes (this is added to the hours).
required: false
default: 0
choices: []
aliases: []
version_added: '1.8'
desc:
description:
- Short description of maintenance window.
required: false
default: Created by Ansible
choices: []
aliases: []
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: 1.5.1
'''
EXAMPLES='''
# List ongoing maintenance windows using a user/passwd
- pagerduty:
name: companyabc
user: [email protected]
passwd: password123
state: ongoing
# List ongoing maintenance windows using a token
- pagerduty:
name: companyabc
token: xxxxxxxxxxxxxx
state: ongoing
# Create a 1 hour maintenance window for service FOO123, using a user/passwd
- pagerduty:
name: companyabc
user: [email protected]
passwd: password123
state: running
service: FOO123
# Create a 5 minute maintenance window for service FOO123, using a token
- pagerduty:
name: companyabc
token: xxxxxxxxxxxxxx
hours: 0
minutes: 5
state: running
service: FOO123
# Create a 4 hour maintenance window for service FOO123 with the description "deployment".
- pagerduty:
name: companyabc
user: [email protected]
passwd: password123
state: running
service: FOO123
hours: 4
desc: deployment
register: pd_window
# Delete the previous maintenance window
- pagerduty:
name: companyabc
user: [email protected]
passwd: password123
state: absent
service: '{{ pd_window.result.maintenance_window.id }}'
'''
import datetime
import base64
def auth_header(user, passwd, token):
if token:
return "Token token=%s" % token
auth = base64.encodestring('%s:%s' % (user, passwd)).replace('\n', '')
return "Basic %s" % auth
def ongoing(module, name, user, passwd, token):
url = "https://" + name + ".pagerduty.com/api/v1/maintenance_windows/ongoing"
headers = {"Authorization": auth_header(user, passwd, token)}
response, info = fetch_url(module, url, headers=headers)
if info['status'] != 200:
module.fail_json(msg="failed to lookup the ongoing window: %s" % info['msg'])
try:
json_out = json.loads(response.read())
except:
json_out = ""
return False, json_out, False
def create(module, name, user, passwd, token, requester_id, service, hours, minutes, desc):
now = datetime.datetime.utcnow()
later = now + datetime.timedelta(hours=int(hours), minutes=int(minutes))
start = now.strftime("%Y-%m-%dT%H:%M:%SZ")
end = later.strftime("%Y-%m-%dT%H:%M:%SZ")
url = "https://" + name + ".pagerduty.com/api/v1/maintenance_windows"
headers = {
'Authorization': auth_header(user, passwd, token),
'Content-Type' : 'application/json',
}
request_data = {'maintenance_window': {'start_time': start, 'end_time': end, 'description': desc, 'service_ids': service}}
if requester_id:
request_data['requester_id'] = requester_id
else:
if token:
module.fail_json(msg="requester_id is required when using a token")
data = json.dumps(request_data)
response, info = fetch_url(module, url, data=data, headers=headers, method='POST')
if info['status'] != 201:
module.fail_json(msg="failed to create the window: %s" % info['msg'])
try:
json_out = json.loads(response.read())
except:
json_out = ""
return False, json_out, True
def absent(module, name, user, passwd, token, requester_id, service):
url = "https://" + name + ".pagerduty.com/api/v1/maintenance_windows/" + service[0]
headers = {
'Authorization': auth_header(user, passwd, token),
'Content-Type' : 'application/json',
}
request_data = {}
if requester_id:
request_data['requester_id'] = requester_id
else:
if token:
module.fail_json(msg="requester_id is required when using a token")
data = json.dumps(request_data)
response, info = fetch_url(module, url, data=data, headers=headers, method='DELETE')
if info['status'] != 204:
module.fail_json(msg="failed to delete the window: %s" % info['msg'])
try:
json_out = json.loads(response.read())
except:
json_out = ""
return False, json_out, True
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(required=True, choices=['running', 'started', 'ongoing', 'absent']),
name=dict(required=True),
user=dict(required=False),
passwd=dict(required=False, no_log=True),
token=dict(required=False, no_log=True),
service=dict(required=False, type='list', aliases=["services"]),
requester_id=dict(required=False),
hours=dict(default='1', required=False),
minutes=dict(default='0', required=False),
desc=dict(default='Created by Ansible', required=False),
validate_certs = dict(default='yes', type='bool'),
)
)
state = module.params['state']
name = module.params['name']
user = module.params['user']
passwd = module.params['passwd']
token = module.params['token']
service = module.params['service']
hours = module.params['hours']
minutes = module.params['minutes']
token = module.params['token']
desc = module.params['desc']
requester_id = module.params['requester_id']
if not token and not (user or passwd):
module.fail_json(msg="neither user and passwd nor token specified")
if state == "running" or state == "started":
if not service:
module.fail_json(msg="service not specified")
(rc, out, changed) = create(module, name, user, passwd, token, requester_id, service, hours, minutes, desc)
if rc == 0:
changed=True
if state == "ongoing":
(rc, out, changed) = ongoing(module, name, user, passwd, token)
if state == "absent":
(rc, out, changed) = absent(module, name, user, passwd, token, requester_id, service)
if rc != 0:
module.fail_json(msg="failed", result=out)
module.exit_json(msg="success", result=out, changed=changed)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
| gpl-3.0 | 3,604,723,450,411,811,000 | 29.155063 | 126 | 0.615804 | false |
threemeninaboat3247/raipy | raipy/Examples/GPIB_query.py | 1 | 1639 | # -*- coding: utf-8 -*-
import PyQt5.QtCore
from PyQt5.QtGui import QColor
import time
import numpy as np
import raipy.UserClassBase as UserClassBase
from datetime import datetime
class programThread(UserClassBase.ThreadBase):
def run(self):
###The main body of your program
###example: Call self.lcdSignal.emit({'Temperature':1,'Voltage':2}) if you want to display 1 on 'Temperature' display and 2 on 'Voltage' display.
pass
class Output(UserClassBase.OutputBase):
###Write the labels of your measured values with dimensions
#format:[[label],[unit]] example:graph_outputs=[['Temperature','K'],['Voltage','V']]
outputs=[]
###Write the settings you want to show up in Graphs in advance
#format:[[x axis label,[y axis label 1,y axis label 2],[color 1,color 2]],[settings for the second graph]] example:[['Time',['Voltage','Current'],[QColor(255,255,0),QColor(0,255,255)]]]
graph_settings=[]
class Control(UserClassBase.ControlBase):
###Write your control parameters with initial values
bools=[['Halt',False]] #format:bools=[['label',bool]] example:bools=[['flag_A',True],['flag_B',False]]
sliders=[['slider',0,100,0]] #format:sliders=[['label',minimum(int)、maximum(int)、initial value(int)]] example:sliders=[['slider_A',0,10,5],['slider_B',0,200,0]]
dials=[['dial',0,100,50]] #same on
floats=[['test',0]]
# floats=[['Kei_2400',24],['Kei_2182',8],['Kei_2000',4],['LakeShore',14],['time interval(s)',0.05]] #format:floats=[['label',initial value(float)]] example:floats=[['param_PI',3.14159265],[param_E,2.71828]] | mit | 4,396,178,931,229,026,300 | 46.352941 | 211 | 0.668738 | false |
vhernandez/jwsProcessor | src/jwsprocessor/selection_page.py | 1 | 13774 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import pygtk
pygtk.require("2.0")
import gtk
import gobject
import os, sys
from tools import SpectraDB, _
from smoothing_selection import ShowSpectrumPanel
from jws_filechooser import JwsFileChooserDialog
class SelectionPage(gtk.HPaned):
def __init__(self, assistant, current_folder=os.getcwd()):
self.assistant = assistant
self.removing_multi = False
self.create_widgets()
# El objeto SpectraDB nos asegura que no hayan duplicados en la lista de
# espectros. Además almacena las cabeceras de cada espectro listado.
# NOTA IMPORTANTE: la lista de espectros tiene un TreeModelSort que
# del spectra_list, el ListStore que maneja spectra_db. Por tanto, antes
# de pasar cualquier path ó iter a spectra_db hay que convertirlo del
# TreeSortModel al ListStore.
self.spectra_db = SpectraDB(self.spectra_list)
self.current_folder = current_folder
def create_widgets(self):
gtk.HPaned.__init__(self)
self.create_spectralist()
self.sl_add_action = gtk.Action("sl_add",
None,
_("Add spectra"),
gtk.STOCK_ADD)
self.sl_add_action.connect("activate", self.sl_add_button_cb)
self.sl_remove_action = gtk.Action("sl_remove",
None,
_("Eliminate selected spectra"),
gtk.STOCK_REMOVE)
self.sl_remove_action.connect("activate", self.sl_remove_button_cb)
self.sl_remove_action.set_sensitive(False)
self.dil_paste_action = gtk.Action( "dil_paste",
None,
_("Paste dilution data"),
gtk.STOCK_PASTE)
self.dil_paste_action.connect("activate", self._request_targets)
self.dil_paste_action.set_sensitive(False)
self.dil_reset_action = gtk.Action("dil_reset",
None,
_("Erase dilution data"),
gtk.STOCK_CLEAR)
self.dil_reset_action.connect("activate", self.reset_dilution_button_cb)
self.show_preview_action = gtk.ToggleAction("show_preview_action",
_("Preview"),
_("Show a plot of the selected spectrum"),
gtk.STOCK_PRINT_PREVIEW)
self.show_preview_action.connect("toggled", self._show_preview_toggle_cb)
dil_menu = gtk.Menu()
dil_menu.insert(self.dil_paste_action.create_menu_item(),-1)
dil_menu.insert(self.dil_reset_action.create_menu_item(),-1)
dil_toolmenu = gtk.MenuToolButton( \
icon_widget = gtk.image_new_from_stock(gtk.STOCK_PASTE,
gtk.ICON_SIZE_SMALL_TOOLBAR),
label=_("Dilution data"))
dil_toolmenu.set_menu(dil_menu)
dil_toolmenu.connect("clicked", self.dilution_menutoolbutton_cb)
dil_toolmenu.set_tooltip(gtk.Tooltips(), _("Paste dilution factors"))
dil_toolmenu.set_is_important(True)
toolbar = gtk.Toolbar()
toolbar.insert(self.sl_add_action.create_tool_item(),-1)
toolbar.insert(self.sl_remove_action.create_tool_item(),-1)
toolbar.insert(gtk.SeparatorToolItem(), -1)
toolbar.insert(dil_toolmenu,-1)
toolbar.insert(gtk.SeparatorToolItem(), -1)
toolbar.insert(self.show_preview_action.create_tool_item(), -1)
toolbar.set_style(gtk.TOOLBAR_BOTH_HORIZ)
tv_vbox = gtk.VBox()
tv_vbox.pack_start(toolbar, False, False, 4)
tv_vbox.pack_start(self.spectra_scroll, True, True, 0)
tv_frame = gtk.Frame(_("Spectra to be processed:"))
tv_frame.add(tv_vbox)
self.spectrum_panel = ShowSpectrumPanel()
self.pack1(tv_frame, True, True)
self.clipboard = gtk.clipboard_get(selection="CLIPBOARD")
def _show_preview_toggle_cb(self, widget,data=None):
if self.show_preview_action.props.active:
self._spectra_sel_changed_cb(self.spectra_sel)
else:
if self.spectrum_panel.props.visible:
self.spectrum_panel.hide()
def dilution_menutoolbutton_cb(self, widget, data=None):
if self.dil_paste_action.props.sensitive:
self._request_targets(widget)
def reset_dilution_button_cb(self, widget, data=None):
_iter = self.spectra_list.get_iter_first()
while _iter is not None:
self.spectra_list.set_value(_iter, 7, 1.0)
_iter = self.spectra_list.iter_next(_iter)
def _request_targets(self, widget, data=None):
self.clipboard.request_text(self._request_text_cb)
return True
def _request_text_cb(self, clipboard, text, user_data = None):
if text == "": return
# Primero romprer los datos en una lista. Nota: hay que comprobar que
# los datos estén separados por "\n", a veces están separados por \r ó
# por \n\r
if "\n\r" in text:
datalist = text.split("\n\r")
elif "\n" in text:
datalist = text.split("\n")
elif "\r" in text:
datalist = text.split("\r")
else:
datalist = [text]
# Comprobar que los datos a pegar sean válidos (que contengan un float)
nlist = []
for elem in datalist:
if "\t" in elem:
elem_s = elem.split("\t")[0]
else:
elem_s = elem
try:
n = float(elem_s)
except:
pass
else:
nlist.append(n)
# Meter los datos en la columna correspondiente, a partir de la fila
# que esté seleccionada:
if len(nlist)>0:
(model, piter_list) = self.spectra_sel.get_selected_rows()
start_path = min(piter_list)
_iter = model.get_iter(start_path)
while (_iter is not None) and (len(nlist) > 0):
value = nlist.pop(0)
model.set_value(_iter, 7, value)
_iter = model.iter_next(_iter)
def create_spectralist(self):
def new_column(name, data_attr, column_id, editable=False):
col = gtk.TreeViewColumn(name)
col.set_resizable(True)
col.set_clickable(True)
self.spectra_view.append_column(col)
#identificación de cada columna a la hora de ordenar:
col.set_sort_column_id(column_id)
cell = gtk.CellRendererText()
cell.set_property("editable", editable)
col.pack_start(cell, True)
col.add_attribute(cell, data_attr, column_id)
return col, cell
# [0=nombre (str), 1=nº puntos (int), 2=lamda inicial (float),
# 3=lambda final (float), 4=data pitch (float), 5=ruta (float),
# 6=nombre_completo (str), 7=dilución (float) ]
self.spectra_list = gtk.ListStore(str, int, float, float, float, str, str, float)
self.spectra_view = gtk.TreeView(self.spectra_list)
(col1, cell1) = new_column(_("Name"), "text", 0)
(dilution_column, dilution_cell) = new_column(_("Dilution"), "text", 7,
editable=True)
(col2, cell2) = new_column(_("Point no."), "text", 1)
(col3, cell3) = new_column(_("Initial λ"), "text", 2)
(col4, cell4) = new_column(_("Final λ"), "text", 3)
(col5, cell5) = new_column(_("Data pitch"), "text", 4)
(col6, cell6) = new_column(_("File path"), "text", 5)
dilution_cell.connect("edited", self._dilution_edited_cb)
self.spectra_view.props.reorderable = True
self.spectra_view.set_border_width(5)
self.spectra_sel = self.spectra_view.get_selection()
self.spectra_sel.set_mode(gtk.SELECTION_MULTIPLE)
self.spectra_sel.connect("changed", self._spectra_sel_changed_cb)
self.spectra_scroll = gtk.ScrolledWindow()
self.spectra_scroll.set_policy( gtk.POLICY_AUTOMATIC,
gtk.POLICY_AUTOMATIC )
self.spectra_scroll.add(self.spectra_view)
self.spectra_list.connect("row-inserted", self._sl_rowinserted_cb)
self.spectra_list.connect("row-deleted", self._sl_rowdeleted_cb)
def _spectra_sel_changed_cb(self, widget, data=None):
(model, piter_list) = widget.get_selected_rows()
#activar todos los widgets que dependan de que exita algo seleccionado:
something_selected = len(piter_list) > 0
self.sl_remove_action.set_sensitive(something_selected)
self.dil_paste_action.set_sensitive(something_selected)
if self.show_preview_action.props.active:
if something_selected and (not self.removing_multi):
fn = model.get_value(model.get_iter(piter_list[0]), 6).decode('utf-8')
if fn in self.spectra_db:
self.spectrum_panel.show_spectrum_file(fn)
if not self.get_child2():
self.pack2(self.spectrum_panel, True, True)
self.spectrum_panel.show()
else:
raise Exception, \
'SpectraSelectionWidnow.sv_row_activated_cb(): Value from treeview list not in\
spectra database'
else:
self.spectrum_panel.hide()
def _blank_entry_changed_cb(self, entry, data=None):
self.__check_model_empty(self.spectra_list)
def _sl_rowinserted_cb(self, model, path, iter, data=None):
self.__check_model_empty(model)
def _sl_rowdeleted_cb(self, model, path, data=None):
self.__check_model_empty(model)
def __check_model_empty(self, model):
model_empty = model.get_iter_first() is None
#self.set_response_sensitive(gtk.RESPONSE_ACCEPT, not model_empty)
self.assistant.set_page_complete(self, not model_empty)
def _dilution_edited_cb(self, cell, path, new_text, data=None):
try:
new_value = float(new_text)
except:
new_value = None
if new_value != None:
_iter = self.spectra_list.get_iter(path)
self.spectra_list.set_value(_iter, 7, new_value)
def sl_add_button_cb(self, button, user_data=None):
fs_dialog = JwsFileChooserDialog( parent= self.assistant,
current_folder = self.current_folder,
title=_("Open spectra..."))
response = fs_dialog.run()
if response == gtk.RESPONSE_OK:
#En windows hay que recodificar los nombres de archivo!!
fns = [fn.decode('utf-8') for fn in fs_dialog.get_filenames()]
else:
fns = None
if fns:
self.spectra_db.append_files(fns)
self.current_folder = fs_dialog.get_current_folder().decode('utf-8')
fs_dialog.destroy()
def sl_remove_button_cb(self, button, user_data=None):
(model, path_list) = self.spectra_sel.get_selected_rows()
if len(path_list)>1:
self.removing_multi = True
self.spectra_db.remove_pathlist(path_list)
self.removing_multi = False
def _namecells_datafunc(self, column, cell, model, iter, user_data=None):
path = model.get_value(iter, 0).decode('utf-8')
if path:
# head = (path); tail = (nombre de archivo)
(head, tail) = os.path.split(path)
cell.set_property('text', tail)
def _pathcells_datafunc(self, column, cell, model, iter, user_data=None):
path = model.get_value(iter, 0).decode('utf-8')
if path:
(head, tail) = os.path.split(path)
cell.set_property('text', head)
def get_ordered_data(self):
''' Devuelve una lista con los espectros de la lista de espectros
ordenados.
'''
_iter = self.spectra_list.get_iter_first()
result = []
while _iter is not None:
fn = self.spectra_list.get_value(_iter, 6).decode('utf-8')
dilution = self.spectra_list.get_value(_iter, 7)
result.append((fn, dilution))
_iter = self.spectra_list.iter_next(_iter)
return result
def get_ordered_list(self):
''' Devuelve una lista con los espectros de la lista de espectros
ordenados.
'''
_iter = self.spectra_list.get_iter_first()
result = []
while _iter is not None:
result.append(self.spectra_list.get_value(_iter, 6).decode('utf-8'))
_iter = self.spectra_list.iter_next(_iter)
return result
def get_selected_spectra(self):
(model, path_list) = self.spectra_sel.get_selected_rows()
selected_spectra = []
for path in path_list:
selected_spectra.append(model.get_value(model.get_iter(path),6).decode('utf-8'))
return selected_spectra
def reset(self):
self.spectra_list.clear()
del self.spectra_db
self.spectra_db = SpectraDB(self.spectra_list)
if __name__=="__main__":
ssd = SelectionDialog()
ssd.run()
| gpl-2.0 | -401,158,781,915,181,600 | 42.968051 | 92 | 0.556678 | false |
wxiang7/airflow | airflow/example_dags/example_trigger_target_dag.py | 6 | 1702 | from airflow.operators import BashOperator, PythonOperator
from airflow.models import DAG
from datetime import datetime
import pprint
pp = pprint.PrettyPrinter(indent=4)
# This example illustrates the use of the TriggerDagRunOperator. There are 2
# entities at work in this scenario:
# 1. The Controller DAG - the DAG that conditionally executes the trigger
# (in example_trigger_controller.py)
# 2. The Target DAG - DAG being triggered
#
# This example illustrates the following features :
# 1. A TriggerDagRunOperator that takes:
# a. A python callable that decides whether or not to trigger the Target DAG
# b. An optional params dict passed to the python callable to help in
# evaluating whether or not to trigger the Target DAG
# c. The id (name) of the Target DAG
# d. The python callable can add contextual info to the DagRun created by
# way of adding a Pickleable payload (e.g. dictionary of primitives). This
# state is then made available to the TargetDag
# 2. A Target DAG : c.f. example_trigger_target_dag.py
args = {
'start_date': datetime.now(),
'owner': 'airflow',
}
dag = DAG(
dag_id='example_trigger_target_dag',
default_args=args,
schedule_interval=None)
def run_this_func(ds, **kwargs):
print("Remotely received value of {} for key=message".format(kwargs['dag_run'].conf['message']))
run_this = PythonOperator(
task_id='run_this',
provide_context=True,
python_callable=run_this_func,
dag=dag)
# You can also access the DagRun object in templates
bash_task = BashOperator(
task_id="bash_task",
bash_command='echo "Here is the message: {{ dag_run.conf["message"] if dag_run else "" }}" ',
dag=dag)
| apache-2.0 | 2,461,589,530,892,142,000 | 33.734694 | 100 | 0.716216 | false |
kieselai/bashForEach.py | commandBuilder.py | 1 | 4749 | # Written by: Anthony Kiesel
# URL: https://github.com/kieselai/bashForEach.py
import re
# Local imports
from simpleLogger import SimpleLogger
from formatter import Formatter
class CommandBuilder:
@staticmethod
def InsertCommandArguments(commandStr, params):
""" Builds commands using a template string and a list of arguments
Parameters:
commandStr (string): A template for the command to be created
params (list(string, string...)): n parameters that will replace
template placeholders in the command string template for the nth placeholder.
EX:
commandStr="openssl dgst -{1} {0}"
params=['testFile.txt','sha256']
final_command="openssl dgst -sha256 testFile.txt"
"""
SimpleLogger.outputVerbose(
["Command template is: ", commandStr],
["Arguments are: ", Formatter.ListAsString(params, True)]
)
# For each parameter index, replace all "{i}" placeholders
# in the commandString with the parameter value
for i in range(0, len(params)):
commandStr = commandStr.replace("{" + str(i) + "}", params[i])
# Replace any parameter values that were not provided with an empty string
commandStr = re.sub(r'\{[0-9]+\}', '', commandStr)
SimpleLogger.outputVerbose(["Final command is: ", commandStr])
return commandStr
@staticmethod
def InsertCommandArgumentsForEach(commandStr, parameterLists):
""" For each argument list in parameterLists, build a command string using those options.
Parameters:
commandStr (string): Template for the command to be executed
parameterLists (list(list(string, string...))): Each list contains the parameters to
replace the template placeholders in the command and represents the options of a distinct command.
"""
cmds = []
for idx in range(0, len(parameterLists)):
# The nth command
SimpleLogger.outputVerbose(["\n", "Command number: ", str(idx)])
# Append the nth command to the return array
cmds.append(CommandBuilder.InsertCommandArguments(commandStr, parameterLists[idx]))
return cmds
@staticmethod
def CreateCommandsFromDistinctArgumentIndices(commandStr, argLists):
""" Create n commands from argLists, pairing command parameters by sub-list index.
The ith list coorelates to the ith parameter
The nth option in each list is used to build the nth command.
Parameters:
commandStr (string): Template for the command to be created
argLists (list(list(string, string...))): contains options for each of the parameters to be used
"""
if isinstance(argLists, list) and len(argLists) > 0:
pairedArgs = [[lst[i] for lst in argLists] for i in range(0, len(argLists[0]))]
SimpleLogger.outputVerbose(["Index Paired Arguments: ", Formatter.ListAsString(pairedArgs, True)])
return CommandBuilder.InsertCommandArgumentsForEach(commandStr, pairedArgs)
@staticmethod
def GetAllArgumentCombinations(argLists):
""" Takes several lists with at least one element and returns a list containing
possible combinations between all of the lists.
The ith list can be used only in the ith position of all possible combinations
Parameters:
argLists (list(list(string, string, ...))):
Each sub-list (the ith list) contains the possible options to be used in the ith place of a combination list.
"""
if isinstance(argLists, list) and len(argLists) > 0:
combinations = [[c] for c in argLists[0]]
for lst in argLists[1:]:
combinations = [existingCombination + [option] for existingCombination in combinations for option in lst]
SimpleLogger.outputVerbose(["Combinations: ", Formatter.ListAsString(combinations, True)])
return combinations
@staticmethod
def CreateCommandsFromAllArgumentCombinations(commandStr, argLists):
""" Create commands representing every combination of the supplied options of sub-lists in argLists
Parameters:
commandStr (string): Template for the command to be created
argLists (list(list(string, string...))): contains options for each of the parameters to be used
"""
if not isinstance(argLists, list): return
comboArgs = CommandBuilder.GetAllArgumentCombinations(argLists)
return CommandBuilder.InsertCommandArgumentsForEach(commandStr, comboArgs)
| apache-2.0 | 50,734,568,756,427,970 | 52.359551 | 125 | 0.657191 | false |
marcuschia/ShaniXBMCWork | other/livestreamchanges/myFunctions.py | 12 | 1537 | import traceback,sys
def addme(page_data,a,b):
return a+b
def call_site(Cookie_Jar,url_to_call):
try:
import urllib2
import base64
import uuid
req = urllib2.Request(url_to_call)
str_guid=str(uuid.uuid1()).upper()
str_guid=base64.b64encode(str_guid)
req.add_header('Connection', 'Upgrade')
req.add_header('Upgrade', 'websocket')
req.add_header('Sec-WebSocket-Key', str_guid)
req.add_header('Origin','http://www.streamafrik.com')
req.add_header('Pragma','no-cache')
req.add_header('Cache-Control','no-cache')
req.add_header('Sec-WebSocket-Version', '13')
req.add_header('Sec-WebSocket-Extensions', 'permessage-deflate; client_max_window_bits, x-webkit-deflate-frame')
req.add_header('User-Agent','Mozilla/5.0 (iPhone; CPU iPhone OS 7_0_4 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11B554a Safari/9537.53')
cookie_handler = urllib2.HTTPCookieProcessor(Cookie_Jar)
opener = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
opener = urllib2.install_opener(opener)
from keepalive import HTTPHandler
keepalive_handler = HTTPHandler()
opener = urllib2.build_opener(keepalive_handler)
urllib2.install_opener(opener)
urllib2.urlopen(req)
response.close()
return ''
except: traceback.print_exc(file=sys.stdout)
return '' | gpl-2.0 | -5,156,817,942,796,603,000 | 41.971429 | 177 | 0.641509 | false |
bigzz/autotest | server/hosts/base_classes.py | 6 | 2668 | #
# Copyright 2007 Google Inc. Released under the GPL v2
"""
This module defines the base classes for the server Host hierarchy.
Implementation details:
You should import the "hosts" package instead of importing each type of host.
Host: a machine on which you can run programs
RemoteHost: a remote machine on which you can run programs
"""
__author__ = """
[email protected] (Martin J. Bligh),
[email protected] (Benjamin Poirier),
[email protected] (Ryan Stutsman)
"""
import os
from autotest.client.shared import hosts
from autotest.server import utils
from autotest.server.hosts import bootloader
class Host(hosts.Host):
"""
This class represents a machine on which you can run programs.
It may be a local machine, the one autoserv is running on, a remote
machine or a virtual machine.
Implementation details:
This is an abstract class, leaf subclasses must implement the methods
listed here. You must not instantiate this class but should
instantiate one of those leaf subclasses.
When overriding methods that raise NotImplementedError, the leaf class
is fully responsible for the implementation and should not chain calls
to super. When overriding methods that are a NOP in Host, the subclass
should chain calls to super(). The criteria for fitting a new method into
one category or the other should be:
1. If two separate generic implementations could reasonably be
concatenated, then the abstract implementation should pass and
subclasses should chain calls to super.
2. If only one class could reasonably perform the stated function
(e.g. two separate run() implementations cannot both be executed)
then the method should raise NotImplementedError in Host, and
the implementor should NOT chain calls to super, to ensure that
only one implementation ever gets executed.
"""
bootloader = None
def __init__(self, *args, **dargs):
super(Host, self).__init__(*args, **dargs)
self.start_loggers()
if self.job:
self.job.hosts.add(self)
def _initialize(self, target_file_owner=None,
*args, **dargs):
super(Host, self)._initialize(*args, **dargs)
self.serverdir = utils.get_server_dir()
self.monitordir = os.path.join(os.path.dirname(__file__), "monitors")
self.bootloader = bootloader.Bootloader(self)
self.env = {}
self.target_file_owner = target_file_owner
def close(self):
super(Host, self).close()
if self.job:
self.job.hosts.discard(self)
| gpl-2.0 | -4,457,147,430,933,432,300 | 33.205128 | 77 | 0.685532 | false |
dakcarto/QGIS | tests/src/python/test_syntactic_sugar.py | 7 | 2532 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for some syntactic sugar in python
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Matthias Kuhn'
__date__ = '12.8.2015'
__copyright__ = 'Copyright 2015, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import qgis
from utilities import (unittest,
TestCase,
getQgisTestApp
)
from qgis.core import (edit,
QgsFeature,
QgsGeometry,
QgsVectorLayer,
QgsEditError
)
getQgisTestApp()
class TestSyntacticSugar(TestCase):
def testEdit(self):
"""Test `with edit(layer):` code"""
ml = QgsVectorLayer("Point?crs=epsg:4236&field=id:integer&field=value:double",
"test_data", "memory")
# Data as list of x, y, id, value
assert ml.isValid()
fields = ml.fields()
# Check insert
with edit(ml):
feat = QgsFeature(fields)
feat['id'] = 1
feat['value'] = 0.9
assert ml.addFeature(feat)
assert ml.dataProvider().getFeatures().next()['value'] == 0.9
# Check update
with edit(ml):
f = ml.getFeatures().next()
f['value'] = 9.9
assert ml.updateFeature(f)
assert ml.dataProvider().getFeatures().next()['value'] == 9.9
# Check for rollBack after exceptions
with self.assertRaises(NameError):
with edit(ml):
f = ml.getFeatures().next()
f['value'] = 3.8
crashycrash()
assert ml.dataProvider().getFeatures().next()['value'] == 9.9
assert ml.getFeatures().next()['value'] == 9.9
# Check for `as`
with edit(ml) as l:
f = l.getFeatures().next()
f['value'] = 10
assert l.updateFeature(f)
assert ml.dataProvider().getFeatures().next()['value'] == 10
# Check that we get a QgsEditError exception when the commit fails
with self.assertRaises(QgsEditError):
with edit(ml) as l:
l.rollBack()
if __name__ == "__main__":
unittest.main()
| gpl-2.0 | 9,139,700,097,430,051,000 | 29.506024 | 86 | 0.540284 | false |
michaupl/materialsapp | materials/migrations/0002_set_type_on_detail.py | 1 | 5253 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Don't use "from appname.models import ModelName".
# Use orm.ModelName to refer to models in this application,
# and orm['appname.ModelName'] for models in other applications.
from .. import DETAIL_TYPE
for detail in orm.MaterialDetail.objects.all():
detail.type = DETAIL_TYPE
detail.save()
def backwards(self, orm):
"Write your backwards methods here."
pass
models = {
u'core.category': {
'Meta': {'object_name': 'Category'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['core.Image']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
u'core.detail': {
'Meta': {'object_name': 'Detail'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'caption': ('django.db.models.fields.CharField', [], {'max_length': '2000'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'facts': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'subcategory': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Subcategory']"}),
'title_image': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['core.Image']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'}),
'video_url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'core.image': {
'Meta': {'object_name': 'Image'},
'alt_text': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'figcaption': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '255'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'core.subcategory': {
'Meta': {'object_name': 'Subcategory'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'caption': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Category']", 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'})
},
u'materials.material': {
'Meta': {'object_name': 'Material', '_ormbases': [u'core.Category']},
u'category_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['core.Category']", 'unique': 'True', 'primary_key': 'True'})
},
u'materials.materialdetail': {
'Meta': {'object_name': 'MaterialDetail', '_ormbases': [u'core.Detail']},
u'detail_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['core.Detail']", 'unique': 'True', 'primary_key': 'True'})
},
u'materials.materialsubcategory': {
'Meta': {'object_name': 'MaterialSubcategory', '_ormbases': [u'core.Subcategory']},
u'subcategory_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['core.Subcategory']", 'unique': 'True', 'primary_key': 'True'})
}
}
complete_apps = ['materials']
symmetrical = True
| apache-2.0 | 2,335,361,993,696,155,600 | 64.6625 | 194 | 0.562155 | false |
zerc/nimply | comments/views.py | 1 | 3254 | # coding: utf-8
from datetime import datetime
from collections import OrderedDict
from flask import request, Blueprint
from flask_restful import Resource, Api
from app import app, mongo
comments_app = Blueprint('comments', __name__)
api = Api(comments_app)
class CommentsWrapper(object):
def __init__(self, db, *args, **kwargs):
self.db = db
self.collection = db.comments
def fetch(self, uuid):
""" Returns all comments for selected file.
"""
return map(self.post_read, self.collection.find({'uuid': uuid}))
def post_read(self, c):
c.pop('_id')
c['date'] = c['date'].strftime('%d-%m-%Y %H:%M')
return c
def add(self, uuid, data):
comment = self.pre_save(data)
comment['uuid'] = uuid
self.collection.insert(comment)
return self.post_read(comment)
def pre_save(self, data):
# TODO: add schema validation
default = {'date': datetime.now()}
default['line'] = [data['line']]
default['message'] = data['message']
default['author'] = data['author']
return default
def get_for_line(self, uuid, line):
return self.grouped_by_line(uuid)(str(line))
def grouped_by_line(self, uuid):
if getattr(self, '__grouped_by_line_cache', None) is None:
self.__grouped_by_line_cache = {}
if uuid not in self.__grouped_by_line_cache:
self.__grouped_by_line_cache[uuid] = {}
comments = self.fetch(uuid)
for c in comments:
for l in c['line']:
self.__grouped_by_line_cache[uuid].setdefault(
l, []).append(c)
def _get(line):
comments = self.__grouped_by_line_cache[uuid].setdefault(line, [])
return sorted(comments, key=lambda c: c['date'], reverse=True)
return _get
def get_grouped_counts(self, uuids, limit=20):
""" Aggregate comments counts for selected `uuids`.
"""
pipeline = [
{'$match': {'uuid': {'$in': uuids}}},
{'$group': {'_id': '$uuid', 'count': {'$sum': 1}}},
{'$sort': {'count': -1}},
{'$limit': limit},
]
result = self.collection.aggregate(pipeline)
result = OrderedDict((r['_id'], r['count']) for r in
result.get('result', []))
for u in uuids:
result.setdefault(u, 0)
return result.items()[:limit]
class CommentsResource(Resource):
def get(self, uuid):
""" Fetch comments for file.
"""
wrapper = CommentsWrapper(mongo.db)
return wrapper.fetch(uuid)
def post(self, uuid):
"""
Add comment to selected to `filename`.
Expected POST params:
:line: souce line (str)
:author: author's name (str)
:message: message (str)
"""
wrapper = CommentsWrapper(mongo.db)
comment = wrapper.add(uuid, {
'line': request.form['line'],
'author': request.form['author'],
'message': request.form['message']
})
return comment
api.add_resource(CommentsResource, '/api/comments/<string:uuid>')
| gpl-2.0 | -4,950,754,413,283,852,000 | 28.853211 | 78 | 0.544868 | false |
orlenko/bccf | src/mezzanine/pages/tests.py | 3 | 8807 |
from django.db import connection
from django.template import Context, Template
from mezzanine.conf import settings
from mezzanine.core.models import CONTENT_STATUS_PUBLISHED
from mezzanine.core.request import current_request
from mezzanine.pages.models import Page, RichTextPage
from mezzanine.urls import PAGES_SLUG
from mezzanine.utils.tests import TestCase
class PagesTests(TestCase):
def test_page_ascendants(self):
"""
Test the methods for looking up ascendants efficiently
behave as expected.
"""
# Create related pages.
primary, created = RichTextPage.objects.get_or_create(title="Primary")
secondary, created = primary.children.get_or_create(title="Secondary")
tertiary, created = secondary.children.get_or_create(title="Tertiary")
# Force a site ID to avoid the site query when measuring queries.
setattr(current_request(), "site_id", settings.SITE_ID)
# Test that get_ascendants() returns the right thing.
page = Page.objects.get(id=tertiary.id)
ascendants = page.get_ascendants()
self.assertEqual(ascendants[0].id, secondary.id)
self.assertEqual(ascendants[1].id, primary.id)
# Test ascendants are returned in order for slug, using
# a single DB query.
connection.queries = []
pages_for_slug = Page.objects.with_ascendants_for_slug(tertiary.slug)
self.assertEqual(len(connection.queries), 1)
self.assertEqual(pages_for_slug[0].id, tertiary.id)
self.assertEqual(pages_for_slug[1].id, secondary.id)
self.assertEqual(pages_for_slug[2].id, primary.id)
# Test page.get_ascendants uses the cached attribute,
# without any more queries.
connection.queries = []
ascendants = pages_for_slug[0].get_ascendants()
self.assertEqual(len(connection.queries), 0)
self.assertEqual(ascendants[0].id, secondary.id)
self.assertEqual(ascendants[1].id, primary.id)
# Use a custom slug in the page path, and test that
# Page.objects.with_ascendants_for_slug fails, but
# correctly falls back to recursive queries.
secondary.slug += "custom"
secondary.save()
pages_for_slug = Page.objects.with_ascendants_for_slug(tertiary.slug)
self.assertEquals(len(pages_for_slug[0]._ascendants), 0)
connection.queries = []
ascendants = pages_for_slug[0].get_ascendants()
self.assertEqual(len(connection.queries), 2) # 2 parent queries
self.assertEqual(pages_for_slug[0].id, tertiary.id)
self.assertEqual(ascendants[0].id, secondary.id)
self.assertEqual(ascendants[1].id, primary.id)
def test_set_parent(self):
old_parent, _ = RichTextPage.objects.get_or_create(title="Old parent")
new_parent, _ = RichTextPage.objects.get_or_create(title="New parent")
child, _ = RichTextPage.objects.get_or_create(title="Child",
slug="kid")
self.assertTrue(child.parent is None)
self.assertTrue(child.slug == "kid")
child.set_parent(old_parent)
child.save()
self.assertEqual(child.parent_id, old_parent.id)
self.assertTrue(child.slug == "old-parent/kid")
child = RichTextPage.objects.get(id=child.id)
self.assertEqual(child.parent_id, old_parent.id)
self.assertTrue(child.slug == "old-parent/kid")
child.set_parent(new_parent)
child.save()
self.assertEqual(child.parent_id, new_parent.id)
self.assertTrue(child.slug == "new-parent/kid")
child = RichTextPage.objects.get(id=child.id)
self.assertEqual(child.parent_id, new_parent.id)
self.assertTrue(child.slug == "new-parent/kid")
child.set_parent(None)
child.save()
self.assertTrue(child.parent is None)
self.assertTrue(child.slug == "kid")
child = RichTextPage.objects.get(id=child.id)
self.assertTrue(child.parent is None)
self.assertTrue(child.slug == "kid")
child = RichTextPage(title="child2")
child.set_parent(new_parent)
self.assertEqual(child.slug, "new-parent/child2")
# Assert that cycles are detected.
p1, _ = RichTextPage.objects.get_or_create(title="p1")
p2, _ = RichTextPage.objects.get_or_create(title="p2")
p2.set_parent(p1)
with self.assertRaises(AttributeError):
p1.set_parent(p1)
with self.assertRaises(AttributeError):
p1.set_parent(p2)
p2c = RichTextPage.objects.get(title="p2")
with self.assertRaises(AttributeError):
p1.set_parent(p2c)
def test_set_slug(self):
parent, _ = RichTextPage.objects.get_or_create(title="Parent",
slug="parent")
child, _ = RichTextPage.objects.get_or_create(title="Child",
slug="parent/child",
parent_id=parent.id)
parent.set_slug("new-parent-slug")
parent.save()
self.assertTrue(parent.slug == "new-parent-slug")
parent = RichTextPage.objects.get(id=parent.id)
self.assertTrue(parent.slug == "new-parent-slug")
child = RichTextPage.objects.get(id=child.id)
self.assertTrue(child.slug == "new-parent-slug/child")
def test_page_menu_queries(self):
"""
Test that rendering a page menu executes the same number of
queries regardless of the number of pages or levels of
children.
"""
template = ('{% load pages_tags %}'
'{% page_menu "pages/menus/tree.html" %}')
before = self.queries_used_for_template(template)
self.assertTrue(before > 0)
self.create_recursive_objects(RichTextPage, "parent", title="Page",
status=CONTENT_STATUS_PUBLISHED)
after = self.queries_used_for_template(template)
self.assertEquals(before, after)
def test_page_menu_flags(self):
"""
Test that pages only appear in the menu templates they've been
assigned to show in.
"""
menus = []
pages = []
template = "{% load pages_tags %}"
for i, label, path in settings.PAGE_MENU_TEMPLATES:
menus.append(i)
pages.append(RichTextPage.objects.create(in_menus=list(menus),
title="Page for %s" % unicode(label),
status=CONTENT_STATUS_PUBLISHED))
template += "{%% page_menu '%s' %%}" % path
rendered = Template(template).render(Context({}))
for page in pages:
self.assertEquals(rendered.count(page.title), len(page.in_menus))
def test_page_menu_default(self):
"""
Test that the default value for the ``in_menus`` field is used
and that it doesn't get forced to unicode.
"""
old_menu_temp = settings.PAGE_MENU_TEMPLATES
old_menu_temp_def = settings.PAGE_MENU_TEMPLATES_DEFAULT
try:
# MenusField initializes choices and default during model
# loading, so we can't just override settings.
from mezzanine.pages.models import BasePage
from mezzanine.pages.fields import MenusField
settings.PAGE_MENU_TEMPLATES = ((8, 'a', 'a'), (9, 'b', 'b'))
settings.PAGE_MENU_TEMPLATES_DEFAULT = None
class P1(BasePage):
in_menus = MenusField(blank=True, null=True)
self.assertEqual(P1().in_menus[0], 8)
settings.PAGE_MENU_TEMPLATES_DEFAULT = tuple()
class P2(BasePage):
in_menus = MenusField(blank=True, null=True)
self.assertEqual(P2().in_menus, None)
settings.PAGE_MENU_TEMPLATES_DEFAULT = [9]
class P3(BasePage):
in_menus = MenusField(blank=True, null=True)
self.assertEqual(P3().in_menus[0], 9)
finally:
settings.PAGE_MENU_TEMPLATES = old_menu_temp
settings.PAGE_MENU_TEMPLATES_DEFAULT = old_menu_temp_def
def test_overridden_page(self):
"""
Test that a page with a slug matching a non-page urlpattern
return ``True`` for its overridden property.
"""
# BLOG_SLUG is empty then urlpatterns for pages are prefixed
# with PAGE_SLUG, and generally won't be overridden. In this
# case, there aren't any overridding URLs by default, so bail
# on the test.
if PAGES_SLUG:
return
page, created = RichTextPage.objects.get_or_create(slug="edit")
self.assertTrue(page.overridden())
| unlicense | -6,495,044,293,988,946,000 | 40.739336 | 78 | 0.612808 | false |
bruceyou/NewsBlur | apps/profile/views.py | 1 | 20939 | # -*- encoding: utf-8 -*-
import stripe
import datetime
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_protect
from django.contrib.auth import logout as logout_user
from django.contrib.auth import login as login_user
from django.db.models.aggregates import Sum
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.sites.models import Site
from django.contrib.auth.models import User
from django.contrib.admin.views.decorators import staff_member_required
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.core.mail import mail_admins
from django.conf import settings
from apps.profile.models import Profile, PaymentHistory, RNewUserQueue, MRedeemedCode
from apps.reader.models import UserSubscription, UserSubscriptionFolders, RUserStory
from apps.profile.forms import StripePlusPaymentForm, PLANS, DeleteAccountForm
from apps.profile.forms import ForgotPasswordForm, ForgotPasswordReturnForm, AccountSettingsForm
from apps.profile.forms import RedeemCodeForm
from apps.reader.forms import SignupForm, LoginForm
from apps.rss_feeds.models import MStarredStory, MStarredStoryCounts
from apps.social.models import MSocialServices, MActivity, MSocialProfile
from utils import json_functions as json
from utils.user_functions import ajax_login_required
from utils.view_functions import render_to
from utils.user_functions import get_user
from utils import log as logging
from vendor.paypalapi.exceptions import PayPalAPIResponseError
from vendor.paypal.standard.forms import PayPalPaymentsForm
SINGLE_FIELD_PREFS = ('timezone','feed_pane_size','hide_mobile','send_emails',
'hide_getting_started', 'has_setup_feeds', 'has_found_friends',
'has_trained_intelligence',)
SPECIAL_PREFERENCES = ('old_password', 'new_password', 'autofollow_friends', 'dashboard_date',)
@ajax_login_required
@require_POST
@json.json_view
def set_preference(request):
code = 1
message = ''
new_preferences = request.POST
preferences = json.decode(request.user.profile.preferences)
for preference_name, preference_value in new_preferences.items():
if preference_value in ['true','false']: preference_value = True if preference_value == 'true' else False
if preference_name in SINGLE_FIELD_PREFS:
setattr(request.user.profile, preference_name, preference_value)
elif preference_name in SPECIAL_PREFERENCES:
if preference_name == 'autofollow_friends':
social_services, _ = MSocialServices.objects.get_or_create(user_id=request.user.pk)
social_services.autofollow = preference_value
social_services.save()
elif preference_name == 'dashboard_date':
request.user.profile.dashboard_date = datetime.datetime.utcnow()
else:
if preference_value in ["true", "false"]:
preference_value = True if preference_value == "true" else False
preferences[preference_name] = preference_value
if preference_name == 'intro_page':
logging.user(request, "~FBAdvancing intro to page ~FM~SB%s" % preference_value)
request.user.profile.preferences = json.encode(preferences)
request.user.profile.save()
logging.user(request, "~FMSaving preference: %s" % new_preferences)
response = dict(code=code, message=message, new_preferences=new_preferences)
return response
@ajax_login_required
@json.json_view
def get_preference(request):
code = 1
preference_name = request.POST.get('preference')
preferences = json.decode(request.user.profile.preferences)
payload = preferences
if preference_name:
payload = preferences.get(preference_name)
response = dict(code=code, payload=payload)
return response
@csrf_protect
def login(request):
form = LoginForm()
if request.method == "POST":
form = LoginForm(data=request.POST)
if form.is_valid():
login_user(request, form.get_user())
logging.user(form.get_user(), "~FG~BBOAuth Login~FW")
return HttpResponseRedirect(request.POST['next'] or reverse('index'))
return render_to_response('accounts/login.html', {
'form': form,
'next': request.REQUEST.get('next', "")
}, context_instance=RequestContext(request))
@csrf_protect
def signup(request):
form = SignupForm()
if request.method == "POST":
form = SignupForm(data=request.POST)
if form.is_valid():
new_user = form.save()
login_user(request, new_user)
logging.user(new_user, "~FG~SB~BBNEW SIGNUP~FW")
new_user.profile.activate_free()
return HttpResponseRedirect(request.POST['next'] or reverse('index'))
return render_to_response('accounts/signup.html', {
'form': form,
'next': request.REQUEST.get('next', "")
}, context_instance=RequestContext(request))
@login_required
@csrf_protect
def redeem_code(request):
code = request.GET.get('code', None)
form = RedeemCodeForm(initial={'gift_code': code})
if request.method == "POST":
form = RedeemCodeForm(data=request.POST)
if form.is_valid():
gift_code = request.POST['gift_code']
PaymentHistory.objects.create(user=request.user,
payment_date=datetime.datetime.now(),
payment_amount=12,
payment_provider='good-web-bundle')
MRedeemedCode.record(request.user.pk, gift_code)
request.user.profile.activate_premium()
logging.user(request.user, "~FG~BBRedeeming gift code: %s~FW" % gift_code)
return render_to_response('reader/paypal_return.xhtml',
{}, context_instance=RequestContext(request))
return render_to_response('accounts/redeem_code.html', {
'form': form,
'code': request.REQUEST.get('code', ""),
'next': request.REQUEST.get('next', "")
}, context_instance=RequestContext(request))
@ajax_login_required
@require_POST
@json.json_view
def set_account_settings(request):
code = -1
message = 'OK'
form = AccountSettingsForm(user=request.user, data=request.POST)
if form.is_valid():
form.save()
code = 1
else:
message = form.errors[form.errors.keys()[0]][0]
payload = {
"username": request.user.username,
"email": request.user.email,
"social_profile": MSocialProfile.profile(request.user.pk)
}
return dict(code=code, message=message, payload=payload)
@ajax_login_required
@require_POST
@json.json_view
def set_view_setting(request):
code = 1
feed_id = request.POST['feed_id']
feed_view_setting = request.POST.get('feed_view_setting')
feed_order_setting = request.POST.get('feed_order_setting')
feed_read_filter_setting = request.POST.get('feed_read_filter_setting')
view_settings = json.decode(request.user.profile.view_settings)
setting = view_settings.get(feed_id, {})
if isinstance(setting, basestring): setting = {'v': setting}
if feed_view_setting: setting['v'] = feed_view_setting
if feed_order_setting: setting['o'] = feed_order_setting
if feed_read_filter_setting: setting['r'] = feed_read_filter_setting
view_settings[feed_id] = setting
request.user.profile.view_settings = json.encode(view_settings)
request.user.profile.save()
logging.user(request, "~FMView settings: %s/%s/%s" % (feed_view_setting,
feed_order_setting, feed_read_filter_setting))
response = dict(code=code)
return response
@ajax_login_required
@json.json_view
def get_view_setting(request):
code = 1
feed_id = request.POST['feed_id']
view_settings = json.decode(request.user.profile.view_settings)
response = dict(code=code, payload=view_settings.get(feed_id))
return response
@ajax_login_required
@require_POST
@json.json_view
def set_collapsed_folders(request):
code = 1
collapsed_folders = request.POST['collapsed_folders']
request.user.profile.collapsed_folders = collapsed_folders
request.user.profile.save()
logging.user(request, "~FMCollapsing folder: %s" % collapsed_folders)
response = dict(code=code)
return response
@ajax_login_required
def paypal_form(request):
domain = Site.objects.get_current().domain
paypal_dict = {
"cmd": "_xclick-subscriptions",
"business": "[email protected]",
"a3": "12.00", # price
"p3": 1, # duration of each unit (depends on unit)
"t3": "Y", # duration unit ("M for Month")
"src": "1", # make payments recur
"sra": "1", # reattempt payment on payment error
"no_note": "1", # remove extra notes (optional)
"item_name": "NewsBlur Premium Account",
"notify_url": "http://%s%s" % (domain, reverse('paypal-ipn')),
"return_url": "http://%s%s" % (domain, reverse('paypal-return')),
"cancel_return": "http://%s%s" % (domain, reverse('index')),
"custom": request.user.username,
}
# Create the instance.
form = PayPalPaymentsForm(initial=paypal_dict, button_type="subscribe")
logging.user(request, "~FBLoading paypal/feedchooser")
# Output the button.
return HttpResponse(form.render(), mimetype='text/html')
def paypal_return(request):
return render_to_response('reader/paypal_return.xhtml', {
}, context_instance=RequestContext(request))
@login_required
def activate_premium(request):
return HttpResponseRedirect(reverse('index'))
@ajax_login_required
@json.json_view
def profile_is_premium(request):
# Check tries
code = 0
retries = int(request.GET['retries'])
profile = Profile.objects.get(user=request.user)
subs = UserSubscription.objects.filter(user=request.user)
total_subs = subs.count()
activated_subs = subs.filter(active=True).count()
if retries >= 30:
code = -1
if not request.user.profile.is_premium:
subject = "高级帐户激活失败:%s (%s/%s)" % (request.user, activated_subs, total_subs)
message = """User: %s (%s) -- Email: %s""" % (request.user.username, request.user.pk, request.user.email)
mail_admins(subject, message, fail_silently=True)
request.user.profile.is_premium = True
request.user.profile.save()
return {
'is_premium': profile.is_premium,
'code': code,
'activated_subs': activated_subs,
'total_subs': total_subs,
}
@login_required
def stripe_form(request):
user = request.user
success_updating = False
stripe.api_key = settings.STRIPE_SECRET
plan = int(request.GET.get('plan', 2))
plan = PLANS[plan-1][0]
error = None
if request.method == 'POST':
zebra_form = StripePlusPaymentForm(request.POST, email=user.email)
if zebra_form.is_valid():
user.email = zebra_form.cleaned_data['email']
user.save()
current_premium = (user.profile.is_premium and
user.profile.premium_expire and
user.profile.premium_expire > datetime.datetime.now())
# Are they changing their existing card?
if user.profile.stripe_id and current_premium:
customer = stripe.Customer.retrieve(user.profile.stripe_id)
try:
card = customer.cards.create(card=zebra_form.cleaned_data['stripe_token'])
except stripe.CardError:
error = "This card was declined."
else:
customer.default_card = card.id
customer.save()
success_updating = True
else:
try:
customer = stripe.Customer.create(**{
'card': zebra_form.cleaned_data['stripe_token'],
'plan': zebra_form.cleaned_data['plan'],
'email': user.email,
'description': user.username,
})
except stripe.CardError:
error = "This card was declined."
else:
user.profile.strip_4_digits = zebra_form.cleaned_data['last_4_digits']
user.profile.stripe_id = customer.id
user.profile.save()
user.profile.activate_premium() # TODO: Remove, because webhooks are slow
success_updating = True
else:
zebra_form = StripePlusPaymentForm(email=user.email, plan=plan)
if success_updating:
return render_to_response('reader/paypal_return.xhtml',
{}, context_instance=RequestContext(request))
new_user_queue_count = RNewUserQueue.user_count()
new_user_queue_position = RNewUserQueue.user_position(request.user.pk)
new_user_queue_behind = 0
if new_user_queue_position >= 0:
new_user_queue_behind = new_user_queue_count - new_user_queue_position
new_user_queue_position -= 1
logging.user(request, "~BM~FBLoading Stripe form")
return render_to_response('profile/stripe_form.xhtml',
{
'zebra_form': zebra_form,
'publishable': settings.STRIPE_PUBLISHABLE,
'success_updating': success_updating,
'new_user_queue_count': new_user_queue_count - 1,
'new_user_queue_position': new_user_queue_position,
'new_user_queue_behind': new_user_queue_behind,
'error': error,
},
context_instance=RequestContext(request)
)
@render_to('reader/activities_module.xhtml')
def load_activities(request):
user = get_user(request)
page = max(1, int(request.REQUEST.get('page', 1)))
activities, has_next_page = MActivity.user(user.pk, page=page)
return {
'activities': activities,
'page': page,
'has_next_page': has_next_page,
'username': '你',
}
@ajax_login_required
@json.json_view
def payment_history(request):
user = request.user
if request.user.is_staff:
user_id = request.REQUEST.get('user_id', request.user.pk)
user = User.objects.get(pk=user_id)
history = PaymentHistory.objects.filter(user=user)
statistics = {
"last_seen_date": user.profile.last_seen_on,
"timezone": unicode(user.profile.timezone),
"stripe_id": user.profile.stripe_id,
"profile": user.profile,
"feeds": UserSubscription.objects.filter(user=user).count(),
"email": user.email,
"read_story_count": RUserStory.read_story_count(user.pk),
"feed_opens": UserSubscription.objects.filter(user=user).aggregate(sum=Sum('feed_opens'))['sum'],
}
return {
'is_premium': user.profile.is_premium,
'premium_expire': user.profile.premium_expire,
'payments': history,
'statistics': statistics,
}
@ajax_login_required
@json.json_view
def cancel_premium(request):
canceled = request.user.profile.cancel_premium()
return {
'code': 1 if canceled else -1,
}
@staff_member_required
@ajax_login_required
@json.json_view
def refund_premium(request):
user_id = request.REQUEST.get('user_id')
partial = request.REQUEST.get('partial', False)
user = User.objects.get(pk=user_id)
try:
refunded = user.profile.refund_premium(partial=partial)
except stripe.InvalidRequestError, e:
refunded = e
except PayPalAPIResponseError, e:
refunded = e
return {'code': 1 if refunded else -1, 'refunded': refunded}
@staff_member_required
@ajax_login_required
@json.json_view
def upgrade_premium(request):
user_id = request.REQUEST.get('user_id')
user = User.objects.get(pk=user_id)
upgraded = user.profile.activate_premium()
return {'code': 1 if upgraded else -1}
@staff_member_required
@ajax_login_required
@json.json_view
def update_payment_history(request):
user_id = request.REQUEST.get('user_id')
user = User.objects.get(pk=user_id)
user.profile.setup_premium_history()
return {'code': 1}
@login_required
@render_to('profile/delete_account.xhtml')
def delete_account(request):
if request.method == 'POST':
form = DeleteAccountForm(request.POST, user=request.user)
if form.is_valid():
logging.user(request.user, "~SK~BC~FRDeleting ~SB%s~SN's account." %
request.user.username)
request.user.profile.delete_user(confirm=True)
logout_user(request)
return HttpResponseRedirect(reverse('index'))
else:
logging.user(request.user, "~BC~FRFailed attempt to delete ~SB%s~SN's account." %
request.user.username)
else:
logging.user(request.user, "~BC~FRAttempting to delete ~SB%s~SN's account." %
request.user.username)
form = DeleteAccountForm(user=request.user)
return {
'delete_form': form,
}
@render_to('profile/forgot_password.xhtml')
def forgot_password(request):
if request.method == 'POST':
form = ForgotPasswordForm(request.POST)
if form.is_valid():
logging.user(request.user, "~BC~FRForgot password: ~SB%s" % request.POST['email'])
try:
user = User.objects.get(email__iexact=request.POST['email'])
except User.MultipleObjectsReturned:
user = User.objects.filter(email__iexact=request.POST['email'])[0]
user.profile.send_forgot_password_email()
return HttpResponseRedirect(reverse('index'))
else:
logging.user(request.user, "~BC~FRFailed forgot password: ~SB%s~SN" %
request.POST['email'])
else:
logging.user(request.user, "~BC~FRAttempting to retrieve forgotton password.")
form = ForgotPasswordForm()
return {
'forgot_password_form': form,
}
@login_required
@render_to('profile/forgot_password_return.xhtml')
def forgot_password_return(request):
if request.method == 'POST':
logging.user(request.user, "~BC~FRReseting ~SB%s~SN's password." %
request.user.username)
new_password = request.POST.get('password', '')
request.user.set_password(new_password)
request.user.save()
return HttpResponseRedirect(reverse('index'))
else:
logging.user(request.user, "~BC~FRAttempting to reset ~SB%s~SN's password." %
request.user.username)
form = ForgotPasswordReturnForm()
return {
'forgot_password_return_form': form,
}
@ajax_login_required
@json.json_view
def delete_starred_stories(request):
timestamp = request.POST.get('timestamp', None)
if timestamp:
delete_date = datetime.datetime.fromtimestamp(int(timestamp))
else:
delete_date = datetime.datetime.now()
starred_stories = MStarredStory.objects.filter(user_id=request.user.pk,
starred_date__lte=delete_date)
stories_deleted = starred_stories.count()
starred_stories.delete()
MStarredStoryCounts.count_for_user(request.user.pk, total_only=True)
starred_counts, starred_count = MStarredStoryCounts.user_counts(request.user.pk, include_total=True)
logging.user(request.user, "~BC~FRDeleting %s/%s starred stories (%s)" % (stories_deleted,
stories_deleted+starred_count, delete_date))
return dict(code=1, stories_deleted=stories_deleted, starred_counts=starred_counts,
starred_count=starred_count)
@ajax_login_required
@json.json_view
def delete_all_sites(request):
request.user.profile.send_opml_export_email()
subs = UserSubscription.objects.filter(user=request.user)
sub_count = subs.count()
subs.delete()
usf = UserSubscriptionFolders.objects.get(user=request.user)
usf.folders = '[]'
usf.save()
logging.user(request.user, "~BC~FRDeleting %s sites" % sub_count)
return dict(code=1)
@login_required
@render_to('profile/email_optout.xhtml')
def email_optout(request):
user = request.user
user.profile.send_emails = False
user.profile.save()
return {
"user": user,
}
| mit | 7,787,415,308,863,567,000 | 36.158082 | 117 | 0.633061 | false |
bmya/server-tools | sql_request_abstract/models/sql_request_mixin.py | 2 | 9036 | # Copyright (C) 2015 Akretion (<http://www.akretion.com>)
# Copyright (C) 2017 - Today: GRAP (http://www.grap.coop)
# @author: Sylvain LE GAL (https://twitter.com/legalsylvain)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import re
import uuid
from io import StringIO
import base64
from psycopg2 import ProgrammingError
from odoo import _, api, fields, models
from odoo.exceptions import UserError
class SQLRequestMixin(models.AbstractModel):
_name = 'sql.request.mixin'
_clean_query_enabled = True
_check_prohibited_words_enabled = True
_check_execution_enabled = True
_sql_request_groups_relation = False
_sql_request_users_relation = False
STATE_SELECTION = [
('draft', 'Draft'),
('sql_valid', 'SQL Valid'),
]
PROHIBITED_WORDS = [
'delete',
'drop',
'insert',
'alter',
'truncate',
'execute',
'create',
'update',
'ir_config_parameter',
]
# Default Section
@api.model
def _default_group_ids(self):
ir_model_obj = self.env['ir.model.data']
return [ir_model_obj.xmlid_to_res_id(
'sql_request_abstract.group_sql_request_user')]
@api.model
def _default_user_ids(self):
return []
# Columns Section
name = fields.Char('Name', required=True)
query = fields.Text(
string='Query', required=True, help="You can't use the following words"
": DELETE, DROP, CREATE, INSERT, ALTER, TRUNCATE, EXECUTE, UPDATE.")
state = fields.Selection(
string='State', selection=STATE_SELECTION, default='draft',
help="State of the Request:\n"
" * 'Draft': Not tested\n"
" * 'SQL Valid': SQL Request has been checked and is valid")
group_ids = fields.Many2many(
comodel_name='res.groups', string='Allowed Groups',
relation=_sql_request_groups_relation,
column1='sql_id', column2='group_id',
default=_default_group_ids)
user_ids = fields.Many2many(
comodel_name='res.users', string='Allowed Users',
relation=_sql_request_users_relation,
column1='sql_id', column2='user_id',
default=_default_user_ids)
# Action Section
@api.multi
def button_validate_sql_expression(self):
for item in self:
if item._clean_query_enabled:
item._clean_query()
if item._check_prohibited_words_enabled:
item._check_prohibited_words()
if item._check_execution_enabled:
item._check_execution()
item.state = 'sql_valid'
@api.multi
def button_set_draft(self):
self.write({'state': 'draft'})
# API Section
@api.multi
def _execute_sql_request(
self, params=None, mode='fetchall', rollback=True,
view_name=False, copy_options="CSV HEADER DELIMITER ';'"):
"""Execute a SQL request on the current database.
??? This function checks before if the user has the
right to execute the request.
:param params: (dict) of keys / values that will be replaced in
the sql query, before executing it.
:param mode: (str) result type expected. Available settings :
* 'view': create a view with the select query. Extra param
required 'view_name'.
* 'materialized_view': create a MATERIALIZED VIEW with the
select query. Extra parameter required 'view_name'.
* 'fetchall': execute the select request, and return the
result of 'cr.fetchall()'.
* 'fetchone' : execute the select request, and return the
result of 'cr.fetchone()'
:param rollback: (boolean) mention if a rollback should be played after
the execution of the query. Please keep this feature enabled
for security reason, except if necessary.
(Ignored if @mode in ('view', 'materialized_view'))
:param view_name: (str) name of the view.
(Ignored if @mode not in ('view', 'materialized_view'))
:param copy_options: (str) mentions extra options for
"COPY request STDOUT WITH xxx" request.
(Ignored if @mode != 'stdout')
..note:: The following exceptions could be raised:
psycopg2.ProgrammingError: Error in the SQL Request.
odoo.exceptions.UserError:
* 'mode' is not implemented.
* materialized view is not supported by the Postgresql Server.
"""
self.ensure_one()
res = False
# Check if the request is in a valid state
if self.state == 'draft':
raise UserError(_(
"It is not allowed to execute a not checked request."))
# Disable rollback if a creation of a view is asked
if mode in ('view', 'materialized_view'):
rollback = False
# pylint: disable=sql-injection
if params:
query = self.query % params
else:
query = self.query
query = query
if mode in ('fetchone', 'fetchall'):
pass
elif mode == 'stdout':
query = "COPY (%s) TO STDOUT WITH %s" % (query, copy_options)
elif mode in 'view':
query = "CREATE VIEW %s AS (%s);" % (query, view_name)
elif mode in 'materialized_view':
self._check_materialized_view_available()
query = "CREATE MATERIALIZED VIEW %s AS (%s);" % (query, view_name)
else:
raise UserError(_("Unimplemented mode : '%s'" % mode))
if rollback:
rollback_name = self._create_savepoint()
try:
if mode == 'stdout':
output = StringIO.StringIO()
self.env.cr.copy_expert(query, output)
output.getvalue()
res = base64.b64encode(output.getvalue())
output.close()
else:
self.env.cr.execute(query)
if mode == 'fetchall':
res = self.env.cr.fetchall()
elif mode == 'fetchone':
res = self.env.cr.fetchone()
finally:
self._rollback_savepoint(rollback_name)
return res
# Private Section
@api.model
def _create_savepoint(self):
rollback_name = '%s_%s' % (
self._name.replace('.', '_'), uuid.uuid1().hex)
# pylint: disable=sql-injection
req = "SAVEPOINT %s" % (rollback_name)
self.env.cr.execute(req)
return rollback_name
@api.model
def _rollback_savepoint(self, rollback_name):
# pylint: disable=sql-injection
req = "ROLLBACK TO SAVEPOINT %s" % (rollback_name)
self.env.cr.execute(req)
@api.model
def _check_materialized_view_available(self):
self.env.cr.execute("SHOW server_version;")
res = self.env.cr.fetchone()[0].split('.')
minor_version = float('.'.join(res[:2]))
if minor_version < 9.3:
raise UserError(_(
"Materialized View requires PostgreSQL 9.3 or greater but"
" PostgreSQL %s is currently installed.") % (minor_version))
@api.multi
def _clean_query(self):
self.ensure_one()
query = self.query.strip()
while query[-1] == ';':
query = query[:-1]
self.query = query
@api.multi
def _check_prohibited_words(self):
"""Check if the query contains prohibited words, to avoid maliscious
SQL requests"""
self.ensure_one()
query = self.query.lower()
for word in self.PROHIBITED_WORDS:
expr = r'\b%s\b' % word
is_not_safe = re.search(expr, query)
if is_not_safe:
raise UserError(_(
"The query is not allowed because it contains unsafe word"
" '%s'") % (word))
@api.multi
def _check_execution(self):
"""Ensure that the query is valid, trying to execute it. A rollback
is done after."""
self.ensure_one()
query = self._prepare_request_check_execution()
rollback_name = self._create_savepoint()
res = False
try:
self.env.cr.execute(query)
res = self._hook_executed_request()
except ProgrammingError as e:
raise UserError(
_("The SQL query is not valid:\n\n %s") % e)
finally:
self._rollback_savepoint(rollback_name)
return res
@api.multi
def _prepare_request_check_execution(self):
"""Overload me to replace some part of the query, if it contains
parameters"""
self.ensure_one()
return self.query
def _hook_executed_request(self):
"""Overload me to insert custom code, when the SQL request has
been executed, before the rollback.
"""
self.ensure_one()
return False
| agpl-3.0 | -8,622,797,667,682,290,000 | 33.357414 | 79 | 0.570938 | false |
OmeGak/indico | indico/modules/events/layout/util.py | 2 | 13982 | # This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from collections import OrderedDict, defaultdict
from itertools import chain, count
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import joinedload, load_only
from werkzeug.urls import url_parse
import indico
from indico.core import signals
from indico.core.config import config
from indico.core.db import db
from indico.core.plugins import url_for_plugin
from indico.legacy.common.cache import GenericCache
from indico.modules.events.layout import layout_settings
from indico.modules.events.layout.models.menu import MenuEntry, MenuEntryType, TransientMenuEntry
from indico.util.caching import memoize_request
from indico.util.signals import named_objects_from_signal, values_from_signal
from indico.util.string import crc32, return_ascii
from indico.web.flask.util import url_for
_cache = GenericCache('updated-menus')
def _menu_entry_key(entry_data):
return entry_data.position == -1, entry_data.position, entry_data.name
@memoize_request
def get_menu_entries_from_signal():
return named_objects_from_signal(signals.event.sidemenu.send(), plugin_attr='plugin')
def build_menu_entry_name(name, plugin=None):
""" Builds the proper name for a menu entry.
Given a menu entry's name and optionally a plugin, returns the
correct name of the menu entry.
:param name: str -- The name of the menu entry.
:param plugin: IndicoPlugin or str -- The plugin (or the name of the
plugin) which created the entry.
"""
if plugin:
plugin = getattr(plugin, 'name', plugin)
return '{}:{}'.format(plugin, name)
else:
return name
class MenuEntryData(object):
"""Container to transmit menu entry-related data via signals
The data contained is transmitted via the `sidemenu` signal and used
to build the side menu of an event.
:param title: str -- The title of the menu, displayed to the user.
The title should be translated using the normal gettext
function, i.e. ``_('...')``, or the plugin's bound gettext
function.
:param name: str -- Name used to refer to the entry internally.
This is never shown to the user. The name must be unique,
names from plugins are automatically prefixed with the plugin
name and a colon and therefore have to be unique only within the
plugin. To mark the entry as active, its name must be specified
in the `menu_entry_name` class attribute of the WP class. For
plugins, the plugin name must be specified via the
`menu_entry_plugin` attribute as well.
:param endpoint: str -- The endpoint the entry will point to.
:param position: int -- The desired position of the menu entry.
the position is indicative only, relative to the other entries
and not the exact position. Entries with the same position will
be sorted alphanumerically on their name. A position of `-1`
will append the entry at the end of the menu.
:param is_enabled: bool -- Whether the entry should be enabled by
default (Default: `True`).
:param visible: function -- Determines if the entry should be
visible. This is a simple function which takes only the `event`
as parameter and returns a boolean to indicate if the entry is
visible or not. It is called whenever the menu is displayed, so
the current state of the event/user can be taken into account.
:param parent: str -- The name of the parent entry (None for root
entries).
:param static_site: bool or str -- If True, this menu item should
be shown in the menu of a static site. When set to a string,
the string will be used instead of a mangled version of the
endpoint's URL.
"""
plugin = None
def __init__(self, title, name, endpoint=None, position=-1, is_enabled=True, visible=None, parent=None,
static_site=False):
self.title = title
self._name = name
self.endpoint = endpoint
self.position = position
self._visible = visible
self.is_enabled = is_enabled
self.parent = parent
self.static_site = static_site
@property
def name(self):
return build_menu_entry_name(self._name, self.plugin)
def visible(self, event):
return self._visible(event) if self._visible else True
@return_ascii
def __repr__(self):
parent = ''
if self.parent:
parent = ', parent={}'.format(self.parent)
return '<MenuEntryData({}{}): "{}">'.format(self.name, parent, self.title)
def _get_split_signal_entries():
"""Get the top-level and child menu entry data"""
signal_entries = get_menu_entries_from_signal()
top_data = OrderedDict((name, data)
for name, data in sorted(signal_entries.iteritems(),
key=lambda name_data: _menu_entry_key(name_data[1]))
if not data.parent)
child_data = defaultdict(list)
for name, data in signal_entries.iteritems():
if data.parent is not None:
child_data[data.parent].append(data)
for parent, entries in child_data.iteritems():
entries.sort(key=_menu_entry_key)
return top_data, child_data
def _get_menu_cache_data(event):
from indico.core.plugins import plugin_engine
cache_key = unicode(event.id)
plugin_hash = crc32(','.join(sorted(plugin_engine.get_active_plugins())))
cache_version = '{}:{}'.format(indico.__version__, plugin_hash)
return cache_key, cache_version
def _menu_needs_recheck(event):
"""Check whether the menu needs to be checked for missing items"""
cache_key, cache_version = _get_menu_cache_data(event)
return _cache.get(cache_key) != cache_version
def _set_menu_checked(event):
"""Mark the menu as up to date"""
cache_key, cache_version = _get_menu_cache_data(event)
_cache.set(cache_key, cache_version)
def _save_menu_entries(entries):
"""Save new menu entries using a separate SA session"""
with db.tmp_session() as sess:
sess.add_all(entries)
try:
sess.commit()
except IntegrityError as e:
# If there are two parallel requests trying to insert a new menu
# item one of them will fail with an error due to the unique index.
# If the IntegrityError involves that index, we assume it's just the
# race condition and ignore it.
sess.rollback()
if 'ix_uq_menu_entries_event_id_name' not in unicode(e.message):
raise
return False
else:
return True
def _rebuild_menu(event):
"""Create all menu entries in the database"""
top_data, child_data = _get_split_signal_entries()
pos_gen = count()
entries = [_build_menu_entry(event, True, data, next(pos_gen), children=child_data.get(data.name))
for name, data in top_data.iteritems()]
return _save_menu_entries(entries)
def _check_menu(event):
"""Create missing menu items in the database"""
top_data, child_data = _get_split_signal_entries()
query = (MenuEntry.query
.filter(MenuEntry.event_id == int(event.id))
.options(load_only('id', 'parent_id', 'name', 'position'),
joinedload('parent').load_only('id', 'parent_id', 'name', 'position'),
joinedload('children').load_only('id', 'parent_id', 'name', 'position')))
existing = {entry.name: entry for entry in query}
pos_gen = count(start=(max(x.position for x in existing.itervalues() if not x.parent)))
entries = []
top_created = set()
for name, data in top_data.iteritems():
if name in existing:
continue
entries.append(_build_menu_entry(event, True, data, next(pos_gen), child_data.get(name)))
top_created.add(name)
child_pos_gens = {}
for name, entry in existing.iteritems():
if entry.parent is not None:
continue
child_pos_gens[name] = count(start=(max(x.position for x in entry.children) + 1 if entry.children else 0))
for parent_name, data_list in child_data.iteritems():
if parent_name in top_created:
# adding a missing parent element also adds its children
continue
for data in data_list:
if data.name in existing:
continue
parent = existing[parent_name]
# use the parent id, not the object itself since we don't want to
# connect the new objects here to the main sqlalchemy session
entries.append(_build_menu_entry(event, True, data, next(child_pos_gens[parent.name]), parent_id=parent.id))
return _save_menu_entries(entries)
def _build_menu(event):
"""Fetch the customizable menu data from the database."""
entries = MenuEntry.get_for_event(event)
if not entries:
# empty menu, just build the whole structure without checking
# for existing menu entries
if _rebuild_menu(event):
_set_menu_checked(event)
return MenuEntry.get_for_event(event)
elif _menu_needs_recheck(event):
# menu items found, but maybe something new has been added
if _check_menu(event):
_set_menu_checked(event)
# For some reason SQLAlchemy uses old data for the children
# relationships even when querying the entries again below.
# Expire them explicitly to avoid having to reload the page
# after missing menu items have been created.
for entry in entries:
db.session.expire(entry, ('children',))
return MenuEntry.get_for_event(event)
else:
# menu is assumed up to date
return entries
def _build_transient_menu(event):
"""Build the transient event menu from the signal data.
This is used to check for missing items if customization is
enabled or for the actual menu if no customization is used
"""
top_data, child_data = _get_split_signal_entries()
pos_gen = count()
return [_build_menu_entry(event, False, data, next(pos_gen), children=child_data.get(data.name))
for name, data in top_data.iteritems()
if data.parent is None]
@memoize_request
def menu_entries_for_event(event):
custom_menu_enabled = layout_settings.get(event, 'use_custom_menu')
return _build_menu(event) if custom_menu_enabled else _build_transient_menu(event)
def _build_menu_entry(event, custom_menu_enabled, data, position, children=None, parent_id=None):
entry_cls = MenuEntry if custom_menu_enabled else TransientMenuEntry
entry = entry_cls(
event=event,
is_enabled=data.is_enabled,
name=data.name,
position=position,
children=[_build_menu_entry(event, custom_menu_enabled, entry_data, i)
for i, entry_data in enumerate(sorted(children or [], key=_menu_entry_key))]
)
if parent_id is not None:
# only valid for non-transient menu entries
entry.parent_id = parent_id
if data.plugin:
entry.type = MenuEntryType.plugin_link
entry.plugin = data.plugin.name
else:
entry.type = MenuEntryType.internal_link
return entry
@memoize_request
def get_menu_entry_by_name(name, event):
entries = menu_entries_for_event(event)
return next((e for e in chain(entries, *(e.children for e in entries)) if e.name == name), None)
def is_menu_entry_enabled(entry_name, event):
"""Check whether the MenuEntry is enabled"""
return get_menu_entry_by_name(entry_name, event).is_enabled
def get_plugin_conference_themes():
data = values_from_signal(signals.plugin.get_conference_themes.send(), return_plugins=True)
return {':'.join((plugin.name, name)): (path, title) for plugin, (name, path, title) in data}
def _build_css_url(theme):
if ':' in theme:
try:
path = get_plugin_conference_themes()[theme][0]
except KeyError:
return None
plugin = theme.split(':', 1)[0]
return url_for_plugin(plugin + '.static', filename=path)
else:
css_base = url_parse(config.CONFERENCE_CSS_TEMPLATES_BASE_URL).path
return '{}/{}'.format(css_base, theme)
def get_css_url(event, force_theme=None, for_preview=False):
"""Builds the URL of a CSS resource.
:param event: The `Event` to get the CSS url for
:param force_theme: The ID of the theme to override the custom CSS resource
only if it exists
:param for_preview: Whether the URL is used in the CSS preview page
:return: The URL to the CSS resource
"""
from indico.modules.events.layout import layout_settings
if force_theme and force_theme != '_custom':
return _build_css_url(force_theme)
elif for_preview and force_theme is None:
return None
elif force_theme == '_custom' or layout_settings.get(event, 'use_custom_css'):
if not event.has_stylesheet:
return None
return url_for('event_layout.css_display', event, slug=event.stylesheet_metadata['hash'])
elif layout_settings.get(event, 'theme'):
return _build_css_url(layout_settings.get(event, 'theme'))
def get_logo_data(event):
return {
'url': event.logo_url,
'filename': event.logo_metadata['filename'],
'size': event.logo_metadata['size'],
'content_type': event.logo_metadata['content_type']
}
def get_css_file_data(event):
return {
'filename': event.stylesheet_metadata['filename'],
'size': event.stylesheet_metadata['size'],
'content_type': 'text/css'
}
| mit | 5,241,790,921,630,120,000 | 37.517906 | 120 | 0.653268 | false |
simfarm/ansible-playbooks | test-playbooks/inventories/more_inventories/dyn_inventory.py | 3 | 2333 | #!/usr/bin/env python
from argparse import ArgumentParser
from pprint import pprint
inventory = {'group_four': {'hosts': ['group_four_host_0{}'.format(i) for i in range(1, 6)]
+ ['group_four_and_five_host_0{}'.format(i) for i in range(1, 6)]
+ ['group_four_five_and_six_host_0{}'.format(i) for i in range(1, 6)],
'vars': {'is_in_group_four': True}},
'group_five': {'hosts': ['group_five_host_0{}'.format(i) for i in range(1, 6)]
+ ['group_four_and_five_host_0{}'.format(i) for i in range(1, 6)]
+ ['group_five_and_six_host_0{}'.format(i) for i in range(1, 6)]
+ ['group_four_five_and_six_host_0{}'.format(i) for i in range(1, 6)],
'vars': {'is_in_group_five': True}},
'group_six': {'hosts': ['group_six_host_0{}'.format(i) for i in range(1, 6)]
+ ['group_five_and_six_host_0{}'.format(i) for i in range(1, 6)]
+ ['group_four_five_and_six_host_0{}'.format(i) for i in range(1, 6)],
'vars': {'is_in_group_six': True}},
'all': {'vars': {'ansible_connection': 'local',
'inventories_var': True}},
'ungrouped': {'hosts': ['ungrouped_host_{}'.format('0{}'.format(i) if len(str(i)) == 1 else i) for i in range(6, 11)]},
'_meta': {'hostvars': {'group_four_host_01': {'group_four_host_01_has_this_var': True},
'group_five_host_01': {'group_five_host_01_has_this_var': True},
'group_six_host_01': {'group_six_host_01_has_this_var': True}}}}
def parse_args():
parser = ArgumentParser()
parser.add_argument('--list', dest='list_instances', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', dest='requested_host', help='Get all the variables about a specific instance')
return parser.parse_args()
def load_inventory():
args = parse_args()
if args.list_instances:
pprint(inventory)
if __name__ == '__main__':
load_inventory()
| gpl-3.0 | -6,321,689,080,875,422,000 | 57.325 | 132 | 0.49207 | false |
uday1889/gensim | gensim/similarities/docsim.py | 38 | 27839 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Radim Rehurek <[email protected]>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
This module contains functions and classes for computing similarities across
a collection of documents in the Vector Space Model.
The main class is `Similarity`, which builds an index for a given set of documents.
Once the index is built, you can perform efficient queries like "Tell me how similar
is this query document to each document in the index?". The result is a vector
of numbers as large as the size of the initial set of documents, that is, one float
for each index document. Alternatively, you can also request only the top-N most
similar index documents to the query.
You can later add new documents to the index via `Similarity.add_documents()`.
How It Works
------------
The `Similarity` class splits the index into several smaller sub-indexes ("shards"),
which are disk-based. If your entire index fits in memory (~hundreds of thousands
documents for 1GB of RAM), you can also use the `MatrixSimilarity` or `SparseMatrixSimilarity`
classes directly. These are more simple but do not scale well (the entire index is
kept in RAM).
Once the index has been initialized, you can query for document similarity simply by:
>>> index = Similarity('/tmp/tst', corpus, num_features=12) # build the index
>>> similarities = index[query] # get similarities between the query and all index documents
If you have more query documents, you can submit them all at once, in a batch:
>>> for similarities in index[batch_of_documents]: # the batch is simply an iterable of documents (=gensim corpus)
>>> ...
The benefit of this batch (aka "chunked") querying is much better performance.
To see the speed-up on your machine, run ``python -m gensim.test.simspeed``
(compare to my results `here <http://groups.google.com/group/gensim/msg/4f6f171a869e4fca?>`_).
There is also a special syntax for when you need similarity of documents in the index
to the index itself (i.e. queries=indexed documents themselves). This special syntax
already uses the faster, batch queries internally:
>>> for similarities in index: # return all similarities of the 1st index document, then 2nd...
>>> ...
"""
import logging
import itertools
import os
import heapq
import numpy
import scipy.sparse
from gensim import interfaces, utils, matutils
from six.moves import map as imap, xrange, zip as izip
logger = logging.getLogger('gensim.similarities.docsim')
PARALLEL_SHARDS = False
try:
import multiprocessing
# by default, don't parallelize queries. uncomment the following line if you want that.
# PARALLEL_SHARDS = multiprocessing.cpu_count() # use #parallel processes = #CPus
except ImportError:
pass
class Shard(utils.SaveLoad):
"""
A proxy class that represents a single shard instance within a Similarity
index.
Basically just wraps (Sparse)MatrixSimilarity so that it mmaps from disk on
request (query).
"""
def __init__(self, fname, index):
self.dirname, self.fname = os.path.split(fname)
self.length = len(index)
self.cls = index.__class__
logger.info("saving index shard to %s" % self.fullname())
index.save(self.fullname())
self.index = self.get_index()
def fullname(self):
return os.path.join(self.dirname, self.fname)
def __len__(self):
return self.length
def __getstate__(self):
result = self.__dict__.copy()
# (S)MS objects must be loaded via load() because of mmap (simple pickle.load won't do)
if 'index' in result:
del result['index']
return result
def __str__(self):
return ("%s Shard(%i documents in %s)" % (self.cls.__name__, len(self), self.fullname()))
def get_index(self):
if not hasattr(self, 'index'):
logger.debug("mmaping index from %s" % self.fullname())
self.index = self.cls.load(self.fullname(), mmap='r')
return self.index
def get_document_id(self, pos):
"""Return index vector at position `pos`.
The vector is of the same type as the underlying index (ie., dense for
MatrixSimilarity and scipy.sparse for SparseMatrixSimilarity.
"""
assert 0 <= pos < len(self), "requested position out of range"
return self.get_index().index[pos]
def __getitem__(self, query):
index = self.get_index()
try:
index.num_best = self.num_best
index.normalize = self.normalize
except:
raise ValueError("num_best and normalize have to be set before querying a proxy Shard object")
return index[query]
def query_shard(args):
query, shard = args # simulate starmap (not part of multiprocessing in older Pythons)
logger.debug("querying shard %s num_best=%s in process %s" % (shard, shard.num_best, os.getpid()))
result = shard[query]
logger.debug("finished querying shard %s in process %s" % (shard, os.getpid()))
return result
class Similarity(interfaces.SimilarityABC):
"""
Compute cosine similarity of a dynamic query against a static corpus of documents
("the index").
Scalability is achieved by sharding the index into smaller pieces, each of which
fits into core memory (see the `(Sparse)MatrixSimilarity` classes in this module).
The shards themselves are simply stored as files to disk and mmap'ed back as needed.
"""
def __init__(self, output_prefix, corpus, num_features, num_best=None, chunksize=256, shardsize=32768):
"""
Construct the index from `corpus`. The index can be later extended by calling
the `add_documents` method. **Note**: documents are split (internally, transparently)
into shards of `shardsize` documents each, converted to a matrix, for faster BLAS calls.
Each shard is stored to disk under `output_prefix.shard_number` (=you need write
access to that location). If you don't specify an output prefix, a random
filename in temp will be used.
`shardsize` should be chosen so that a `shardsize x chunksize` matrix of floats
fits comfortably into main memory.
`num_features` is the number of features in the `corpus` (e.g. size of the
dictionary, or the number of latent topics for latent semantic models).
If `num_best` is left unspecified, similarity queries will return a full
vector with one float for every document in the index:
>>> index = Similarity('/path/to/index', corpus, num_features=400) # if corpus has 7 documents...
>>> index[query] # ... then result will have 7 floats
[0.0, 0.0, 0.2, 0.13, 0.8, 0.0, 0.1]
If `num_best` is set, queries return only the `num_best` most similar documents,
always leaving out documents for which the similarity is 0.
If the input vector itself only has features with zero values (=the sparse
representation is empty), the returned list will always be empty.
>>> index.num_best = 3
>>> index[query] # return at most "num_best" of `(index_of_document, similarity)` tuples
[(4, 0.8), (2, 0.13), (3, 0.13)]
You can also override `num_best` dynamically, simply by setting e.g.
`self.num_best = 10` before doing a query.
"""
if output_prefix is None:
# undocumented feature: set output_prefix=None to create the server in temp
self.output_prefix = utils.randfname(prefix='simserver')
else:
self.output_prefix = output_prefix
logger.info("starting similarity index under %s" % self.output_prefix)
self.num_features = num_features
self.num_best = num_best
self.normalize = True
self.chunksize = int(chunksize)
self.shardsize = shardsize
self.shards = []
self.fresh_docs, self.fresh_nnz = [], 0
if corpus is not None:
self.add_documents(corpus)
def __len__(self):
return len(self.fresh_docs) + sum([len(shard) for shard in self.shards])
def __str__(self):
return ("Similarity index with %i documents in %i shards (stored under %s)" %
(len(self), len(self.shards), self.output_prefix))
def add_documents(self, corpus):
"""
Extend the index with new documents.
Internally, documents are buffered and then spilled to disk when there's
`self.shardsize` of them (or when a query is issued).
"""
min_ratio = 1.0 # 0.5 to only reopen shards that are <50% complete
if self.shards and len(self.shards[-1]) < min_ratio * self.shardsize:
# The last shard was incomplete (<; load it back and add the documents there, don't start a new shard
self.reopen_shard()
for doc in corpus:
if isinstance(doc, numpy.ndarray):
doclen = len(doc)
elif scipy.sparse.issparse(doc):
doclen = doc.nnz
else:
doclen = len(doc)
if doclen < 0.3 * self.num_features:
doc = matutils.unitvec(matutils.corpus2csc([doc], self.num_features).T)
else:
doc = matutils.unitvec(matutils.sparse2full(doc, self.num_features))
self.fresh_docs.append(doc)
self.fresh_nnz += doclen
if len(self.fresh_docs) >= self.shardsize:
self.close_shard()
if len(self.fresh_docs) % 10000 == 0:
logger.info("PROGRESS: fresh_shard size=%i" % len(self.fresh_docs))
def shardid2filename(self, shardid):
if self.output_prefix.endswith('.'):
return "%s%s" % (self.output_prefix, shardid)
else:
return "%s.%s" % (self.output_prefix, shardid)
def close_shard(self):
"""
Force the latest shard to close (be converted to a matrix and stored
to disk). Do nothing if no new documents added since last call.
**NOTE**: the shard is closed even if it is not full yet (its size is smaller
than `self.shardsize`). If documents are added later via `add_documents()`,
this incomplete shard will be loaded again and completed.
"""
if not self.fresh_docs:
return
shardid = len(self.shards)
# consider the shard sparse if its density is < 30%
issparse = 0.3 > 1.0 * self.fresh_nnz / (len(self.fresh_docs) * self.num_features)
if issparse:
index = SparseMatrixSimilarity(self.fresh_docs, num_terms=self.num_features,
num_docs=len(self.fresh_docs), num_nnz=self.fresh_nnz)
else:
index = MatrixSimilarity(self.fresh_docs, num_features=self.num_features)
logger.info("creating %s shard #%s" % ('sparse' if issparse else 'dense', shardid))
shard = Shard(self.shardid2filename(shardid), index)
shard.num_best = self.num_best
shard.num_nnz = self.fresh_nnz
self.shards.append(shard)
self.fresh_docs, self.fresh_nnz = [], 0
def reopen_shard(self):
assert self.shards
if self.fresh_docs:
raise ValueError("cannot reopen a shard with fresh documents in index")
last_shard = self.shards[-1]
last_index = last_shard.get_index()
logger.info("reopening an incomplete shard of %i documents" % len(last_shard))
self.fresh_docs = list(last_index.index)
self.fresh_nnz = last_shard.num_nnz
del self.shards[-1] # remove the shard from index, *but its file on disk is not deleted*
logger.debug("reopen complete")
def query_shards(self, query):
"""
Return the result of applying shard[query] for each shard in self.shards,
as a sequence.
If PARALLEL_SHARDS is set, the shards are queried in parallel, using
the multiprocessing module.
"""
args = zip([query] * len(self.shards), self.shards)
if PARALLEL_SHARDS and PARALLEL_SHARDS > 1:
logger.debug("spawning %i query processes" % PARALLEL_SHARDS)
pool = multiprocessing.Pool(PARALLEL_SHARDS)
result = pool.imap(query_shard, args, chunksize=1 + len(args) / PARALLEL_SHARDS)
else:
# serial processing, one shard after another
pool = None
result = imap(query_shard, args)
return pool, result
def __getitem__(self, query):
"""Get similarities of document `query` to all documents in the corpus.
**or**
If `query` is a corpus (iterable of documents), return a matrix of similarities
of all query documents vs. all corpus document. This batch query is more
efficient than computing the similarities one document after another.
"""
self.close_shard() # no-op if no documents added to index since last query
# reset num_best and normalize parameters, in case they were changed dynamically
for shard in self.shards:
shard.num_best = self.num_best
shard.normalize = self.normalize
# there are 4 distinct code paths, depending on whether input `query` is
# a corpus (or numpy/scipy matrix) or a single document, and whether the
# similarity result should be a full array or only num_best most similar
# documents.
pool, shard_results = self.query_shards(query)
if self.num_best is None:
# user asked for all documents => just stack the sub-results into a single matrix
# (works for both corpus / single doc query)
result = numpy.hstack(shard_results)
else:
# the following uses a lot of lazy evaluation and (optionally) parallel
# processing, to improve query latency and minimize memory footprint.
offsets = numpy.cumsum([0] + [len(shard) for shard in self.shards])
convert = lambda doc, shard_no: [(doc_index + offsets[shard_no], sim)
for doc_index, sim in doc]
is_corpus, query = utils.is_corpus(query)
is_corpus = is_corpus or hasattr(query, 'ndim') and query.ndim > 1 and query.shape[0] > 1
if not is_corpus:
# user asked for num_best most similar and query is a single doc
results = (convert(result, shard_no) for shard_no, result in enumerate(shard_results))
result = heapq.nlargest(self.num_best, itertools.chain(*results), key=lambda item: item[1])
else:
# the trickiest combination: returning num_best results when query was a corpus
results = []
for shard_no, result in enumerate(shard_results):
shard_result = [convert(doc, shard_no) for doc in result]
results.append(shard_result)
result = []
for parts in izip(*results):
merged = heapq.nlargest(self.num_best, itertools.chain(*parts), key=lambda item: item[1])
result.append(merged)
if pool:
# gc doesn't seem to collect the Pools, eventually leading to
# "IOError 24: too many open files". so let's terminate it manually.
pool.terminate()
return result
def vector_by_id(self, docpos):
"""
Return indexed vector corresponding to the document at position `docpos`.
"""
self.close_shard() # no-op if no documents added to index since last query
pos = 0
for shard in self.shards:
pos += len(shard)
if docpos < pos:
break
if not self.shards or docpos < 0 or docpos >= pos:
raise ValueError("invalid document position: %s (must be 0 <= x < %s)" %
(docpos, len(self)))
result = shard.get_document_id(docpos - pos + len(shard))
return result
def similarity_by_id(self, docpos):
"""
Return similarity of the given document only. `docpos` is the position
of the query document within index.
"""
query = self.vector_by_id(docpos)
norm, self.normalize = self.normalize, False
result = self[query]
self.normalize = norm
return result
def __iter__(self):
"""
For each index document, compute cosine similarity against all other
documents in the index and yield the result.
"""
# turn off query normalization (vectors in the index are already normalized, save some CPU)
norm, self.normalize = self.normalize, False
for chunk in self.iter_chunks():
if chunk.shape[0] > 1:
for sim in self[chunk]:
yield sim
else:
yield self[chunk]
self.normalize = norm # restore normalization
def iter_chunks(self, chunksize=None):
"""
Iteratively yield the index as chunks of documents, each of size <= chunksize.
The chunk is returned in its raw form (matrix or sparse matrix slice).
The size of the chunk may be smaller than requested; it is up to the caller
to check the result for real length, using `chunk.shape[0]`.
"""
self.close_shard()
if chunksize is None:
# if not explicitly specified, use the chunksize from the constructor
chunksize = self.chunksize
for shard in self.shards:
query = shard.get_index().index
for chunk_start in xrange(0, query.shape[0], chunksize):
# scipy.sparse doesn't allow slicing beyond real size of the matrix
# (unlike numpy). so, clip the end of the chunk explicitly to make
# scipy.sparse happy
chunk_end = min(query.shape[0], chunk_start + chunksize)
chunk = query[chunk_start: chunk_end] # create a view
yield chunk
def check_moved(self):
"""
Update shard locations, in case the server directory has moved on filesystem.
"""
dirname = os.path.dirname(self.output_prefix)
for shard in self.shards:
shard.dirname = dirname
def save(self, fname=None, *args, **kwargs):
"""
Save the object via pickling (also see load) under filename specified in
the constructor.
Calls `close_shard` internally to spill any unfinished shards to disk first.
"""
self.close_shard()
if fname is None:
fname = self.output_prefix
super(Similarity, self).save(fname, *args, **kwargs)
def destroy(self):
"""
Delete all files under self.output_prefix. Object is not usable after calling
this method anymore. Use with care!
"""
import glob
for fname in glob.glob(self.output_prefix + '*'):
logger.info("deleting %s" % fname)
os.remove(fname)
#endclass Similarity
class MatrixSimilarity(interfaces.SimilarityABC):
"""
Compute similarity against a corpus of documents by storing the index matrix
in memory. The similarity measure used is cosine between two vectors.
Use this if your input corpus contains dense vectors (such as documents in LSI
space) and fits into RAM.
The matrix is internally stored as a *dense* numpy array. Unless the entire matrix
fits into main memory, use `Similarity` instead.
See also `Similarity` and `SparseMatrixSimilarity` in this module.
"""
def __init__(self, corpus, num_best=None, dtype=numpy.float32, num_features=None, chunksize=256, corpus_len=None):
"""
`num_features` is the number of features in the corpus (will be determined
automatically by scanning the corpus if not specified). See `Similarity`
class for description of the other parameters.
"""
if num_features is None:
logger.warning("scanning corpus to determine the number of features (consider setting `num_features` explicitly)")
num_features = 1 + utils.get_max_id(corpus)
self.num_features = num_features
self.num_best = num_best
self.normalize = True
self.chunksize = chunksize
if corpus_len is None:
corpus_len = len(corpus)
if corpus is not None:
if self.num_features <= 0:
raise ValueError("cannot index a corpus with zero features (you must specify either `num_features` or a non-empty corpus in the constructor)")
logger.info("creating matrix with %i documents and %i features", corpus_len, num_features)
self.index = numpy.empty(shape=(corpus_len, num_features), dtype=dtype)
# iterate over corpus, populating the numpy index matrix with (normalized)
# document vectors
for docno, vector in enumerate(corpus):
if docno % 1000 == 0:
logger.debug("PROGRESS: at document #%i/%i", docno, corpus_len)
# individual documents in fact may be in numpy.scipy.sparse format as well.
# it's not documented because other it's not fully supported throughout.
# the user better know what he's doing (no normalization, must
# explicitly supply num_features etc).
if isinstance(vector, numpy.ndarray):
pass
elif scipy.sparse.issparse(vector):
vector = vector.toarray().flatten()
else:
vector = matutils.unitvec(matutils.sparse2full(vector, num_features))
self.index[docno] = vector
def __len__(self):
return self.index.shape[0]
def get_similarities(self, query):
"""
Return similarity of sparse vector `query` to all documents in the corpus,
as a numpy array.
If `query` is a collection of documents, return a 2D array of similarities
of each document in `query` to all documents in the corpus (=batch query,
faster than processing each document in turn).
**Do not use this function directly; use the self[query] syntax instead.**
"""
is_corpus, query = utils.is_corpus(query)
if is_corpus:
query = numpy.asarray(
[matutils.sparse2full(vec, self.num_features) for vec in query],
dtype=self.index.dtype)
else:
if scipy.sparse.issparse(query):
query = query.toarray() # convert sparse to dense
elif isinstance(query, numpy.ndarray):
pass
else:
# default case: query is a single vector in sparse gensim format
query = matutils.sparse2full(query, self.num_features)
query = numpy.asarray(query, dtype=self.index.dtype)
# do a little transposition dance to stop numpy from making a copy of
# self.index internally in numpy.dot (very slow).
result = numpy.dot(self.index, query.T).T # return #queries x #index
return result # XXX: removed casting the result from array to list; does anyone care?
def __str__(self):
return "%s<%i docs, %i features>" % (self.__class__.__name__, len(self), self.index.shape[1])
#endclass MatrixSimilarity
class SparseMatrixSimilarity(interfaces.SimilarityABC):
"""
Compute similarity against a corpus of documents by storing the sparse index
matrix in memory. The similarity measure used is cosine between two vectors.
Use this if your input corpus contains sparse vectors (such as documents in
bag-of-words format) and fits into RAM.
The matrix is internally stored as a `scipy.sparse.csr` matrix. Unless the entire
matrix fits into main memory, use `Similarity` instead.
See also `Similarity` and `MatrixSimilarity` in this module.
"""
def __init__(self, corpus, num_features=None, num_terms=None, num_docs=None, num_nnz=None,
num_best=None, chunksize=500, dtype=numpy.float32):
self.num_best = num_best
self.normalize = True
self.chunksize = chunksize
if corpus is not None:
logger.info("creating sparse index")
# iterate over input corpus, populating the sparse index matrix
try:
# use the more efficient corpus generation version, if the input
# `corpus` is MmCorpus-like (knows its shape and number of non-zeroes).
num_terms, num_docs, num_nnz = corpus.num_terms, corpus.num_docs, corpus.num_nnz
logger.debug("using efficient sparse index creation")
except AttributeError:
# no MmCorpus, use the slower version (or maybe user supplied the
# num_* params in constructor)
pass
if num_features is not None:
# num_terms is just an alias for num_features, for compatibility with MatrixSimilarity
num_terms = num_features
if num_terms is None:
raise ValueError("refusing to guess the number of sparse features: specify num_features explicitly")
corpus = (matutils.scipy2sparse(v) if scipy.sparse.issparse(v) else
(matutils.full2sparse(v) if isinstance(v, numpy.ndarray) else
matutils.unitvec(v)) for v in corpus)
self.index = matutils.corpus2csc(corpus, num_terms=num_terms, num_docs=num_docs, num_nnz=num_nnz,
dtype=dtype, printprogress=10000).T
# convert to Compressed Sparse Row for efficient row slicing and multiplications
self.index = self.index.tocsr() # currently no-op, CSC.T is already CSR
logger.info("created %r" % self.index)
def __len__(self):
return self.index.shape[0]
def get_similarities(self, query):
"""
Return similarity of sparse vector `query` to all documents in the corpus,
as a numpy array.
If `query` is a collection of documents, return a 2D array of similarities
of each document in `query` to all documents in the corpus (=batch query,
faster than processing each document in turn).
**Do not use this function directly; use the self[query] syntax instead.**
"""
is_corpus, query = utils.is_corpus(query)
if is_corpus:
query = matutils.corpus2csc(query, self.index.shape[1], dtype=self.index.dtype)
else:
if scipy.sparse.issparse(query):
query = query.T # convert documents=rows to documents=columns
elif isinstance(query, numpy.ndarray):
if query.ndim == 1:
query.shape = (1, len(query))
query = scipy.sparse.csr_matrix(query, dtype=self.index.dtype).T
else:
# default case: query is a single vector, in sparse gensim format
query = matutils.corpus2csc([query], self.index.shape[1], dtype=self.index.dtype)
# compute cosine similarity against every other document in the collection
result = self.index * query.tocsc() # N x T * T x C = N x C
if result.shape[1] == 1 and not is_corpus:
# for queries of one document, return a 1d array
result = result.toarray().flatten()
else:
# otherwise, return a 2d matrix (#queries x #index)
result = result.toarray().T
return result
#endclass SparseMatrixSimilarity
| gpl-3.0 | -6,361,021,586,887,310,000 | 41.180303 | 158 | 0.624304 | false |
midnightradio/gensim | gensim/test/test_big.py | 2 | 2434 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014 Radim Rehurek <[email protected]>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Automated tests for checking processing/storing large inputs.
"""
import logging
import unittest
import os
import numpy as np
import gensim
from gensim.test.utils import get_tmpfile
class BigCorpus:
"""A corpus of a large number of docs & large vocab"""
def __init__(self, words_only=False, num_terms=200000, num_docs=1000000, doc_len=100):
self.dictionary = gensim.utils.FakeDict(num_terms)
self.words_only = words_only
self.num_docs = num_docs
self.doc_len = doc_len
def __iter__(self):
for _ in range(self.num_docs):
doc_len = np.random.poisson(self.doc_len)
ids = np.random.randint(0, len(self.dictionary), doc_len)
if self.words_only:
yield [str(idx) for idx in ids]
else:
weights = np.random.poisson(3, doc_len)
yield sorted(zip(ids, weights))
if os.environ.get('GENSIM_BIG', False):
class TestLargeData(unittest.TestCase):
"""Try common operations, using large models. You'll need ~8GB RAM to run these tests"""
def testWord2Vec(self):
corpus = BigCorpus(words_only=True, num_docs=100000, num_terms=3000000, doc_len=200)
tmpf = get_tmpfile('gensim_big.tst')
model = gensim.models.Word2Vec(corpus, vector_size=300, workers=4)
model.save(tmpf, ignore=['syn1'])
del model
gensim.models.Word2Vec.load(tmpf)
def testLsiModel(self):
corpus = BigCorpus(num_docs=50000)
tmpf = get_tmpfile('gensim_big.tst')
model = gensim.models.LsiModel(corpus, num_topics=500, id2word=corpus.dictionary)
model.save(tmpf)
del model
gensim.models.LsiModel.load(tmpf)
def testLdaModel(self):
corpus = BigCorpus(num_docs=5000)
tmpf = get_tmpfile('gensim_big.tst')
model = gensim.models.LdaModel(corpus, num_topics=500, id2word=corpus.dictionary)
model.save(tmpf)
del model
gensim.models.LdaModel.load(tmpf)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
| gpl-3.0 | 8,973,228,674,481,336,000 | 32.342466 | 96 | 0.614215 | false |
vyscond/cocos | cocos/audio/SDL/dll.py | 4 | 10357 | #!/usr/bin/env python
"""
"""
import six
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
from ctypes import *
from ctypes.util import find_library
import sys
# Private version checking declared before SDL.version can be
# imported.
class _SDL_version(Structure):
_fields_ = [('major', c_ubyte),
('minor', c_ubyte),
('patch', c_ubyte)]
def __repr__(self):
return '%d.%d.%d' % \
(self.major, self.minor, self.patch)
def _version_parts(v):
"""Return a tuple (major, minor, patch) for `v`, which can be
an _SDL_version, string or tuple."""
if hasattr(v, 'major') and hasattr(v, 'minor') and hasattr(v, 'patch'):
return v.major, v.minor, v.patch
elif type(v) == tuple:
return v
elif type(v) == str:
return tuple([int(i) for i in v.split('.')])
else:
raise TypeError
def _version_string(v):
return '%d.%d.%d' % _version_parts(v)
def _platform_library_name(library):
if sys.platform[:5] == 'linux':
return 'lib%s.so' % library
elif sys.platform == 'darwin':
return '%s.framework' % library
elif sys.platform == 'win32':
return '%s.dll' % library
return library
class SDL_DLL:
def __init__(self, library_name, version_function_name, version=None):
self.library_name = library_name
if sys.platform == 'win32':
try:
self._load_library_win()
except WindowsError:
raise ImportError(('Dynamic library "%s" was not found' %
library_name))
else:
self._load_library_nix(version)
# Get the version of the DLL we're using
if version_function_name:
try:
version_function = getattr(self._dll, version_function_name)
version_function.restype = POINTER(_SDL_version)
self._version = _version_parts(version_function().contents)
except AttributeError:
self._version = (0, 0, 0)
else:
self._version = (0, 0, 0)
def _load_library_win(self):
"""
loads library from the dir cocos.sdl_lib_path
Normally it is the path to the pygame package.
If set to None will look first in the current working directory,
then in system32; that can be handy when using py2exe
"""
import os
import cocos
# we must change cwd because some .dll s will directly load other dlls
old_cwd = os.getcwd()
if cocos.sdl_lib_path is not None:
os.chdir(cocos.sdl_lib_path)
try:
self._dll = getattr(cdll, self.library_name)
finally:
os.chdir(old_cwd)
def _load_library_nix(self, version):
library = find_library(self.library_name)
if library is None and version is not None:
# try to lookup with version. this is useful in linux, sometimes
# there is'nt a libSDL.so but a libSDL-1.2.so
library = find_library("%s-%s" % (self.library_name, version))
if not library:
raise ImportError('Dynamic library "%s" was not found' %
_platform_library_name(self.library_name))
try:
self._dll = getattr(cdll, library)
except OSError:
raise ImportError("Dynamic library not found")
def version_compatible(self, v):
"""Returns True iff `v` is equal to or later than the loaded library
version."""
v = _version_parts(v)
for i in range(3):
if self._version[i] < v[i]:
return False
return True
def assert_version_compatible(self, name, since):
"""Raises an exception if `since` is later than the loaded library."""
if not version_compatible(since):
import cocos.audio.SDL.error
raise cocos.audio.SDL.error.SDL_NotImplementedError(
'%s requires SDL version %s; currently using version %s' %
(name, _version_string(since), _version_string(self._version)))
def private_function(self, name, **kwargs):
"""Construct a wrapper function for ctypes with internal documentation
and no argument names."""
kwargs['doc'] = 'Private wrapper for %s' % name
kwargs['args'] = []
return self.function(name, **kwargs)
def function(self, name, doc, args=[], arg_types=[],
return_type=None,
dereference_return=False,
require_return=False,
success_return=None,
error_return=None,
since=None):
"""Construct a wrapper function for ctypes.
:Parameters:
`name`
The name of the function as it appears in the shared library.
`doc`
Docstring to associate with the wrapper function.
`args`
List of strings giving the argument names.
`arg_types`
List of ctypes classes giving the argument types.
`return_type`
The ctypes class giving the wrapped function's native
return type.
`dereference_return`
If True, the return value is assumed to be a pointer and
will be dereferenced via ``.contents`` before being
returned to the user application.
`require_return`
Used in conjunction with `dereference_return`; if True, an
exception will be raised if the result is NULL; if False
None will be returned when the result is NULL.
`success_return`
If not None, the expected result of the wrapped function.
If the return value does not equal success_return, an
exception will be raised.
`error_return`
If not None, the error result of the wrapped function. If
the return value equals error_return, an exception will be
raised. Cannot be used in conjunction with
`success_return`.
`since`
Tuple (major, minor, patch) or string 'x.y.z' of the first
version of SDL in which this function appears. If the
loaded version predates it, a placeholder function that
raises `SDL_NotImplementedError` will be returned instead.
Set to None if the function is in all versions of SDL.
"""
# Check for version compatibility first
if since and not self.version_compatible(since):
def _f(*args, **kwargs):
import cocos.audio.SDL.error
raise cocos.audio.SDL.error.SDL_NotImplementedError(
'%s requires %s %s; currently using version %s' %
(name, self.library_name, _version_string(since),
_version_string(self._version)))
if args:
_f._args = args
_f.__doc__ = doc
if six.PY2:
_f.func_name = name
else:
_f.__name__ = name
return _f
# Ok, get function from ctypes
func = getattr(self._dll, name)
func.argtypes = arg_types
func.restype = return_type
if dereference_return:
if require_return:
# Construct a function which dereferences the pointer result,
# or raises an exception if NULL is returned.
def _f(*args, **kwargs):
result = func(*args, **kwargs)
if result:
return result.contents
import cocos.audio.SDL.error
raise cocos.audio.SDL.error.SDL_Exception(cocos.audio.SDL.error.SDL_GetError())
else:
# Construct a function which dereferences the pointer result,
# or returns None if NULL is returned.
def _f(*args, **kwargs):
result = func(*args, **kwargs)
if result:
return result.contents
return None
elif success_return is not None:
# Construct a function which returns None, but raises an exception
# if the C function returns a failure code.
def _f(*args, **kwargs):
result = func(*args, **kwargs)
if result != success_return:
import cocos.audio.SDL.error
raise cocos.audio.SDL.error.SDL_Exception(cocos.audio.SDL.error.SDL_GetError())
return result
elif error_return is not None:
# Construct a function which returns None, but raises an exception
# if the C function returns a failure code.
def _f(*args, **kwargs):
result = func(*args, **kwargs)
if result == error_return:
import cocos.audio.SDL.error
raise cocos.audio.SDL.error.SDL_Exception(cocus.audio.SDL.error.SDL_GetError())
return result
elif require_return:
# Construct a function which returns the usual result, or returns
# None if NULL is returned.
def _f(*args, **kwargs):
result = func(*args, **kwargs)
if not result:
import cocos.audio.SDL.error
raise cocos.audio.SDL.error.SDL_Exception(cocos.audio.SDL.error.SDL_GetError())
return result
else:
# Construct a function which returns the C function's return
# value.
def _f(*args, **kwargs):
return func(*args, **kwargs)
if args:
_f._args = args
_f.__doc__ = doc
if six.PY2:
_f.func_name = name
else:
_f.__name__ = name
return _f
# Shortcuts to the SDL core library
_dll = SDL_DLL('SDL', 'SDL_Linked_Version', '1.2')
version_compatible = _dll.version_compatible
assert_version_compatible = _dll.assert_version_compatible
private_function = _dll.private_function
function = _dll.function
| bsd-3-clause | 991,794,302,125,215,000 | 38.083019 | 99 | 0.548904 | false |
CXQERP/ODOOERP | addons/l10n_be_invoice_bba/invoice.py | 2 | 12421 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import re, time, random
from openerp import api
from openerp.osv import fields, osv
from openerp.tools.translate import _
import logging
_logger = logging.getLogger(__name__)
"""
account.invoice object:
- Add support for Belgian structured communication
- Rename 'reference' field labels to 'Communication'
"""
class account_invoice(osv.osv):
_inherit = 'account.invoice'
@api.cr_uid_context
def _get_reference_type(self, cursor, user, context=None):
"""Add BBA Structured Communication Type and change labels from 'reference' into 'communication' """
res = super(account_invoice, self)._get_reference_type(cursor, user,
context=context)
res[[i for i,x in enumerate(res) if x[0] == 'none'][0]] = ('none', 'Free Communication')
res.append(('bba', 'BBA Structured Communication'))
#l_logger.warning('reference_type = %s' %res )
return res
def check_bbacomm(self, val):
supported_chars = '0-9+*/ '
pattern = re.compile('[^' + supported_chars + ']')
if pattern.findall(val or ''):
return False
bbacomm = re.sub('\D', '', val or '')
if len(bbacomm) == 12:
base = int(bbacomm[:10])
mod = base % 97 or 97
if mod == int(bbacomm[-2:]):
return True
return False
def _check_communication(self, cr, uid, ids):
for inv in self.browse(cr, uid, ids):
if inv.reference_type == 'bba':
return self.check_bbacomm(inv.reference)
return True
def onchange_partner_id(self, cr, uid, ids, type, partner_id,
date_invoice=False, payment_term=False,
partner_bank_id=False, company_id=False,
context=None):
result = super(account_invoice, self).onchange_partner_id(cr, uid, ids, type, partner_id,
date_invoice, payment_term, partner_bank_id, company_id, context)
# reference_type = self.default_get(cr, uid, ['reference_type'])['reference_type']
# _logger.warning('partner_id %s' % partner_id)
reference = False
reference_type = 'none'
if partner_id:
if (type == 'out_invoice'):
reference_type = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context).out_inv_comm_type
if reference_type:
reference = self.generate_bbacomm(cr, uid, ids, type, reference_type, partner_id, '', context=context)['value']['reference']
res_update = {
'reference_type': reference_type or 'none',
'reference': reference,
}
result['value'].update(res_update)
return result
def generate_bbacomm(self, cr, uid, ids, type, reference_type, partner_id, reference, context=None):
partner_obj = self.pool.get('res.partner')
reference = reference or ''
algorithm = False
if partner_id:
algorithm = partner_obj.browse(cr, uid, partner_id, context=context).out_inv_comm_algorithm
algorithm = algorithm or 'random'
if (type == 'out_invoice'):
if reference_type == 'bba':
if algorithm == 'date':
if not self.check_bbacomm(reference):
doy = time.strftime('%j')
year = time.strftime('%Y')
seq = '001'
seq_ids = self.search(cr, uid,
[('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),
('reference', 'like', '+++%s/%s/%%' % (doy, year))], order='reference')
if seq_ids:
prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15])
if prev_seq < 999:
seq = '%03d' % (prev_seq + 1)
else:
raise osv.except_osv(_('Warning!'),
_('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \
'\nPlease create manually a unique BBA Structured Communication.'))
bbacomm = doy + year + seq
base = int(bbacomm)
mod = base % 97 or 97
reference = '+++%s/%s/%s%02d+++' % (doy, year, seq, mod)
elif algorithm == 'partner_ref':
if not self.check_bbacomm(reference):
partner_ref = self.pool.get('res.partner').browse(cr, uid, partner_id).ref
partner_ref_nr = re.sub('\D', '', partner_ref or '')
if (len(partner_ref_nr) < 3) or (len(partner_ref_nr) > 7):
raise osv.except_osv(_('Warning!'),
_('The Partner should have a 3-7 digit Reference Number for the generation of BBA Structured Communications!' \
'\nPlease correct the Partner record.'))
else:
partner_ref_nr = partner_ref_nr.ljust(7, '0')
seq = '001'
seq_ids = self.search(cr, uid,
[('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),
('reference', 'like', '+++%s/%s/%%' % (partner_ref_nr[:3], partner_ref_nr[3:]))], order='reference')
if seq_ids:
prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15])
if prev_seq < 999:
seq = '%03d' % (prev_seq + 1)
else:
raise osv.except_osv(_('Warning!'),
_('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \
'\nPlease create manually a unique BBA Structured Communication.'))
bbacomm = partner_ref_nr + seq
base = int(bbacomm)
mod = base % 97 or 97
reference = '+++%s/%s/%s%02d+++' % (partner_ref_nr[:3], partner_ref_nr[3:], seq, mod)
elif algorithm == 'random':
if not self.check_bbacomm(reference):
base = random.randint(1, 9999999999)
bbacomm = str(base).rjust(10, '0')
base = int(bbacomm)
mod = base % 97 or 97
mod = str(mod).rjust(2, '0')
reference = '+++%s/%s/%s%s+++' % (bbacomm[:3], bbacomm[3:7], bbacomm[7:], mod)
else:
raise osv.except_osv(_('Error!'),
_("Unsupported Structured Communication Type Algorithm '%s' !" \
"\nPlease contact your Odoo support channel.") % algorithm)
return {'value': {'reference': reference}}
def create(self, cr, uid, vals, context=None):
reference = vals.get('reference', False)
reference_type = vals.get('reference_type', False)
if vals.get('type') == 'out_invoice' and not reference_type:
# fallback on default communication type for partner
reference_type = self.pool.get('res.partner').browse(cr, uid, vals['partner_id']).out_inv_comm_type
if reference_type == 'bba':
reference = self.generate_bbacomm(cr, uid, [], vals['type'], reference_type, vals['partner_id'], '', context={})['value']['reference']
vals.update({
'reference_type': reference_type or 'none',
'reference': reference,
})
if reference_type == 'bba':
if not reference:
raise osv.except_osv(_('Warning!'),
_('Empty BBA Structured Communication!' \
'\nPlease fill in a unique BBA Structured Communication.'))
if self.check_bbacomm(reference):
reference = re.sub('\D', '', reference)
vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++'
same_ids = self.search(cr, uid,
[('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),
('reference', '=', vals['reference'])])
if same_ids:
raise osv.except_osv(_('Warning!'),
_('The BBA Structured Communication has already been used!' \
'\nPlease create manually a unique BBA Structured Communication.'))
return super(account_invoice, self).create(cr, uid, vals, context=context)
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
for inv in self.browse(cr, uid, ids, context):
if vals.has_key('reference_type'):
reference_type = vals['reference_type']
else:
reference_type = inv.reference_type or ''
if reference_type == 'bba' and 'reference' in vals:
if self.check_bbacomm(vals['reference']):
reference = re.sub('\D', '', vals['reference'])
vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++'
same_ids = self.search(cr, uid,
[('id', '!=', inv.id), ('type', '=', 'out_invoice'),
('reference_type', '=', 'bba'), ('reference', '=', vals['reference'])])
if same_ids:
raise osv.except_osv(_('Warning!'),
_('The BBA Structured Communication has already been used!' \
'\nPlease create manually a unique BBA Structured Communication.'))
return super(account_invoice, self).write(cr, uid, ids, vals, context)
def copy(self, cr, uid, id, default=None, context=None):
default = default or {}
invoice = self.browse(cr, uid, id, context=context)
if invoice.type in ['out_invoice']:
reference_type = invoice.reference_type or 'none'
default['reference_type'] = reference_type
if reference_type == 'bba':
partner = invoice.partner_id
default['reference'] = self.generate_bbacomm(cr, uid, id,
invoice.type, reference_type,
partner.id, '', context=context)['value']['reference']
return super(account_invoice, self).copy(cr, uid, id, default, context=context)
_columns = {
'reference': fields.char('Communication', help="The partner reference of this invoice."),
'reference_type': fields.selection(_get_reference_type, 'Communication Type',
required=True),
}
_constraints = [
(_check_communication, 'Invalid BBA Structured Communication !', ['Communication']),
]
account_invoice()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 2,143,271,697,212,474,400 | 51.855319 | 165 | 0.512117 | false |
Tejal011089/huntercamp_erpnext | erpnext/accounts/party.py | 17 | 8239 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import datetime
from frappe import _, msgprint, scrub
from frappe.defaults import get_user_permissions
from frappe.utils import add_days, getdate, formatdate, flt, get_first_day, date_diff, nowdate
from erpnext.utilities.doctype.address.address import get_address_display
from erpnext.utilities.doctype.contact.contact import get_contact_details
@frappe.whitelist()
def get_party_details(party=None, account=None, party_type="Customer", company=None,
posting_date=None, price_list=None, currency=None, doctype=None):
if not party:
return {}
if not frappe.db.exists(party_type, party):
frappe.throw(_("{0}: {1} does not exists").format(party_type, party))
return _get_party_details(party, account, party_type,
company, posting_date, price_list, currency, doctype)
def _get_party_details(party=None, account=None, party_type="Customer", company=None,
posting_date=None, price_list=None, currency=None, doctype=None, ignore_permissions=False):
out = frappe._dict(set_account_and_due_date(party, account, party_type, company, posting_date, doctype))
party = out[party_type.lower()]
if not ignore_permissions and not frappe.has_permission(party_type, "read", party):
frappe.throw(_("Not permitted"), frappe.PermissionError)
party = frappe.get_doc(party_type, party)
set_address_details(out, party, party_type)
set_contact_details(out, party, party_type)
set_other_values(out, party, party_type)
set_price_list(out, party, party_type, price_list)
if not out.get("currency"):
out["currency"] = currency
# sales team
if party_type=="Customer":
out["sales_team"] = [{
"sales_person": d.sales_person,
"sales_designation": d.sales_designation,
"allocated_percentage": d.allocated_percentage
} for d in party.get("sales_team")]
return out
def set_address_details(out, party, party_type):
billing_address_field = "customer_address" if party_type == "Lead" \
else party_type.lower() + "_address"
out[billing_address_field] = frappe.db.get_value("Address",
{party_type.lower(): party.name, "is_primary_address":1}, "name")
# address display
out.address_display = get_address_display(out[billing_address_field])
# shipping address
if party_type in ["Customer", "Lead"]:
out.shipping_address_name = frappe.db.get_value("Address",
{party_type.lower(): party.name, "is_shipping_address":1}, "name")
out.shipping_address = get_address_display(out["shipping_address_name"])
def set_contact_details(out, party, party_type):
out.contact_person = frappe.db.get_value("Contact",
{party_type.lower(): party.name, "is_primary_contact":1}, "name")
if not out.contact_person:
out.update({
"contact_person": None,
"contact_display": None,
"contact_email": None,
"contact_mobile": None,
"contact_phone": None,
"contact_designation": None,
"contact_department": None
})
else:
out.update(get_contact_details(out.contact_person))
def set_other_values(out, party, party_type):
# copy
if party_type=="Customer":
to_copy = ["customer_name", "customer_group", "territory"]
else:
to_copy = ["supplier_name", "supplier_type"]
for f in to_copy:
out[f] = party.get(f)
# fields prepended with default in Customer doctype
for f in ['currency', 'taxes_and_charges'] \
+ (['sales_partner', 'commission_rate'] if party_type=="Customer" else []):
if party.get("default_" + f):
out[f] = party.get("default_" + f)
def set_price_list(out, party, party_type, given_price_list):
# price list
price_list = filter(None, get_user_permissions().get("Price List", []))
if isinstance(price_list, list):
price_list = price_list[0] if len(price_list)==1 else None
if not price_list:
price_list = party.default_price_list
if not price_list and party_type=="Customer":
price_list = frappe.db.get_value("Customer Group",
party.customer_group, "default_price_list")
if not price_list:
price_list = given_price_list
if price_list:
out.price_list_currency = frappe.db.get_value("Price List", price_list, "currency")
out["selling_price_list" if party.doctype=="Customer" else "buying_price_list"] = price_list
def set_account_and_due_date(party, account, party_type, company, posting_date, doctype):
if doctype not in ["Sales Invoice", "Purchase Invoice"]:
# not an invoice
return {
party_type.lower(): party
}
if party:
account = get_party_account(company, party, party_type)
account_fieldname = "debit_to" if party_type=="Customer" else "credit_to"
out = {
party_type.lower(): party,
account_fieldname : account,
"due_date": get_due_date(posting_date, party_type, party, company)
}
return out
@frappe.whitelist()
def get_party_account(company, party, party_type):
"""Returns the account for the given `party`.
Will first search in party (Customer / Supplier) record, if not found,
will search in group (Customer Group / Supplier Type),
finally will return default."""
if not company:
frappe.throw(_("Please select company first."))
if party:
account = frappe.db.get_value("Party Account",
{"parenttype": party_type, "parent": party, "company": company}, "account")
if not account:
party_group_doctype = "Customer Group" if party_type=="Customer" else "Supplier Type"
group = frappe.db.get_value(party_type, party, scrub(party_group_doctype))
account = frappe.db.get_value("Party Account",
{"parenttype": party_group_doctype, "parent": group, "company": company}, "account")
if not account:
default_account_name = "default_receivable_account" if party_type=="Customer" else "default_payable_account"
account = frappe.db.get_value("Company", company, default_account_name)
return account
@frappe.whitelist()
def get_due_date(posting_date, party_type, party, company):
"""Set Due Date = Posting Date + Credit Days"""
due_date = None
if posting_date and party:
due_date = posting_date
if party_type=="Customer":
credit_days_based_on, credit_days = get_credit_days(party_type, party, company)
if credit_days_based_on == "Fixed Days" and credit_days:
due_date = add_days(posting_date, credit_days)
elif credit_days_based_on == "Last Day of the Next Month":
due_date = (get_first_day(posting_date, 0, 2) + datetime.timedelta(-1)).strftime("%Y-%m-%d")
else:
credit_days = get_credit_days(party_type, party, company)
if credit_days:
due_date = add_days(posting_date, credit_days)
return due_date
def get_credit_days(party_type, party, company):
if party_type and party:
if party_type == "Customer":
credit_days_based_on, credit_days, customer_group = \
frappe.db.get_value(party_type, party, ["credit_days_based_on", "credit_days", "customer_group"])
if not credit_days_based_on:
credit_days_based_on, credit_days = \
frappe.db.get_value("Customer Group", customer_group, ["credit_days_based_on", "credit_days"]) \
or frappe.db.get_value("Company", company, ["credit_days_based_on", "credit_days"])
return credit_days_based_on, credit_days
else:
credit_days, supplier_type = frappe.db.get_value(party_type, party, ["credit_days", "supplier_type"])
if not credit_days:
credit_days = frappe.db.get_value("Supplier Type", supplier_type, "credit_days") \
or frappe.db.get_value("Company", company, "credit_days")
return credit_days
def validate_due_date(posting_date, due_date, party_type, party, company):
if getdate(due_date) < getdate(posting_date):
frappe.throw(_("Due Date cannot be before Posting Date"))
else:
default_due_date = get_due_date(posting_date, party_type, party, company)
if default_due_date != posting_date and getdate(due_date) > getdate(default_due_date):
is_credit_controller = frappe.db.get_single_value("Accounts Settings", "credit_controller") in frappe.get_roles()
if is_credit_controller:
msgprint(_("Note: Due / Reference Date exceeds allowed customer credit days by {0} day(s)")
.format(date_diff(due_date, default_due_date)))
else:
frappe.throw(_("Due / Reference Date cannot be after {0}").format(formatdate(default_due_date))) | agpl-3.0 | -8,767,121,103,044,999,000 | 36.454545 | 116 | 0.70494 | false |
roopali8/tempest | tempest/stress/actions/ssh_floating.py | 8 | 7744 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import socket
import subprocess
from tempest_lib.common.utils import data_utils
from tempest import config
import tempest.stress.stressaction as stressaction
import tempest.test
CONF = config.CONF
class FloatingStress(stressaction.StressAction):
# from the scenario manager
def ping_ip_address(self, ip_address):
cmd = ['ping', '-c1', '-w1', ip_address]
proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
proc.communicate()
success = proc.returncode == 0
return success
def tcp_connect_scan(self, addr, port):
# like tcp
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((addr, port))
except socket.error as exc:
self.logger.info("%s(%s): %s", self.server_id, self.floating['ip'],
str(exc))
return False
self.logger.info("%s(%s): Connected :)", self.server_id,
self.floating['ip'])
s.close()
return True
def check_port_ssh(self):
def func():
return self.tcp_connect_scan(self.floating['ip'], 22)
if not tempest.test.call_until_true(func, self.check_timeout,
self.check_interval):
raise RuntimeError("Cannot connect to the ssh port.")
def check_icmp_echo(self):
self.logger.info("%s(%s): Pinging..",
self.server_id, self.floating['ip'])
def func():
return self.ping_ip_address(self.floating['ip'])
if not tempest.test.call_until_true(func, self.check_timeout,
self.check_interval):
raise RuntimeError("%s(%s): Cannot ping the machine.",
self.server_id, self.floating['ip'])
self.logger.info("%s(%s): pong :)",
self.server_id, self.floating['ip'])
def _create_vm(self):
self.name = name = data_utils.rand_name("instance")
servers_client = self.manager.servers_client
self.logger.info("creating %s" % name)
vm_args = self.vm_extra_args.copy()
vm_args['security_groups'] = [self.sec_grp]
server = servers_client.create_server(name, self.image,
self.flavor,
**vm_args)
self.server_id = server['id']
if self.wait_after_vm_create:
self.manager.servers_client.wait_for_server_status(self.server_id,
'ACTIVE')
def _destroy_vm(self):
self.logger.info("deleting %s" % self.server_id)
self.manager.servers_client.delete_server(self.server_id)
self.manager.servers_client.wait_for_server_termination(self.server_id)
self.logger.info("deleted %s" % self.server_id)
def _create_sec_group(self):
sec_grp_cli = self.manager.security_groups_client
s_name = data_utils.rand_name('sec_grp')
s_description = data_utils.rand_name('desc')
self.sec_grp = sec_grp_cli.create_security_group(s_name,
s_description)
create_rule = sec_grp_cli.create_security_group_rule
create_rule(self.sec_grp['id'], 'tcp', 22, 22)
create_rule(self.sec_grp['id'], 'icmp', -1, -1)
def _destroy_sec_grp(self):
sec_grp_cli = self.manager.security_groups_client
sec_grp_cli.delete_security_group(self.sec_grp['id'])
def _create_floating_ip(self):
floating_cli = self.manager.floating_ips_client
self.floating = floating_cli.create_floating_ip(self.floating_pool)
def _destroy_floating_ip(self):
cli = self.manager.floating_ips_client
cli.delete_floating_ip(self.floating['id'])
cli.wait_for_resource_deletion(self.floating['id'])
self.logger.info("Deleted Floating IP %s", str(self.floating['ip']))
def setUp(self, **kwargs):
self.image = CONF.compute.image_ref
self.flavor = CONF.compute.flavor_ref
self.vm_extra_args = kwargs.get('vm_extra_args', {})
self.wait_after_vm_create = kwargs.get('wait_after_vm_create',
True)
self.new_vm = kwargs.get('new_vm', False)
self.new_sec_grp = kwargs.get('new_sec_group', False)
self.new_floating = kwargs.get('new_floating', False)
self.reboot = kwargs.get('reboot', False)
self.floating_pool = kwargs.get('floating_pool', None)
self.verify = kwargs.get('verify', ('check_port_ssh',
'check_icmp_echo'))
self.check_timeout = kwargs.get('check_timeout', 120)
self.check_interval = kwargs.get('check_interval', 1)
self.wait_for_disassociate = kwargs.get('wait_for_disassociate',
True)
# allocate floating
if not self.new_floating:
self._create_floating_ip()
# add security group
if not self.new_sec_grp:
self._create_sec_group()
# create vm
if not self.new_vm:
self._create_vm()
def wait_disassociate(self):
cli = self.manager.floating_ips_client
def func():
floating = cli.show_floating_ip(self.floating['id'])
return floating['instance_id'] is None
if not tempest.test.call_until_true(func, self.check_timeout,
self.check_interval):
raise RuntimeError("IP disassociate timeout!")
def run_core(self):
cli = self.manager.floating_ips_client
cli.associate_floating_ip_to_server(self.floating['ip'],
self.server_id)
for method in self.verify:
m = getattr(self, method)
m()
cli.disassociate_floating_ip_from_server(self.floating['ip'],
self.server_id)
if self.wait_for_disassociate:
self.wait_disassociate()
def run(self):
if self.new_sec_grp:
self._create_sec_group()
if self.new_floating:
self._create_floating_ip()
if self.new_vm:
self._create_vm()
if self.reboot:
self.manager.servers_client.reboot(self.server_id, 'HARD')
self.manager.servers_client.wait_for_server_status(self.server_id,
'ACTIVE')
self.run_core()
if self.new_vm:
self._destroy_vm()
if self.new_floating:
self._destroy_floating_ip()
if self.new_sec_grp:
self._destroy_sec_grp()
def tearDown(self):
if not self.new_vm:
self._destroy_vm()
if not self.new_floating:
self._destroy_floating_ip()
if not self.new_sec_grp:
self._destroy_sec_grp()
| apache-2.0 | 533,804,512,134,053,950 | 38.917526 | 79 | 0.556302 | false |
awkspace/ansible | lib/ansible/modules/network/fortios/fortios_firewall_policy6.py | 24 | 37823 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2018 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# the lib use python logging can get it if the following is set in your
# Ansible config.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_firewall_policy6
short_description: Configure IPv6 policies in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS by
allowing the user to configure firewall feature and policy6 category.
Examples includes all options and need to be adjusted to datasources before usage.
Tested with FOS v6.0.2
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate ip address.
required: true
username:
description:
- FortiOS or FortiGate username.
required: true
password:
description:
- FortiOS or FortiGate password.
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS
protocol
type: bool
default: false
firewall_policy6:
description:
- Configure IPv6 policies.
default: null
suboptions:
state:
description:
- Indicates whether to create or remove the object
choices:
- present
- absent
action:
description:
- Policy action (allow/deny/ipsec).
choices:
- accept
- deny
- ipsec
app-category:
description:
- Application category ID list.
suboptions:
id:
description:
- Category IDs.
required: true
app-group:
description:
- Application group names.
suboptions:
name:
description:
- Application group names. Source application.group.name.
required: true
application:
description:
- Application ID list.
suboptions:
id:
description:
- Application IDs.
required: true
application-list:
description:
- Name of an existing Application list. Source application.list.name.
av-profile:
description:
- Name of an existing Antivirus profile. Source antivirus.profile.name.
comments:
description:
- Comment.
custom-log-fields:
description:
- Log field index numbers to append custom log fields to log messages for this policy.
suboptions:
field-id:
description:
- Custom log field. Source log.custom-field.id.
required: true
devices:
description:
- Names of devices or device groups that can be matched by the policy.
suboptions:
name:
description:
- Device or group name. Source user.device.alias user.device-group.name user.device-category.name.
required: true
diffserv-forward:
description:
- Enable to change packet's DiffServ values to the specified diffservcode-forward value.
choices:
- enable
- disable
diffserv-reverse:
description:
- Enable to change packet's reverse (reply) DiffServ values to the specified diffservcode-rev value.
choices:
- enable
- disable
diffservcode-forward:
description:
- Change packet's DiffServ to this value.
diffservcode-rev:
description:
- Change packet's reverse (reply) DiffServ to this value.
dlp-sensor:
description:
- Name of an existing DLP sensor. Source dlp.sensor.name.
dscp-match:
description:
- Enable DSCP check.
choices:
- enable
- disable
dscp-negate:
description:
- Enable negated DSCP match.
choices:
- enable
- disable
dscp-value:
description:
- DSCP value.
dsri:
description:
- Enable DSRI to ignore HTTP server responses.
choices:
- enable
- disable
dstaddr:
description:
- Destination address and address group names.
suboptions:
name:
description:
- Address name. Source firewall.address6.name firewall.addrgrp6.name firewall.vip6.name firewall.vipgrp6.name.
required: true
dstaddr-negate:
description:
- When enabled dstaddr specifies what the destination address must NOT be.
choices:
- enable
- disable
dstintf:
description:
- Outgoing (egress) interface.
suboptions:
name:
description:
- Interface name. Source system.interface.name system.zone.name.
required: true
firewall-session-dirty:
description:
- How to handle sessions if the configuration of this firewall policy changes.
choices:
- check-all
- check-new
fixedport:
description:
- Enable to prevent source NAT from changing a session's source port.
choices:
- enable
- disable
global-label:
description:
- Label for the policy that appears when the GUI is in Global View mode.
groups:
description:
- Names of user groups that can authenticate with this policy.
suboptions:
name:
description:
- Group name. Source user.group.name.
required: true
icap-profile:
description:
- Name of an existing ICAP profile. Source icap.profile.name.
inbound:
description:
- "Policy-based IPsec VPN: only traffic from the remote network can initiate a VPN."
choices:
- enable
- disable
ippool:
description:
- Enable to use IP Pools for source NAT.
choices:
- enable
- disable
ips-sensor:
description:
- Name of an existing IPS sensor. Source ips.sensor.name.
label:
description:
- Label for the policy that appears when the GUI is in Section View mode.
logtraffic:
description:
- Enable or disable logging. Log all sessions or security profile sessions.
choices:
- all
- utm
- disable
logtraffic-start:
description:
- Record logs when a session starts and ends.
choices:
- enable
- disable
name:
description:
- Policy name.
nat:
description:
- Enable/disable source NAT.
choices:
- enable
- disable
natinbound:
description:
- "Policy-based IPsec VPN: apply destination NAT to inbound traffic."
choices:
- enable
- disable
natoutbound:
description:
- "Policy-based IPsec VPN: apply source NAT to outbound traffic."
choices:
- enable
- disable
outbound:
description:
- "Policy-based IPsec VPN: only traffic from the internal network can initiate a VPN."
choices:
- enable
- disable
per-ip-shaper:
description:
- Per-IP traffic shaper. Source firewall.shaper.per-ip-shaper.name.
policyid:
description:
- Policy ID.
required: true
poolname:
description:
- IP Pool names.
suboptions:
name:
description:
- IP pool name. Source firewall.ippool6.name.
required: true
profile-group:
description:
- Name of profile group. Source firewall.profile-group.name.
profile-protocol-options:
description:
- Name of an existing Protocol options profile. Source firewall.profile-protocol-options.name.
profile-type:
description:
- Determine whether the firewall policy allows security profile groups or single profiles only.
choices:
- single
- group
replacemsg-override-group:
description:
- Override the default replacement message group for this policy. Source system.replacemsg-group.name.
rsso:
description:
- Enable/disable RADIUS single sign-on (RSSO).
choices:
- enable
- disable
schedule:
description:
- Schedule name. Source firewall.schedule.onetime.name firewall.schedule.recurring.name firewall.schedule.group.name.
send-deny-packet:
description:
- Enable/disable return of deny-packet.
choices:
- enable
- disable
service:
description:
- Service and service group names.
suboptions:
name:
description:
- Address name. Source firewall.service.custom.name firewall.service.group.name.
required: true
service-negate:
description:
- When enabled service specifies what the service must NOT be.
choices:
- enable
- disable
session-ttl:
description:
- Session TTL in seconds for sessions accepted by this policy. 0 means use the system default session TTL.
spamfilter-profile:
description:
- Name of an existing Spam filter profile. Source spamfilter.profile.name.
srcaddr:
description:
- Source address and address group names.
suboptions:
name:
description:
- Address name. Source firewall.address6.name firewall.addrgrp6.name.
required: true
srcaddr-negate:
description:
- When enabled srcaddr specifies what the source address must NOT be.
choices:
- enable
- disable
srcintf:
description:
- Incoming (ingress) interface.
suboptions:
name:
description:
- Interface name. Source system.zone.name system.interface.name.
required: true
ssh-filter-profile:
description:
- Name of an existing SSH filter profile. Source ssh-filter.profile.name.
ssl-mirror:
description:
- Enable to copy decrypted SSL traffic to a FortiGate interface (called SSL mirroring).
choices:
- enable
- disable
ssl-mirror-intf:
description:
- SSL mirror interface name.
suboptions:
name:
description:
- Interface name. Source system.zone.name system.interface.name.
required: true
ssl-ssh-profile:
description:
- Name of an existing SSL SSH profile. Source firewall.ssl-ssh-profile.name.
status:
description:
- Enable or disable this policy.
choices:
- enable
- disable
tcp-mss-receiver:
description:
- Receiver TCP maximum segment size (MSS).
tcp-mss-sender:
description:
- Sender TCP maximum segment size (MSS).
tcp-session-without-syn:
description:
- Enable/disable creation of TCP session without SYN flag.
choices:
- all
- data-only
- disable
timeout-send-rst:
description:
- Enable/disable sending RST packets when TCP sessions expire.
choices:
- enable
- disable
traffic-shaper:
description:
- Reverse traffic shaper. Source firewall.shaper.traffic-shaper.name.
traffic-shaper-reverse:
description:
- Reverse traffic shaper. Source firewall.shaper.traffic-shaper.name.
url-category:
description:
- URL category ID list.
suboptions:
id:
description:
- URL category ID.
required: true
users:
description:
- Names of individual users that can authenticate with this policy.
suboptions:
name:
description:
- Names of individual users that can authenticate with this policy. Source user.local.name.
required: true
utm-status:
description:
- Enable AV/web/ips protection profile.
choices:
- enable
- disable
uuid:
description:
- Universally Unique Identifier (UUID; automatically assigned but can be manually reset).
vlan-cos-fwd:
description:
- "VLAN forward direction user priority: 255 passthrough, 0 lowest, 7 highest"
vlan-cos-rev:
description:
- "VLAN reverse direction user priority: 255 passthrough, 0 lowest, 7 highest"
vlan-filter:
description:
- Set VLAN filters.
voip-profile:
description:
- Name of an existing VoIP profile. Source voip.profile.name.
vpntunnel:
description:
- "Policy-based IPsec VPN: name of the IPsec VPN Phase 1. Source vpn.ipsec.phase1.name vpn.ipsec.manualkey.name."
webfilter-profile:
description:
- Name of an existing Web filter profile. Source webfilter.profile.name.
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
tasks:
- name: Configure IPv6 policies.
fortios_firewall_policy6:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
firewall_policy6:
state: "present"
action: "accept"
app-category:
-
id: "5"
app-group:
-
name: "default_name_7 (source application.group.name)"
application:
-
id: "9"
application-list: "<your_own_value> (source application.list.name)"
av-profile: "<your_own_value> (source antivirus.profile.name)"
comments: "<your_own_value>"
custom-log-fields:
-
field-id: "<your_own_value> (source log.custom-field.id)"
devices:
-
name: "default_name_16 (source user.device.alias user.device-group.name user.device-category.name)"
diffserv-forward: "enable"
diffserv-reverse: "enable"
diffservcode-forward: "<your_own_value>"
diffservcode-rev: "<your_own_value>"
dlp-sensor: "<your_own_value> (source dlp.sensor.name)"
dscp-match: "enable"
dscp-negate: "enable"
dscp-value: "<your_own_value>"
dsri: "enable"
dstaddr:
-
name: "default_name_27 (source firewall.address6.name firewall.addrgrp6.name firewall.vip6.name firewall.vipgrp6.name)"
dstaddr-negate: "enable"
dstintf:
-
name: "default_name_30 (source system.interface.name system.zone.name)"
firewall-session-dirty: "check-all"
fixedport: "enable"
global-label: "<your_own_value>"
groups:
-
name: "default_name_35 (source user.group.name)"
icap-profile: "<your_own_value> (source icap.profile.name)"
inbound: "enable"
ippool: "enable"
ips-sensor: "<your_own_value> (source ips.sensor.name)"
label: "<your_own_value>"
logtraffic: "all"
logtraffic-start: "enable"
name: "default_name_43"
nat: "enable"
natinbound: "enable"
natoutbound: "enable"
outbound: "enable"
per-ip-shaper: "<your_own_value> (source firewall.shaper.per-ip-shaper.name)"
policyid: "49"
poolname:
-
name: "default_name_51 (source firewall.ippool6.name)"
profile-group: "<your_own_value> (source firewall.profile-group.name)"
profile-protocol-options: "<your_own_value> (source firewall.profile-protocol-options.name)"
profile-type: "single"
replacemsg-override-group: "<your_own_value> (source system.replacemsg-group.name)"
rsso: "enable"
schedule: "<your_own_value> (source firewall.schedule.onetime.name firewall.schedule.recurring.name firewall.schedule.group.name)"
send-deny-packet: "enable"
service:
-
name: "default_name_60 (source firewall.service.custom.name firewall.service.group.name)"
service-negate: "enable"
session-ttl: "62"
spamfilter-profile: "<your_own_value> (source spamfilter.profile.name)"
srcaddr:
-
name: "default_name_65 (source firewall.address6.name firewall.addrgrp6.name)"
srcaddr-negate: "enable"
srcintf:
-
name: "default_name_68 (source system.zone.name system.interface.name)"
ssh-filter-profile: "<your_own_value> (source ssh-filter.profile.name)"
ssl-mirror: "enable"
ssl-mirror-intf:
-
name: "default_name_72 (source system.zone.name system.interface.name)"
ssl-ssh-profile: "<your_own_value> (source firewall.ssl-ssh-profile.name)"
status: "enable"
tcp-mss-receiver: "75"
tcp-mss-sender: "76"
tcp-session-without-syn: "all"
timeout-send-rst: "enable"
traffic-shaper: "<your_own_value> (source firewall.shaper.traffic-shaper.name)"
traffic-shaper-reverse: "<your_own_value> (source firewall.shaper.traffic-shaper.name)"
url-category:
-
id: "82"
users:
-
name: "default_name_84 (source user.local.name)"
utm-status: "enable"
uuid: "<your_own_value>"
vlan-cos-fwd: "87"
vlan-cos-rev: "88"
vlan-filter: "<your_own_value>"
voip-profile: "<your_own_value> (source voip.profile.name)"
vpntunnel: "<your_own_value> (source vpn.ipsec.phase1.name vpn.ipsec.manualkey.name)"
webfilter-profile: "<your_own_value> (source webfilter.profile.name)"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "key1"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
fos = None
def login(data):
host = data['host']
username = data['username']
password = data['password']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password)
def filter_firewall_policy6_data(json):
option_list = ['action', 'app-category', 'app-group',
'application', 'application-list', 'av-profile',
'comments', 'custom-log-fields', 'devices',
'diffserv-forward', 'diffserv-reverse', 'diffservcode-forward',
'diffservcode-rev', 'dlp-sensor', 'dscp-match',
'dscp-negate', 'dscp-value', 'dsri',
'dstaddr', 'dstaddr-negate', 'dstintf',
'firewall-session-dirty', 'fixedport', 'global-label',
'groups', 'icap-profile', 'inbound',
'ippool', 'ips-sensor', 'label',
'logtraffic', 'logtraffic-start', 'name',
'nat', 'natinbound', 'natoutbound',
'outbound', 'per-ip-shaper', 'policyid',
'poolname', 'profile-group', 'profile-protocol-options',
'profile-type', 'replacemsg-override-group', 'rsso',
'schedule', 'send-deny-packet', 'service',
'service-negate', 'session-ttl', 'spamfilter-profile',
'srcaddr', 'srcaddr-negate', 'srcintf',
'ssh-filter-profile', 'ssl-mirror', 'ssl-mirror-intf',
'ssl-ssh-profile', 'status', 'tcp-mss-receiver',
'tcp-mss-sender', 'tcp-session-without-syn', 'timeout-send-rst',
'traffic-shaper', 'traffic-shaper-reverse', 'url-category',
'users', 'utm-status', 'uuid',
'vlan-cos-fwd', 'vlan-cos-rev', 'vlan-filter',
'voip-profile', 'vpntunnel', 'webfilter-profile']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def firewall_policy6(data, fos):
vdom = data['vdom']
firewall_policy6_data = data['firewall_policy6']
filtered_data = filter_firewall_policy6_data(firewall_policy6_data)
if firewall_policy6_data['state'] == "present":
return fos.set('firewall',
'policy6',
data=filtered_data,
vdom=vdom)
elif firewall_policy6_data['state'] == "absent":
return fos.delete('firewall',
'policy6',
mkey=filtered_data['policyid'],
vdom=vdom)
def fortios_firewall(data, fos):
login(data)
methodlist = ['firewall_policy6']
for method in methodlist:
if data[method]:
resp = eval(method)(data, fos)
break
fos.logout()
return not resp['status'] == "success", resp['status'] == "success", resp
def main():
fields = {
"host": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": "False"},
"firewall_policy6": {
"required": False, "type": "dict",
"options": {
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"action": {"required": False, "type": "str",
"choices": ["accept", "deny", "ipsec"]},
"app-category": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "int"}
}},
"app-group": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"application": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "int"}
}},
"application-list": {"required": False, "type": "str"},
"av-profile": {"required": False, "type": "str"},
"comments": {"required": False, "type": "str"},
"custom-log-fields": {"required": False, "type": "list",
"options": {
"field-id": {"required": True, "type": "str"}
}},
"devices": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"diffserv-forward": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"diffserv-reverse": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"diffservcode-forward": {"required": False, "type": "str"},
"diffservcode-rev": {"required": False, "type": "str"},
"dlp-sensor": {"required": False, "type": "str"},
"dscp-match": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dscp-negate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dscp-value": {"required": False, "type": "str"},
"dsri": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dstaddr": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"dstaddr-negate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dstintf": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"firewall-session-dirty": {"required": False, "type": "str",
"choices": ["check-all", "check-new"]},
"fixedport": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"global-label": {"required": False, "type": "str"},
"groups": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"icap-profile": {"required": False, "type": "str"},
"inbound": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ippool": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ips-sensor": {"required": False, "type": "str"},
"label": {"required": False, "type": "str"},
"logtraffic": {"required": False, "type": "str",
"choices": ["all", "utm", "disable"]},
"logtraffic-start": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"name": {"required": False, "type": "str"},
"nat": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"natinbound": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"natoutbound": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"outbound": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"per-ip-shaper": {"required": False, "type": "str"},
"policyid": {"required": True, "type": "int"},
"poolname": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"profile-group": {"required": False, "type": "str"},
"profile-protocol-options": {"required": False, "type": "str"},
"profile-type": {"required": False, "type": "str",
"choices": ["single", "group"]},
"replacemsg-override-group": {"required": False, "type": "str"},
"rsso": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"schedule": {"required": False, "type": "str"},
"send-deny-packet": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"service": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"service-negate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"session-ttl": {"required": False, "type": "int"},
"spamfilter-profile": {"required": False, "type": "str"},
"srcaddr": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"srcaddr-negate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"srcintf": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"ssh-filter-profile": {"required": False, "type": "str"},
"ssl-mirror": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"ssl-mirror-intf": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"ssl-ssh-profile": {"required": False, "type": "str"},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"tcp-mss-receiver": {"required": False, "type": "int"},
"tcp-mss-sender": {"required": False, "type": "int"},
"tcp-session-without-syn": {"required": False, "type": "str",
"choices": ["all", "data-only", "disable"]},
"timeout-send-rst": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"traffic-shaper": {"required": False, "type": "str"},
"traffic-shaper-reverse": {"required": False, "type": "str"},
"url-category": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "int"}
}},
"users": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"utm-status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"uuid": {"required": False, "type": "str"},
"vlan-cos-fwd": {"required": False, "type": "int"},
"vlan-cos-rev": {"required": False, "type": "int"},
"vlan-filter": {"required": False, "type": "str"},
"voip-profile": {"required": False, "type": "str"},
"vpntunnel": {"required": False, "type": "str"},
"webfilter-profile": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
global fos
fos = FortiOSAPI()
is_error, has_changed, result = fortios_firewall(module.params, fos)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 | -6,296,476,045,752,827,000 | 40.246456 | 138 | 0.474235 | false |
wasade/qiime | scripts/count_seqs.py | 1 | 3088 | #!/usr/bin/env python
# File created on 29 May 2011
from __future__ import division
__author__ = "Greg Caporaso"
__copyright__ = "Copyright 2011, The QIIME project"
__credits__ = ["Greg Caporaso", "Jose Antonio Navas Molina"]
__license__ = "GPL"
__version__ = "1.8.0-dev"
__maintainer__ = "Greg Caporaso"
__email__ = "[email protected]"
from qiime.util import (parse_command_line_parameters,
make_option,
count_seqs_in_filepaths)
script_info = {}
script_info['brief_description'] = ""
script_info['script_description'] = ""
script_info['script_usage'] = [
("",
"Count the sequences in a fasta file and write results to stdout.",
"%prog -i in.fasta"),
("",
"Count the sequences in a fasta file and a fastq file and write results to file. Note that fastq files can only be processed if they end with .fastq -- all other files are assumed to be fasta.",
"%prog -i in1.fasta,in2.fastq -o seq_counts.txt"),
("",
"Count the sequences all .fasta files in current directory and write results to stdout. Note that -i option must be quoted.",
"%prog -i \"*.fasta\"")]
script_info['output_description'] = ""
script_info['required_options'] = [
make_option('-i', '--input_fps', type='existing_filepaths',
help='the input filepaths (comma-separated)'),
]
script_info['optional_options'] = [
# Example optional option
make_option('-o', '--output_fp', type="new_filepath",
help='the output filepath [default: write to stdout]'),
make_option('--suppress_errors', action='store_true',
help='Suppress warnings about missing files [default: %default]',
default=False)
]
script_info['version'] = __version__
def format_output(count_data, total, inaccessible_filepaths,
suppress_errors=False):
""" Output formatter """
lines = ['']
count_data.sort()
for c in count_data:
lines.append(
'%d : %s (Sequence lengths (mean +/- std): %1.4f +/- %1.4f)' %
(c[0][0], c[1], c[0][1], c[0][2]))
lines.append('%d : Total' % total)
if inaccessible_filepaths and not suppress_errors:
lines.append('')
lines.append(
'Some files were not accessible. Do they exist? Do you have read permission?')
for inaccessible_filepath in inaccessible_filepaths:
lines.append(' %s' % inaccessible_filepath)
lines.append('')
return '\n'.join(lines)
def main():
option_parser, opts, args =\
parse_command_line_parameters(**script_info)
suppress_errors = opts.suppress_errors
input_fps = opts.input_fps
output_fp = opts.output_fp
count_data, total, inaccessible_filepaths = count_seqs_in_filepaths(
input_fps)
r = format_output(
count_data,
total,
inaccessible_filepaths,
suppress_errors)
if opts.output_fp:
f = open(output_fp, 'w')
f.write(r)
f.close()
else:
print r
if __name__ == "__main__":
main()
| gpl-2.0 | 6,489,183,005,794,179,000 | 32.565217 | 199 | 0.602655 | false |
styra/sharedmem | tests/test_sharedmem.py | 1 | 4768 | import numpy
import sharedmem
import time
from numpy.testing import (assert_equal, assert_array_equal,
assert_almost_equal, assert_array_almost_equal, assert_, run_module_suite)
import sys
def run_idle(pool):
def work(i):
time.sleep(0.4)
with pool:
now = time.time()
pool.map(work, range(pool.np))
return time.time() - now
def test_parallel_process():
pool = sharedmem.MapReduce()
assert run_idle(pool) < 1.0
def test_parallel_thread():
pool = sharedmem.MapReduceByThread()
assert run_idle(pool) < 1.0
from sharedmem import background
def test_background():
def function1():
time.sleep(2)
return True
re = background(function1)
now = time.time()
assert re.wait() == True
assert int(time.time() - now + 0.5) == 2
def test_background_raise():
def function2():
raise Exception('test exception')
time.sleep(2)
re = background(function2)
now = time.time()
try:
assert re.wait() == True
except Exception as e:
return
raise AssertionError("Shall not reach here")
def test_killed():
import os
import signal
with sharedmem.MapReduce() as pool:
def work(i):
time.sleep(0.1 * numpy.random.uniform())
if i == 10:
os.kill(os.getpid(), signal.SIGKILL)
try:
pool.map(work, range(100))
except sharedmem.SlaveException:
return
raise AssertionError("Shall not reach here")
class UnpicklableException(Exception):
def __reduce__(self):
raise Exception("This pickle is not supposed to be pickled")
import warnings
def test_unpicklable_raise():
with sharedmem.MapReduce() as pool:
def work(i):
time.sleep(0.1 * numpy.random.uniform())
if i == 10:
raise UnpicklableException("Raise an exception")
try:
with warnings.catch_warnings(record=True) as w:
pool.map(work, range(100))
# except an warning here
assert len(w) == 1
except Exception as e:
assert not isinstance(e.reason, UnpicklableException)
return
raise AssertionError("Shall not reach here")
class PicklableException(Exception):
pass
def test_picklable_raise():
with sharedmem.MapReduce() as pool:
def work(i):
time.sleep(0.1 * numpy.random.uniform())
if i == 10:
raise PicklableException("Raise an exception")
try:
pool.map(work, range(100))
except sharedmem.SlaveException as e:
assert isinstance(e.reason, PicklableException)
return
raise AssertionError("Shall not reach here")
def test_memory_pickle():
import pickle
a = sharedmem.empty(100)
a[:] = range(100)
s = pickle.dumps(a)
b = pickle.loads(s)
assert isinstance(b, type(a))
b[:] += 10
assert (a == b).all()
def test_memory_type():
a = sharedmem.empty(100)
b = sharedmem.empty(100)
assert isinstance(b, type(a))
assert not isinstance(a + 10, type(a))
assert not isinstance(numpy.sum(a), type(a))
assert not isinstance(a + b, type(a))
assert not isinstance(a * b, type(a))
def test_ordered():
t = sharedmem.empty(800)
with sharedmem.MapReduce(np=32) as pool:
def work(i):
time.sleep(0.1 * numpy.random.uniform())
with pool.ordered:
t[i] = time.time()
pool.map(work, range(800))
# without ordered, the time is ordered
assert (t[1:] > t[:-1]).all()
def work(i):
time.sleep(0.1 * numpy.random.uniform())
t[i] = time.time()
pool.map(work, range(800))
# without ordered, the ordering is messy
assert not (t[1:] > t[:-1]).all()
def test_critical():
t = sharedmem.empty(1, dtype='i8')
t[:] = 0
# FIXME: if the system has one core then this will never fail,
# even if the critical section is not
with sharedmem.MapReduce(np=8) as pool:
def work(i):
with pool.critical:
t[:] = 1
if i != 30:
time.sleep(0.01)
assert t[:] == 1
t[:] = 0
pool.map(work, range(16))
def work(i):
t[:] = 1
if i != 30:
time.sleep(0.01)
assert t[:] == 1
t[:] = 0
try:
pool.map(work, range(16))
except sharedmem.SlaveException as e:
assert isinstance(e.reason, AssertionError)
return
raise AssertionError("Shall not reach here.")
if __name__ == "__main__":
import sys
run_module_suite()
| gpl-3.0 | 1,804,939,340,465,828,600 | 25.786517 | 78 | 0.566695 | false |
kylemsguy/limbo | test/test_plugins/test_stock.py | 15 | 1168 | # -*- coding: UTF-8 -*-
import os
import sys
from nose.tools import eq_
import vcr
DIR = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(DIR, '../../limbo/plugins'))
from stock import on_message, stockprice
def test_apple():
with vcr.use_cassette('test/fixtures/stock_apple.yaml'):
ret = on_message({"text": u"$aapl"}, None)
assert ':chart_with_upwards_trend:' in ret
assert 'Apple Inc.' in ret
assert '130.41' in ret
assert '+1.62' in ret
def test_nonexistent():
with vcr.use_cassette('test/fixtures/stock_none'):
ret = on_message({"text": u"bana"}, None)
eq_(ret, None)
def test_unicode():
with vcr.use_cassette('test/fixtures/stock_unicode.yaml'):
ret = on_message({"text": u"$äapl"}, None)
eq_(ret, None)
def test_multiple():
with vcr.use_cassette('test/fixtures/stock_multiple.yaml'):
ret = on_message({"text": u"$goog $aapl"}, None)
assert 'Google Inc' in ret
def test_price():
with vcr.use_cassette('test/fixtures/stock_none'):
ret = on_message({"text": u"the price is $12.43"}, None)
eq_(ret, None)
| mit | 6,204,202,997,615,499,000 | 28.923077 | 64 | 0.614396 | false |
rnestler/servo | tests/wpt/web-platform-tests/css/tools/w3ctestlib/OutputFormats.py | 80 | 7230 | #!/usr/bin/python
# CSS Test Source Manipulation Library
# Initial code by fantasai, joint copyright 2010 W3C and Microsoft
# Licensed under BSD 3-Clause: <http://www.w3.org/Consortium/Legal/2008/03-bsd-license>
import re
import os
from os.path import join, exists, splitext, dirname, basename
from Sources import XHTMLSource, HTMLSource, SVGSource, SourceTree
class ExtensionMap:
""" Given a file extension mapping (e.g. {'.xht' : '.htm'}), provides
a translate function for paths.
"""
def __init__(self, extMap):
self.extMap = extMap
def translate(self, path):
for ext in self.extMap:
if path.endswith(ext):
return splitext(path)[0] + self.extMap[ext]
return path
class BasicFormat:
"""Base class. A Format manages all the conversions and location
transformations (e.g. subdirectory for all tests in that format)
associated with a test suite format.
The base class implementation performs no conversions or
format-specific location transformations."""
formatDirName = None
indexExt = '.htm'
convert = True # XXX hack to supress format conversion in support dirs, need to clean up output code to make this cleaner
def __init__(self, destroot, sourceTree, extMap=None, outputDirName=None):
"""Creates format root of the output tree. `destroot` is the root path
of the output tree.
extMap provides a file extension mapping, e.g. {'.xht' : '.htm'}
"""
self.root = join(destroot, outputDirName) if outputDirName else destroot
self.sourceTree = sourceTree
self.formatDirName = outputDirName
if not exists(self.root):
os.makedirs(self.root)
self.extMap = ExtensionMap(extMap or {})
self.subdir = None
def setSubDir(self, name=None):
"""Sets format to write into group subdirectory `name`.
"""
self.subdir = name
def destDir(self):
return join(self.root, self.subdir) if self.subdir else self.root
def dest(self, relpath):
"""Returns final destination of relpath in this format and ensures that the
parent directory exists."""
# Translate path
if (self.convert):
relpath = self.extMap.translate(relpath)
if (self.sourceTree.isReferenceAnywhere(relpath)):
relpath = join('reference', basename(relpath))
# XXX when forcing support files into support path, need to account for support/support
dest = join(self.root, self.subdir, relpath) if self.subdir \
else join(self.root, relpath)
# Ensure parent
parent = dirname(dest)
if not exists(parent):
os.makedirs(parent)
return dest
def write(self, source):
"""Write FileSource to destination, following all necessary
conversion methods."""
source.write(self, source)
testTransform = False
# def testTransform(self, outputString, source) if needed
class XHTMLFormat(BasicFormat):
"""Base class for XHTML test suite format. Builds into 'xhtml1' subfolder
of root.
"""
indexExt = '.xht'
def __init__(self, destroot, sourceTree, extMap=None, outputDirName='xhtml1'):
if not extMap:
extMap = {'.htm' : '.xht', '.html' : '.xht', '.xhtml' : '.xht' }
BasicFormat.__init__(self, destroot, sourceTree, extMap, outputDirName)
def write(self, source):
# skip HTMLonly tests
if hasattr(source, 'hasFlag') and source.hasFlag('HTMLonly'):
return
if isinstance(source, HTMLSource) and self.convert:
source.write(self, source.serializeXHTML())
else:
source.write(self)
class HTMLFormat(BasicFormat):
"""Base class for HTML test suite format. Builds into 'html4' subfolder
of root.
"""
def __init__(self, destroot, sourceTree, extMap=None, outputDirName='html4'):
if not extMap:
extMap = {'.xht' : '.htm', '.xhtml' : '.htm', '.html' : '.htm' }
BasicFormat.__init__(self, destroot, sourceTree, extMap, outputDirName)
def write(self, source):
# skip nonHTML tests
if hasattr(source, 'hasFlag') and source.hasFlag('nonHTML'):
return
if isinstance(source, XHTMLSource) and self.convert:
source.write(self, source.serializeHTML())
else:
source.write(self)
class HTML5Format(HTMLFormat):
def __init__(self, destroot, sourceTree, extMap=None, outputDirName='html'):
HTMLFormat.__init__(self, destroot, sourceTree, extMap, outputDirName)
def write(self, source):
# skip nonHTML tests
if hasattr(source, 'hasFlag') and source.hasFlag('nonHTML'):
return
if isinstance(source, XHTMLSource) and self.convert:
source.write(self, source.serializeHTML())
else:
source.write(self)
class SVGFormat(BasicFormat):
def __init__(self, destroot, sourceTree, extMap=None, outputDirName='svg'):
if not extMap:
extMap = {'.svg' : '.svg' }
BasicFormat.__init__(self, destroot, sourceTree, extMap, outputDirName)
def write(self, source):
# skip non SVG tests
if isinstance(source, SVGSource):
source.write(self)
class XHTMLPrintFormat(XHTMLFormat):
"""Base class for XHTML Print test suite format. Builds into 'xhtml1print'
subfolder of root.
"""
def __init__(self, destroot, sourceTree, testSuiteName, extMap=None, outputDirName='xhtml1print'):
if not extMap:
extMap = {'.htm' : '.xht', '.html' : '.xht', '.xhtml' : '.xht' }
BasicFormat.__init__(self, destroot, sourceTree, extMap, outputDirName)
self.testSuiteName = testSuiteName
def write(self, source):
if (isinstance(source, XHTMLSource)):
if not source.hasFlag('HTMLonly'):
source.write(self, self.testTransform(source))
else:
XHTMLFormat.write(self, source)
def testTransform(self, source):
assert isinstance(source, XHTMLSource)
output = source.serializeXHTML('xhtml10')
headermeta = {'suitename' : self.testSuiteName,
'testid' : source.name(),
'margin' : '',
}
if re.search('@page\s*{[^}]*@', output):
# Don't use headers and footers when page tests margin boxes
output = re.sub('(<body[^>]*>)',
'\1\n' + self.__htmlstart % headermeta,
output);
output = re.sub('(</body[^>]*>)',
'\1\n' + self.__htmlend % headermeta,
output);
else:
# add margin rule only when @page statement does not exist
if not re.search('@page', output):
headermeta['margin'] = self.__margin
output = re.sub('</title>',
'</title>\n <style type="text/css">%s</style>' % \
(self.__css % headermeta),
output);
return output;
# template bits
__margin = 'margin: 7%;';
__font = 'font: italic 8pt sans-serif; color: gray;'
__css = """
@page { %s
%%(margin)s
counter-increment: page;
@top-left { content: "%%(suitename)s"; }
@top-right { content: "Test %%(testid)s"; }
@bottom-right { content: counter(page); }
}
""" % __font
__htmlstart = '<p style="%s">Start of %%(suitename)s %%(testid)s.</p>' % __font
__htmlend = '<p style="%s">End of %%(suitename)s %%(testid)s.</p>' % __font
| mpl-2.0 | -4,598,831,042,036,456,000 | 33.927536 | 131 | 0.634578 | false |
ehabkost/avocado-vt | shared/deps/serial/serial_host_send_receive.py | 8 | 6458 | #!/usr/bin/python
import os
import socket
import struct
import optparse
try:
import hashlib
except ImportError:
import md5
class Md5MissMatch(Exception):
def __init__(self, md5_pre, md5_post):
Exception.__init__(self, md5_pre, md5_post)
self.md5_pre = md5_pre
self.md5_post = md5_post
def __str__(self):
return ("Md5 miss match. Original md5 = %s, current md5 = %s" %
(self.md5_pre, self.md5_post))
class ShakeHandError(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
self.msg = msg
def __str__(self):
return ("Shake hand fail. %s" % self.msg)
def md5_init(data=None):
"""
Returns md5. This function is implemented in order to encapsulate hash
objects in a way that is compatible with python 2.4 and python 2.6
without warnings.
Note that even though python 2.6 hashlib supports hash types other than
md5 and sha1, we are artificially limiting the input values in order to
make the function to behave exactly the same among both python
implementations.
:param data: Optional input string that will be used to update the hash.
"""
try:
md5_value = hashlib.new("md5")
except NameError:
md5_value = md5.new()
if data:
md5_value.update(data)
return md5_value
def get_md5(filename, size=None):
"""
Calculate the hash of filename.
If size is not None, limit to first size bytes.
Throw exception if something is wrong with filename.
Can be also implemented with bash one-liner (assuming size%1024==0):
dd if=filename bs=1024 count=size/1024 | sha1sum -
:param filename: Path of the file that will have its hash calculated.
:param method: Method used to calculate the hash. Supported methods:
* md5
* sha1
:return: Hash of the file, if something goes wrong, return None.
"""
chunksize = 4096
fsize = os.path.getsize(filename)
if not size or size > fsize:
size = fsize
f = open(filename, 'rb')
md5_value = md5_init()
while size > 0:
if chunksize > size:
chunksize = size
data = f.read(chunksize)
if len(data) == 0:
print("Nothing left to read but size=%d" % size)
break
md5_value.update(data)
size -= len(data)
f.close()
return md5_value.hexdigest()
def shake_hand(connect, size=0, action="receive"):
hi_str = struct.pack("2s", "HI")
hi_str_len = len(hi_str)
if action == "send":
connect.send(hi_str)
txt = connect.recv(hi_str_len)
hi_str = struct.unpack("2s", txt)[0]
if hi_str != "HI":
raise ShakeHandError("Fail to get HI from guest.")
size_str = struct.pack("q", size)
connect.send(size_str)
txt = connect.recv(3)
ack_str = struct.unpack("3s", txt)[0]
if ack_str != "ACK":
raise ShakeHandError("Guest did not ACK the file size message.")
return size
elif action == "receive":
txt = connect.recv(hi_str_len)
hi_str = struct.unpack("2s", txt)[0]
if hi_str != "HI":
raise ShakeHandError("Fail to get HI from guest.")
connect.send(hi_str)
size = connect.recv(8)
if size:
size = struct.unpack("q", size)[0]
txt = struct.pack("3s", "ACK")
connect.send(txt)
return size
def receive(connect, filename, p_size=1024):
recv_size = 0
size = shake_hand(connect, action="receive")
if p_size < int(size):
p_szie = int(size)
md5_value = md5_init()
file_no = open(filename, 'wb')
try:
while recv_size < size:
txt = connect.recv(p_size)
file_no.write(txt)
md5_value.update(txt)
recv_size += len(txt)
finally:
file_no.close()
md5_sum = md5_value.hexdigest()
return md5_sum
def send(connect, filename, p_size=1024):
recv_size = 0
f_size = os.path.getsize(filename)
shake_hand(connect, f_size, action="send")
md5_value = md5_init()
file_no = open(filename, 'rb')
try:
while recv_size < f_size:
txt = file_no.read(p_size)
connect.send(txt)
md5_value.update(txt)
recv_size += len(txt)
finally:
print("received size = %s" % recv_size)
file_no.close()
md5_sum = md5_value.hexdigest()
return md5_sum
def main():
parser = optparse.OptionParser("Transfer data between guest and host"
"through virtio serial. Please make sure"
"VirtIOChannel.py run in guest first.")
parser.add_option("-s", "--socket", dest="socket",
help="unix socket device used in qemu command"
"eg:your CLI:-chardev socket,id=channel2,"
"path=/tmp/helloworld2 ,then input"
"'/tmp/helloworld2' here")
parser.add_option("-f", "--filename", dest="filename",
help="File transfer to guest or save data to.")
parser.add_option("-a", "--action", dest="action", default="send",
help="Send data out or receive data.")
parser.add_option("-p", "--package", dest="package", default=1024,
help="Package size during file transfer.")
options, args = parser.parse_args()
if options.socket:
sock = options.socket
else:
parser.error("Please set -s parameter.")
if options.filename:
filename = options.filename
else:
parser.error("Please set -f parameter.")
action = options.action
p_size = options.package
vport = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
vport.connect(sock)
if action == "receive":
md5_sum = receive(vport, filename, p_size=p_size)
print("md5_sum = %s" % md5_sum)
elif action == "send":
md5_sum = send(vport, filename, p_size=p_size)
print("md5_sum = %s" % md5_sum)
else:
md5_ori = send(vport, filename, p_size=p_size)
print("md5_original = %s" % md5_ori)
md5_post = receive(vport, filename, p_size=p_size)
print("md5_post = %s" % md5_post)
if md5_ori != md5_post:
raise Md5MissMatch(md5_ori, md5_post)
if __name__ == "__main__":
main()
| gpl-2.0 | 5,238,338,230,037,466,000 | 29.752381 | 76 | 0.574946 | false |
rodrigolucianocosta/ProjectParking | ProjectParking/Parking/django-localflavor-1.1/build/lib.linux-x86_64-2.7/localflavor/fi/fi_municipalities.py | 17 | 10416 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
#: An alphabetical list of Finnish municipalities for use as `choices` in a formfield.
MUNICIPALITY_CHOICES = (
('akaa', "Akaa"),
('alajarvi', "Alajärvi"),
('alavieska', "Alavieska"),
('alavus', "Alavus"),
('artjarvi', "Artjärvi"),
('asikkala', "Asikkala"),
('askola', "Askola"),
('aura', "Aura"),
('brando', "Brändö"),
('eckero', "Eckerö"),
('enonkoski', "Enonkoski"),
('enontekio', "Enontekiö"),
('espoo', "Espoo"),
('eura', "Eura"),
('eurajoki', "Eurajoki"),
('evijarvi', "Evijärvi"),
('finstrom', "Finström"),
('forssa', "Forssa"),
('foglo', "Föglö"),
('geta', "Geta"),
('haapajarvi', "Haapajärvi"),
('haapavesi', "Haapavesi"),
('hailuoto', "Hailuoto"),
('halsua', "Halsua"),
('hamina', "Hamina"),
('hammarland', "Hammarland"),
('hankasalmi', "Hankasalmi"),
('hanko', "Hanko"),
('harjavalta', "Harjavalta"),
('hartola', "Hartola"),
('hattula', "Hattula"),
('haukipudas', "Haukipudas"),
('hausjarvi', "Hausjärvi"),
('heinola', "Heinola"),
('heinavesi', "Heinävesi"),
('helsinki', "Helsinki"),
('hirvensalmi', "Hirvensalmi"),
('hollola', "Hollola"),
('honkajoki', "Honkajoki"),
('huittinen', "Huittinen"),
('humppila', "Humppila"),
('hyrynsalmi', "Hyrynsalmi"),
('hyvinkaa', "Hyvinkää"),
('hameenkoski', "Hämeenkoski"),
('hameenkyro', "Hämeenkyrö"),
('hameenlinna', "Hämeenlinna"),
('ii', "Ii"),
('iisalmi', "Iisalmi"),
('iitti', "Iitti"),
('ikaalinen', "Ikaalinen"),
('ilmajoki', "Ilmajoki"),
('ilomantsi', "Ilomantsi"),
('imatra', "Imatra"),
('inari', "Inari"),
('inkoo', "Inkoo"),
('isojoki', "Isojoki"),
('isokyro', "Isokyrö"),
('jalasjarvi', "Jalasjärvi"),
('janakkala', "Janakkala"),
('joensuu', "Joensuu"),
('jokioinen', "Jokioinen"),
('jomala', "Jomala"),
('joroinen', "Joroinen"),
('joutsa', "Joutsa"),
('juankoski', "Juankoski"),
('juuka', "Juuka"),
('juupajoki', "Juupajoki"),
('juva', "Juva"),
('jyvaskyla', "Jyväskylä"),
('jamijarvi', "Jämijärvi"),
('jamsa', "Jämsä"),
('jarvenpaa', "Järvenpää"),
('kaarina', "Kaarina"),
('kaavi', "Kaavi"),
('kajaani', "Kajaani"),
('kalajoki', "Kalajoki"),
('kangasala', "Kangasala"),
('kangasniemi', "Kangasniemi"),
('kankaanpaa', "Kankaanpää"),
('kannonkoski', "Kannonkoski"),
('kannus', "Kannus"),
('karijoki', "Karijoki"),
('karjalohja', "Karjalohja"),
('karkkila', "Karkkila"),
('karstula', "Karstula"),
('karttula', "Karttula"),
('karvia', "Karvia"),
('kaskinen', "Kaskinen"),
('kauhajoki', "Kauhajoki"),
('kauhava', "Kauhava"),
('kauniainen', "Kauniainen"),
('kaustinen', "Kaustinen"),
('keitele', "Keitele"),
('kemi', "Kemi"),
('kemijarvi', "Kemijärvi"),
('keminmaa', "Keminmaa"),
('kemionsaari', "Kemiönsaari"),
('kempele', "Kempele"),
('kerava', "Kerava"),
('kerimaki', "Kerimäki"),
('kesalahti', "Kesälahti"),
('keuruu', "Keuruu"),
('kihnio', "Kihniö"),
('kiikoinen', "Kiikoinen"),
('kiiminki', "Kiiminki"),
('kinnula', "Kinnula"),
('kirkkonummi', "Kirkkonummi"),
('kitee', "Kitee"),
('kittila', "Kittilä"),
('kiuruvesi', "Kiuruvesi"),
('kivijarvi', "Kivijärvi"),
('kokemaki', "Kokemäki"),
('kokkola', "Kokkola"),
('kolari', "Kolari"),
('konnevesi', "Konnevesi"),
('kontiolahti', "Kontiolahti"),
('korsnas', "Korsnäs"),
('koskitl', "Koski Tl"),
('kotka', "Kotka"),
('kouvola', "Kouvola"),
('kristiinankaupunki', "Kristiinankaupunki"),
('kruunupyy', "Kruunupyy"),
('kuhmalahti', "Kuhmalahti"),
('kuhmo', "Kuhmo"),
('kuhmoinen', "Kuhmoinen"),
('kumlinge', "Kumlinge"),
('kuopio', "Kuopio"),
('kuortane', "Kuortane"),
('kurikka', "Kurikka"),
('kustavi', "Kustavi"),
('kuusamo', "Kuusamo"),
('kylmakoski', "Kylmäkoski"),
('kyyjarvi', "Kyyjärvi"),
('karkola', "Kärkölä"),
('karsamaki', "Kärsämäki"),
('kokar', "Kökar"),
('koylio', "Köyliö"),
('lahti', "Lahti"),
('laihia', "Laihia"),
('laitila', "Laitila"),
('lapinjarvi', "Lapinjärvi"),
('lapinlahti', "Lapinlahti"),
('lappajarvi', "Lappajärvi"),
('lappeenranta', "Lappeenranta"),
('lapua', "Lapua"),
('laukaa', "Laukaa"),
('lavia', "Lavia"),
('lemi', "Lemi"),
('lemland', "Lemland"),
('lempaala', "Lempäälä"),
('leppavirta', "Leppävirta"),
('lestijarvi', "Lestijärvi"),
('lieksa', "Lieksa"),
('lieto', "Lieto"),
('liminka', "Liminka"),
('liperi', "Liperi"),
('lohja', "Lohja"),
('loimaa', "Loimaa"),
('loppi', "Loppi"),
('loviisa', "Loviisa"),
('luhanka', "Luhanka"),
('lumijoki', "Lumijoki"),
('lumparland', "Lumparland"),
('luoto', "Luoto"),
('luumaki', "Luumäki"),
('luvia', "Luvia"),
('lansi-turunmaa', "Länsi-Turunmaa"),
('maalahti', "Maalahti"),
('maaninka', "Maaninka"),
('maarianhamina', "Maarianhamina"),
('marttila', "Marttila"),
('masku', "Masku"),
('merijarvi', "Merijärvi"),
('merikarvia', "Merikarvia"),
('miehikkala', "Miehikkälä"),
('mikkeli', "Mikkeli"),
('muhos', "Muhos"),
('multia', "Multia"),
('muonio', "Muonio"),
('mustasaari', "Mustasaari"),
('muurame', "Muurame"),
('mynamaki', "Mynämäki"),
('myrskyla', "Myrskylä"),
('mantsala', "Mäntsälä"),
('mantta-vilppula', "Mänttä-Vilppula"),
('mantyharju', "Mäntyharju"),
('naantali', "Naantali"),
('nakkila', "Nakkila"),
('nastola', "Nastola"),
('nilsia', "Nilsiä"),
('nivala', "Nivala"),
('nokia', "Nokia"),
('nousiainen', "Nousiainen"),
('nummi-pusula', "Nummi-Pusula"),
('nurmes', "Nurmes"),
('nurmijarvi', "Nurmijärvi"),
('narpio', "Närpiö"),
('oravainen', "Oravainen"),
('orimattila', "Orimattila"),
('oripaa', "Oripää"),
('orivesi', "Orivesi"),
('oulainen', "Oulainen"),
('oulu', "Oulu"),
('oulunsalo', "Oulunsalo"),
('outokumpu', "Outokumpu"),
('padasjoki', "Padasjoki"),
('paimio', "Paimio"),
('paltamo', "Paltamo"),
('parikkala', "Parikkala"),
('parkano', "Parkano"),
('pedersore', "Pedersöre"),
('pelkosenniemi', "Pelkosenniemi"),
('pello', "Pello"),
('perho', "Perho"),
('pertunmaa', "Pertunmaa"),
('petajavesi', "Petäjävesi"),
('pieksamaki', "Pieksämäki"),
('pielavesi', "Pielavesi"),
('pietarsaari', "Pietarsaari"),
('pihtipudas', "Pihtipudas"),
('pirkkala', "Pirkkala"),
('polvijarvi', "Polvijärvi"),
('pomarkku', "Pomarkku"),
('pori', "Pori"),
('pornainen', "Pornainen"),
('porvoo', "Porvoo"),
('posio', "Posio"),
('pudasjarvi', "Pudasjärvi"),
('pukkila', "Pukkila"),
('punkaharju', "Punkaharju"),
('punkalaidun', "Punkalaidun"),
('puolanka', "Puolanka"),
('puumala', "Puumala"),
('pyhtaa', "Pyhtää"),
('pyhajoki', "Pyhäjoki"),
('pyhajarvi', "Pyhäjärvi"),
('pyhanta', "Pyhäntä"),
('pyharanta', "Pyhäranta"),
('palkane', "Pälkäne"),
('poytya', "Pöytyä"),
('raahe', "Raahe"),
('raasepori', "Raasepori"),
('raisio', "Raisio"),
('rantasalmi', "Rantasalmi"),
('ranua', "Ranua"),
('rauma', "Rauma"),
('rautalampi', "Rautalampi"),
('rautavaara', "Rautavaara"),
('rautjarvi', "Rautjärvi"),
('reisjarvi', "Reisjärvi"),
('riihimaki', "Riihimäki"),
('ristiina', "Ristiina"),
('ristijarvi', "Ristijärvi"),
('rovaniemi', "Rovaniemi"),
('ruokolahti', "Ruokolahti"),
('ruovesi', "Ruovesi"),
('rusko', "Rusko"),
('raakkyla', "Rääkkylä"),
('saarijarvi', "Saarijärvi"),
('salla', "Salla"),
('salo', "Salo"),
('saltvik', "Saltvik"),
('sastamala', "Sastamala"),
('sauvo', "Sauvo"),
('savitaipale', "Savitaipale"),
('savonlinna', "Savonlinna"),
('savukoski', "Savukoski"),
('seinajoki', "Seinäjoki"),
('sievi', "Sievi"),
('siikainen', "Siikainen"),
('siikajoki', "Siikajoki"),
('siikalatva', "Siikalatva"),
('siilinjarvi', "Siilinjärvi"),
('simo', "Simo"),
('sipoo', "Sipoo"),
('siuntio', "Siuntio"),
('sodankyla', "Sodankylä"),
('soini', "Soini"),
('somero', "Somero"),
('sonkajarvi', "Sonkajärvi"),
('sotkamo', "Sotkamo"),
('sottunga', "Sottunga"),
('sulkava', "Sulkava"),
('sund', "Sund"),
('suomenniemi', "Suomenniemi"),
('suomussalmi', "Suomussalmi"),
('suonenjoki', "Suonenjoki"),
('sysma', "Sysmä"),
('sakyla', "Säkylä"),
('taipalsaari', "Taipalsaari"),
('taivalkoski', "Taivalkoski"),
('taivassalo', "Taivassalo"),
('tammela', "Tammela"),
('tampere', "Tampere"),
('tarvasjoki', "Tarvasjoki"),
('tervo', "Tervo"),
('tervola', "Tervola"),
('teuva', "Teuva"),
('tohmajarvi', "Tohmajärvi"),
('toholampi', "Toholampi"),
('toivakka', "Toivakka"),
('tornio', "Tornio"),
('turku', "Turku"),
('tuusniemi', "Tuusniemi"),
('tuusula', "Tuusula"),
('tyrnava', "Tyrnävä"),
('toysa', "Töysä"),
('ulvila', "Ulvila"),
('urjala', "Urjala"),
('utajarvi', "Utajärvi"),
('utsjoki', "Utsjoki"),
('uurainen', "Uurainen"),
('uusikaarlepyy', "Uusikaarlepyy"),
('uusikaupunki', "Uusikaupunki"),
('vaala', "Vaala"),
('vaasa', "Vaasa"),
('valkeakoski', "Valkeakoski"),
('valtimo', "Valtimo"),
('vantaa', "Vantaa"),
('varkaus', "Varkaus"),
('varpaisjarvi', "Varpaisjärvi"),
('vehmaa', "Vehmaa"),
('vesanto', "Vesanto"),
('vesilahti', "Vesilahti"),
('veteli', "Veteli"),
('vierema', "Vieremä"),
('vihanti', "Vihanti"),
('vihti', "Vihti"),
('viitasaari', "Viitasaari"),
('vimpeli', "Vimpeli"),
('virolahti', "Virolahti"),
('virrat', "Virrat"),
('vardo', "Vårdö"),
('vahakyro', "Vähäkyrö"),
('voyri-maksamaa', "Vöyri-Maksamaa"),
('yli-ii', "Yli-Ii"),
('ylitornio', "Ylitornio"),
('ylivieska', "Ylivieska"),
('ylojarvi', "Ylöjärvi"),
('ypaja', "Ypäjä"),
('ahtari', "Ähtäri"),
('aanekoski', "Äänekoski")
)
| mpl-2.0 | -5,813,406,947,525,378,000 | 28.543103 | 86 | 0.528353 | false |
sumanau7/Ele_CC_Sumanau | lib/traitlets/config/tests/test_configurable.py | 4 | 14095 | # encoding: utf-8
"""Tests for traitlets.config.configurable"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import logging
from unittest import TestCase
import nose.tools as nt
from nose import SkipTest
from traitlets.config.configurable import (
Configurable,
LoggingConfigurable,
SingletonConfigurable,
)
from traitlets.traitlets import (
Integer, Float, Unicode, List, Dict, Set,
)
from traitlets.config.loader import Config
from ipython_genutils.py3compat import PY3
from ...tests._warnings import expected_warnings
class MyConfigurable(Configurable):
a = Integer(1, help="The integer a.").tag(config=True)
b = Float(1.0, help="The integer b.").tag(config=True)
c = Unicode('no config')
mc_help=u"""MyConfigurable options
----------------------
--MyConfigurable.a=<Integer>
Default: 1
The integer a.
--MyConfigurable.b=<Float>
Default: 1.0
The integer b."""
mc_help_inst=u"""MyConfigurable options
----------------------
--MyConfigurable.a=<Integer>
Current: 5
The integer a.
--MyConfigurable.b=<Float>
Current: 4.0
The integer b."""
# On Python 3, the Integer trait is a synonym for Int
if PY3:
mc_help = mc_help.replace(u"<Integer>", u"<Int>")
mc_help_inst = mc_help_inst.replace(u"<Integer>", u"<Int>")
class Foo(Configurable):
a = Integer(0, help="The integer a.").tag(config=True)
b = Unicode('nope').tag(config=True)
class Bar(Foo):
b = Unicode('gotit', help="The string b.").tag(config=False)
c = Float(help="The string c.").tag(config=True)
class TestConfigurable(TestCase):
def test_default(self):
c1 = Configurable()
c2 = Configurable(config=c1.config)
c3 = Configurable(config=c2.config)
self.assertEqual(c1.config, c2.config)
self.assertEqual(c2.config, c3.config)
def test_custom(self):
config = Config()
config.foo = 'foo'
config.bar = 'bar'
c1 = Configurable(config=config)
c2 = Configurable(config=c1.config)
c3 = Configurable(config=c2.config)
self.assertEqual(c1.config, config)
self.assertEqual(c2.config, config)
self.assertEqual(c3.config, config)
# Test that copies are not made
self.assertTrue(c1.config is config)
self.assertTrue(c2.config is config)
self.assertTrue(c3.config is config)
self.assertTrue(c1.config is c2.config)
self.assertTrue(c2.config is c3.config)
def test_inheritance(self):
config = Config()
config.MyConfigurable.a = 2
config.MyConfigurable.b = 2.0
c1 = MyConfigurable(config=config)
c2 = MyConfigurable(config=c1.config)
self.assertEqual(c1.a, config.MyConfigurable.a)
self.assertEqual(c1.b, config.MyConfigurable.b)
self.assertEqual(c2.a, config.MyConfigurable.a)
self.assertEqual(c2.b, config.MyConfigurable.b)
def test_parent(self):
config = Config()
config.Foo.a = 10
config.Foo.b = "wow"
config.Bar.b = 'later'
config.Bar.c = 100.0
f = Foo(config=config)
with expected_warnings(['`b` not recognized']):
b = Bar(config=f.config)
self.assertEqual(f.a, 10)
self.assertEqual(f.b, 'wow')
self.assertEqual(b.b, 'gotit')
self.assertEqual(b.c, 100.0)
def test_override1(self):
config = Config()
config.MyConfigurable.a = 2
config.MyConfigurable.b = 2.0
c = MyConfigurable(a=3, config=config)
self.assertEqual(c.a, 3)
self.assertEqual(c.b, config.MyConfigurable.b)
self.assertEqual(c.c, 'no config')
def test_override2(self):
config = Config()
config.Foo.a = 1
config.Bar.b = 'or' # Up above b is config=False, so this won't do it.
config.Bar.c = 10.0
with expected_warnings(['`b` not recognized']):
c = Bar(config=config)
self.assertEqual(c.a, config.Foo.a)
self.assertEqual(c.b, 'gotit')
self.assertEqual(c.c, config.Bar.c)
with expected_warnings(['`b` not recognized']):
c = Bar(a=2, b='and', c=20.0, config=config)
self.assertEqual(c.a, 2)
self.assertEqual(c.b, 'and')
self.assertEqual(c.c, 20.0)
def test_help(self):
self.assertEqual(MyConfigurable.class_get_help(), mc_help)
def test_help_inst(self):
inst = MyConfigurable(a=5, b=4)
self.assertEqual(MyConfigurable.class_get_help(inst), mc_help_inst)
class TestSingletonConfigurable(TestCase):
def test_instance(self):
class Foo(SingletonConfigurable): pass
self.assertEqual(Foo.initialized(), False)
foo = Foo.instance()
self.assertEqual(Foo.initialized(), True)
self.assertEqual(foo, Foo.instance())
self.assertEqual(SingletonConfigurable._instance, None)
def test_inheritance(self):
class Bar(SingletonConfigurable): pass
class Bam(Bar): pass
self.assertEqual(Bar.initialized(), False)
self.assertEqual(Bam.initialized(), False)
bam = Bam.instance()
bam == Bar.instance()
self.assertEqual(Bar.initialized(), True)
self.assertEqual(Bam.initialized(), True)
self.assertEqual(bam, Bam._instance)
self.assertEqual(bam, Bar._instance)
self.assertEqual(SingletonConfigurable._instance, None)
class MyParent(Configurable):
pass
class MyParent2(MyParent):
pass
class TestParentConfigurable(TestCase):
def test_parent_config(self):
cfg = Config({
'MyParent' : {
'MyConfigurable' : {
'b' : 2.0,
}
}
})
parent = MyParent(config=cfg)
myc = MyConfigurable(parent=parent)
self.assertEqual(myc.b, parent.config.MyParent.MyConfigurable.b)
def test_parent_inheritance(self):
cfg = Config({
'MyParent' : {
'MyConfigurable' : {
'b' : 2.0,
}
}
})
parent = MyParent2(config=cfg)
myc = MyConfigurable(parent=parent)
self.assertEqual(myc.b, parent.config.MyParent.MyConfigurable.b)
def test_multi_parent(self):
cfg = Config({
'MyParent2' : {
'MyParent' : {
'MyConfigurable' : {
'b' : 2.0,
}
},
# this one shouldn't count
'MyConfigurable' : {
'b' : 3.0,
},
}
})
parent2 = MyParent2(config=cfg)
parent = MyParent(parent=parent2)
myc = MyConfigurable(parent=parent)
self.assertEqual(myc.b, parent.config.MyParent2.MyParent.MyConfigurable.b)
def test_parent_priority(self):
cfg = Config({
'MyConfigurable' : {
'b' : 2.0,
},
'MyParent' : {
'MyConfigurable' : {
'b' : 3.0,
}
},
'MyParent2' : {
'MyConfigurable' : {
'b' : 4.0,
}
}
})
parent = MyParent2(config=cfg)
myc = MyConfigurable(parent=parent)
self.assertEqual(myc.b, parent.config.MyParent2.MyConfigurable.b)
def test_multi_parent_priority(self):
cfg = Config({
'MyConfigurable' : {
'b' : 2.0,
},
'MyParent' : {
'MyConfigurable' : {
'b' : 3.0,
}
},
'MyParent2' : {
'MyConfigurable' : {
'b' : 4.0,
}
},
'MyParent2' : {
'MyParent' : {
'MyConfigurable' : {
'b' : 5.0,
}
}
}
})
parent2 = MyParent2(config=cfg)
parent = MyParent2(parent=parent2)
myc = MyConfigurable(parent=parent)
self.assertEqual(myc.b, parent.config.MyParent2.MyParent.MyConfigurable.b)
class Containers(Configurable):
lis = List().tag(config=True)
def _lis_default(self):
return [-1]
s = Set().tag(config=True)
def _s_default(self):
return {'a'}
d = Dict().tag(config=True)
def _d_default(self):
return {'a' : 'b'}
class TestConfigContainers(TestCase):
def test_extend(self):
c = Config()
c.Containers.lis.extend(list(range(5)))
obj = Containers(config=c)
self.assertEqual(obj.lis, list(range(-1,5)))
def test_insert(self):
c = Config()
c.Containers.lis.insert(0, 'a')
c.Containers.lis.insert(1, 'b')
obj = Containers(config=c)
self.assertEqual(obj.lis, ['a', 'b', -1])
def test_prepend(self):
c = Config()
c.Containers.lis.prepend([1,2])
c.Containers.lis.prepend([2,3])
obj = Containers(config=c)
self.assertEqual(obj.lis, [2,3,1,2,-1])
def test_prepend_extend(self):
c = Config()
c.Containers.lis.prepend([1,2])
c.Containers.lis.extend([2,3])
obj = Containers(config=c)
self.assertEqual(obj.lis, [1,2,-1,2,3])
def test_append_extend(self):
c = Config()
c.Containers.lis.append([1,2])
c.Containers.lis.extend([2,3])
obj = Containers(config=c)
self.assertEqual(obj.lis, [-1,[1,2],2,3])
def test_extend_append(self):
c = Config()
c.Containers.lis.extend([2,3])
c.Containers.lis.append([1,2])
obj = Containers(config=c)
self.assertEqual(obj.lis, [-1,2,3,[1,2]])
def test_insert_extend(self):
c = Config()
c.Containers.lis.insert(0, 1)
c.Containers.lis.extend([2,3])
obj = Containers(config=c)
self.assertEqual(obj.lis, [1,-1,2,3])
def test_set_update(self):
c = Config()
c.Containers.s.update({0,1,2})
c.Containers.s.update({3})
obj = Containers(config=c)
self.assertEqual(obj.s, {'a', 0, 1, 2, 3})
def test_dict_update(self):
c = Config()
c.Containers.d.update({'c' : 'd'})
c.Containers.d.update({'e' : 'f'})
obj = Containers(config=c)
self.assertEqual(obj.d, {'a':'b', 'c':'d', 'e':'f'})
def test_update_twice(self):
c = Config()
c.MyConfigurable.a = 5
m = MyConfigurable(config=c)
self.assertEqual(m.a, 5)
c2 = Config()
c2.MyConfigurable.a = 10
m.update_config(c2)
self.assertEqual(m.a, 10)
c2.MyConfigurable.a = 15
m.update_config(c2)
self.assertEqual(m.a, 15)
def test_update_self(self):
"""update_config with same config object still triggers config_changed"""
c = Config()
c.MyConfigurable.a = 5
m = MyConfigurable(config=c)
self.assertEqual(m.a, 5)
c.MyConfigurable.a = 10
m.update_config(c)
self.assertEqual(m.a, 10)
def test_config_default(self):
class SomeSingleton(SingletonConfigurable):
pass
class DefaultConfigurable(Configurable):
a = Integer().tag(config=True)
def _config_default(self):
if SomeSingleton.initialized():
return SomeSingleton.instance().config
return Config()
c = Config()
c.DefaultConfigurable.a = 5
d1 = DefaultConfigurable()
self.assertEqual(d1.a, 0)
single = SomeSingleton.instance(config=c)
d2 = DefaultConfigurable()
self.assertIs(d2.config, single.config)
self.assertEqual(d2.a, 5)
def test_config_default_deprecated(self):
"""Make sure configurables work even with the deprecations in traitlets"""
class SomeSingleton(SingletonConfigurable):
pass
with expected_warnings(['Metadata should be set using the \.tag\(\) method']):
class DefaultConfigurable(Configurable):
a = Integer(config=True)
def _config_default(self):
if SomeSingleton.initialized():
return SomeSingleton.instance().config
return Config()
c = Config()
c.DefaultConfigurable.a = 5
d1 = DefaultConfigurable()
self.assertEqual(d1.a, 0)
single = SomeSingleton.instance(config=c)
d2 = DefaultConfigurable()
self.assertIs(d2.config, single.config)
self.assertEqual(d2.a, 5)
def test_warn_match():
if not hasattr(nt, 'assert_logs'):
raise SkipTest("Test requires nose.tests.assert_logs")
class A(LoggingConfigurable):
foo = Integer(config=True)
bar = Integer(config=True)
baz = Integer(config=True)
logger = logging.getLogger('test_warn_match')
cfg = Config({'A': {'bat': 5}})
with nt.assert_logs(logger, logging.WARNING) as captured:
a = A(config=cfg, log=logger)
output = '\n'.join(captured.output)
nt.assert_in('Did you mean one of: `bar, baz`?', output)
nt.assert_in('Config option `bat` not recognized by `A`.', output)
cfg = Config({'A': {'fool': 5}})
with nt.assert_logs(logger, logging.WARNING) as captured:
a = A(config=cfg, log=logger)
output = '\n'.join(captured.output)
nt.assert_in('Config option `fool` not recognized by `A`.', output)
nt.assert_in('Did you mean `foo`?', output)
cfg = Config({'A': {'totally_wrong': 5}})
with nt.assert_logs(logger, logging.WARNING) as captured:
a = A(config=cfg, log=logger)
output = '\n'.join(captured.output)
nt.assert_in('Config option `totally_wrong` not recognized by `A`.', output)
nt.assert_not_in('Did you mean', output)
| apache-2.0 | -1,090,027,966,061,592,300 | 29.842451 | 86 | 0.562682 | false |
mjirik/lisa | tests/vein_basin_segmentation_test.py | 1 | 19794 | # ! /usr/bin/python
# -*- coding: utf-8 -*-
# import funkcí z jiného adresáře
import sys
from PyQt5.QtWidgets import *
import os.path
# imcut_path = os.path.join(path_to_script, "../../imcut/")
# sys.path.insert(0, imcut_path)
import unittest
import numpy as np
import pytest
from lisa import organ_segmentation
import imcut.dcmreaddata as dcmr
import lisa.dataset
import io3d.datasets
# nosetests tests/organ_segmentation_test.py:OrganSegmentationTest.test_create_iparams # noqa
class CoinaudSegmentationTest(unittest.TestCase):
interactiveTest = False
verbose = False
@unittest.skip("Waiting for implementation")
def test_devel_vein_basin_segmentation(self):
pth = r"E:\data\medical\processed\mik2018 para\P09_paraall_cropped.pklz"
datap = io3d.read(pth)
labels = {
"1": 1,
"2": 2,
"3": 3,
"4": 4,
"5": 5,
"6": 6,
}
from PyQt5.QtGui import QApplication, QPushButton
app = QApplication(sys.argv)
import seededitorqt
se = seededitorqt.QTSeedEditor(datap["data3d"], contours=datap["segmentation"])
def split(obj):
import lisa.virtual_resection as vr
print(np.max(datap["data3d"]))
print(np.unique(se.seeds))
out, sepobj = vr.split_vessel(datap, se.seeds, method="separate labels", input_seeds_label2=2)
print(np.unique(out))
se.seeds = out
se.contours = out
se.setView(se.actual_view)
qpb = QPushButton("Ahoj")
qpb.clicked.connect(split)
wg = se.my_layout.layout().itemAt(0).itemAt(2).addWidget(qpb)
se.exec_()
# def generate_data(self):
#
# img3d = (np.random.rand(30, 30, 30)*10).astype(np.int16)
# seeds = (np.zeros(img3d.shape)).astype(np.int8)
# segmentation = (np.zeros(img3d.shape)).astype(np.int8)
# segmentation[10:25, 4:24, 2:16] = 1
# img3d = img3d + segmentation*20
# seeds[12:18, 9:16, 3:6] = 1
# seeds[19:22, 21:27, 19:21] = 2
#
# voxelsize_mm = [5, 5, 5]
# metadata = {'voxelsize_mm': voxelsize_mm}
# return img3d, metadata, seeds, segmentation
#
# # @unittest.skipIf(not interactiveTest, "interactive test")
# @pytest.mark.interactive
# def test_viewer_seeds(self):
#
# try:
# from imcut.seed_editor_qt import QTSeedEditor
# except:
# print("Deprecated of pyseg_base as submodule")
# from seed_editor_qt import QTSeedEditor
# from PyQt4.QtGui import QApplication
# import numpy as np
# img3d = (np.random.rand(30, 30, 30)*10).astype(np.int16)
# seeds = (np.zeros(img3d.shape)).astype(np.int8)
# seeds[3:6, 12:18, 9:16] = 1
# seeds[3:6, 19:22, 21:27] = 2
# # , QMainWindow
# app = QApplication(sys.argv)
# pyed = QTSeedEditor(img3d, seeds=seeds)
# pyed.exec_()
#
# # deletemask = pyed.getSeeds()
# # import pdb; pdb.set_trace()
#
# # pyed = QTSeedEditor(deletemask, mode='draw')
# # pyed.exec_()
#
# app.exit()
# # @unittest.skip("demonstrating skipping")
#
# @pytest.mark.interactive
# def test_whole_organ_segmentation_interactive(self):
# """
# Interactive test uses dicom data for segmentation
# """
# dcmdir = io3d.datasets.join_path(
# 'matlab/examples/sample_data/DICOM/digest_article/'
# )
# # path_to_script,
# # './../sample_data/matlab/examples/sample_data/DICOM/digest_article/') # noqa
# oseg = organ_segmentation.OrganSegmentation(
# dcmdir, working_voxelsize_mm=4, manualroi=False)
#
# # manual seeds setting
# print("with left mouse button select some pixels of the brain")
# print("with right mouse button select some pixels of other tissues\
# and background")
#
# oseg.interactivity()
#
# volume = oseg.get_segmented_volume_size_mm3()
# print(volume)
#
# self.assertGreater(volume, 50000)
# self.assertLess(volume, 1200000)
#
#
# # roi_mm = [[3, 3, 3], [150, 150, 50]]
# # oseg.ni_set_roi()
# # coordinates_mm = [[110, 50, 30], [10, 10, 10]]
# # label = [1, 2]
# # radius = [5, 5]
# # oseg.ni_set_seeds(coordinates_mm, label, radius)
# #
# # oseg.make_segmentation()
#
#
# # @unittest.skipIf(not interactiveTest, "interactive test")
# @unittest.skip("interactivity params are obsolete")
# def test_organ_segmentation_with_boundary_penalties(self):
# """
# Interactivity is stored to file
# """
# dcmdir = io3d.datasets.join_path('jatra_5mm')
#
# print("Interactive test: with left mouse button select liver, \
# with right mouse button select other tissues")
# # gcparams = {'pairwiseAlpha':10, 'use_boundary_penalties':True}
# segparams = {'pairwise_alpha_per': 3,
# 'use_boundary_penalties': True,
# 'boundary_penalties_sigma': 200}
# oseg = organ_segmentation.OrganSegmentation(
# dcmdir, working_voxelsize_mm=4, segparams=segparams)
# oseg.add_seeds_mm([120], [120], [400], label=1, radius=30)
# oseg.add_seeds_mm([170, 220, 250], [250, 280, 200], [400], label=2,
# radius=30)
#
# "boundary penalties"
# oseg.interactivity()
# # oseg.ninteractivity()
#
# volume = oseg.get_segmented_volume_size_mm3()
#
# # misc.obj_to_file(oseg.get_iparams(),'iparams.pkl', filetype='pickle')
#
# self.assertGreater(volume, 1000000)
# # @unittest.skipIf(not interactiveTest, "interactive test")
#
# @unittest.skip("interactivity params are obsolete")
# def test_create_iparams(self):
# """
# Interactivity is stored to file
# """
# if self.verbose:
# print("test_create_iparams")
# import misc
# dcmdir = io3d.datasets.join_path('jatra_5mm')
# # path_to_script, './../sample_data/jatra_5mm')
#
# segparams = {'pairwiseAlpha': 20,
# 'use_boundary_penalties': False,
# 'boundary_penalties_sigma': 200}
# # oseg = organ_segmentation.OrganSegmentation(dcmdir, working_voxelsize_mm = 4) # noqa
# oseg = organ_segmentation.OrganSegmentation(
# dcmdir, working_voxelsize_mm=4,
# segparams=segparams, manualroi=False)
# # oseg.add_seeds_mm([120, 160], [150, 120], [70], label=1, radius=20)
# oseg.add_seeds_mm([120, 160], [150, 80], [85], label=1, radius=20)
# oseg.add_seeds_mm([170, 220, 250, 100], [250, 300, 200, 350], [85],
# label=2, radius=20)
# oseg.add_seeds_mm([170], [240], [70], label=2, radius=20)
#
# # print "test_ipars"
# # oseg.interactivity()
# oseg.ninteractivity()
#
# volume = oseg.get_segmented_volume_size_mm3()
# # print 'vol %.3g ' %(volume)
#
# misc.obj_to_file(oseg.get_iparams(), 'iparams.pkl', filetype='pickle')
#
# self.assertGreater(volume, 1000000)
#
# # @unittest.skipIf(not interactiveTest, "interactive test")
# @pytest.mark.interactive
# def test_stored_interactivity(self):
# pass
#
# def test_synth_liver(self):
# params = {}
# self.synthetic_liver_template(params)
#
# def synthetic_liver(self):
# """
# Create synthetic data. There is some liver and porta -like object.
# """
# # data
# slab = {'none': 0, 'liver': 1, 'porta': 2}
# voxelsize_mm = np.array([1.0, 1.0, 1.2])
#
# segm = np.zeros([80, 256, 250], dtype=np.int16)
#
# # liver
# segm[30:60, 70:180, 40:190] = slab['liver']
# # porta
# segm[40:45, 120:130, 70:190] = slab['porta']
# segm[40:45, 80:130, 100:110] = slab['porta']
# segm[40:44, 120:170, 130:135] = slab['porta']
#
# data3d = np.zeros(segm.shape)
# data3d[segm == slab['liver']] = 146
# data3d[segm == slab['porta']] = 206
# noise = (np.random.normal(0, 10, segm.shape)) # .astype(np.int16)
# data3d = (data3d + noise).astype(np.int16)
# return data3d, segm, voxelsize_mm, slab
#
# def synthetic_liver_template(self, params):
# """
# Function uses organ_segmentation for synthetic box object
# segmentation.
# """
# # dcmdir = os.path.join(path_to_script,'./../sample_data/matlab/examples/sample_data/DICOM/digest_article/') # noqa
# # data
#
# data3d, segm, voxelsize_mm, slab = self.synthetic_liver()
#
# # seeds
# seeds = np.zeros(data3d.shape, np.int8)
# seeds[40:55, 90:120, 70:110] = 1
# seeds[30:45, 190:200, 40:90] = 2
# # [mm] 10 x 10 x 10 # voxelsize_mm = [1, 4, 3]
# metadata = {'voxelsize_mm': voxelsize_mm}
#
# oseg = organ_segmentation.OrganSegmentation(
# None,
# data3d=data3d,
# metadata=metadata,
# seeds=seeds,
# working_voxelsize_mm=5,
# manualroi=False,
# autocrop=False,
#
# **params
# )
#
# oseg.ninteractivity()
#
# volume = oseg.get_segmented_volume_size_mm3()
# oseg.portalVeinSegmentation(interactivity=False, threshold=180)
# oseg.saveVesselTree('porta')
#
# # print '> 0 '
# # print np.sum(oseg.segmentation > 0)
# # print np.sum(segm > 0)
# # print np.sum(oseg.segmentation > 0) * np.prod(voxelsize_mm)
# # print np.sum(segm > 0) * np.prod(voxelsize_mm)
# # print 'computed ', volume
# # print voxelsize_mm
# # print oseg.voxelsize_mm
#
# # import pdb; pdb.set_trace()
# # import sed3
# # ed = sed3.sed3(data3d, seeds=seeds,
# # contour=(oseg.segmentation))
# # ed.show()
# # import ipdb; ipdb.set_trace() # noqa BREAKPOINT
#
# # mel by to být litr. tedy milion mm3
# # je to zvlastni. pro nekter verze knihoven je to 630, pro jine 580
# self.assertGreater(volume, 570000)
# self.assertLess(volume, 640000)
#
# def test_roi(self):
# """
# Test setting of ROI. It is in pixels, not in mm
# """
#
# img3d = (np.random.rand(30, 30, 30)*10).astype(np.int16)
# seeds = (np.zeros(img3d.shape)).astype(np.int8)
# segmentation = (np.zeros(img3d.shape)).astype(np.int8)
# segmentation[10:25, 4:24, 2:16] = 1
# img3d = img3d + segmentation*20
# seeds[12:18, 9:16, 3:6] = 1
# seeds[19:22, 21:27, 19:21] = 2
#
# roi = [[7, 27], [2, 29], [0, 26]]
# # seeds = seeds[7:27, 2:29, 0:26]
# voxelsize_mm = [5, 5, 5]
# metadata = {'voxelsize_mm': voxelsize_mm}
#
# oseg = organ_segmentation.OrganSegmentation(
# None,
# data3d=img3d,
# metadata=metadata,
# seeds=seeds,
# roi=roi,
# working_voxelsize_mm=5,
# manualroi=False)
#
# # from PyQt4.QtGui import QApplication
# # app = QApplication(sys.argv)
# # oseg.interactivity(min_val=0, max_val=30)
# oseg.ninteractivity()
# datap = oseg.export()
#
# volume = oseg.get_segmented_volume_size_mm3()
# self.assertGreater(volume, 500000)
# self.assertIn('data3d', datap.keys())
# self.assertIn('voxelsize_mm', datap.keys())
#
# def test_box_segmentation(self):
# params = {'segmentation_smoothing': False}
# self.box_segmentation_template(params)
#
# def test_box_segmentation_with_smoothing(self):
# """
# Function uses organ_segmentation for synthetic box object
# segmentation.
# """
# params = {'segmentation_smoothing': True}
# self.box_segmentation_template(params, noise_sigma=4)
# # dcmdir = os.path.join(path_to_script,'./../sample_data/matlab/examples/sample_data/DICOM/digest_article/') # noqa
#
# def box_segmentation_template(self, params, noise_sigma=3):
# """
# Function uses organ_segmentation for synthetic box object
# segmentation.
# """
# # dcmdir = os.path.join(path_to_script,'./../sample_data/matlab/examples/sample_data/DICOM/digest_article/') # noqa
# # data
# img3d = np.random.rand(32, 64, 64) * noise_sigma
# img3d[4:24, 12:32, 5:25] = img3d[4:24, 12:32, 5:25] + 30
#
# # seeds
# seeds = np.zeros([32, 64, 64], np.int8)
# seeds[9:12, 13:29, 18:25] = 1
# seeds[9:12, 4:9, 3:32] = 2
# # [mm] 10 x 10 x 10 # voxelsize_mm = [1, 4, 3]
# voxelsize_mm = [5, 5, 5]
# metadata = {'voxelsize_mm': voxelsize_mm}
#
# oseg = organ_segmentation.OrganSegmentation(
# None,
# data3d=img3d,
# metadata=metadata,
# seeds=seeds,
# working_voxelsize_mm=10,
# manualroi=False,
# **params
# )
#
# # oseg.seeds = seeds
# # oseg.make_gc()
# # manual seeds setting
#
# from PyQt4.QtGui import QApplication
# app = QApplication(sys.argv)
# # oseg.interactivity()
# oseg.ninteractivity()
#
# volume = oseg.get_segmented_volume_size_mm3()
#
# # import pdb; pdb.set_trace()
#
# # mel by to být litr. tedy milion mm3
# self.assertGreater(volume, 800000)
# self.assertLess(volume, 1100000)
#
# def test_volume_resize(self):
# # from scipy.sparse.import lil_matrix
#
# pass
#
# # @unittest.skipIf(True, "interactive test")
# # @unittest.skipIf(not interactiveTest, "interactive test")
# @pytest.mark.interactive
# def test_vincentka_06_slice_thickness_interactive(self):
# """
# Interactive test. SliceThickness is not voxel depth. If it is, this
# test will fail.
# """
# # dcmdir = os.path.join(path_to_script, './../sample_data/matlab/examples/sample_data/DICOM/digest_article/') #noqa
# dcmdir = os.path.expanduser(
# '~/data/medical/data_orig/vincentka/13021610/10200000/')
# dcmdir = os.path.expanduser(
# '~/data/medical/data_orig/vincentka/13021610/12460000/')
# oseg = organ_segmentation.OrganSegmentation(dcmdir,
# working_voxelsize_mm=4,
# manualroi=False)
#
# # manual seeds setting
# print(
# "with left mouse button select some pixels of the bottle content")
# print("with right mouse button select some pixels of background")
#
# oseg.interactivity()
#
# volume = oseg.get_segmented_volume_size_mm3()
# # print volume
#
# self.assertGreater(volume, 550000)
# self.assertLess(volume, 850000)
#
# def setUp(self):
# """ Nastavení společných proměnných pro testy """
# self.assertTrue(True)
#
# # @TODO dodělat přidávání uzlů pomocí mm
# # @unittest.skipIf(not interactiveTest, "interactive test")
# def test_add_seeds_mm(self):
# """
# Function uses organ_segmentation object for segmentation
# """
# dcmdir = io3d.datasets.join_path(
# 'matlab/examples/sample_data/DICOM/digest_article/'
# # path_to_script,
# # './../sample_data/matlab/examples/sample_data/DICOM/digest_article/'
# )
# oseg = organ_segmentation.OrganSegmentation(dcmdir,
# working_voxelsize_mm=4,
# manualroi=False)
#
# oseg.add_seeds_mm([80], [120], [120], 1, 25)
# oseg.add_seeds_mm([80], [40], [130], 2, 25)
#
# # pro kontrolu lze odkomentovat
# # from PyQt4.QtGui import QApplication
# # app = QApplication(sys.argv)
# # oseg.interactivity()
#
# oseg.ninteractivity()
#
# volume = oseg.get_segmented_volume_size_mm3()
#
# # import pdb; pdb.set_trace()
#
# # mel by to být litr. tedy milion mm3
# self.assertGreater(volume, 800000)
# self.assertLess(volume, 1200000)
#
# # roi_mm = [[3, 3, 3], [150, 150, 50]]
# # oseg.ni_set_roi()
# # coordinates_mm = [[110, 50, 30], [10, 10, 10]]
# # label = [1, 2]
# # radius = [5, 5]
# # oseg.ni_set_seeds(coordinates_mm, label, radius)
#
# # oseg.make_segmentation()
#
# # oseg.noninteractivity()
# pass
#
# @unittest.skip("demonstrating skipping")
# def test_dicomread_and_graphcut(self):
# """
# Test dicomread module and graphcut module
# """
# from imcut import pycut
#
# path_to_script = os.path.dirname(os.path.abspath(__file__))
# dcmdir = os.path.join(path_to_script, './../sample_data/matlab/examples/sample_data/DICOM/digest_article/') #noqa
# data3d, metadata = dcmr.dcm_read_from_dir(dcmdir)
#
# # print("Data size: " + str(data3d.nbytes) + ', shape: ' + str(data3d.shape) ) #noqa
#
# igc = pycut.ImageGraphCut(data3d, zoom=0.5)
# seeds = igc.seeds
# seeds[0, :, 0] = 1
# seeds[60:66, 60:66, 5:6] = 2
# igc.noninteractivity(seeds)
#
# igc.make_gc()
# segmentation = igc.segmentation
# self.assertTrue(segmentation[14, 4, 1] == 0)
# self.assertTrue(segmentation[127, 120, 10] == 1)
# self.assertTrue(np.sum(segmentation == 1) > 100)
# self.assertTrue(np.sum(segmentation == 0) > 100)
# # igc.show_segmentation()
#
# def test_resize_data_one_value(self):
# datap = io3d.datasets.generate_abdominal()
# target_voxelsize_mm = 3
# oseg = organ_segmentation.OrganSegmentation(
# working_voxelsize_mm=4,
# manualroi=False)
# oseg.import_dataplus(dataplus=datap)
# orig_shape = oseg.data3d.shape
#
# oseg.resize_to_mm(target_voxelsize_mm)
# self.assertEqual(oseg.voxelsize_mm[0], target_voxelsize_mm)
# self.assertEqual(oseg.voxelsize_mm[1], target_voxelsize_mm)
# self.assertEqual(oseg.voxelsize_mm[2], target_voxelsize_mm)
# self.assertNotEqual(orig_shape[0], oseg.data3d.shape[0])
# self.assertNotEqual(orig_shape[1], oseg.data3d.shape[1])
# self.assertNotEqual(orig_shape[2], oseg.data3d.shape[2])
#
# def test_resize_data_three_values(self):
# datap = io3d.datasets.generate_abdominal()
# target_voxelsize_mm = [7.14, 4.1, 3]
# oseg = organ_segmentation.OrganSegmentation(
# working_voxelsize_mm=4,
# manualroi=False)
# oseg.import_dataplus(dataplus=datap)
# orig_shape = oseg.data3d.shape
#
# oseg.resize_to_mm(target_voxelsize_mm)
# self.assertEqual(oseg.voxelsize_mm[0], target_voxelsize_mm[0])
# self.assertEqual(oseg.voxelsize_mm[1], target_voxelsize_mm[1])
# self.assertEqual(oseg.voxelsize_mm[2], target_voxelsize_mm[2])
# self.assertNotEqual(orig_shape[0], oseg.data3d.shape[0])
# self.assertNotEqual(orig_shape[1], oseg.data3d.shape[1])
# self.assertNotEqual(orig_shape[2], oseg.data3d.shape[2])
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | -8,242,498,691,151,463,000 | 34 | 125 | 0.557522 | false |
jsok/unleashed | tests/test_meta_resources.py | 1 | 3102 | from nose.tools import (
assert_true, assert_false,
assert_equals,
assert_is_none,
assert_raises
)
from unittest import TestCase
from unleashed.resources.fields import Field
from unleashed.resources import UnleashedResource
class EndpointTestCase(TestCase):
def test_endpoint_autocreated(self):
class NoEndpoint(UnleashedResource):
pass
res = NoEndpoint()
assert_equals(res.__endpoint__, 'NoEndpoint')
def test_endpoint_is_none(self):
class NoneEndpoint(UnleashedResource):
__endpoint__ = None
res = NoneEndpoint()
assert_equals(res.__endpoint__, 'NoneEndpoint')
def test_endpoint_not_overwritten(self):
class HasEndpoint(UnleashedResource):
__endpoint__ = 'original'
res = HasEndpoint()
assert_equals(res.__endpoint__, 'original')
def test_endpoint_not_inherited(self):
class BaseEndPoint(UnleashedResource):
__endpoint__ = 'base'
class MyEndpoint(BaseEndPoint):
pass
res = MyEndpoint()
assert_equals(res.__endpoint__, 'MyEndpoint')
class DummyEmbeddedResource(UnleashedResource):
Qux = Field()
Fred = Field()
class DummyResource(UnleashedResource):
__endpoint__ = "MyEndPoint"
Foo = Field()
Bar = Field()
Baz = DummyEmbeddedResource()
class ResourceFieldsTestCase(TestCase):
def test_fields_accessible_as_attributes(self):
r = DummyResource()
assert_true(hasattr(r, 'Foo'))
assert_true(hasattr(r, 'Bar'))
assert_true(hasattr(r, 'Baz'))
assert_true(hasattr(r.Baz, 'Qux'))
assert_true(hasattr(r.Baz, 'Fred'))
def test_fields_settable_as_attributes(self):
r = DummyResource()
r.Foo = 42
r.Bar = True
assert_equals(r.Foo, 42)
assert_equals(r.Bar, True)
def test_from_dict(self):
r = DummyResource()
dict_value = {
'Foo': 1,
'Bar': 2,
'Baz': {
'Qux': 3,
'Fred': 4
}
}
r.from_dict(dict_value)
assert_equals(r.Foo, 1)
assert_equals(r.Bar, 2)
def test_to_dict(self):
r = DummyResource()
r.Foo = 1
r.Bar = 2
r.Baz.Qux = 3
r.Baz.Fred = 4
dict_val = r.to_dict()
assert_equals(
dict_val,
{
'Foo': 1,
'Bar': 2,
'Baz': {
'Qux': 3,
'Fred': 4
}
}
)
def test_to_dict_overwrite_embedded(self):
r = DummyResource()
r.Foo = 1
r.Bar = 2
baz = DummyEmbeddedResource()
baz.Qux = 3
baz.Fred = 4
r.Baz = baz
dict_val = r.to_dict()
assert_equals(
dict_val,
{
'Foo': 1,
'Bar': 2,
'Baz': {
'Qux': 3,
'Fred': 4
}
}
) | mit | 6,770,713,216,156,039,000 | 21.485507 | 55 | 0.505158 | false |
cockpit-project/system-api-roles | roles/selinux/library/selogin.py | 5 | 9073 | #!/usr/bin/python
# (c) 2017, Petr Lautrbach <[email protected]>
# Based on seport.py module (c) 2014, Dan Keder <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: selogin
short_description: Manages linux user to SELinux user mapping
description:
- Manages linux user to SELinux user mapping
version_added: "1.0"
options:
login:
description:
- a Linux user
required: true
default: __default__
seuser:
description:
- SELinux user name
required: true
default: null
serange:
description:
- MLS/MCS Security Range (MLS/MCS Systems only) SELinux Range for SELinux login mapping defaults to the SELinux user record range.
required: false
default: s0
state:
description:
- Desired mapping value.
required: true
default: present
choices: [ 'present', 'absent' ]
reload:
description:
- Reload SELinux policy after commit.
required: false
default: yes
notes:
- The changes are persistent across reboots
- Not tested on any debian based system
requirements: [ 'libselinux-python', 'policycoreutils-python' ]
author: Dan Keder
author: Petr Lautrbach
'''
EXAMPLES = '''
# Modify the default user on the system to the guest_u user
- selogin:
login: __default__
seuser: guest_u
state: present
# Assign gijoe user on an MLS machine a range and to the staff_u user
- selogin:
login: gijoe
seuser: staff_u
serange: SystemLow-Secre
state: present
# Assign all users in the engineering group to the staff_u user
- selogin:
login: %engineering
seuser: staff_u
state: present
'''
try:
import selinux
HAVE_SELINUX=True
except ImportError:
HAVE_SELINUX=False
try:
import seobject
HAVE_SEOBJECT=True
except ImportError:
HAVE_SEOBJECT=False
from ansible.module_utils.basic import *
from ansible.module_utils.pycompat24 import get_exception
def semanage_port_get_ports(seport, setype, proto):
""" Get the list of ports that have the specified type definition.
:param seport: Instance of seobject.portRecords
:type setype: str
:param setype: SELinux type.
:type proto: str
:param proto: Protocol ('tcp' or 'udp')
:rtype: list
:return: List of ports that have the specified SELinux type.
"""
records = seport.get_all_by_type()
if (setype, proto) in records:
return records[(setype, proto)]
else:
return []
def semanage_port_get_type(seport, port, proto):
""" Get the SELinux type of the specified port.
:param seport: Instance of seobject.portRecords
:type port: str
:param port: Port or port range (example: "8080", "8080-9090")
:type proto: str
:param proto: Protocol ('tcp' or 'udp')
:rtype: tuple
:return: Tuple containing the SELinux type and MLS/MCS level, or None if not found.
"""
ports = port.split('-', 1)
if len(ports) == 1:
ports.extend(ports)
key = (int(ports[0]), int(ports[1]), proto)
records = seport.get_all()
if key in records:
return records[key]
else:
return None
def semanage_login_add(module, login, seuser, do_reload, serange='s0', sestore=''):
""" Add linux user to SELinux user mapping
:type module: AnsibleModule
:param module: Ansible module
:type login: str
:param login: a Linux User or a Linux group if it begins with %
:type seuser: str
:param proto: An SELinux user ('__default__', 'unconfined_u', 'staff_u', ...), see 'semanage login -l'
:type serange: str
:param serange: SELinux MLS/MCS range (defaults to 's0')
:type do_reload: bool
:param do_reload: Whether to reload SELinux policy after commit
:type sestore: str
:param sestore: SELinux store
:rtype: bool
:return: True if the policy was changed, otherwise False
"""
try:
selogin = seobject.loginRecords(sestore)
selogin.set_reload(do_reload)
change = False
all_logins = selogin.get_all()
# module.fail_json(msg="%s: %s %s" % (all_logins, login, sestore))
# for local_login in all_logins:
if login not in all_logins.keys():
selogin.add(login, seuser, serange)
change = True
else:
selogin.modify(login, seuser, serange)
except ValueError:
e = get_exception()
module.fail_json(msg="%s: %s\n" % (e.__class__.__name__, str(e)))
except IOError:
e = get_exception()
module.fail_json(msg="%s: %s\n" % (e.__class__.__name__, str(e)))
except KeyError:
e = get_exception()
module.fail_json(msg="%s: %s\n" % (e.__class__.__name__, str(e)))
except OSError:
e = get_exception()
module.fail_json(msg="%s: %s\n" % (e.__class__.__name__, str(e)))
except RuntimeError:
e = get_exception()
module.fail_json(msg="%s: %s\n" % (e.__class__.__name__, str(e)))
return change
def semanage_login_del(module, login, seuser, do_reload, sestore=''):
""" Delete linux user to SELinux user mapping
:type module: AnsibleModule
:param module: Ansible module
:type login: str
:param login: a Linux User or a Linux group if it begins with %
:type seuser: str
:param proto: An SELinux user ('__default__', 'unconfined_u', 'staff_u', ...), see 'semanage login -l'
:type do_reload: bool
:param do_reload: Whether to reload SELinux policy after commit
:type sestore: str
:param sestore: SELinux store
:rtype: bool
:return: True if the policy was changed, otherwise False
"""
try:
selogin = seobject.loginRecords(sestore)
selogin.set_reload(do_reload)
change = False
all_logins = selogin.get_all()
# module.fail_json(msg="%s: %s %s" % (all_logins, login, sestore))
if login in all_logins.keys():
selogin.delete(login)
change = True
except ValueError:
e = get_exception()
module.fail_json(msg="%s: %s\n" % (e.__class__.__name__, str(e)))
except IOError:
e = get_exception()
module.fail_json(msg="%s: %s\n" % (e.__class__.__name__, str(e)))
except KeyError:
e = get_exception()
module.fail_json(msg="%s: %s\n" % (e.__class__.__name__, str(e)))
except OSError:
e = get_exception()
module.fail_json(msg="%s: %s\n" % (e.__class__.__name__, str(e)))
except RuntimeError:
e = get_exception()
module.fail_json(msg="%s: %s\n" % (e.__class__.__name__, str(e)))
return change
def main():
module = AnsibleModule(
argument_spec={
'login': {
'required': True,
# 'default': '__default__',
},
'seuser': {
'required': True,
},
'serange': {
'required': False
},
'state': {
'choices': ['present', 'absent'],
'default': 'present'
},
'reload': {
'required': False,
'type': 'bool',
'default': 'yes',
},
},
supports_check_mode=True
)
if not HAVE_SELINUX:
module.fail_json(msg="This module requires libselinux-python")
if not HAVE_SEOBJECT:
module.fail_json(msg="This module requires policycoreutils-python")
if not selinux.is_selinux_enabled():
module.fail_json(msg="SELinux is disabled on this host.")
login = module.params['login']
seuser = module.params['seuser']
serange = module.params['serange']
state = module.params['state']
do_reload = module.params['reload']
result = {
'login': login,
'seuser': seuser,
'serange': serange,
'state': state,
}
if state == 'present':
result['changed'] = semanage_login_add(module, login, seuser, do_reload, serange)
elif state == 'absent':
result['changed'] = semanage_login_del(module, login, seuser, do_reload)
else:
module.fail_json(msg='Invalid value of argument "state": {0}'.format(state))
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | -8,821,499,433,613,315,000 | 27.98722 | 137 | 0.600463 | false |
lavvy/osmc | package/mediacenter-addon-osmc/src/script.module.osmcsetting.logging/resources/osmc/OSMCSetting.py | 9 | 8961 | '''
The settings for OSMC are handled by the OSMC Settings Addon (OSA).
In order to more easily accomodate future changes and enhancements, each OSMC settings bundle (module) is a separate addon.
The module can take the form of an xbmc service, an xbmc script, or an xbmc module, but it must be installed into the users'
/usr/share/kodi/addons folder.
The OSA collects the modules it can find, loads their icons, and launches them individually when the user clicks on an icon.
The modules can either have their own GUI, or they can leverage the settings interface provided by XBMC. If the OSG uses the XBMC
settings interface, then all of their settings must be stored in the addons settings.xml. This is true even if the source of record
is a separate config file.
An example of this type is the Pi settings module; the actual settings are read from the config.txt, then written to the
settings.xml for display in kodi, then finally all changes are written back to the config.txt. The Pi module detects user
changes to the settings by identifying the differences between a newly read settings.xml and the values from a previously
read settings.xml.
The values of the settings displayed by this module are only ever populated by the items in the settings.xml. [Note: meaning that
if the settings data is retrieved from a different source, it will need to be populated in the module before it is displayed
to the user.]
Each module must have in its folder, a sub-folder called 'resources/osmc'. Within that folder must reside this script (OSMCSetting.py),
and the icons to be used in the OSG to represent the module (FX_Icon.png and FO_Icon.png for unfocused and focused images
respectively).
When the OSA creates the OSMC Settings GUI (OSG), these modules are identified and the OSMCSetting.py script in each of them
is imported. This script provides the mechanism for the OSG to apply the changes required from a change in a setting.
The OSMCSetting.py file must have a class called OSMCSettingClass as shown below.
The key variables in this class are:
addonid : The id for the addon. This must be the id declared in the addons addon.xml.
description : The description for the module, shown in the OSA
reboot_required : A boolean to declare if the OS needs to be rebooted. If a change in a specific setting
requires an OS reboot to take affect, this is flag that will let the OSG know.
setting_data_method : This dictionary contains:
- the name of all settings in the module
- the current value of those settings
- [optional] apply - a method to call for each setting when the value changes
- [optional] translate - a method to call to translate the data before adding it to the
setting_data_method dict. The translate method must have a 'reverse' argument which
when set to True, reverses the transformation.
The key methods of this class are:
open_settings_window : This is called by the OSG when the icon is clicked. This will open the settings window.
Usually this would be __addon__.OpenSettings(), but it could be any other script.
This allows the creation of action buttons in the GUI, as well as allowing developers
to script and skin their own user interfaces.
[optional] first_method : called before any individual settings changes are applied.
[optional] final_method : called after all the individual settings changes are done.
[optional] boot_method : called when the OSA is first started.
apply_settings : This is called by the OSG to apply the changes to any settings that have changed.
It calls the first setting method, if it exists.
Then it calls the method listed in setting_data_method for each setting. Then it
calls the final method, again, if it exists.
populate_setting_data_method : This method is used to populate the setting_data_method with the current settings data.
Usually this will be from the addons setting data stored in settings.xml and retrieved
using the settings_retriever_xml method.
Sometimes the user is able to edit external setting files (such as the Pi's config.txt).
If the developer wants to use this source in place of the data stored in the
settings.xml, then they should edit this method to include a mechanism to retrieve and
parse that external data. As the window shown in the OSG populates only with data from
the settings.xml, the developer should ensure that the external data is loaded into that
xml before the settings window is opened.
settings_retriever_xml : This method is used to retrieve all the data for the settings listed in the
setting_data_method from the addons settings.xml.
The developer is free to create any methods they see fit, but the ones listed above are specifically used by the OSA.
Specifically, the apply_settings method is called when the OSA closes.
Settings changes are applied when the OSG is called to close. But this behaviour can be changed to occur when the addon
settings window closes by editing the open_settings_window. The method apply_settings will still be called by OSA, so
keep that in mind.
'''
# XBMC Modules
import xbmc
import xbmcaddon
import subprocess
import sys
import os
import threading
addonid = "script.module.osmcsetting.logging"
__addon__ = xbmcaddon.Addon(addonid)
# Custom modules
sys.path.append(xbmc.translatePath(os.path.join(xbmcaddon.Addon(addonid).getAddonInfo('path'), 'resources','lib')))
# OSMC SETTING Modules
from CompLogger import comprehensive_logger as clog
def log(message):
xbmc.log('OSMC LOGGING ' + str(message), level=xbmc.LOGDEBUG)
class OSMCSettingClass(threading.Thread):
'''
A OSMCSettingClass is way to substantiate the settings of an OSMC settings module, and make them available to the
OSMC Settings Addon (OSA).
'''
def __init__(self):
'''
The logger simply runs specific logs. All this module does is open the Settings window.
'''
super(OSMCSettingClass, self).__init__()
self.addonid = addonid
self.me = xbmcaddon.Addon(self.addonid)
# this is what is displayed in the main settings gui
self.shortname = 'Log Uploader'
self.description = """This module helps with debugging and troubleshooting by retrieving logs, various xml, and config information from your system and uploading them in a single file.[CR]
Once uploading is complete, you are provided with a URL which you can share on the OSMC forums.[CR]
The information stored in the URL will help others diagnose your issue and decrease the amount of time it takes to find a resolution. """
@clog(log, nowait=True)
def run(self):
'''
The method determines what happens when the item is clicked in the settings GUI.
Usually this would be __addon__.OpenSettings(), but it could be any other script.
This allows the creation of action buttons in the GUI, as well as allowing developers to script and skin their
own user interfaces.
'''
self.me.openSettings()
@clog(log)
def apply_settings(self):
'''
This method will apply all of the settings. It calls the first_method, if it exists.
Then it calls the method listed in pi_settings_dict for each setting. Then it calls the
final_method, again, if it exists.
'''
pass
##############################################################################################################################
# #
def first_method(self):
'''
The method to call before all the other setting methods are called.
For example, this could be a call to stop a service. The final method could then restart the service again.
This can be used to apply the setting changes.
'''
@clog(log)
def final_method(self):
'''
The method to call after all the other setting methods have been called.
For example, in the case of the Raspberry Pi's settings module, the final writing to the config.txt can be delayed
until all the settings have been updated in the pi_settings_dict.
'''
''' This method will write the changed settings to the config.txt file. '''
pass
# #
##############################################################################################################################
##############################################################################################################################
# #
'''
Methods beyond this point are for specific settings.
'''
# #
##############################################################################################################################
if __name__ == "__main__":
pass
| gpl-2.0 | -8,756,576,332,101,238,000 | 41.070423 | 191 | 0.677045 | false |
ThreatCentral/blackberries | src/ThreatCentral/transforms/common/entities.py | 1 | 10597 | #!/usr/bin/env python
# (c) Copyright [2016] Hewlett Packard Enterprise Development LP Licensed under
# the Apache License, Version 2.0 (the "License"); you may not use this file
# except in compliance with the License. You may obtain a copy of the License
# at Unless required by applicable
# law or agreed to in writing, software distributed under the License is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the specific language
# governing permissions and limitations under the License.
from canari.maltego.message import Entity, EntityField, EntityFieldType, MatchingRule
__author__ = 'Bart Otten'
__copyright__ = '(c) Copyright [2016] Hewlett Packard Enterprise Development LP'
__credits__ = []
__license__ = 'Apache 2.0'
__version__ = '1'
__maintainer__ = 'Bart Otten'
__email__ = '[email protected]'
__status__ = 'Development'
__all__ = [
'ThreatcentralEntity',
'MyThreatcentralEntity'
]
"""
DO NOT EDIT:
The following entity is the base entity type from which all your entities will inherit from. This provides you with the
default namespace that all your entities will use for their unique entity type in Maltego. For example, MyThreatcentralEntity will
have an entity type name of ThreatCentral.MyThreatcentralEntity. When adding a new entity in Maltego, you will have to specify this
name (ThreatCentral.MyThreatcentralEntity) in the 'Unique entity type' field.
"""
class ThreatcentralEntity(Entity):
_namespace_ = 'ThreatCentral'
"""
You can specify as many entity fields as you want by just adding an extra @EntityField() decorator to your entities. The
@EntityField() decorator takes the following parameters:
- name: the name of the field without spaces or special characters except for dots ('.') (required)
- propname: the name of the object's property used to get and set the value of the field (required, if name contains dots)
- displayname: the name of the entity as it appears in Maltego (optional)
- type: the data type of the field (optional, default: EntityFieldType.String)
- required: whether or not the field's value must be set before sending back the message (optional, default: False)
- choices: a list of acceptable field values for this field (optional)
- matchingrule: whether or not the field should be loosely or strictly matched (optional, default: MatchingRule.Strict)
- decorator: a function that is invoked each and everytime the field's value is set or changed.
- is_value: a boolean value that determines whether the field is also the default value of the entity object.
TODO: define as many custom fields and entity types as you wish:)
"""
@EntityField(name='ThreatCentral.fieldN', propname='fieldN', displayname='Field N', matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.field1', propname='field1', displayname='Field 1', type=EntityFieldType.Integer)
class MyThreatcentralEntity(ThreatcentralEntity):
"""
Uncomment the line below and comment out the pass if you wish to define a ridiculous entity type name like
'my.fancy.EntityType'
"""
# _name_ = 'my.fancy.EntityType'
pass
@EntityField(name='ThreatCentral.actor', propname='actor', displayname='Actor',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.resourceId', propname='resourceId', displayname='Resource ID',
matchingrule=MatchingRule.Strict)
@EntityField(name='ThreatCentral.name', propname='name', displayname='Name',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.organization', propname='organization', displayname='Organization',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.aliases', propname='aliases', displayname='Aliases',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.country', propname='country', displayname='Country',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.score', propname='score', displayname='Score',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.title', propname='title', displayname='Title',
matchingrule=MatchingRule.Loose)
class Actor(ThreatcentralEntity):
pass
@EntityField(name='ThreatCentral.TTP', propname='TTP', displayname='Title',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.resourceId', propname='resourceId', displayname='Resource ID',
matchingrule=MatchingRule.Strict)
class TTP(ThreatcentralEntity):
pass
@EntityField(name='ThreatCentral.title', propname='title', displayname='Title',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.resourceId', propname='resourceId', displayname='Resource ID',
matchingrule=MatchingRule.Strict)
@EntityField(name='ThreatCentral.severity', propname='severity', displayname='Severity',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.confidence', propname='confidence', displayname='Confidence',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.indicatorType', propname='indicatorType', displayname='Indicator Type',
matchingrule=MatchingRule.Loose)
class Indicator(ThreatcentralEntity):
pass
@EntityField(name='ThreatCentral.title', propname='title', displayname='Title',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.resourceId', propname='resourceId', displayname='Resource ID',
matchingrule=MatchingRule.Strict)
@EntityField(name='ThreatCentral.importanceScore', propname='importanceScore', displayname='ImportanceScore',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.importanceLevel', propname='importanceLevel', displayname='importanceLevel',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.indicatorType', propname='indicatorType', displayname='Indicator Type',
matchingrule=MatchingRule.Loose)
class Case(ThreatcentralEntity):
pass
@EntityField(name='ThreatCentral.title', propname='title', displayname='Title',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.resourceId', propname='resourceId', displayname='Resource ID',
matchingrule=MatchingRule.Strict)
@EntityField(name='ThreatCentral.url', propname='url', displayname='URL',
matchingrule=MatchingRule.Loose)
class Hyperlinks(ThreatcentralEntity):
pass
@EntityField(name='ThreatCentral.title', propname='title', displayname='Title',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.resourceId', propname='resourceId', displayname='Resource ID',
matchingrule=MatchingRule.Strict)
class CoursesOfAction(ThreatcentralEntity):
pass
@EntityField(name='ThreatCentral.resourceId', propname='resourceId', displayname='Resource ID',
matchingrule=MatchingRule.Strict)
@EntityField(name='ThreatCentral.name', propname='name', displayname='Name',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.atype', propname='atype', displayname='Type',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.size', propname='size', displayname='Size',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.checksum', propname='checksum', displayname='Checksum',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.createDate', propname='createDate', displayname='Create Date',
matchingrule=MatchingRule.Loose)
class Attachments(ThreatcentralEntity):
pass
@EntityField(name='ThreatCentral.resourceId', propname='resourceId', displayname='Resource ID',
matchingrule=MatchingRule.Strict)
@EntityField(name='ThreatCentral.title', propname='title', displayname='Title',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.reportedOn', propname='reportedOn', displayname='Reported On',
matchingrule=MatchingRule.Loose)
class Incident(ThreatcentralEntity):
pass
@EntityField(name='ThreatCentral.resourceId', propname='resourceId', displayname='Resource ID',
matchingrule=MatchingRule.Strict)
@EntityField(name='ThreatCentral.postDate', propname='postDate', displayname='Post Data',
matchingrule=MatchingRule.Loose)
class Comment(ThreatcentralEntity):
pass
@EntityField(name='ThreatCentral.resourceId', propname='resourceId', displayname='Resource ID',
matchingrule=MatchingRule.Strict)
@EntityField(name='ThreatCentral.name', propname='name', displayname='Name',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.htype', propname='htype', displayname='Type',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.value', propname='value', displayname='Value',
matchingrule=MatchingRule.Loose)
class FileHash(ThreatcentralEntity):
pass
@EntityField(name='ThreatCentral.resourceId', propname='resourceId', displayname='Resource ID',
matchingrule=MatchingRule.Strict)
@EntityField(name='ThreatCentral.mutex', propname='mutex', displayname='Mutex',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.action', propname='action', displayname='Action',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.name', propname='name', displayname='Name',
matchingrule=MatchingRule.Loose)
class Mutex(ThreatcentralEntity):
pass
@EntityField(name='ThreatCentral.resourceId', propname='resourceId', displayname='Resource ID',
matchingrule=MatchingRule.Strict)
@EntityField(name='ThreatCentral.action', propname='action', displayname='Action',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.hive', propname='hive', displayname='Hive',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.key', propname='key', displayname='Key',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.name', propname='name', displayname='Name',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.data', propname='data', displayname='Data',
matchingrule=MatchingRule.Loose)
@EntityField(name='ThreatCentral.rtype', propname='rtype', displayname='Type',
matchingrule=MatchingRule.Loose)
class RegistryKey(ThreatcentralEntity):
pass
| apache-2.0 | -8,450,055,789,026,379,000 | 48.288372 | 131 | 0.741059 | false |
saschpe/creatrepo | createrepo/merge.py | 4 | 5665 | #!/usr/bin/python -tt
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Copyright 2008 Red Hat, Inc - written by seth vidal skvidal at fedoraproject.org
# merge repos from arbitrary repo urls
import os
import shutil
import yum
import yum.Errors
from yum.misc import unique, getCacheDir
import yum.update_md
import rpmUtils.arch
import operator
from utils import MDError
import createrepo
import tempfile
# take repo paths from cli
# produce new repo metadata from merging the two together.
#TODO:
# excludes?
class RepoMergeBase:
def __init__(self, repolist=[], yumbase=None, mdconf=None, mdbase_class=None ):
self.repolist = repolist
self.outputdir = '%s/merged_repo' % os.getcwd()
self.exclude_tuples = []
self.sort_func = self._sort_func # callback function to magically sort pkgs
if not mdconf:
self.mdconf = createrepo.MetaDataConfig()
else:
self.mdconf = mdconf
if not mdbase_class:
self.mdbase_class = createrepo.MetaDataGenerator
else:
self.mdbase_class = mdbase_class
if not yumbase:
self.yumbase = yum.YumBase()
else:
self.yumbase = yumbase
self.yumbase.conf.cachedir = getCacheDir()
self.yumbase.conf.cache = 0
# default to all arches
self.archlist = unique(rpmUtils.arch.arches.keys() + rpmUtils.arch.arches.values())
self.groups = True
self.updateinfo = True
def _sort_func(self, repos):
"""Default sort func for repomerge. Takes a list of repository objects
any package which is not to be included in the merged repo should be
delPackage()'d"""
# sort the repos by _merge_rank
# - lowest number is the highest rank (1st place, 2ndplace, etc)
repos.sort(key=operator.attrgetter('_merge_rank'))
for repo in repos:
for pkg in repo.sack:
others = self.yumbase.pkgSack.searchNevra(name=pkg.name, arch=pkg.arch)
# NOTE the above is definitely going to catch other versions which may
# be an invalid comparison
if len(others) > 1:
for thatpkg in others:
if pkg.repoid == thatpkg.repoid: continue
if pkg.repo._merge_rank < thatpkg.repo._merge_rank:
thatpkg.repo.sack.delPackage(thatpkg)
def merge_repos(self):
self.yumbase.repos.disableRepo('*')
# add our repos and give them a merge rank in the order they appear in
# in the repolist
count = 0
for r in self.repolist:
if r[0] == '/':
r = 'file://' + r # just fix the file repos, this is silly.
count +=1
rid = 'repo%s' % count
n = self.yumbase.add_enable_repo(rid, baseurls=[r],
metadata_expire=0,
timestamp_check=False)
n._merge_rank = count
#setup our sacks
try:
self.yumbase._getSacks(archlist=self.archlist)
except yum.Errors.RepoError, e:
raise MDError, "Could not setup merge repo pkgsack: %s" % e
myrepos = self.yumbase.repos.listEnabled()
self.sort_func(myrepos)
def write_metadata(self, outputdir=None):
mytempdir = tempfile.mkdtemp()
if self.groups:
try:
comps_fn = mytempdir + '/groups.xml'
compsfile = open(comps_fn, 'w')
compsfile.write(self.yumbase.comps.xml())
compsfile.close()
except yum.Errors.GroupsError, e:
# groups not being available shouldn't be a fatal error
pass
else:
self.mdconf.groupfile=comps_fn
if self.updateinfo:
ui_fn = mytempdir + '/updateinfo.xml'
uifile = open(ui_fn, 'w')
umd = yum.update_md.UpdateMetadata()
for repo in self.yumbase.repos.listEnabled():
try: # attempt to grab the updateinfo.xml.gz from the repodata
umd.add(repo)
except yum.Errors.RepoMDError:
continue
umd.xml(fileobj=uifile)
uifile.close()
self.mdconf.additional_metadata['updateinfo'] = ui_fn
self.mdconf.pkglist = self.yumbase.pkgSack
self.mdconf.directory = self.outputdir
if outputdir:
self.mdconf.directory = outputdir
# clean out what was there
if os.path.exists(self.mdconf.directory + '/repodata'):
shutil.rmtree(self.mdconf.directory + '/repodata')
if not os.path.exists(self.mdconf.directory):
os.makedirs(self.mdconf.directory)
mdgen = self.mdbase_class(config_obj=self.mdconf)
mdgen.doPkgMetadata()
mdgen.doRepoMetadata()
mdgen.doFinalMove()
| gpl-2.0 | 8,877,100,491,299,574,000 | 36.766667 | 91 | 0.602648 | false |
WindfallLabs/dslw | tests/test_system.py | 1 | 1781 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_system.py: dslw Testing Suite
Copyright (c) 2017 Garin Wally
MIT License; see LICENSE
"""
import os
import unittest
from ctypes import WinDLL
from _ctypes import FreeLibrary
from glob import glob
import apsw
import dslw
ALL_DLLS = [
"libfreexl-1.dll",
"libgcc_s_dw2-1.dll",
"libgeos-3-5-0.dll",
"libgeos_c-1.dll",
"libiconv-2.dll",
"liblzma-5.dll",
"libproj-9.dll",
"libsqlite3-0.dll",
"libstdc++-6.dll",
"libxml2-2.dll",
"mod_spatialite.dll",
#"sqlite3.exe",
"zlib1.dll"
]
# =============================================================================
# TESTS
class DLLTests(unittest.TestCase):
def load_dll(self, dll):
"""DLL-load function."""
try:
print("Testing " + dll)
open_dll = WinDLL(dll)
glob_paths = set()
for p in os.environ["PATH"].split(";"):
glob_path = glob(os.path.join(p, open_dll._name))
if glob_path:
glob_paths.add(glob_path[0])
FreeLibrary(open_dll._handle)
del open_dll
return True
except WindowsError:
print("....... NOT FOUND")
return False
def test_load_dlls(self):
"""CRITICAL: DLLs found on PATH"""
for dll in ALL_DLLS:
self.assertTrue(self.load_dll(dll))
class ImportArcpy(unittest.TestCase):
def tearDown(self):
if hasattr(dslw, "arcpy"):
del dslw.arcpy
def test_arcpy_first(self):
# Simulate import
dslw.arcpy = None
with self.assertRaises(apsw.ExtensionLoadingError) as context:
reload(dslw)
assert "arcpy" in str(context.exception)
| mit | 4,667,068,809,762,401,000 | 23.067568 | 79 | 0.540146 | false |
sajeeshcs/nested_quota_final | nova/network/driver.py | 2 | 1369 | # Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from oslo_config import cfg
from oslo_utils import importutils
from nova.i18n import _LE, _LI
from nova.openstack.common import log as logging
driver_opts = [
cfg.StrOpt('network_driver',
default='nova.network.linux_net',
help='Driver to use for network creation'),
]
CONF = cfg.CONF
CONF.register_opts(driver_opts)
LOG = logging.getLogger(__name__)
def load_network_driver(network_driver=None):
if not network_driver:
network_driver = CONF.network_driver
if not network_driver:
LOG.error(_LE("Network driver option required, but not specified"))
sys.exit(1)
LOG.info(_LI("Loading network driver '%s'"), network_driver)
return importutils.import_module(network_driver)
| apache-2.0 | -3,912,460,570,263,867,400 | 30.113636 | 78 | 0.702703 | false |
pvillela/ServerSim | microservices_example.py | 1 | 7179 | """
Comparison of microservice deployment alternatives.
"""
from __future__ import print_function
import random
import simpy
from serversim import *
# fi = open("simout.txt", "w")
fi = sys.stdout
usersCurve = [(0, 900), (50, 650), (100, 900), (150, 650)]
def microservices_example(num_users, weight1, weight2, server_range1,
server_range2):
def cug(mid, delta):
"""Computation units geneartor"""
def f():
return random.uniform(mid - delta, mid + delta)
return f
def ld_bal(svc_name):
"""Application server load-balancer."""
if svc_name == "svc_1":
s = random.choice(servers1)
elif svc_name == "svc_2":
s = random.choice(servers2)
else:
assert False, "Invalid service type."
return s
try:
random.seed(12345)
# num_users = 700
simtime = 200
hw_threads = 10
sw_threads = 20
speed = 20
svc_1_comp_units = 2.0
svc_2_comp_units = 1.0
env = simpy.Environment()
n_servers = max(server_range1[-1] + 1, server_range2[-1] + 1)
servers = [Server(env, hw_threads, sw_threads, speed, "AppServer_%s" % i)
for i in range(n_servers)]
servers1 = [servers[i] for i in server_range1]
servers2 = [servers[i] for i in server_range2]
svc_1 = CoreSvcRequester(env, "svc_1", cug(svc_1_comp_units,
svc_1_comp_units*.9), ld_bal)
svc_2 = CoreSvcRequester(env, "svc_2", cug(svc_2_comp_units,
svc_2_comp_units*.9), ld_bal)
weighted_txns = [(svc_1, weight1),
(svc_2, weight2)
]
min_think_time = 2.0 # .5 # 4
max_think_time = 10.0 # 1.5 # 20
grp = UserGroup(env, num_users, "UserTypeX", weighted_txns,
min_think_time, max_think_time)
grp.activate_users()
print("\n\n***** Start Simulation --", num_users, ",", weight1, ",",
weight2, ", [", server_range1[0], ",", server_range1[-1] + 1,
") , [", server_range2[0], ",", server_range2[-1] + 1, ") *****",
file=fi)
print("Simulation: num_users =", num_users, "; simTime =", simtime,
file=fi)
env.run(until=simtime)
print("<< ServerExample >>\n", file=fi)
indent = " " * 4
# print parameters
print("\n" + "simtime =", simtime, file=fi)
print("\n" + "Servers:", file=fi)
for svr in servers:
print(indent*1 + "Server:", svr.name, file=fi)
print(indent * 2 + "max_concurrency =", svr.max_concurrency, file=fi)
print(indent * 2 + "num_threads =", svr.num_threads, file=fi)
print(indent*2 + "speed =", svr.speed, file=fi)
print(indent * 2 + "avg_process_time =", svr.avg_process_time, file=fi)
print(indent * 2 + "avg_hw_queue_time =", svr.avg_hw_queue_time, file=fi)
print(indent * 2 + "avg_thread_queue_time =", svr.avg_thread_queue_time, file=fi)
print(indent * 2 + "avg_service_time =", svr.avg_service_time, file=fi)
print(indent * 2 + "avg_hw_queue_length =", svr.avg_hw_queue_length, file=fi)
print(indent * 2 + "avg_thread_queue_length =", svr.avg_thread_queue_length, file=fi)
print(indent * 2 + "hw_queue_length =", svr.hw_queue_length, file=fi)
print(indent * 2 + "hw_in_process_count =", svr.hw_in_process_count, file=fi)
print(indent * 2 + "thread_queue_length =", svr.thread_queue_length, file=fi)
print(indent * 2 + "thread_in_use_count =", svr.thread_in_use_count, file=fi)
print(indent*2 + "utilization =", svr.utilization, file=fi)
print(indent*2 + "throughput =", svr.throughput, file=fi)
print(indent*1 + "Group:", grp.name, file=fi)
print(indent * 2 + "num_users =", grp.num_users, file=fi)
print(indent * 2 + "min_think_time =", grp.min_think_time, file=fi)
print(indent * 2 + "max_think_time =", grp.max_think_time, file=fi)
print(indent * 2 + "responded_request_count =", grp.responded_request_count(None), file=fi)
print(indent * 2 + "unresponded_request_count =", grp.unresponded_request_count(None), file=fi)
print(indent * 2 + "avg_response_time =", grp.avg_response_time(None), file=fi)
print(indent * 2 + "std_dev_response_time =", grp.std_dev_response_time(None), file=fi)
print(indent*2 + "throughput =", grp.throughput(None), file=fi)
for txn in grp.svcs:
print(indent*2 + txn.svc_name + ":", file=fi)
print(indent * 3 + "responded_request_count =", grp.responded_request_count(txn), file=fi)
print(indent * 3 + "unresponded_request_count =", grp.unresponded_request_count(txn), file=fi)
print(indent * 3 + "avg_response_time =", grp.avg_response_time(txn), file=fi)
print(indent * 3 + "std_dev_response_time =", grp.std_dev_response_time(txn), file=fi)
print(indent*3 + "throughput =", grp.throughput(txn), file=fi)
finally:
if not fi == sys.stdout:
fi.close()
print("\n*** Done ***", file=fi)
if __name__ == "__main__":
print("\n\n\n@@@@@@@@@ Start comparative simulations @@@@@@@@@@")
microservices_example(num_users=700, weight1=2, weight2=1, server_range1=range(0, 10), server_range2=range(0, 10))
microservices_example(num_users=700, weight1=2, weight2=1, server_range1=range(0, 8), server_range2=range(8, 10))
#
# microservices_example(numUsers=700, weight1=5, weight2=1, serverRange1=range(0, 10), serverRange2=range(0, 10))
# microservices_example(numUsers=700, weight1=5, weight2=1, serverRange1=range(0, 8), serverRange2=range(8, 10))
#
# microservices_example(numUsers=700, weight1=1, weight2=1, serverRange1=range(0, 10), serverRange2=range(0, 10))
# microservices_example(numUsers=700, weight1=1, weight2=1, serverRange1=range(0, 8), serverRange2=range(8, 10))
#
# microservices_example(numUsers=700, weight1=1, weight2=1, serverRange1=range(0, 9), serverRange2=range(0, 9))
# microservices_example(numUsers=700, weight1=1, weight2=1, serverRange1=range(0, 7), serverRange2=range(7, 9))
# microservices_example(numUsers=700, weight1=1, weight2=1, serverRange1=range(0, 6), serverRange2=range(6, 9))
#
# microservices_example(numUsers=usersCurve, weight1=2, weight2=1, serverRange1=range(0, 10), serverRange2=range(0, 10))
# microservices_example(numUsers=usersCurve, weight1=2, weight2=1, serverRange1=range(0, 8), serverRange2=range(8, 10))
#
# microservices_example(numUsers=usersCurve, weight1=1, weight2=1, serverRange1=range(0, 9), serverRange2=range(0, 9))
# microservices_example(numUsers=usersCurve, weight1=1, weight2=1, serverRange1=range(0, 7), serverRange2=range(7, 9))
# microservices_example(numUsers=usersCurve, weight1=1, weight2=1, serverRange1=range(0, 6), serverRange2=range(6, 9))
| mit | -6,845,698,734,378,844,000 | 45.316129 | 124 | 0.587268 | false |
andreadean5/python-hpOneView | hpOneView/resources/storage/storage_volume_attachments.py | 1 | 6229 | # -*- coding: utf-8 -*-
###
# (C) Copyright (2012-2016) Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
###
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future import standard_library
standard_library.install_aliases()
__title__ = 'storage-volume-templates'
__version__ = '0.0.1'
__copyright__ = '(C) Copyright (2012-2016) Hewlett Packard Enterprise ' \
' Development LP'
__license__ = 'MIT'
__status__ = 'Development'
from hpOneView.resources.resource import ResourceClient
class StorageVolumeAttachments(object):
URI = '/rest/storage-volume-attachments'
def __init__(self, con):
self._connection = con
self._client = ResourceClient(con, self.URI)
def get_all(self, start=0, count=-1, filter='', sort=''):
"""
Gets a list of volume attachment resources
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all the items.
The actual number of items in the response may differ from the requested
count if the sum of start and count exceed the total number of items.
filter:
A general filter/query string to narrow the list of items returned. The
default is no filter - all resources are returned.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time, with the oldest entry first.
Returns:
list: Volume attachment resources.
"""
return self._client.get_all(start, count, filter=filter, sort=sort)
def get_extra_unmanaged_storage_volumes(self, start=0, count=-1, filter='', sort=''):
"""
Gets the list of extra unmanaged storage volumes
Args:
alertFixType
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all the items.
The actual number of items in the response may differ from the requested
count if the sum of start and count exceed the total number of items.
filter:
A general filter/query string to narrow the list of items returned. The
default is no filter - all resources are returned.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time, with the oldest entry first.
Returns:
list: extra unmanaged storage volumes
"""
uri = self.URI + "/repair?alertFixType=ExtraUnmanagedStorageVolumes"
return self._client.get(uri)
def remove_extra_presentations(self, resource, timeout=-1):
"""
Removes extra presentations from a specified server profile.
Args:
resource (dict): Object to create
timeout:
Timeout in seconds. Wait task completion by default. The timeout does not abort the operation
in OneView, just stops waiting for its completion.
Returns:
dict: associated storage attachment resource
"""
uri = self.URI + "/repair"
custom_headers = {'Accept-Language': 'en_US'}
return self._client.create(resource, uri=uri, timeout=timeout, custom_headers=custom_headers)
def get_paths(self, id_or_uri, path_id_or_uri=''):
"""
Gets all paths or a specific attachment path for the specified volume attachment
Args:
id_or_uri: Could be either the volume attachment id or the volume attachment uri
port_id_or_uri: Could be either the path id or the path uri
Returns:
dict: path(s)
"""
if path_id_or_uri:
uri = self._client.build_uri(path_id_or_uri)
if "/paths" not in uri:
uri = self._client.build_uri(
id_or_uri) + "/paths" + "/" + path_id_or_uri
else:
uri = self._client.build_uri(id_or_uri) + "/paths"
return self._client.get(uri)
def get(self, id_or_uri):
"""
Gets a volume attachment by id or uri
Args:
id_or_uri: Could be either the volume attachment id or the volume attachment uri
Returns:
dict: volume attachment
"""
return self._client.get(id_or_uri)
def get_by(self, field, value):
"""
Get all storage systems that match the filter
The search is case insensitive
Args:
field: field name to filter
value: value to filter
Returns:
list: List of volume attachments.
"""
return self._client.get_by(field, value)
| mit | -4,063,065,250,446,386,700 | 40.251656 | 109 | 0.62803 | false |
bvisness/the-blue-alliance | controllers/admin/admin_user_controller.py | 3 | 1795 | from datetime import datetime
import logging
import os
from google.appengine.api import users
from google.appengine.ext import ndb
from google.appengine.ext.webapp import template
from controllers.base_controller import LoggedInHandler
from models.account import Account
class AdminUserList(LoggedInHandler):
"""
List all Users.
"""
def get(self):
self._require_admin()
users = Account.query().order(Account.created).fetch(10000)
self.template_values.update({
"users": users,
})
path = os.path.join(os.path.dirname(__file__), '../../templates/admin/user_list.html')
self.response.out.write(template.render(path, self.template_values))
class AdminUserDetail(LoggedInHandler):
"""
Show a User.
"""
def get(self, user_id):
self._require_admin()
user = Account.get_by_id(user_id)
self.template_values.update({
"user": user
})
path = os.path.join(os.path.dirname(__file__), '../../templates/admin/user_details.html')
self.response.out.write(template.render(path, self.template_values))
class AdminUserEdit(LoggedInHandler):
"""
Edit a User.
"""
def get(self, user_id):
self._require_admin()
user = Account.get_by_id(user_id)
self.template_values.update({
"user": user
})
path = os.path.join(os.path.dirname(__file__), '../../templates/admin/user_edit.html')
self.response.out.write(template.render(path, self.template_values))
def post(self, user_id):
self._require_admin()
user = Account.get_by_id(user_id)
user.display_name = self.request.get("display_name")
user.put()
self.redirect("/admin/user/" + user_id)
| mit | 7,112,287,158,958,968,000 | 26.19697 | 97 | 0.623398 | false |
wschoenell/chimera | src/chimera/core/event.py | 5 | 1076 | #! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# chimera - observatory automation system
# Copyright (C) 2006-2007 P. Henrique Silva <[email protected]>
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
from chimera.core.constants import EVENT_ATTRIBUTE_NAME
__all__ = ['event']
def event(method):
"""
Event annotation.
"""
setattr(method, EVENT_ATTRIBUTE_NAME, True)
return method
| gpl-2.0 | 6,942,478,345,447,145,000 | 30.647059 | 69 | 0.733271 | false |
weiyuanke/mykeystone | tests/test_policy.py | 3 | 6593 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import StringIO
import tempfile
import urllib2
from keystone.common import policy as common_policy
from keystone import config
from keystone import exception
from keystone.policy.backends import rules
from keystone import test
CONF = config.CONF
class PolicyFileTestCase(test.TestCase):
def setUp(self):
super(PolicyFileTestCase, self).setUp()
rules.reset()
_unused, self.tmpfilename = tempfile.mkstemp()
self.opt(policy_file=self.tmpfilename)
self.target = {}
def tearDown(self):
super(PolicyFileTestCase, self).tearDown()
rules.reset()
def test_modified_policy_reloads(self):
action = "example:test"
empty_credentials = {}
with open(self.tmpfilename, "w") as policyfile:
policyfile.write("""{"example:test": []}""")
rules.enforce(empty_credentials, action, self.target)
with open(self.tmpfilename, "w") as policyfile:
policyfile.write("""{"example:test": ["false:false"]}""")
# NOTE(vish): reset stored policy cache so we don't have to sleep(1)
rules._POLICY_CACHE = {}
self.assertRaises(exception.Forbidden, rules.enforce,
empty_credentials, action, self.target)
class PolicyTestCase(test.TestCase):
def setUp(self):
super(PolicyTestCase, self).setUp()
rules.reset()
# NOTE(vish): preload rules to circumvent reloading from file
rules.init()
brain = {
"true": [],
"example:allowed": [],
"example:denied": [["false:false"]],
"example:get_http": [["http:http://www.example.com"]],
"example:my_file": [["role:compute_admin"],
["project_id:%(project_id)s"]],
"example:early_and_fail": [["false:false", "rule:true"]],
"example:early_or_success": [["rule:true"], ["false:false"]],
"example:lowercase_admin": [["role:admin"], ["role:sysadmin"]],
"example:uppercase_admin": [["role:ADMIN"], ["role:sysadmin"]],
}
# NOTE(vish): then overload underlying brain
common_policy.set_brain(common_policy.HttpBrain(brain))
self.credentials = {}
self.target = {}
def tearDown(self):
rules.reset()
super(PolicyTestCase, self).tearDown()
def test_enforce_nonexistent_action_throws(self):
action = "example:noexist"
self.assertRaises(exception.Forbidden, rules.enforce,
self.credentials, action, self.target)
def test_enforce_bad_action_throws(self):
action = "example:denied"
self.assertRaises(exception.Forbidden, rules.enforce,
self.credentials, action, self.target)
def test_enforce_good_action(self):
action = "example:allowed"
rules.enforce(self.credentials, action, self.target)
def test_enforce_http_true(self):
def fakeurlopen(url, post_data):
return StringIO.StringIO("True")
self.stubs.Set(urllib2, 'urlopen', fakeurlopen)
action = "example:get_http"
target = {}
result = rules.enforce(self.credentials, action, target)
self.assertEqual(result, None)
def test_enforce_http_false(self):
def fakeurlopen(url, post_data):
return StringIO.StringIO("False")
self.stubs.Set(urllib2, 'urlopen', fakeurlopen)
action = "example:get_http"
target = {}
self.assertRaises(exception.Forbidden, rules.enforce,
self.credentials, action, target)
def test_templatized_enforcement(self):
target_mine = {'project_id': 'fake'}
target_not_mine = {'project_id': 'another'}
credentials = {'project_id': 'fake', 'roles': []}
action = "example:my_file"
rules.enforce(credentials, action, target_mine)
self.assertRaises(exception.Forbidden, rules.enforce,
credentials, action, target_not_mine)
def test_early_AND_enforcement(self):
action = "example:early_and_fail"
self.assertRaises(exception.Forbidden, rules.enforce,
self.credentials, action, self.target)
def test_early_OR_enforcement(self):
action = "example:early_or_success"
rules.enforce(self.credentials, action, self.target)
def test_ignore_case_role_check(self):
lowercase_action = "example:lowercase_admin"
uppercase_action = "example:uppercase_admin"
# NOTE(dprince) we mix case in the Admin role here to ensure
# case is ignored
admin_credentials = {'roles': ['AdMiN']}
rules.enforce(admin_credentials, lowercase_action, self.target)
rules.enforce(admin_credentials, uppercase_action, self.target)
class DefaultPolicyTestCase(test.TestCase):
def setUp(self):
super(DefaultPolicyTestCase, self).setUp()
rules.reset()
rules.init()
self.brain = {
"default": [],
"example:exist": [["false:false"]]
}
self._set_brain('default')
self.credentials = {}
def _set_brain(self, default_rule):
brain = common_policy.HttpBrain(self.brain, default_rule)
common_policy.set_brain(brain)
def tearDown(self):
super(DefaultPolicyTestCase, self).setUp()
rules.reset()
def test_policy_called(self):
self.assertRaises(exception.Forbidden, rules.enforce,
self.credentials, "example:exist", {})
def test_not_found_policy_calls_default(self):
rules.enforce(self.credentials, "example:noexist", {})
def test_default_not_found(self):
self._set_brain("default_noexist")
self.assertRaises(exception.Forbidden, rules.enforce,
self.credentials, "example:noexist", {})
| apache-2.0 | -3,997,542,202,065,827,000 | 35.627778 | 78 | 0.62263 | false |
sgraham/nope | build/android/pylib/results/flakiness_dashboard/json_results_generator.py | 51 | 23492 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Most of this file was ported over from Blink's
# Tools/Scripts/webkitpy/layout_tests/layout_package/json_results_generator.py
# Tools/Scripts/webkitpy/common/net/file_uploader.py
#
import json
import logging
import mimetypes
import os
import time
import urllib2
_log = logging.getLogger(__name__)
_JSON_PREFIX = 'ADD_RESULTS('
_JSON_SUFFIX = ');'
def HasJSONWrapper(string):
return string.startswith(_JSON_PREFIX) and string.endswith(_JSON_SUFFIX)
def StripJSONWrapper(json_content):
# FIXME: Kill this code once the server returns json instead of jsonp.
if HasJSONWrapper(json_content):
return json_content[len(_JSON_PREFIX):len(json_content) - len(_JSON_SUFFIX)]
return json_content
def WriteJSON(json_object, file_path, callback=None):
# Specify separators in order to get compact encoding.
json_string = json.dumps(json_object, separators=(',', ':'))
if callback:
json_string = callback + '(' + json_string + ');'
with open(file_path, 'w') as fp:
fp.write(json_string)
def ConvertTrieToFlatPaths(trie, prefix=None):
"""Flattens the trie of paths, prepending a prefix to each."""
result = {}
for name, data in trie.iteritems():
if prefix:
name = prefix + '/' + name
if len(data) and not 'results' in data:
result.update(ConvertTrieToFlatPaths(data, name))
else:
result[name] = data
return result
def AddPathToTrie(path, value, trie):
"""Inserts a single path and value into a directory trie structure."""
if not '/' in path:
trie[path] = value
return
directory, _slash, rest = path.partition('/')
if not directory in trie:
trie[directory] = {}
AddPathToTrie(rest, value, trie[directory])
def TestTimingsTrie(individual_test_timings):
"""Breaks a test name into dicts by directory
foo/bar/baz.html: 1ms
foo/bar/baz1.html: 3ms
becomes
foo: {
bar: {
baz.html: 1,
baz1.html: 3
}
}
"""
trie = {}
for test_result in individual_test_timings:
test = test_result.test_name
AddPathToTrie(test, int(1000 * test_result.test_run_time), trie)
return trie
class TestResult(object):
"""A simple class that represents a single test result."""
# Test modifier constants.
(NONE, FAILS, FLAKY, DISABLED) = range(4)
def __init__(self, test, failed=False, elapsed_time=0):
self.test_name = test
self.failed = failed
self.test_run_time = elapsed_time
test_name = test
try:
test_name = test.split('.')[1]
except IndexError:
_log.warn('Invalid test name: %s.', test)
if test_name.startswith('FAILS_'):
self.modifier = self.FAILS
elif test_name.startswith('FLAKY_'):
self.modifier = self.FLAKY
elif test_name.startswith('DISABLED_'):
self.modifier = self.DISABLED
else:
self.modifier = self.NONE
def Fixable(self):
return self.failed or self.modifier == self.DISABLED
class JSONResultsGeneratorBase(object):
"""A JSON results generator for generic tests."""
MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG = 750
# Min time (seconds) that will be added to the JSON.
MIN_TIME = 1
# Note that in non-chromium tests those chars are used to indicate
# test modifiers (FAILS, FLAKY, etc) but not actual test results.
PASS_RESULT = 'P'
SKIP_RESULT = 'X'
FAIL_RESULT = 'F'
FLAKY_RESULT = 'L'
NO_DATA_RESULT = 'N'
MODIFIER_TO_CHAR = {TestResult.NONE: PASS_RESULT,
TestResult.DISABLED: SKIP_RESULT,
TestResult.FAILS: FAIL_RESULT,
TestResult.FLAKY: FLAKY_RESULT}
VERSION = 4
VERSION_KEY = 'version'
RESULTS = 'results'
TIMES = 'times'
BUILD_NUMBERS = 'buildNumbers'
TIME = 'secondsSinceEpoch'
TESTS = 'tests'
FIXABLE_COUNT = 'fixableCount'
FIXABLE = 'fixableCounts'
ALL_FIXABLE_COUNT = 'allFixableCount'
RESULTS_FILENAME = 'results.json'
TIMES_MS_FILENAME = 'times_ms.json'
INCREMENTAL_RESULTS_FILENAME = 'incremental_results.json'
# line too long pylint: disable=line-too-long
URL_FOR_TEST_LIST_JSON = (
'http://%s/testfile?builder=%s&name=%s&testlistjson=1&testtype=%s&master=%s')
# pylint: enable=line-too-long
def __init__(self, builder_name, build_name, build_number,
results_file_base_path, builder_base_url,
test_results_map, svn_repositories=None,
test_results_server=None,
test_type='',
master_name=''):
"""Modifies the results.json file. Grabs it off the archive directory
if it is not found locally.
Args
builder_name: the builder name (e.g. Webkit).
build_name: the build name (e.g. webkit-rel).
build_number: the build number.
results_file_base_path: Absolute path to the directory containing the
results json file.
builder_base_url: the URL where we have the archived test results.
If this is None no archived results will be retrieved.
test_results_map: A dictionary that maps test_name to TestResult.
svn_repositories: A (json_field_name, svn_path) pair for SVN
repositories that tests rely on. The SVN revision will be
included in the JSON with the given json_field_name.
test_results_server: server that hosts test results json.
test_type: test type string (e.g. 'layout-tests').
master_name: the name of the buildbot master.
"""
self._builder_name = builder_name
self._build_name = build_name
self._build_number = build_number
self._builder_base_url = builder_base_url
self._results_directory = results_file_base_path
self._test_results_map = test_results_map
self._test_results = test_results_map.values()
self._svn_repositories = svn_repositories
if not self._svn_repositories:
self._svn_repositories = {}
self._test_results_server = test_results_server
self._test_type = test_type
self._master_name = master_name
self._archived_results = None
def GenerateJSONOutput(self):
json_object = self.GetJSON()
if json_object:
file_path = (
os.path.join(
self._results_directory,
self.INCREMENTAL_RESULTS_FILENAME))
WriteJSON(json_object, file_path)
def GenerateTimesMSFile(self):
times = TestTimingsTrie(self._test_results_map.values())
file_path = os.path.join(self._results_directory, self.TIMES_MS_FILENAME)
WriteJSON(times, file_path)
def GetJSON(self):
"""Gets the results for the results.json file."""
results_json = {}
if not results_json:
results_json, error = self._GetArchivedJSONResults()
if error:
# If there was an error don't write a results.json
# file at all as it would lose all the information on the
# bot.
_log.error('Archive directory is inaccessible. Not '
'modifying or clobbering the results.json '
'file: ' + str(error))
return None
builder_name = self._builder_name
if results_json and builder_name not in results_json:
_log.debug('Builder name (%s) is not in the results.json file.'
% builder_name)
self._ConvertJSONToCurrentVersion(results_json)
if builder_name not in results_json:
results_json[builder_name] = (
self._CreateResultsForBuilderJSON())
results_for_builder = results_json[builder_name]
if builder_name:
self._InsertGenericMetaData(results_for_builder)
self._InsertFailureSummaries(results_for_builder)
# Update the all failing tests with result type and time.
tests = results_for_builder[self.TESTS]
all_failing_tests = self._GetFailedTestNames()
all_failing_tests.update(ConvertTrieToFlatPaths(tests))
for test in all_failing_tests:
self._InsertTestTimeAndResult(test, tests)
return results_json
def SetArchivedResults(self, archived_results):
self._archived_results = archived_results
def UploadJSONFiles(self, json_files):
"""Uploads the given json_files to the test_results_server (if the
test_results_server is given)."""
if not self._test_results_server:
return
if not self._master_name:
_log.error(
'--test-results-server was set, but --master-name was not. Not '
'uploading JSON files.')
return
_log.info('Uploading JSON files for builder: %s', self._builder_name)
attrs = [('builder', self._builder_name),
('testtype', self._test_type),
('master', self._master_name)]
files = [(json_file, os.path.join(self._results_directory, json_file))
for json_file in json_files]
url = 'http://%s/testfile/upload' % self._test_results_server
# Set uploading timeout in case appengine server is having problems.
# 120 seconds are more than enough to upload test results.
uploader = _FileUploader(url, 120)
try:
response = uploader.UploadAsMultipartFormData(files, attrs)
if response:
if response.code == 200:
_log.info('JSON uploaded.')
else:
_log.debug(
"JSON upload failed, %d: '%s'" %
(response.code, response.read()))
else:
_log.error('JSON upload failed; no response returned')
except Exception, err:
_log.error('Upload failed: %s' % err)
return
def _GetTestTiming(self, test_name):
"""Returns test timing data (elapsed time) in second
for the given test_name."""
if test_name in self._test_results_map:
# Floor for now to get time in seconds.
return int(self._test_results_map[test_name].test_run_time)
return 0
def _GetFailedTestNames(self):
"""Returns a set of failed test names."""
return set([r.test_name for r in self._test_results if r.failed])
def _GetModifierChar(self, test_name):
"""Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test modifier
for the given test_name.
"""
if test_name not in self._test_results_map:
return self.__class__.NO_DATA_RESULT
test_result = self._test_results_map[test_name]
if test_result.modifier in self.MODIFIER_TO_CHAR.keys():
return self.MODIFIER_TO_CHAR[test_result.modifier]
return self.__class__.PASS_RESULT
def _get_result_char(self, test_name):
"""Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test result
for the given test_name.
"""
if test_name not in self._test_results_map:
return self.__class__.NO_DATA_RESULT
test_result = self._test_results_map[test_name]
if test_result.modifier == TestResult.DISABLED:
return self.__class__.SKIP_RESULT
if test_result.failed:
return self.__class__.FAIL_RESULT
return self.__class__.PASS_RESULT
def _GetSVNRevision(self, in_directory):
"""Returns the svn revision for the given directory.
Args:
in_directory: The directory where svn is to be run.
"""
# This is overridden in flakiness_dashboard_results_uploader.py.
raise NotImplementedError()
def _GetArchivedJSONResults(self):
"""Download JSON file that only contains test
name list from test-results server. This is for generating incremental
JSON so the file generated has info for tests that failed before but
pass or are skipped from current run.
Returns (archived_results, error) tuple where error is None if results
were successfully read.
"""
results_json = {}
old_results = None
error = None
if not self._test_results_server:
return {}, None
results_file_url = (self.URL_FOR_TEST_LIST_JSON %
(urllib2.quote(self._test_results_server),
urllib2.quote(self._builder_name),
self.RESULTS_FILENAME,
urllib2.quote(self._test_type),
urllib2.quote(self._master_name)))
try:
# FIXME: We should talk to the network via a Host object.
results_file = urllib2.urlopen(results_file_url)
old_results = results_file.read()
except urllib2.HTTPError, http_error:
# A non-4xx status code means the bot is hosed for some reason
# and we can't grab the results.json file off of it.
if http_error.code < 400 and http_error.code >= 500:
error = http_error
except urllib2.URLError, url_error:
error = url_error
if old_results:
# Strip the prefix and suffix so we can get the actual JSON object.
old_results = StripJSONWrapper(old_results)
try:
results_json = json.loads(old_results)
except Exception:
_log.debug('results.json was not valid JSON. Clobbering.')
# The JSON file is not valid JSON. Just clobber the results.
results_json = {}
else:
_log.debug('Old JSON results do not exist. Starting fresh.')
results_json = {}
return results_json, error
def _InsertFailureSummaries(self, results_for_builder):
"""Inserts aggregate pass/failure statistics into the JSON.
This method reads self._test_results and generates
FIXABLE, FIXABLE_COUNT and ALL_FIXABLE_COUNT entries.
Args:
results_for_builder: Dictionary containing the test results for a
single builder.
"""
# Insert the number of tests that failed or skipped.
fixable_count = len([r for r in self._test_results if r.Fixable()])
self._InsertItemIntoRawList(results_for_builder,
fixable_count, self.FIXABLE_COUNT)
# Create a test modifiers (FAILS, FLAKY etc) summary dictionary.
entry = {}
for test_name in self._test_results_map.iterkeys():
result_char = self._GetModifierChar(test_name)
entry[result_char] = entry.get(result_char, 0) + 1
# Insert the pass/skip/failure summary dictionary.
self._InsertItemIntoRawList(results_for_builder, entry,
self.FIXABLE)
# Insert the number of all the tests that are supposed to pass.
all_test_count = len(self._test_results)
self._InsertItemIntoRawList(results_for_builder,
all_test_count, self.ALL_FIXABLE_COUNT)
def _InsertItemIntoRawList(self, results_for_builder, item, key):
"""Inserts the item into the list with the given key in the results for
this builder. Creates the list if no such list exists.
Args:
results_for_builder: Dictionary containing the test results for a
single builder.
item: Number or string to insert into the list.
key: Key in results_for_builder for the list to insert into.
"""
if key in results_for_builder:
raw_list = results_for_builder[key]
else:
raw_list = []
raw_list.insert(0, item)
raw_list = raw_list[:self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG]
results_for_builder[key] = raw_list
def _InsertItemRunLengthEncoded(self, item, encoded_results):
"""Inserts the item into the run-length encoded results.
Args:
item: String or number to insert.
encoded_results: run-length encoded results. An array of arrays, e.g.
[[3,'A'],[1,'Q']] encodes AAAQ.
"""
if len(encoded_results) and item == encoded_results[0][1]:
num_results = encoded_results[0][0]
if num_results <= self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
encoded_results[0][0] = num_results + 1
else:
# Use a list instead of a class for the run-length encoding since
# we want the serialized form to be concise.
encoded_results.insert(0, [1, item])
def _InsertGenericMetaData(self, results_for_builder):
""" Inserts generic metadata (such as version number, current time etc)
into the JSON.
Args:
results_for_builder: Dictionary containing the test results for
a single builder.
"""
self._InsertItemIntoRawList(results_for_builder,
self._build_number, self.BUILD_NUMBERS)
# Include SVN revisions for the given repositories.
for (name, path) in self._svn_repositories:
# Note: for JSON file's backward-compatibility we use 'chrome' rather
# than 'chromium' here.
lowercase_name = name.lower()
if lowercase_name == 'chromium':
lowercase_name = 'chrome'
self._InsertItemIntoRawList(results_for_builder,
self._GetSVNRevision(path),
lowercase_name + 'Revision')
self._InsertItemIntoRawList(results_for_builder,
int(time.time()),
self.TIME)
def _InsertTestTimeAndResult(self, test_name, tests):
""" Insert a test item with its results to the given tests dictionary.
Args:
tests: Dictionary containing test result entries.
"""
result = self._get_result_char(test_name)
test_time = self._GetTestTiming(test_name)
this_test = tests
for segment in test_name.split('/'):
if segment not in this_test:
this_test[segment] = {}
this_test = this_test[segment]
if not len(this_test):
self._PopulateResultsAndTimesJSON(this_test)
if self.RESULTS in this_test:
self._InsertItemRunLengthEncoded(result, this_test[self.RESULTS])
else:
this_test[self.RESULTS] = [[1, result]]
if self.TIMES in this_test:
self._InsertItemRunLengthEncoded(test_time, this_test[self.TIMES])
else:
this_test[self.TIMES] = [[1, test_time]]
def _ConvertJSONToCurrentVersion(self, results_json):
"""If the JSON does not match the current version, converts it to the
current version and adds in the new version number.
"""
if self.VERSION_KEY in results_json:
archive_version = results_json[self.VERSION_KEY]
if archive_version == self.VERSION:
return
else:
archive_version = 3
# version 3->4
if archive_version == 3:
for results in results_json.values():
self._ConvertTestsToTrie(results)
results_json[self.VERSION_KEY] = self.VERSION
def _ConvertTestsToTrie(self, results):
if not self.TESTS in results:
return
test_results = results[self.TESTS]
test_results_trie = {}
for test in test_results.iterkeys():
single_test_result = test_results[test]
AddPathToTrie(test, single_test_result, test_results_trie)
results[self.TESTS] = test_results_trie
def _PopulateResultsAndTimesJSON(self, results_and_times):
results_and_times[self.RESULTS] = []
results_and_times[self.TIMES] = []
return results_and_times
def _CreateResultsForBuilderJSON(self):
results_for_builder = {}
results_for_builder[self.TESTS] = {}
return results_for_builder
def _RemoveItemsOverMaxNumberOfBuilds(self, encoded_list):
"""Removes items from the run-length encoded list after the final
item that exceeds the max number of builds to track.
Args:
encoded_results: run-length encoded results. An array of arrays, e.g.
[[3,'A'],[1,'Q']] encodes AAAQ.
"""
num_builds = 0
index = 0
for result in encoded_list:
num_builds = num_builds + result[0]
index = index + 1
if num_builds > self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
return encoded_list[:index]
return encoded_list
def _NormalizeResultsJSON(self, test, test_name, tests):
""" Prune tests where all runs pass or tests that no longer exist and
truncate all results to maxNumberOfBuilds.
Args:
test: ResultsAndTimes object for this test.
test_name: Name of the test.
tests: The JSON object with all the test results for this builder.
"""
test[self.RESULTS] = self._RemoveItemsOverMaxNumberOfBuilds(
test[self.RESULTS])
test[self.TIMES] = self._RemoveItemsOverMaxNumberOfBuilds(
test[self.TIMES])
is_all_pass = self._IsResultsAllOfType(test[self.RESULTS],
self.PASS_RESULT)
is_all_no_data = self._IsResultsAllOfType(test[self.RESULTS],
self.NO_DATA_RESULT)
max_time = max([test_time[1] for test_time in test[self.TIMES]])
# Remove all passes/no-data from the results to reduce noise and
# filesize. If a test passes every run, but takes > MIN_TIME to run,
# don't throw away the data.
if is_all_no_data or (is_all_pass and max_time <= self.MIN_TIME):
del tests[test_name]
# method could be a function pylint: disable=R0201
def _IsResultsAllOfType(self, results, result_type):
"""Returns whether all the results are of the given type
(e.g. all passes)."""
return len(results) == 1 and results[0][1] == result_type
class _FileUploader(object):
def __init__(self, url, timeout_seconds):
self._url = url
self._timeout_seconds = timeout_seconds
def UploadAsMultipartFormData(self, files, attrs):
file_objs = []
for filename, path in files:
with file(path, 'rb') as fp:
file_objs.append(('file', filename, fp.read()))
# FIXME: We should use the same variable names for the formal and actual
# parameters.
content_type, data = _EncodeMultipartFormData(attrs, file_objs)
return self._UploadData(content_type, data)
def _UploadData(self, content_type, data):
start = time.time()
end = start + self._timeout_seconds
while time.time() < end:
try:
request = urllib2.Request(self._url, data,
{'Content-Type': content_type})
return urllib2.urlopen(request)
except urllib2.HTTPError as e:
_log.warn("Received HTTP status %s loading \"%s\". "
'Retrying in 10 seconds...' % (e.code, e.filename))
time.sleep(10)
def _GetMIMEType(filename):
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
# FIXME: Rather than taking tuples, this function should take more
# structured data.
def _EncodeMultipartFormData(fields, files):
"""Encode form fields for multipart/form-data.
Args:
fields: A sequence of (name, value) elements for regular form fields.
files: A sequence of (name, filename, value) elements for data to be
uploaded as files.
Returns:
(content_type, body) ready for httplib.HTTP instance.
Source:
http://code.google.com/p/rietveld/source/browse/trunk/upload.py
"""
BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
CRLF = '\r\n'
lines = []
for key, value in fields:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"' % key)
lines.append('')
if isinstance(value, unicode):
value = value.encode('utf-8')
lines.append(value)
for key, filename, value in files:
lines.append('--' + BOUNDARY)
lines.append('Content-Disposition: form-data; name="%s"; '
'filename="%s"' % (key, filename))
lines.append('Content-Type: %s' % _GetMIMEType(filename))
lines.append('')
if isinstance(value, unicode):
value = value.encode('utf-8')
lines.append(value)
lines.append('--' + BOUNDARY + '--')
lines.append('')
body = CRLF.join(lines)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
| bsd-3-clause | -5,855,518,757,506,057,000 | 32.704448 | 83 | 0.650477 | false |
RobberPhex/mysql-connector-python | lib/mysqlx/protocol.py | 2 | 16120 | # MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Implementation of the X protocol for MySQL servers."""
import struct
from .protobuf import mysqlx_pb2 as MySQLx
from .protobuf import mysqlx_session_pb2 as MySQLxSession
from .protobuf import mysqlx_sql_pb2 as MySQLxSQL
from .protobuf import mysqlx_notice_pb2 as MySQLxNotice
from .protobuf import mysqlx_datatypes_pb2 as MySQLxDatatypes
from .protobuf import mysqlx_resultset_pb2 as MySQLxResultset
from .protobuf import mysqlx_crud_pb2 as MySQLxCrud
from .protobuf import mysqlx_expr_pb2 as MySQLxExpr
from .protobuf import mysqlx_connection_pb2 as MySQLxConnection
from .result import ColumnMetaData
from .compat import STRING_TYPES, INT_TYPES
from .dbdoc import DbDoc
from .errors import InterfaceError, OperationalError, ProgrammingError
from .expr import (ExprParser, build_null_scalar, build_string_scalar,
build_bool_scalar, build_double_scalar, build_int_scalar)
_SERVER_MESSAGES = [
(MySQLx.ServerMessages.SESS_AUTHENTICATE_CONTINUE,
MySQLxSession.AuthenticateContinue),
(MySQLx.ServerMessages.SESS_AUTHENTICATE_OK,
MySQLxSession.AuthenticateOk),
(MySQLx.ServerMessages.SQL_STMT_EXECUTE_OK, MySQLxSQL.StmtExecuteOk),
(MySQLx.ServerMessages.ERROR, MySQLx.Error),
(MySQLx.ServerMessages.NOTICE, MySQLxNotice.Frame),
(MySQLx.ServerMessages.RESULTSET_COLUMN_META_DATA,
MySQLxResultset.ColumnMetaData),
(MySQLx.ServerMessages.RESULTSET_ROW, MySQLxResultset.Row),
(MySQLx.ServerMessages.RESULTSET_FETCH_DONE, MySQLxResultset.FetchDone),
(MySQLx.ServerMessages.RESULTSET_FETCH_DONE_MORE_RESULTSETS,
MySQLxResultset.FetchDoneMoreResultsets),
(MySQLx.ServerMessages.OK, MySQLx.Ok),
(MySQLx.ServerMessages.CONN_CAPABILITIES, MySQLxConnection.Capabilities),
]
def encode_to_bytes(value, encoding="utf-8"):
return value if isinstance(value, bytes) else value.encode(encoding)
class MessageReaderWriter(object):
def __init__(self, socket_stream):
self._stream = socket_stream
self._msg = None
def push_message(self, msg):
if self._msg is not None:
raise OperationalError("Message push slot is full")
self._msg = msg
def read_message(self):
if self._msg is not None:
m = self._msg
self._msg = None
return m
return self._read_message()
def _read_message(self):
hdr = self._stream.read(5)
msg_len, msg_type = struct.unpack("<LB", hdr)
payload = self._stream.read(msg_len - 1)
for msg_tuple in _SERVER_MESSAGES:
if msg_tuple[0] == msg_type:
msg = msg_tuple[1]()
msg.ParseFromString(payload)
return msg
raise ValueError("Unknown msg_type: {0}".format(msg_type))
def write_message(self, msg_id, msg):
msg_str = msg.SerializeToString()
header = struct.pack("<LB", len(msg_str) + 1, msg_id)
self._stream.sendall(b"".join([header, msg_str]))
class Protocol(object):
def __init__(self, reader_writer):
self._reader = reader_writer
self._writer = reader_writer
self._message = None
def get_capabilites(self):
msg = MySQLxConnection.CapabilitiesGet()
self._writer.write_message(
MySQLx.ClientMessages.CON_CAPABILITIES_GET, msg)
return self._reader.read_message()
def set_capabilities(self, **kwargs):
msg = MySQLxConnection.CapabilitiesSet()
for key, value in kwargs.items():
value = self._create_any(value)
capability = MySQLxConnection.Capability(name=key, value=value)
msg.capabilities.capabilities.extend([capability])
self._writer.write_message(
MySQLx.ClientMessages.CON_CAPABILITIES_SET, msg)
return self.read_ok()
def send_auth_start(self, method):
msg = MySQLxSession.AuthenticateStart(mech_name=method)
self._writer.write_message(
MySQLx.ClientMessages.SESS_AUTHENTICATE_START, msg)
def read_auth_continue(self):
msg = self._reader.read_message()
if not isinstance(msg, MySQLxSession.AuthenticateContinue):
raise InterfaceError("Unexpected message encountered during "
"authentication handshake")
return msg.auth_data
def send_auth_continue(self, data):
msg = MySQLxSession.AuthenticateContinue(
auth_data=encode_to_bytes(data))
self._writer.write_message(
MySQLx.ClientMessages.SESS_AUTHENTICATE_CONTINUE, msg)
def read_auth_ok(self):
while True:
msg = self._reader.read_message()
if isinstance(msg, MySQLxSession.AuthenticateOk):
break
if isinstance(msg, MySQLx.Error):
raise InterfaceError(msg.msg)
def get_binding_scalars(self, statement):
count = len(statement._binding_map)
scalars = count * [None]
for binding in statement._bindings:
name = binding["name"]
if name not in statement._binding_map:
raise ProgrammingError("Unable to find placeholder for "
"parameter: {0}".format(name))
pos = statement._binding_map[name]
scalars[pos] = self.arg_object_to_scalar(binding["value"],
not statement._doc_based)
return scalars
def _apply_filter(self, message, statement):
if statement._has_where:
message.criteria.CopyFrom(statement._where_expr)
if statement._has_bindings:
message.args.extend(self.get_binding_scalars(statement))
if statement._has_limit:
message.limit.row_count = statement._limit_row_count
message.limit.offset = statement._limit_offset
if statement._has_sort:
message.order.extend(statement._sort_expr)
if statement._has_group_by:
message.grouping.extend(statement._grouping)
if statement._has_having:
message.grouping_criteria.CopyFrom(statement._having)
def send_find(self, stmt):
find = MySQLxCrud.Find(
data_model=(MySQLxCrud.DOCUMENT
if stmt._doc_based else MySQLxCrud.TABLE),
collection=MySQLxCrud.Collection(name=stmt.target.name,
schema=stmt.schema.name))
if stmt._has_projection:
find.projection.extend(stmt._projection_expr)
self._apply_filter(find, stmt)
self._writer.write_message(MySQLx.ClientMessages.CRUD_FIND, find)
def send_update(self, statement):
update = MySQLxCrud.Update(
data_model=(MySQLxCrud.DOCUMENT
if statement._doc_based else MySQLxCrud.TABLE),
collection=MySQLxCrud.Collection(name=statement.target.name,
schema=statement.schema.name))
self._apply_filter(update, statement)
for update_op in statement._update_ops:
opexpr = MySQLxCrud.UpdateOperation(
operation=update_op.update_type, source=update_op.source)
if update_op.value is not None:
opexpr.value.CopyFrom(
self.arg_object_to_expr(
update_op.value, not statement._doc_based))
update.operation.extend([opexpr])
self._writer.write_message(MySQLx.ClientMessages.CRUD_UPDATE, update)
def send_delete(self, stmt):
delete = MySQLxCrud.Delete(
data_model=(MySQLxCrud.DOCUMENT
if stmt._doc_based else MySQLxCrud.TABLE),
collection=MySQLxCrud.Collection(name=stmt.target.name,
schema=stmt.schema.name))
self._apply_filter(delete, stmt)
self._writer.write_message(MySQLx.ClientMessages.CRUD_DELETE, delete)
def send_execute_statement(self, namespace, stmt, args):
stmt = MySQLxSQL.StmtExecute(namespace=namespace,
stmt=encode_to_bytes(stmt),
compact_metadata=False)
for arg in args:
value = self._create_any(arg)
stmt.args.extend([value])
self._writer.write_message(MySQLx.ClientMessages.SQL_STMT_EXECUTE,
stmt)
def send_insert(self, statement):
insert = MySQLxCrud.Insert(
data_model=(MySQLxCrud.DOCUMENT
if statement._doc_based else MySQLxCrud.TABLE),
collection=MySQLxCrud.Collection(name=statement.target.name,
schema=statement.schema.name))
if hasattr(statement, "_fields"):
for field in statement._fields:
insert.projection.extend([
ExprParser(field, not statement._doc_based)
.parse_table_insert_field()])
for value in statement._values:
row = MySQLxCrud.Insert.TypedRow()
if isinstance(value, list):
for val in value:
obj = self.arg_object_to_expr(
val, not statement._doc_based)
row.field.extend([obj])
else:
obj = self.arg_object_to_expr(value, not statement._doc_based)
row.field.extend([obj])
insert.row.extend([row])
self._writer.write_message(MySQLx.ClientMessages.CRUD_INSERT, insert)
def _create_any(self, arg):
if isinstance(arg, STRING_TYPES):
val = MySQLxDatatypes.Scalar.String(value=encode_to_bytes(arg))
scalar = MySQLxDatatypes.Scalar(type=8, v_string=val)
return MySQLxDatatypes.Any(type=1, scalar=scalar)
elif isinstance(arg, bool):
return MySQLxDatatypes.Any(type=1, scalar=build_bool_scalar(arg))
elif isinstance(arg, INT_TYPES):
return MySQLxDatatypes.Any(type=1, scalar=build_int_scalar(arg))
return None
def close_result(self, rs):
msg = self._read_message(rs)
if msg is not None:
raise OperationalError("Expected to close the result")
def read_row(self, rs):
msg = self._read_message(rs)
if msg is None:
return None
if isinstance(msg, MySQLxResultset.Row):
return msg
self._reader.push_message(msg)
return None
def _process_frame(self, msg, rs):
if msg.type == 1:
warningMsg = MySQLxNotice.Warning()
warningMsg.ParseFromString(msg.payload)
rs._warnings.append(Warning(warningMsg.level, warningMsg.code,
warningMsg.msg))
elif msg.type == 2:
sessVarMsg = MySQLxNotice.SessionVariableChanged()
sessVarMsg.ParseFromString(msg.payload)
elif msg.type == 3:
sessStateMsg = MySQLxNotice.SessionStateChanged()
sessStateMsg.ParseFromString(msg.payload)
if sessStateMsg.param == \
MySQLxNotice.SessionStateChanged.ROWS_AFFECTED:
rs._rows_affected = sessStateMsg.value.v_unsigned_int
elif sessStateMsg.param == \
MySQLxNotice.SessionStateChanged.GENERATED_INSERT_ID:
rs._generated_id = sessStateMsg.value.v_unsigned_int
def _read_message(self, rs):
while True:
msg = self._reader.read_message()
if isinstance(msg, MySQLx.Error):
raise OperationalError(msg.msg)
elif isinstance(msg, MySQLxNotice.Frame):
self._process_frame(msg, rs)
elif isinstance(msg, MySQLxSQL.StmtExecuteOk):
return None
elif isinstance(msg, MySQLxResultset.FetchDone):
rs._closed = True
elif isinstance(msg, MySQLxResultset.FetchDoneMoreResultsets):
rs._has_more_results = True
else:
break
return msg
def get_column_metadata(self, rs):
columns = []
while True:
msg = self._read_message(rs)
if msg is None:
break
if isinstance(msg, MySQLxResultset.Row):
self._reader.push_message(msg)
break
if not isinstance(msg, MySQLxResultset.ColumnMetaData):
raise InterfaceError("Unexpected msg type")
col = ColumnMetaData(msg.type, msg.catalog, msg.schema, msg.table,
msg.original_table, msg.name,
msg.original_name, msg.length, msg.collation,
msg.fractional_digits, msg.flags,
msg.content_type)
columns.append(col)
return columns
def arg_object_to_expr(self, value, allow_relational):
if value is None:
return MySQLxExpr.Expr(type=MySQLxExpr.Expr.LITERAL,
literal=build_null_scalar())
if isinstance(value, bool):
return MySQLxExpr.Expr(type=MySQLxExpr.Expr.LITERAL,
literal=build_bool_scalar(value))
elif isinstance(value, INT_TYPES):
return MySQLxExpr.Expr(type=MySQLxExpr.Expr.LITERAL,
literal=build_int_scalar(value))
elif isinstance(value, (float)):
return MySQLxExpr.Expr(type=MySQLxExpr.Expr.LITERAL,
literal=build_double_scalar(value))
elif isinstance(value, STRING_TYPES):
try:
expression = ExprParser(value, allow_relational).expr()
if expression.has_identifier():
return MySQLxExpr.Expr(type=MySQLxExpr.Expr.LITERAL,
literal=build_string_scalar(value))
return expression
except:
return MySQLxExpr.Expr(type=MySQLxExpr.Expr.LITERAL,
literal=build_string_scalar(value))
elif isinstance(value, DbDoc):
return MySQLxExpr.Expr(type=MySQLxExpr.Expr.LITERAL,
literal=build_string_scalar(str(value)))
raise InterfaceError("Unsupported type: {0}".format(type(value)))
def arg_object_to_scalar(self, value, allow_relational):
return self.arg_object_to_expr(value, allow_relational).literal
def read_ok(self):
msg = self._reader.read_message()
if isinstance(msg, MySQLx.Error):
raise InterfaceError(msg.msg)
if not isinstance(msg, MySQLx.Ok):
raise InterfaceError("Unexpected message encountered")
def send_close(self):
msg = MySQLxSession.Close()
self._writer.write_message(MySQLx.ClientMessages.SESS_CLOSE, msg)
| gpl-2.0 | -6,779,041,607,374,676,000 | 41.986667 | 78 | 0.611414 | false |
rishig/zulip | zerver/lib/queue.py | 1 | 14151 |
from collections import defaultdict
import logging
import random
import threading
import time
from typing import Any, Callable, Dict, List, Mapping, Optional, Set, Union
from django.conf import settings
import pika
import pika.adapters.tornado_connection
from pika.adapters.blocking_connection import BlockingChannel
from pika.spec import Basic
from tornado import ioloop
import ujson
from zerver.lib.utils import statsd
MAX_REQUEST_RETRIES = 3
Consumer = Callable[[BlockingChannel, Basic.Deliver, pika.BasicProperties, str], None]
# This simple queuing library doesn't expose much of the power of
# rabbitmq/pika's queuing system; its purpose is to just provide an
# interface for external files to put things into queues and take them
# out from bots without having to import pika code all over our codebase.
class SimpleQueueClient:
def __init__(self,
# Disable RabbitMQ heartbeats by default because BlockingConnection can't process them
rabbitmq_heartbeat: Optional[int] = 0,
) -> None:
self.log = logging.getLogger('zulip.queue')
self.queues = set() # type: Set[str]
self.channel = None # type: Optional[BlockingChannel]
self.consumers = defaultdict(set) # type: Dict[str, Set[Consumer]]
self.rabbitmq_heartbeat = rabbitmq_heartbeat
self._connect()
def _connect(self) -> None:
start = time.time()
self.connection = pika.BlockingConnection(self._get_parameters())
self.channel = self.connection.channel()
self.log.info('SimpleQueueClient connected (connecting took %.3fs)' % (time.time() - start,))
def _reconnect(self) -> None:
self.connection = None
self.channel = None
self.queues = set()
self._connect()
def _get_parameters(self) -> pika.ConnectionParameters:
# We explicitly disable the RabbitMQ heartbeat feature, since
# it doesn't make sense with BlockingConnection
credentials = pika.PlainCredentials(settings.RABBITMQ_USERNAME,
settings.RABBITMQ_PASSWORD)
return pika.ConnectionParameters(settings.RABBITMQ_HOST,
heartbeat_interval=self.rabbitmq_heartbeat,
credentials=credentials)
def _generate_ctag(self, queue_name: str) -> str:
return "%s_%s" % (queue_name, str(random.getrandbits(16)))
def _reconnect_consumer_callback(self, queue: str, consumer: Consumer) -> None:
self.log.info("Queue reconnecting saved consumer %s to queue %s" % (consumer, queue))
self.ensure_queue(queue, lambda: self.channel.basic_consume(consumer,
queue=queue,
consumer_tag=self._generate_ctag(queue)))
def _reconnect_consumer_callbacks(self) -> None:
for queue, consumers in self.consumers.items():
for consumer in consumers:
self._reconnect_consumer_callback(queue, consumer)
def close(self) -> None:
if self.connection:
self.connection.close()
def ready(self) -> bool:
return self.channel is not None
def ensure_queue(self, queue_name: str, callback: Callable[[], None]) -> None:
'''Ensure that a given queue has been declared, and then call
the callback with no arguments.'''
if self.connection is None or not self.connection.is_open:
self._connect()
if queue_name not in self.queues:
self.channel.queue_declare(queue=queue_name, durable=True)
self.queues.add(queue_name)
callback()
def publish(self, queue_name: str, body: str) -> None:
def do_publish() -> None:
self.channel.basic_publish(
exchange='',
routing_key=queue_name,
properties=pika.BasicProperties(delivery_mode=2),
body=body)
statsd.incr("rabbitmq.publish.%s" % (queue_name,))
self.ensure_queue(queue_name, do_publish)
def json_publish(self, queue_name: str, body: Union[Mapping[str, Any], str]) -> None:
# Union because of zerver.middleware.write_log_line uses a str
try:
self.publish(queue_name, ujson.dumps(body))
return
except pika.exceptions.AMQPConnectionError:
self.log.warning("Failed to send to rabbitmq, trying to reconnect and send again")
self._reconnect()
self.publish(queue_name, ujson.dumps(body))
def register_consumer(self, queue_name: str, consumer: Consumer) -> None:
def wrapped_consumer(ch: BlockingChannel,
method: Basic.Deliver,
properties: pika.BasicProperties,
body: str) -> None:
try:
consumer(ch, method, properties, body)
ch.basic_ack(delivery_tag=method.delivery_tag)
except Exception as e:
ch.basic_nack(delivery_tag=method.delivery_tag)
raise e
self.consumers[queue_name].add(wrapped_consumer)
self.ensure_queue(queue_name,
lambda: self.channel.basic_consume(wrapped_consumer, queue=queue_name,
consumer_tag=self._generate_ctag(queue_name)))
def register_json_consumer(self, queue_name: str,
callback: Callable[[Dict[str, Any]], None]) -> None:
def wrapped_callback(ch: BlockingChannel,
method: Basic.Deliver,
properties: pika.BasicProperties,
body: str) -> None:
callback(ujson.loads(body))
self.register_consumer(queue_name, wrapped_callback)
def drain_queue(self, queue_name: str, json: bool=False) -> List[Dict[str, Any]]:
"Returns all messages in the desired queue"
messages = []
def opened() -> None:
while True:
(meta, _, message) = self.channel.basic_get(queue_name)
if not message:
break
self.channel.basic_ack(meta.delivery_tag)
if json:
message = ujson.loads(message)
messages.append(message)
self.ensure_queue(queue_name, opened)
return messages
def start_consuming(self) -> None:
self.channel.start_consuming()
def stop_consuming(self) -> None:
self.channel.stop_consuming()
# Patch pika.adapters.tornado_connection.TornadoConnection so that a socket error doesn't
# throw an exception and disconnect the tornado process from the rabbitmq
# queue. Instead, just re-connect as usual
class ExceptionFreeTornadoConnection(pika.adapters.tornado_connection.TornadoConnection):
def _adapter_disconnect(self) -> None:
try:
super()._adapter_disconnect()
except (pika.exceptions.ProbableAuthenticationError,
pika.exceptions.ProbableAccessDeniedError,
pika.exceptions.IncompatibleProtocolError) as e:
logging.warning("Caught exception '%r' in ExceptionFreeTornadoConnection when \
calling _adapter_disconnect, ignoring" % (e,))
class TornadoQueueClient(SimpleQueueClient):
# Based on:
# https://pika.readthedocs.io/en/0.9.8/examples/asynchronous_consumer_example.html
def __init__(self) -> None:
super().__init__(
# TornadoConnection can process heartbeats, so enable them.
rabbitmq_heartbeat=None)
self._on_open_cbs = [] # type: List[Callable[[], None]]
self._connection_failure_count = 0
def _connect(self) -> None:
self.log.info("Beginning TornadoQueueClient connection")
self.connection = ExceptionFreeTornadoConnection(
self._get_parameters(),
on_open_callback = self._on_open,
on_open_error_callback = self._on_connection_open_error,
on_close_callback = self._on_connection_closed,
)
def _reconnect(self) -> None:
self.connection = None
self.channel = None
self.queues = set()
self.log.warning("TornadoQueueClient attempting to reconnect to RabbitMQ")
self._connect()
CONNECTION_RETRY_SECS = 2
# When the RabbitMQ server is restarted, it's normal for it to
# take a few seconds to come back; we'll retry a few times and all
# will be well. So for the first few failures, we report only at
# "warning" level, avoiding an email to the server admin.
#
# A loss of an existing connection starts a retry loop just like a
# failed connection attempt, so it counts as the first failure.
#
# On an unloaded test system, a RabbitMQ restart takes about 6s,
# potentially causing 4 failures. We add some headroom above that.
CONNECTION_FAILURES_BEFORE_NOTIFY = 10
def _on_connection_open_error(self, connection: pika.connection.Connection,
message: Optional[str]=None) -> None:
self._connection_failure_count += 1
retry_secs = self.CONNECTION_RETRY_SECS
message = ("TornadoQueueClient couldn't connect to RabbitMQ, retrying in %d secs..."
% (retry_secs,))
if self._connection_failure_count > self.CONNECTION_FAILURES_BEFORE_NOTIFY:
self.log.critical(message)
else:
self.log.warning(message)
ioloop.IOLoop.instance().call_later(retry_secs, self._reconnect)
def _on_connection_closed(self, connection: pika.connection.Connection,
reply_code: int, reply_text: str) -> None:
self._connection_failure_count = 1
retry_secs = self.CONNECTION_RETRY_SECS
self.log.warning("TornadoQueueClient lost connection to RabbitMQ, reconnecting in %d secs..."
% (retry_secs,))
ioloop.IOLoop.instance().call_later(retry_secs, self._reconnect)
def _on_open(self, connection: pika.connection.Connection) -> None:
self._connection_failure_count = 0
try:
self.connection.channel(
on_open_callback = self._on_channel_open)
except pika.exceptions.ConnectionClosed:
# The connection didn't stay open long enough for this code to get to it.
# Let _on_connection_closed deal with trying again.
self.log.warning("TornadoQueueClient couldn't open channel: connection already closed")
def _on_channel_open(self, channel: BlockingChannel) -> None:
self.channel = channel
for callback in self._on_open_cbs:
callback()
self._reconnect_consumer_callbacks()
self.log.info('TornadoQueueClient connected')
def ensure_queue(self, queue_name: str, callback: Callable[[], None]) -> None:
def finish(frame: Any) -> None:
self.queues.add(queue_name)
callback()
if queue_name not in self.queues:
# If we're not connected yet, send this message
# once we have created the channel
if not self.ready():
self._on_open_cbs.append(lambda: self.ensure_queue(queue_name, callback))
return
self.channel.queue_declare(queue=queue_name, durable=True, callback=finish)
else:
callback()
def register_consumer(self, queue_name: str, consumer: Consumer) -> None:
def wrapped_consumer(ch: BlockingChannel,
method: Basic.Deliver,
properties: pika.BasicProperties,
body: str) -> None:
consumer(ch, method, properties, body)
ch.basic_ack(delivery_tag=method.delivery_tag)
if not self.ready():
self.consumers[queue_name].add(wrapped_consumer)
return
self.consumers[queue_name].add(wrapped_consumer)
self.ensure_queue(queue_name,
lambda: self.channel.basic_consume(wrapped_consumer, queue=queue_name,
consumer_tag=self._generate_ctag(queue_name)))
queue_client = None # type: Optional[SimpleQueueClient]
def get_queue_client() -> SimpleQueueClient:
global queue_client
if queue_client is None:
if settings.RUNNING_INSIDE_TORNADO and settings.USING_RABBITMQ:
queue_client = TornadoQueueClient()
elif settings.USING_RABBITMQ:
queue_client = SimpleQueueClient()
return queue_client
# We using a simple lock to prevent multiple RabbitMQ messages being
# sent to the SimpleQueueClient at the same time; this is a workaround
# for an issue with the pika BlockingConnection where using
# BlockingConnection for multiple queues causes the channel to
# randomly close.
queue_lock = threading.RLock()
def queue_json_publish(queue_name: str,
event: Union[Dict[str, Any], str],
processor: Callable[[Any], None]=None) -> None:
# most events are dicts, but zerver.middleware.write_log_line uses a str
with queue_lock:
if settings.USING_RABBITMQ:
get_queue_client().json_publish(queue_name, event)
elif processor:
processor(event)
else:
# Must be imported here: A top section import leads to obscure not-defined-ish errors.
from zerver.worker.queue_processors import get_worker
get_worker(queue_name).consume_wrapper(event)
def retry_event(queue_name: str,
event: Dict[str, Any],
failure_processor: Callable[[Dict[str, Any]], None]) -> None:
if 'failed_tries' not in event:
event['failed_tries'] = 0
event['failed_tries'] += 1
if event['failed_tries'] > MAX_REQUEST_RETRIES:
failure_processor(event)
else:
queue_json_publish(queue_name, event, lambda x: None)
| apache-2.0 | 1,195,079,782,446,228,200 | 41.881818 | 109 | 0.615504 | false |
paplorinc/intellij-community | python/helpers/py3only/docutils/transforms/parts.py | 44 | 6948 | # $Id: parts.py 6073 2009-08-06 12:21:10Z milde $
# Authors: David Goodger <[email protected]>; Ueli Schlaepfer; Dmitry Jemerov
# Copyright: This module has been placed in the public domain.
"""
Transforms related to document parts.
"""
__docformat__ = 'reStructuredText'
import sys
from docutils import nodes
from docutils.transforms import Transform
class SectNum(Transform):
"""
Automatically assigns numbers to the titles of document sections.
It is possible to limit the maximum section level for which the numbers
are added. For those sections that are auto-numbered, the "autonum"
attribute is set, informing the contents table generator that a different
form of the TOC should be used.
"""
default_priority = 710
"""Should be applied before `Contents`."""
def apply(self):
self.maxdepth = self.startnode.details.get('depth', None)
self.startvalue = self.startnode.details.get('start', 1)
self.prefix = self.startnode.details.get('prefix', '')
self.suffix = self.startnode.details.get('suffix', '')
self.startnode.parent.remove(self.startnode)
if self.document.settings.sectnum_xform:
if self.maxdepth is None:
self.maxdepth = sys.maxsize
self.update_section_numbers(self.document)
else: # store details for eventual section numbering by the writer
self.document.settings.sectnum_depth = self.maxdepth
self.document.settings.sectnum_start = self.startvalue
self.document.settings.sectnum_prefix = self.prefix
self.document.settings.sectnum_suffix = self.suffix
def update_section_numbers(self, node, prefix=(), depth=0):
depth += 1
if prefix:
sectnum = 1
else:
sectnum = self.startvalue
for child in node:
if isinstance(child, nodes.section):
numbers = prefix + (str(sectnum),)
title = child[0]
# Use for spacing:
generated = nodes.generated(
'', (self.prefix + '.'.join(numbers) + self.suffix
+ '\u00a0' * 3),
classes=['sectnum'])
title.insert(0, generated)
title['auto'] = 1
if depth < self.maxdepth:
self.update_section_numbers(child, numbers, depth)
sectnum += 1
class Contents(Transform):
"""
This transform generates a table of contents from the entire document tree
or from a single branch. It locates "section" elements and builds them
into a nested bullet list, which is placed within a "topic" created by the
contents directive. A title is either explicitly specified, taken from
the appropriate language module, or omitted (local table of contents).
The depth may be specified. Two-way references between the table of
contents and section titles are generated (requires Writer support).
This transform requires a startnode, which contains generation
options and provides the location for the generated table of contents (the
startnode is replaced by the table of contents "topic").
"""
default_priority = 720
def apply(self):
try: # let the writer (or output software) build the contents list?
toc_by_writer = self.document.settings.use_latex_toc
except AttributeError:
toc_by_writer = False
details = self.startnode.details
if 'local' in details:
startnode = self.startnode.parent.parent
while not (isinstance(startnode, nodes.section)
or isinstance(startnode, nodes.document)):
# find the ToC root: a direct ancestor of startnode
startnode = startnode.parent
else:
startnode = self.document
self.toc_id = self.startnode.parent['ids'][0]
if 'backlinks' in details:
self.backlinks = details['backlinks']
else:
self.backlinks = self.document.settings.toc_backlinks
if toc_by_writer:
# move customization settings to the parent node
self.startnode.parent.attributes.update(details)
self.startnode.parent.remove(self.startnode)
else:
contents = self.build_contents(startnode)
if len(contents):
self.startnode.replace_self(contents)
else:
self.startnode.parent.parent.remove(self.startnode.parent)
def build_contents(self, node, level=0):
level += 1
sections = [sect for sect in node if isinstance(sect, nodes.section)]
entries = []
autonum = 0
depth = self.startnode.details.get('depth', sys.maxsize)
for section in sections:
title = section[0]
auto = title.get('auto') # May be set by SectNum.
entrytext = self.copy_and_filter(title)
reference = nodes.reference('', '', refid=section['ids'][0],
*entrytext)
ref_id = self.document.set_id(reference)
entry = nodes.paragraph('', '', reference)
item = nodes.list_item('', entry)
if ( self.backlinks in ('entry', 'top')
and title.next_node(nodes.reference) is None):
if self.backlinks == 'entry':
title['refid'] = ref_id
elif self.backlinks == 'top':
title['refid'] = self.toc_id
if level < depth:
subsects = self.build_contents(section, level)
item += subsects
entries.append(item)
if entries:
contents = nodes.bullet_list('', *entries)
if auto:
contents['classes'].append('auto-toc')
return contents
else:
return []
def copy_and_filter(self, node):
"""Return a copy of a title, with references, images, etc. removed."""
visitor = ContentsFilter(self.document)
node.walkabout(visitor)
return visitor.get_entry_text()
class ContentsFilter(nodes.TreeCopyVisitor):
def get_entry_text(self):
return self.get_tree_copy().children
def visit_citation_reference(self, node):
raise nodes.SkipNode
def visit_footnote_reference(self, node):
raise nodes.SkipNode
def visit_image(self, node):
if node.hasattr('alt'):
self.parent.append(nodes.Text(node['alt']))
raise nodes.SkipNode
def ignore_node_but_process_children(self, node):
raise nodes.SkipDeparture
visit_interpreted = ignore_node_but_process_children
visit_problematic = ignore_node_but_process_children
visit_reference = ignore_node_but_process_children
visit_target = ignore_node_but_process_children
| apache-2.0 | -7,092,598,360,172,141,000 | 37.815642 | 78 | 0.609096 | false |
Tivix/django-common | django_common/decorators.py | 2 | 1852 | from __future__ import print_function, unicode_literals, with_statement, division
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps
import inspect
from django.conf import settings
from django.http import HttpResponseRedirect
def ssl_required(allow_non_ssl=False):
"""
Views decorated with this will always get redirected to https
except when allow_non_ssl is set to true.
"""
def wrapper(view_func):
def _checkssl(request, *args, **kwargs):
# allow_non_ssl=True lets non-https requests to come
# through to this view (and hence not redirect)
if hasattr(settings, 'SSL_ENABLED') and settings.SSL_ENABLED \
and not request.is_secure() and not allow_non_ssl:
return HttpResponseRedirect(
request.build_absolute_uri().replace('http://', 'https://'))
return view_func(request, *args, **kwargs)
return _checkssl
return wrapper
def disable_for_loaddata(signal_handler):
"""
See: https://code.djangoproject.com/ticket/8399
Disables signal from firing if its caused because of loaddata
"""
@wraps(signal_handler)
def wrapper(*args, **kwargs):
for fr in inspect.stack():
if inspect.getmodulename(fr[1]) == 'loaddata':
return
signal_handler(*args, **kwargs)
return wrapper
def anonymous_required(view, redirect_to=None):
"""
Only allow if user is NOT authenticated.
"""
if redirect_to is None:
redirect_to = settings.LOGIN_REDIRECT_URL
@wraps(view)
def wrapper(request, *a, **k):
if request.user and request.user.is_authenticated():
return HttpResponseRedirect(redirect_to)
return view(request, *a, **k)
return wrapper
| mit | 9,117,531,020,581,332,000 | 30.389831 | 81 | 0.642009 | false |
javaos74/neutron | neutron/plugins/ml2/rpc.py | 4 | 13745 | # Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
import oslo_messaging
from sqlalchemy.orm import exc
from neutron.api.rpc.handlers import dvr_rpc
from neutron.api.rpc.handlers import securitygroups_rpc as sg_rpc
from neutron.callbacks import events
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.common import constants as n_const
from neutron.common import exceptions
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.extensions import portbindings
from neutron.extensions import portsecurity as psec
from neutron.i18n import _LE, _LW
from neutron import manager
from neutron.plugins.ml2 import driver_api as api
from neutron.plugins.ml2.drivers import type_tunnel
from neutron.services.qos import qos_consts
# REVISIT(kmestery): Allow the type and mechanism drivers to supply the
# mixins and eventually remove the direct dependencies on type_tunnel.
LOG = log.getLogger(__name__)
class RpcCallbacks(type_tunnel.TunnelRpcCallbackMixin):
# history
# 1.0 Initial version (from openvswitch/linuxbridge)
# 1.1 Support Security Group RPC
# 1.2 Support get_devices_details_list
# 1.3 get_device_details rpc signature upgrade to obtain 'host' and
# return value to include fixed_ips and device_owner for
# the device port
# 1.4 tunnel_sync rpc signature upgrade to obtain 'host'
# 1.5 Support update_device_list and
# get_devices_details_list_and_failed_devices
target = oslo_messaging.Target(version='1.5')
def __init__(self, notifier, type_manager):
self.setup_tunnel_callback_mixin(notifier, type_manager)
super(RpcCallbacks, self).__init__()
def get_device_details(self, rpc_context, **kwargs):
"""Agent requests device details."""
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
host = kwargs.get('host')
# cached networks used for reducing number of network db calls
# for server internal usage only
cached_networks = kwargs.get('cached_networks')
LOG.debug("Device %(device)s details requested by agent "
"%(agent_id)s with host %(host)s",
{'device': device, 'agent_id': agent_id, 'host': host})
plugin = manager.NeutronManager.get_plugin()
port_id = plugin._device_to_port_id(rpc_context, device)
port_context = plugin.get_bound_port_context(rpc_context,
port_id,
host,
cached_networks)
if not port_context:
LOG.warning(_LW("Device %(device)s requested by agent "
"%(agent_id)s not found in database"),
{'device': device, 'agent_id': agent_id})
return {'device': device}
segment = port_context.bottom_bound_segment
port = port_context.current
# caching information about networks for future use
if cached_networks is not None:
if port['network_id'] not in cached_networks:
cached_networks[port['network_id']] = (
port_context.network.current)
if not segment:
LOG.warning(_LW("Device %(device)s requested by agent "
"%(agent_id)s on network %(network_id)s not "
"bound, vif_type: %(vif_type)s"),
{'device': device,
'agent_id': agent_id,
'network_id': port['network_id'],
'vif_type': port_context.vif_type})
return {'device': device}
if (not host or host == port_context.host):
new_status = (n_const.PORT_STATUS_BUILD if port['admin_state_up']
else n_const.PORT_STATUS_DOWN)
if port['status'] != new_status:
plugin.update_port_status(rpc_context,
port_id,
new_status,
host,
port_context.network.current)
qos_policy_id = (port.get(qos_consts.QOS_POLICY_ID) or
port_context.network._network.get(
qos_consts.QOS_POLICY_ID))
entry = {'device': device,
'network_id': port['network_id'],
'port_id': port['id'],
'mac_address': port['mac_address'],
'admin_state_up': port['admin_state_up'],
'network_type': segment[api.NETWORK_TYPE],
'segmentation_id': segment[api.SEGMENTATION_ID],
'physical_network': segment[api.PHYSICAL_NETWORK],
'fixed_ips': port['fixed_ips'],
'device_owner': port['device_owner'],
'allowed_address_pairs': port['allowed_address_pairs'],
'port_security_enabled': port.get(psec.PORTSECURITY, True),
'qos_policy_id': qos_policy_id,
'profile': port[portbindings.PROFILE]}
if 'security_groups' in port:
entry['security_groups'] = port['security_groups']
LOG.debug("Returning: %s", entry)
return entry
def get_devices_details_list(self, rpc_context, **kwargs):
# cached networks used for reducing number of network db calls
cached_networks = {}
return [
self.get_device_details(
rpc_context,
device=device,
cached_networks=cached_networks,
**kwargs
)
for device in kwargs.pop('devices', [])
]
def get_devices_details_list_and_failed_devices(self,
rpc_context,
**kwargs):
devices = []
failed_devices = []
cached_networks = {}
for device in kwargs.pop('devices', []):
try:
devices.append(self.get_device_details(
rpc_context,
device=device,
cached_networks=cached_networks,
**kwargs))
except Exception:
LOG.error(_LE("Failed to get details for device %s"),
device)
failed_devices.append(device)
return {'devices': devices,
'failed_devices': failed_devices}
def update_device_down(self, rpc_context, **kwargs):
"""Device no longer exists on agent."""
# TODO(garyk) - live migration and port status
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
host = kwargs.get('host')
LOG.debug("Device %(device)s no longer exists at agent "
"%(agent_id)s",
{'device': device, 'agent_id': agent_id})
plugin = manager.NeutronManager.get_plugin()
port_id = plugin._device_to_port_id(rpc_context, device)
port_exists = True
if (host and not plugin.port_bound_to_host(rpc_context,
port_id, host)):
LOG.debug("Device %(device)s not bound to the"
" agent host %(host)s",
{'device': device, 'host': host})
return {'device': device,
'exists': port_exists}
try:
port_exists = bool(plugin.update_port_status(
rpc_context, port_id, n_const.PORT_STATUS_DOWN, host))
except exc.StaleDataError:
port_exists = False
LOG.debug("delete_port and update_device_down are being executed "
"concurrently. Ignoring StaleDataError.")
return {'device': device,
'exists': port_exists}
def update_device_up(self, rpc_context, **kwargs):
"""Device is up on agent."""
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
host = kwargs.get('host')
LOG.debug("Device %(device)s up at agent %(agent_id)s",
{'device': device, 'agent_id': agent_id})
plugin = manager.NeutronManager.get_plugin()
port_id = plugin._device_to_port_id(rpc_context, device)
if (host and not plugin.port_bound_to_host(rpc_context,
port_id, host)):
LOG.debug("Device %(device)s not bound to the"
" agent host %(host)s",
{'device': device, 'host': host})
return
port_id = plugin.update_port_status(rpc_context, port_id,
n_const.PORT_STATUS_ACTIVE,
host)
try:
# NOTE(armax): it's best to remove all objects from the
# session, before we try to retrieve the new port object
rpc_context.session.expunge_all()
port = plugin._get_port(rpc_context, port_id)
except exceptions.PortNotFound:
LOG.debug('Port %s not found during update', port_id)
else:
kwargs = {
'context': rpc_context,
'port': port,
'update_device_up': True
}
registry.notify(
resources.PORT, events.AFTER_UPDATE, plugin, **kwargs)
def update_device_list(self, rpc_context, **kwargs):
devices_up = []
failed_devices_up = []
devices_down = []
failed_devices_down = []
devices = kwargs.get('devices_up')
if devices:
for device in devices:
try:
self.update_device_up(
rpc_context,
device=device,
**kwargs)
except Exception:
failed_devices_up.append(device)
LOG.error(_LE("Failed to update device %s up"), device)
else:
devices_up.append(device)
devices = kwargs.get('devices_down')
if devices:
for device in devices:
try:
dev = self.update_device_down(
rpc_context,
device=device,
**kwargs)
except Exception:
failed_devices_down.append(device)
LOG.error(_LE("Failed to update device %s down"), device)
else:
devices_down.append(dev)
return {'devices_up': devices_up,
'failed_devices_up': failed_devices_up,
'devices_down': devices_down,
'failed_devices_down': failed_devices_down}
class AgentNotifierApi(dvr_rpc.DVRAgentRpcApiMixin,
sg_rpc.SecurityGroupAgentRpcApiMixin,
type_tunnel.TunnelAgentRpcApiMixin):
"""Agent side of the openvswitch rpc API.
API version history:
1.0 - Initial version.
1.1 - Added get_active_networks_info, create_dhcp_port,
update_dhcp_port, and removed get_dhcp_port methods.
"""
def __init__(self, topic):
self.topic = topic
self.topic_network_delete = topics.get_topic_name(topic,
topics.NETWORK,
topics.DELETE)
self.topic_port_update = topics.get_topic_name(topic,
topics.PORT,
topics.UPDATE)
self.topic_port_delete = topics.get_topic_name(topic,
topics.PORT,
topics.DELETE)
target = oslo_messaging.Target(topic=topic, version='1.0')
self.client = n_rpc.get_client(target)
def network_delete(self, context, network_id):
cctxt = self.client.prepare(topic=self.topic_network_delete,
fanout=True)
cctxt.cast(context, 'network_delete', network_id=network_id)
def port_update(self, context, port, network_type, segmentation_id,
physical_network):
cctxt = self.client.prepare(topic=self.topic_port_update,
fanout=True)
cctxt.cast(context, 'port_update', port=port,
network_type=network_type, segmentation_id=segmentation_id,
physical_network=physical_network)
def port_delete(self, context, port_id):
cctxt = self.client.prepare(topic=self.topic_port_delete,
fanout=True)
cctxt.cast(context, 'port_delete', port_id=port_id)
| apache-2.0 | -5,634,697,078,448,194,000 | 42.496835 | 78 | 0.531466 | false |
olinlibrary/ABE | abe/resource_models/label_resources.py | 1 | 4413 | """Label Resource models for flask"""
import logging
from flask import request
from flask_restplus import Namespace, Resource, fields
from abe import database as db
from abe.auth import require_scope
from abe.helper_functions.converting_helpers import request_to_dict
from abe.helper_functions.mongodb_helpers import mongo_resource_errors
from abe.helper_functions.query_helpers import multi_search
api = Namespace('labels', description='Event labels')
# This should be kept in sync with the document model, which drives the format
model = api.model("Label", {
"id": fields.String(readonly=True),
"name": fields.String(example="library"),
"description": fields.String(
description="Description of the label",
example="Any event that has to do with the library",
),
"url": fields.String(description="Not currently used."),
# "parent_labels": fields.List(fields.String),
"color": fields.String(
description="Color for calendar display.",
example="#aaccff",
),
"default": fields.Boolean(
default=False,
description="If true, appears in the default calendar view.",
),
"protected": fields.Boolean(
default=False,
description="If true, requires the admin role to create and edit labeled events."
),
"visibility": fields.String(
enum=['public', 'olin', 'students'],
description="Who can see events with this label.",
),
})
class LabelApi(Resource):
"""API for interacting with all labels (searching, creating)"""
@mongo_resource_errors
@api.doc(security=[])
@api.marshal_with(model)
def get(self, id=None):
"""Retrieve a list of labels"""
if id: # use label name/object id if present
logging.debug('Label requested: %s', id)
search_fields = ['name', 'id']
result = multi_search(db.Label, id, search_fields)
if not result:
return "Label not found with identifier '{}'".format(id), 404
return result
return list(db.Label.objects())
@require_scope('create:labels')
@mongo_resource_errors
@api.expect(model)
@api.marshal_with(model)
def post(self):
"""Create a new label"""
received_data = request_to_dict(request)
logging.debug("Received POST data: %s", received_data)
new_label = db.Label(**received_data)
new_label.save()
return new_label, 201
@require_scope('edit:labels')
@mongo_resource_errors
@api.expect(model)
@api.marshal_with(model)
def put(self, id):
"""Modify a label's properties"""
received_data = request_to_dict(request)
logging.debug("Received PUT data: %s", received_data)
search_fields = ['name', 'id']
result = multi_search(db.Label, id, search_fields)
if not result:
return "Label not found with identifier '{}'".format(id), 404
previous_name = result['name']
new_name = received_data.get('name', previous_name)
result.update(**received_data)
# TODO: do this inside the same transaction as the update above, on update to mnogo 4.0
if previous_name != new_name:
db.Event.objects(labels=previous_name).update(labels__S=new_name)
db.ICS.objects(labels=previous_name).update(labels__S=new_name)
db.Subscription.objects(labels=previous_name).update(labels__S=new_name)
return result
@require_scope('delete:labels')
@mongo_resource_errors
@api.marshal_with(model)
def delete(self, id):
"""Delete a label"""
logging.debug('Label requested: %s', id)
search_fields = ['name', 'id']
result = multi_search(db.Label, id, search_fields)
if not result:
return "Label not found with identifier '{}'".format(id), 404
received_data = request_to_dict(request)
logging.debug("Received DELETE data: %s", received_data)
result.delete()
# TODO: this should also remove the label from tagged events
# TODO: should this operation fail if it would leave events untagged?
return result
api.add_resource(LabelApi, '/', methods=['GET', 'POST'], endpoint='label')
api.add_resource(LabelApi, '/<string:id>',
methods=['GET', 'PUT', 'PATCH', 'DELETE'],
endpoint='label_id')
| agpl-3.0 | -7,671,649,440,249,133,000 | 34.878049 | 95 | 0.63177 | false |
suto/infernal-twin | build/pillow/Tests/test_file_bmp.py | 11 | 1784 | from helper import unittest, PillowTestCase, hopper
from PIL import Image
import io
class TestFileBmp(PillowTestCase):
def roundtrip(self, im):
outfile = self.tempfile("temp.bmp")
im.save(outfile, 'BMP')
reloaded = Image.open(outfile)
reloaded.load()
self.assertEqual(im.mode, reloaded.mode)
self.assertEqual(im.size, reloaded.size)
self.assertEqual(reloaded.format, "BMP")
def test_sanity(self):
self.roundtrip(hopper())
self.roundtrip(hopper("1"))
self.roundtrip(hopper("L"))
self.roundtrip(hopper("P"))
self.roundtrip(hopper("RGB"))
def test_save_to_bytes(self):
output = io.BytesIO()
im = hopper()
im.save(output, "BMP")
output.seek(0)
reloaded = Image.open(output)
self.assertEqual(im.mode, reloaded.mode)
self.assertEqual(im.size, reloaded.size)
self.assertEqual(reloaded.format, "BMP")
def test_dpi(self):
dpi = (72, 72)
output = io.BytesIO()
im = hopper()
im.save(output, "BMP", dpi=dpi)
output.seek(0)
reloaded = Image.open(output)
self.assertEqual(reloaded.info["dpi"], dpi)
def test_save_bmp_with_dpi(self):
# Test for #1301
# Arrange
outfile = self.tempfile("temp.jpg")
im = Image.open("Tests/images/hopper.bmp")
# Act
im.save(outfile, 'JPEG', dpi=im.info['dpi'])
# Assert
reloaded = Image.open(outfile)
reloaded.load()
self.assertEqual(im.info['dpi'], reloaded.info['dpi'])
self.assertEqual(im.size, reloaded.size)
self.assertEqual(reloaded.format, "JPEG")
if __name__ == '__main__':
unittest.main()
# End of file
| gpl-3.0 | -6,580,235,527,331,182,000 | 23.777778 | 62 | 0.586323 | false |
mancoast/CPythonPyc_test | cpython/330_test_curses.py | 5 | 10336 | #
# Test script for the curses module
#
# This script doesn't actually display anything very coherent. but it
# does call every method and function.
#
# Functions not tested: {def,reset}_{shell,prog}_mode, getch(), getstr(),
# init_color()
# Only called, not tested: getmouse(), ungetmouse()
#
import sys, tempfile, os
# Optionally test curses module. This currently requires that the
# 'curses' resource be given on the regrtest command line using the -u
# option. If not available, nothing after this line will be executed.
import unittest
from test.support import requires, import_module
requires('curses')
# If either of these don't exist, skip the tests.
curses = import_module('curses')
curses.panel = import_module('curses.panel')
# XXX: if newterm was supported we could use it instead of initscr and not exit
term = os.environ.get('TERM')
if not term or term == 'unknown':
raise unittest.SkipTest("$TERM=%r, calling initscr() may cause exit" % term)
if sys.platform == "cygwin":
raise unittest.SkipTest("cygwin's curses mostly just hangs")
def window_funcs(stdscr):
"Test the methods of windows"
win = curses.newwin(10,10)
win = curses.newwin(5,5, 5,5)
win2 = curses.newwin(15,15, 5,5)
for meth in [stdscr.addch, stdscr.addstr]:
for args in [('a'), ('a', curses.A_BOLD),
(4,4, 'a'), (5,5, 'a', curses.A_BOLD)]:
meth(*args)
for meth in [stdscr.box, stdscr.clear, stdscr.clrtobot,
stdscr.clrtoeol, stdscr.cursyncup, stdscr.delch,
stdscr.deleteln, stdscr.erase, stdscr.getbegyx,
stdscr.getbkgd, stdscr.getkey, stdscr.getmaxyx,
stdscr.getparyx, stdscr.getyx, stdscr.inch,
stdscr.insertln, stdscr.instr, stdscr.is_wintouched,
win.noutrefresh, stdscr.redrawwin, stdscr.refresh,
stdscr.standout, stdscr.standend, stdscr.syncdown,
stdscr.syncup, stdscr.touchwin, stdscr.untouchwin]:
meth()
stdscr.addnstr('1234', 3)
stdscr.addnstr('1234', 3, curses.A_BOLD)
stdscr.addnstr(4,4, '1234', 3)
stdscr.addnstr(5,5, '1234', 3, curses.A_BOLD)
stdscr.attron(curses.A_BOLD)
stdscr.attroff(curses.A_BOLD)
stdscr.attrset(curses.A_BOLD)
stdscr.bkgd(' ')
stdscr.bkgd(' ', curses.A_REVERSE)
stdscr.bkgdset(' ')
stdscr.bkgdset(' ', curses.A_REVERSE)
win.border(65, 66, 67, 68,
69, 70, 71, 72)
win.border('|', '!', '-', '_',
'+', '\\', '#', '/')
try:
win.border(65, 66, 67, 68,
69, [], 71, 72)
except TypeError:
pass
else:
raise RuntimeError("Expected win.border() to raise TypeError")
stdscr.clearok(1)
win4 = stdscr.derwin(2,2)
win4 = stdscr.derwin(1,1, 5,5)
win4.mvderwin(9,9)
stdscr.echochar('a')
stdscr.echochar('a', curses.A_BOLD)
stdscr.hline('-', 5)
stdscr.hline('-', 5, curses.A_BOLD)
stdscr.hline(1,1,'-', 5)
stdscr.hline(1,1,'-', 5, curses.A_BOLD)
stdscr.idcok(1)
stdscr.idlok(1)
stdscr.immedok(1)
stdscr.insch('c')
stdscr.insdelln(1)
stdscr.insnstr('abc', 3)
stdscr.insnstr('abc', 3, curses.A_BOLD)
stdscr.insnstr(5, 5, 'abc', 3)
stdscr.insnstr(5, 5, 'abc', 3, curses.A_BOLD)
stdscr.insstr('def')
stdscr.insstr('def', curses.A_BOLD)
stdscr.insstr(5, 5, 'def')
stdscr.insstr(5, 5, 'def', curses.A_BOLD)
stdscr.is_linetouched(0)
stdscr.keypad(1)
stdscr.leaveok(1)
stdscr.move(3,3)
win.mvwin(2,2)
stdscr.nodelay(1)
stdscr.notimeout(1)
win2.overlay(win)
win2.overwrite(win)
win2.overlay(win, 1, 2, 3, 3, 2, 1)
win2.overwrite(win, 1, 2, 3, 3, 2, 1)
stdscr.redrawln(1,2)
stdscr.scrollok(1)
stdscr.scroll()
stdscr.scroll(2)
stdscr.scroll(-3)
stdscr.move(12, 2)
stdscr.setscrreg(10,15)
win3 = stdscr.subwin(10,10)
win3 = stdscr.subwin(10,10, 5,5)
stdscr.syncok(1)
stdscr.timeout(5)
stdscr.touchline(5,5)
stdscr.touchline(5,5,0)
stdscr.vline('a', 3)
stdscr.vline('a', 3, curses.A_STANDOUT)
stdscr.chgat(5, 2, 3, curses.A_BLINK)
stdscr.chgat(3, curses.A_BOLD)
stdscr.chgat(5, 8, curses.A_UNDERLINE)
stdscr.chgat(curses.A_BLINK)
stdscr.refresh()
stdscr.vline(1,1, 'a', 3)
stdscr.vline(1,1, 'a', 3, curses.A_STANDOUT)
if hasattr(curses, 'resize'):
stdscr.resize()
if hasattr(curses, 'enclose'):
stdscr.enclose()
def module_funcs(stdscr):
"Test module-level functions"
for func in [curses.baudrate, curses.beep, curses.can_change_color,
curses.cbreak, curses.def_prog_mode, curses.doupdate,
curses.filter, curses.flash, curses.flushinp,
curses.has_colors, curses.has_ic, curses.has_il,
curses.isendwin, curses.killchar, curses.longname,
curses.nocbreak, curses.noecho, curses.nonl,
curses.noqiflush, curses.noraw,
curses.reset_prog_mode, curses.termattrs,
curses.termname, curses.erasechar, curses.getsyx]:
func()
# Functions that actually need arguments
if curses.tigetstr("cnorm"):
curses.curs_set(1)
curses.delay_output(1)
curses.echo() ; curses.echo(1)
f = tempfile.TemporaryFile()
stdscr.putwin(f)
f.seek(0)
curses.getwin(f)
f.close()
curses.halfdelay(1)
curses.intrflush(1)
curses.meta(1)
curses.napms(100)
curses.newpad(50,50)
win = curses.newwin(5,5)
win = curses.newwin(5,5, 1,1)
curses.nl() ; curses.nl(1)
curses.putp(b'abc')
curses.qiflush()
curses.raw() ; curses.raw(1)
curses.setsyx(5,5)
curses.tigetflag('hc')
curses.tigetnum('co')
curses.tigetstr('cr')
curses.tparm(b'cr')
curses.typeahead(sys.__stdin__.fileno())
curses.unctrl('a')
curses.ungetch('a')
curses.use_env(1)
# Functions only available on a few platforms
if curses.has_colors():
curses.start_color()
curses.init_pair(2, 1,1)
curses.color_content(1)
curses.color_pair(2)
curses.pair_content(curses.COLOR_PAIRS - 1)
curses.pair_number(0)
if hasattr(curses, 'use_default_colors'):
curses.use_default_colors()
if hasattr(curses, 'keyname'):
curses.keyname(13)
if hasattr(curses, 'has_key'):
curses.has_key(13)
if hasattr(curses, 'getmouse'):
(availmask, oldmask) = curses.mousemask(curses.BUTTON1_PRESSED)
# availmask indicates that mouse stuff not available.
if availmask != 0:
curses.mouseinterval(10)
# just verify these don't cause errors
curses.ungetmouse(0, 0, 0, 0, curses.BUTTON1_PRESSED)
m = curses.getmouse()
if hasattr(curses, 'is_term_resized'):
curses.is_term_resized(*stdscr.getmaxyx())
if hasattr(curses, 'resizeterm'):
curses.resizeterm(*stdscr.getmaxyx())
if hasattr(curses, 'resize_term'):
curses.resize_term(*stdscr.getmaxyx())
def unit_tests():
from curses import ascii
for ch, expected in [('a', 'a'), ('A', 'A'),
(';', ';'), (' ', ' '),
('\x7f', '^?'), ('\n', '^J'), ('\0', '^@'),
# Meta-bit characters
('\x8a', '!^J'), ('\xc1', '!A'),
]:
if ascii.unctrl(ch) != expected:
print('curses.unctrl fails on character', repr(ch))
def test_userptr_without_set(stdscr):
w = curses.newwin(10, 10)
p = curses.panel.new_panel(w)
# try to access userptr() before calling set_userptr() -- segfaults
try:
p.userptr()
raise RuntimeError('userptr should fail since not set')
except curses.panel.error:
pass
def test_resize_term(stdscr):
if hasattr(curses, 'resizeterm'):
lines, cols = curses.LINES, curses.COLS
curses.resizeterm(lines - 1, cols + 1)
if curses.LINES != lines - 1 or curses.COLS != cols + 1:
raise RuntimeError("Expected resizeterm to update LINES and COLS")
def test_issue6243(stdscr):
curses.ungetch(1025)
stdscr.getkey()
def test_unget_wch(stdscr):
if not hasattr(curses, 'unget_wch'):
return
encoding = stdscr.encoding
for ch in ('a', '\xe9', '\u20ac', '\U0010FFFF'):
try:
ch.encode(encoding)
except UnicodeEncodeError:
continue
try:
curses.unget_wch(ch)
except Exception as err:
raise Exception("unget_wch(%a) failed with encoding %s: %s"
% (ch, stdscr.encoding, err))
read = stdscr.get_wch()
if read != ch:
raise AssertionError("%r != %r" % (read, ch))
code = ord(ch)
curses.unget_wch(code)
read = stdscr.get_wch()
if read != ch:
raise AssertionError("%r != %r" % (read, ch))
def test_issue10570():
b = curses.tparm(curses.tigetstr("cup"), 5, 3)
assert type(b) is bytes
curses.putp(b)
def test_encoding(stdscr):
import codecs
encoding = stdscr.encoding
codecs.lookup(encoding)
try:
stdscr.encoding = 10
except TypeError:
pass
else:
raise AssertionError("TypeError not raised")
stdscr.encoding = encoding
try:
del stdscr.encoding
except TypeError:
pass
else:
raise AssertionError("TypeError not raised")
def main(stdscr):
curses.savetty()
try:
module_funcs(stdscr)
window_funcs(stdscr)
test_userptr_without_set(stdscr)
test_resize_term(stdscr)
test_issue6243(stdscr)
test_unget_wch(stdscr)
test_issue10570()
test_encoding(stdscr)
finally:
curses.resetty()
def test_main():
if not sys.__stdout__.isatty():
raise unittest.SkipTest("sys.__stdout__ is not a tty")
# testing setupterm() inside initscr/endwin
# causes terminal breakage
curses.setupterm(fd=sys.__stdout__.fileno())
try:
stdscr = curses.initscr()
main(stdscr)
finally:
curses.endwin()
unit_tests()
if __name__ == '__main__':
curses.wrapper(main)
unit_tests()
| gpl-3.0 | -5,171,228,960,011,170,000 | 29.134111 | 80 | 0.592783 | false |
wdv4758h/ZipPy | lib-python/3/turtle.py | 1 | 143024 | #
# turtle.py: a Tkinter based turtle graphics module for Python
# Version 1.1b - 4. 5. 2009
#
# Copyright (C) 2006 - 2010 Gregor Lingl
# email: [email protected]
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
"""
Turtle graphics is a popular way for introducing programming to
kids. It was part of the original Logo programming language developed
by Wally Feurzig and Seymour Papert in 1966.
Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it
the command turtle.forward(15), and it moves (on-screen!) 15 pixels in
the direction it is facing, drawing a line as it moves. Give it the
command turtle.right(25), and it rotates in-place 25 degrees clockwise.
By combining together these and similar commands, intricate shapes and
pictures can easily be drawn.
----- turtle.py
This module is an extended reimplementation of turtle.py from the
Python standard distribution up to Python 2.5. (See: http://www.python.org)
It tries to keep the merits of turtle.py and to be (nearly) 100%
compatible with it. This means in the first place to enable the
learning programmer to use all the commands, classes and methods
interactively when using the module from within IDLE run with
the -n switch.
Roughly it has the following features added:
- Better animation of the turtle movements, especially of turning the
turtle. So the turtles can more easily be used as a visual feedback
instrument by the (beginning) programmer.
- Different turtle shapes, gif-images as turtle shapes, user defined
and user controllable turtle shapes, among them compound
(multicolored) shapes. Turtle shapes can be stretched and tilted, which
makes turtles very versatile geometrical objects.
- Fine control over turtle movement and screen updates via delay(),
and enhanced tracer() and speed() methods.
- Aliases for the most commonly used commands, like fd for forward etc.,
following the early Logo traditions. This reduces the boring work of
typing long sequences of commands, which often occur in a natural way
when kids try to program fancy pictures on their first encounter with
turtle graphics.
- Turtles now have an undo()-method with configurable undo-buffer.
- Some simple commands/methods for creating event driven programs
(mouse-, key-, timer-events). Especially useful for programming games.
- A scrollable Canvas class. The default scrollable Canvas can be
extended interactively as needed while playing around with the turtle(s).
- A TurtleScreen class with methods controlling background color or
background image, window and canvas size and other properties of the
TurtleScreen.
- There is a method, setworldcoordinates(), to install a user defined
coordinate-system for the TurtleScreen.
- The implementation uses a 2-vector class named Vec2D, derived from tuple.
This class is public, so it can be imported by the application programmer,
which makes certain types of computations very natural and compact.
- Appearance of the TurtleScreen and the Turtles at startup/import can be
configured by means of a turtle.cfg configuration file.
The default configuration mimics the appearance of the old turtle module.
- If configured appropriately the module reads in docstrings from a docstring
dictionary in some different language, supplied separately and replaces
the English ones by those read in. There is a utility function
write_docstringdict() to write a dictionary with the original (English)
docstrings to disc, so it can serve as a template for translations.
Behind the scenes there are some features included with possible
extensions in mind. These will be commented and documented elsewhere.
"""
_ver = "turtle 1.1b- - for Python 3.1 - 4. 5. 2009"
# print(_ver)
import tkinter as TK
import types
import math
import time
import os
import inspect
from os.path import isfile, split, join
from copy import deepcopy
from tkinter import simpledialog
_tg_classes = ['ScrolledCanvas', 'TurtleScreen', 'Screen',
'RawTurtle', 'Turtle', 'RawPen', 'Pen', 'Shape', 'Vec2D']
_tg_screen_functions = ['addshape', 'bgcolor', 'bgpic', 'bye',
'clearscreen', 'colormode', 'delay', 'exitonclick', 'getcanvas',
'getshapes', 'listen', 'mainloop', 'mode', 'numinput',
'onkey', 'onkeypress', 'onkeyrelease', 'onscreenclick', 'ontimer',
'register_shape', 'resetscreen', 'screensize', 'setup',
'setworldcoordinates', 'textinput', 'title', 'tracer', 'turtles', 'update',
'window_height', 'window_width']
_tg_turtle_functions = ['back', 'backward', 'begin_fill', 'begin_poly', 'bk',
'circle', 'clear', 'clearstamp', 'clearstamps', 'clone', 'color',
'degrees', 'distance', 'dot', 'down', 'end_fill', 'end_poly', 'fd',
'fillcolor', 'filling', 'forward', 'get_poly', 'getpen', 'getscreen', 'get_shapepoly',
'getturtle', 'goto', 'heading', 'hideturtle', 'home', 'ht', 'isdown',
'isvisible', 'left', 'lt', 'onclick', 'ondrag', 'onrelease', 'pd',
'pen', 'pencolor', 'pendown', 'pensize', 'penup', 'pos', 'position',
'pu', 'radians', 'right', 'reset', 'resizemode', 'rt',
'seth', 'setheading', 'setpos', 'setposition', 'settiltangle',
'setundobuffer', 'setx', 'sety', 'shape', 'shapesize', 'shapetransform', 'shearfactor', 'showturtle',
'speed', 'st', 'stamp', 'tilt', 'tiltangle', 'towards',
'turtlesize', 'undo', 'undobufferentries', 'up', 'width',
'write', 'xcor', 'ycor']
_tg_utilities = ['write_docstringdict', 'done']
__all__ = (_tg_classes + _tg_screen_functions + _tg_turtle_functions +
_tg_utilities) # + _math_functions)
_alias_list = ['addshape', 'backward', 'bk', 'fd', 'ht', 'lt', 'pd', 'pos',
'pu', 'rt', 'seth', 'setpos', 'setposition', 'st',
'turtlesize', 'up', 'width']
_CFG = {"width" : 0.5, # Screen
"height" : 0.75,
"canvwidth" : 400,
"canvheight": 300,
"leftright": None,
"topbottom": None,
"mode": "standard", # TurtleScreen
"colormode": 1.0,
"delay": 10,
"undobuffersize": 1000, # RawTurtle
"shape": "classic",
"pencolor" : "black",
"fillcolor" : "black",
"resizemode" : "noresize",
"visible" : True,
"language": "english", # docstrings
"exampleturtle": "turtle",
"examplescreen": "screen",
"title": "Python Turtle Graphics",
"using_IDLE": False
}
def config_dict(filename):
"""Convert content of config-file into dictionary."""
with open(filename, "r") as f:
cfglines = f.readlines()
cfgdict = {}
for line in cfglines:
line = line.strip()
if not line or line.startswith("#"):
continue
try:
key, value = line.split("=")
except:
print("Bad line in config-file %s:\n%s" % (filename,line))
continue
key = key.strip()
value = value.strip()
if value in ["True", "False", "None", "''", '""']:
value = eval(value)
else:
try:
if "." in value:
value = float(value)
else:
value = int(value)
except:
pass # value need not be converted
cfgdict[key] = value
return cfgdict
def readconfig(cfgdict):
"""Read config-files, change configuration-dict accordingly.
If there is a turtle.cfg file in the current working directory,
read it from there. If this contains an importconfig-value,
say 'myway', construct filename turtle_mayway.cfg else use
turtle.cfg and read it from the import-directory, where
turtle.py is located.
Update configuration dictionary first according to config-file,
in the import directory, then according to config-file in the
current working directory.
If no config-file is found, the default configuration is used.
"""
default_cfg = "turtle.cfg"
cfgdict1 = {}
cfgdict2 = {}
if isfile(default_cfg):
cfgdict1 = config_dict(default_cfg)
if "importconfig" in cfgdict1:
default_cfg = "turtle_%s.cfg" % cfgdict1["importconfig"]
try:
head, tail = split(__file__)
cfg_file2 = join(head, default_cfg)
except:
cfg_file2 = ""
if isfile(cfg_file2):
cfgdict2 = config_dict(cfg_file2)
_CFG.update(cfgdict2)
_CFG.update(cfgdict1)
try:
readconfig(_CFG)
except:
print ("No configfile read, reason unknown")
class Vec2D(tuple):
"""A 2 dimensional vector class, used as a helper class
for implementing turtle graphics.
May be useful for turtle graphics programs also.
Derived from tuple, so a vector is a tuple!
Provides (for a, b vectors, k number):
a+b vector addition
a-b vector subtraction
a*b inner product
k*a and a*k multiplication with scalar
|a| absolute value of a
a.rotate(angle) rotation
"""
def __new__(cls, x, y):
return tuple.__new__(cls, (x, y))
def __add__(self, other):
return Vec2D(self[0]+other[0], self[1]+other[1])
def __mul__(self, other):
if isinstance(other, Vec2D):
return self[0]*other[0]+self[1]*other[1]
return Vec2D(self[0]*other, self[1]*other)
def __rmul__(self, other):
if isinstance(other, int) or isinstance(other, float):
return Vec2D(self[0]*other, self[1]*other)
def __sub__(self, other):
return Vec2D(self[0]-other[0], self[1]-other[1])
def __neg__(self):
return Vec2D(-self[0], -self[1])
def __abs__(self):
return (self[0]**2 + self[1]**2)**0.5
def rotate(self, angle):
"""rotate self counterclockwise by angle
"""
perp = Vec2D(-self[1], self[0])
angle = angle * math.pi / 180.0
c, s = math.cos(angle), math.sin(angle)
return Vec2D(self[0]*c+perp[0]*s, self[1]*c+perp[1]*s)
def __getnewargs__(self):
return (self[0], self[1])
def __repr__(self):
return "(%.2f,%.2f)" % self
##############################################################################
### From here up to line : Tkinter - Interface for turtle.py ###
### May be replaced by an interface to some different graphics toolkit ###
##############################################################################
## helper functions for Scrolled Canvas, to forward Canvas-methods
## to ScrolledCanvas class
def __methodDict(cls, _dict):
"""helper function for Scrolled Canvas"""
baseList = list(cls.__bases__)
baseList.reverse()
for _super in baseList:
__methodDict(_super, _dict)
for key, value in cls.__dict__.items():
if type(value) == types.FunctionType:
_dict[key] = value
def __methods(cls):
"""helper function for Scrolled Canvas"""
_dict = {}
__methodDict(cls, _dict)
return _dict.keys()
__stringBody = (
'def %(method)s(self, *args, **kw): return ' +
'self.%(attribute)s.%(method)s(*args, **kw)')
def __forwardmethods(fromClass, toClass, toPart, exclude = ()):
### MANY CHANGES ###
_dict_1 = {}
__methodDict(toClass, _dict_1)
_dict = {}
mfc = __methods(fromClass)
for ex in _dict_1.keys():
if ex[:1] == '_' or ex[-1:] == '_' or ex in exclude or ex in mfc:
pass
else:
_dict[ex] = _dict_1[ex]
for method, func in _dict.items():
d = {'method': method, 'func': func}
if isinstance(toPart, str):
execString = \
__stringBody % {'method' : method, 'attribute' : toPart}
exec(execString, d)
setattr(fromClass, method, d[method]) ### NEWU!
class ScrolledCanvas(TK.Frame):
"""Modeled after the scrolled canvas class from Grayons's Tkinter book.
Used as the default canvas, which pops up automatically when
using turtle graphics functions or the Turtle class.
"""
def __init__(self, master, width=500, height=350,
canvwidth=600, canvheight=500):
TK.Frame.__init__(self, master, width=width, height=height)
self._rootwindow = self.winfo_toplevel()
self.width, self.height = width, height
self.canvwidth, self.canvheight = canvwidth, canvheight
self.bg = "white"
self._canvas = TK.Canvas(master, width=width, height=height,
bg=self.bg, relief=TK.SUNKEN, borderwidth=2)
self.hscroll = TK.Scrollbar(master, command=self._canvas.xview,
orient=TK.HORIZONTAL)
self.vscroll = TK.Scrollbar(master, command=self._canvas.yview)
self._canvas.configure(xscrollcommand=self.hscroll.set,
yscrollcommand=self.vscroll.set)
self.rowconfigure(0, weight=1, minsize=0)
self.columnconfigure(0, weight=1, minsize=0)
self._canvas.grid(padx=1, in_ = self, pady=1, row=0,
column=0, rowspan=1, columnspan=1, sticky='news')
self.vscroll.grid(padx=1, in_ = self, pady=1, row=0,
column=1, rowspan=1, columnspan=1, sticky='news')
self.hscroll.grid(padx=1, in_ = self, pady=1, row=1,
column=0, rowspan=1, columnspan=1, sticky='news')
self.reset()
self._rootwindow.bind('<Configure>', self.onResize)
def reset(self, canvwidth=None, canvheight=None, bg = None):
"""Adjust canvas and scrollbars according to given canvas size."""
if canvwidth:
self.canvwidth = canvwidth
if canvheight:
self.canvheight = canvheight
if bg:
self.bg = bg
self._canvas.config(bg=bg,
scrollregion=(-self.canvwidth//2, -self.canvheight//2,
self.canvwidth//2, self.canvheight//2))
self._canvas.xview_moveto(0.5*(self.canvwidth - self.width + 30) /
self.canvwidth)
self._canvas.yview_moveto(0.5*(self.canvheight- self.height + 30) /
self.canvheight)
self.adjustScrolls()
def adjustScrolls(self):
""" Adjust scrollbars according to window- and canvas-size.
"""
cwidth = self._canvas.winfo_width()
cheight = self._canvas.winfo_height()
self._canvas.xview_moveto(0.5*(self.canvwidth-cwidth)/self.canvwidth)
self._canvas.yview_moveto(0.5*(self.canvheight-cheight)/self.canvheight)
if cwidth < self.canvwidth or cheight < self.canvheight:
self.hscroll.grid(padx=1, in_ = self, pady=1, row=1,
column=0, rowspan=1, columnspan=1, sticky='news')
self.vscroll.grid(padx=1, in_ = self, pady=1, row=0,
column=1, rowspan=1, columnspan=1, sticky='news')
else:
self.hscroll.grid_forget()
self.vscroll.grid_forget()
def onResize(self, event):
"""self-explanatory"""
self.adjustScrolls()
def bbox(self, *args):
""" 'forward' method, which canvas itself has inherited...
"""
return self._canvas.bbox(*args)
def cget(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
return self._canvas.cget(*args, **kwargs)
def config(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.config(*args, **kwargs)
def bind(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.bind(*args, **kwargs)
def unbind(self, *args, **kwargs):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.unbind(*args, **kwargs)
def focus_force(self):
""" 'forward' method, which canvas itself has inherited...
"""
self._canvas.focus_force()
__forwardmethods(ScrolledCanvas, TK.Canvas, '_canvas')
class _Root(TK.Tk):
"""Root class for Screen based on Tkinter."""
def __init__(self):
TK.Tk.__init__(self)
def setupcanvas(self, width, height, cwidth, cheight):
self._canvas = ScrolledCanvas(self, width, height, cwidth, cheight)
self._canvas.pack(expand=1, fill="both")
def _getcanvas(self):
return self._canvas
def set_geometry(self, width, height, startx, starty):
self.geometry("%dx%d%+d%+d"%(width, height, startx, starty))
def ondestroy(self, destroy):
self.wm_protocol("WM_DELETE_WINDOW", destroy)
def win_width(self):
return self.winfo_screenwidth()
def win_height(self):
return self.winfo_screenheight()
Canvas = TK.Canvas
class TurtleScreenBase(object):
"""Provide the basic graphics functionality.
Interface between Tkinter and turtle.py.
To port turtle.py to some different graphics toolkit
a corresponding TurtleScreenBase class has to be implemented.
"""
@staticmethod
def _blankimage():
"""return a blank image object
"""
img = TK.PhotoImage(width=1, height=1)
img.blank()
return img
@staticmethod
def _image(filename):
"""return an image object containing the
imagedata from a gif-file named filename.
"""
return TK.PhotoImage(file=filename)
def __init__(self, cv):
self.cv = cv
if isinstance(cv, ScrolledCanvas):
w = self.cv.canvwidth
h = self.cv.canvheight
else: # expected: ordinary TK.Canvas
w = int(self.cv.cget("width"))
h = int(self.cv.cget("height"))
self.cv.config(scrollregion = (-w//2, -h//2, w//2, h//2 ))
self.canvwidth = w
self.canvheight = h
self.xscale = self.yscale = 1.0
def _createpoly(self):
"""Create an invisible polygon item on canvas self.cv)
"""
return self.cv.create_polygon((0, 0, 0, 0, 0, 0), fill="", outline="")
def _drawpoly(self, polyitem, coordlist, fill=None,
outline=None, width=None, top=False):
"""Configure polygonitem polyitem according to provided
arguments:
coordlist is sequence of coordinates
fill is filling color
outline is outline color
top is a boolean value, which specifies if polyitem
will be put on top of the canvas' displaylist so it
will not be covered by other items.
"""
cl = []
for x, y in coordlist:
cl.append(x * self.xscale)
cl.append(-y * self.yscale)
self.cv.coords(polyitem, *cl)
if fill is not None:
self.cv.itemconfigure(polyitem, fill=fill)
if outline is not None:
self.cv.itemconfigure(polyitem, outline=outline)
if width is not None:
self.cv.itemconfigure(polyitem, width=width)
if top:
self.cv.tag_raise(polyitem)
def _createline(self):
"""Create an invisible line item on canvas self.cv)
"""
return self.cv.create_line(0, 0, 0, 0, fill="", width=2,
capstyle = TK.ROUND)
def _drawline(self, lineitem, coordlist=None,
fill=None, width=None, top=False):
"""Configure lineitem according to provided arguments:
coordlist is sequence of coordinates
fill is drawing color
width is width of drawn line.
top is a boolean value, which specifies if polyitem
will be put on top of the canvas' displaylist so it
will not be covered by other items.
"""
if coordlist is not None:
cl = []
for x, y in coordlist:
cl.append(x * self.xscale)
cl.append(-y * self.yscale)
self.cv.coords(lineitem, *cl)
if fill is not None:
self.cv.itemconfigure(lineitem, fill=fill)
if width is not None:
self.cv.itemconfigure(lineitem, width=width)
if top:
self.cv.tag_raise(lineitem)
def _delete(self, item):
"""Delete graphics item from canvas.
If item is"all" delete all graphics items.
"""
self.cv.delete(item)
def _update(self):
"""Redraw graphics items on canvas
"""
self.cv.update()
def _delay(self, delay):
"""Delay subsequent canvas actions for delay ms."""
self.cv.after(delay)
def _iscolorstring(self, color):
"""Check if the string color is a legal Tkinter color string.
"""
try:
rgb = self.cv.winfo_rgb(color)
ok = True
except TK.TclError:
ok = False
return ok
def _bgcolor(self, color=None):
"""Set canvas' backgroundcolor if color is not None,
else return backgroundcolor."""
if color is not None:
self.cv.config(bg = color)
self._update()
else:
return self.cv.cget("bg")
def _write(self, pos, txt, align, font, pencolor):
"""Write txt at pos in canvas with specified font
and color.
Return text item and x-coord of right bottom corner
of text's bounding box."""
x, y = pos
x = x * self.xscale
y = y * self.yscale
anchor = {"left":"sw", "center":"s", "right":"se" }
item = self.cv.create_text(x-1, -y, text = txt, anchor = anchor[align],
fill = pencolor, font = font)
x0, y0, x1, y1 = self.cv.bbox(item)
self.cv.update()
return item, x1-1
## def _dot(self, pos, size, color):
## """may be implemented for some other graphics toolkit"""
def _onclick(self, item, fun, num=1, add=None):
"""Bind fun to mouse-click event on turtle.
fun must be a function with two arguments, the coordinates
of the clicked point on the canvas.
num, the number of the mouse-button defaults to 1
"""
if fun is None:
self.cv.tag_unbind(item, "<Button-%s>" % num)
else:
def eventfun(event):
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
self.cv.tag_bind(item, "<Button-%s>" % num, eventfun, add)
def _onrelease(self, item, fun, num=1, add=None):
"""Bind fun to mouse-button-release event on turtle.
fun must be a function with two arguments, the coordinates
of the point on the canvas where mouse button is released.
num, the number of the mouse-button defaults to 1
If a turtle is clicked, first _onclick-event will be performed,
then _onscreensclick-event.
"""
if fun is None:
self.cv.tag_unbind(item, "<Button%s-ButtonRelease>" % num)
else:
def eventfun(event):
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
self.cv.tag_bind(item, "<Button%s-ButtonRelease>" % num,
eventfun, add)
def _ondrag(self, item, fun, num=1, add=None):
"""Bind fun to mouse-move-event (with pressed mouse button) on turtle.
fun must be a function with two arguments, the coordinates of the
actual mouse position on the canvas.
num, the number of the mouse-button defaults to 1
Every sequence of mouse-move-events on a turtle is preceded by a
mouse-click event on that turtle.
"""
if fun is None:
self.cv.tag_unbind(item, "<Button%s-Motion>" % num)
else:
def eventfun(event):
try:
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
except:
pass
self.cv.tag_bind(item, "<Button%s-Motion>" % num, eventfun, add)
def _onscreenclick(self, fun, num=1, add=None):
"""Bind fun to mouse-click event on canvas.
fun must be a function with two arguments, the coordinates
of the clicked point on the canvas.
num, the number of the mouse-button defaults to 1
If a turtle is clicked, first _onclick-event will be performed,
then _onscreensclick-event.
"""
if fun is None:
self.cv.unbind("<Button-%s>" % num)
else:
def eventfun(event):
x, y = (self.cv.canvasx(event.x)/self.xscale,
-self.cv.canvasy(event.y)/self.yscale)
fun(x, y)
self.cv.bind("<Button-%s>" % num, eventfun, add)
def _onkeyrelease(self, fun, key):
"""Bind fun to key-release event of key.
Canvas must have focus. See method listen
"""
if fun is None:
self.cv.unbind("<KeyRelease-%s>" % key, None)
else:
def eventfun(event):
fun()
self.cv.bind("<KeyRelease-%s>" % key, eventfun)
def _onkeypress(self, fun, key=None):
"""If key is given, bind fun to key-press event of key.
Otherwise bind fun to any key-press.
Canvas must have focus. See method listen.
"""
if fun is None:
if key is None:
self.cv.unbind("<KeyPress>", None)
else:
self.cv.unbind("<KeyPress-%s>" % key, None)
else:
def eventfun(event):
fun()
if key is None:
self.cv.bind("<KeyPress>", eventfun)
else:
self.cv.bind("<KeyPress-%s>" % key, eventfun)
def _listen(self):
"""Set focus on canvas (in order to collect key-events)
"""
self.cv.focus_force()
def _ontimer(self, fun, t):
"""Install a timer, which calls fun after t milliseconds.
"""
if t == 0:
self.cv.after_idle(fun)
else:
self.cv.after(t, fun)
def _createimage(self, image):
"""Create and return image item on canvas.
"""
return self.cv.create_image(0, 0, image=image)
def _drawimage(self, item, pos, image):
"""Configure image item as to draw image object
at position (x,y) on canvas)
"""
x, y = pos
self.cv.coords(item, (x * self.xscale, -y * self.yscale))
self.cv.itemconfig(item, image=image)
def _setbgpic(self, item, image):
"""Configure image item as to draw image object
at center of canvas. Set item to the first item
in the displaylist, so it will be drawn below
any other item ."""
self.cv.itemconfig(item, image=image)
self.cv.tag_lower(item)
def _type(self, item):
"""Return 'line' or 'polygon' or 'image' depending on
type of item.
"""
return self.cv.type(item)
def _pointlist(self, item):
"""returns list of coordinate-pairs of points of item
Example (for insiders):
>>> from turtle import *
>>> getscreen()._pointlist(getturtle().turtle._item)
[(0.0, 9.9999999999999982), (0.0, -9.9999999999999982),
(9.9999999999999982, 0.0)]
>>> """
cl = self.cv.coords(item)
pl = [(cl[i], -cl[i+1]) for i in range(0, len(cl), 2)]
return pl
def _setscrollregion(self, srx1, sry1, srx2, sry2):
self.cv.config(scrollregion=(srx1, sry1, srx2, sry2))
def _rescale(self, xscalefactor, yscalefactor):
items = self.cv.find_all()
for item in items:
coordinates = list(self.cv.coords(item))
newcoordlist = []
while coordinates:
x, y = coordinates[:2]
newcoordlist.append(x * xscalefactor)
newcoordlist.append(y * yscalefactor)
coordinates = coordinates[2:]
self.cv.coords(item, *newcoordlist)
def _resize(self, canvwidth=None, canvheight=None, bg=None):
"""Resize the canvas the turtles are drawing on. Does
not alter the drawing window.
"""
# needs amendment
if not isinstance(self.cv, ScrolledCanvas):
return self.canvwidth, self.canvheight
if canvwidth is canvheight is bg is None:
return self.cv.canvwidth, self.cv.canvheight
if canvwidth is not None:
self.canvwidth = canvwidth
if canvheight is not None:
self.canvheight = canvheight
self.cv.reset(canvwidth, canvheight, bg)
def _window_size(self):
""" Return the width and height of the turtle window.
"""
width = self.cv.winfo_width()
if width <= 1: # the window isn't managed by a geometry manager
width = self.cv['width']
height = self.cv.winfo_height()
if height <= 1: # the window isn't managed by a geometry manager
height = self.cv['height']
return width, height
def mainloop(self):
"""Starts event loop - calling Tkinter's mainloop function.
No argument.
Must be last statement in a turtle graphics program.
Must NOT be used if a script is run from within IDLE in -n mode
(No subprocess) - for interactive use of turtle graphics.
Example (for a TurtleScreen instance named screen):
>>> screen.mainloop()
"""
TK.mainloop()
def textinput(self, title, prompt):
"""Pop up a dialog window for input of a string.
Arguments: title is the title of the dialog window,
prompt is a text mostly describing what information to input.
Return the string input
If the dialog is canceled, return None.
Example (for a TurtleScreen instance named screen):
>>> screen.textinput("NIM", "Name of first player:")
"""
return simpledialog.askstring(title, prompt)
def numinput(self, title, prompt, default=None, minval=None, maxval=None):
"""Pop up a dialog window for input of a number.
Arguments: title is the title of the dialog window,
prompt is a text mostly describing what numerical information to input.
default: default value
minval: minimum value for imput
maxval: maximum value for input
The number input must be in the range minval .. maxval if these are
given. If not, a hint is issued and the dialog remains open for
correction. Return the number input.
If the dialog is canceled, return None.
Example (for a TurtleScreen instance named screen):
>>> screen.numinput("Poker", "Your stakes:", 1000, minval=10, maxval=10000)
"""
return simpledialog.askfloat(title, prompt, initialvalue=default,
minvalue=minval, maxvalue=maxval)
##############################################################################
### End of Tkinter - interface ###
##############################################################################
class Terminator (Exception):
"""Will be raised in TurtleScreen.update, if _RUNNING becomes False.
Thus stops execution of turtle graphics script. Main purpose: use in
in the Demo-Viewer turtle.Demo.py.
"""
pass
class TurtleGraphicsError(Exception):
"""Some TurtleGraphics Error
"""
class Shape(object):
"""Data structure modeling shapes.
attribute _type is one of "polygon", "image", "compound"
attribute _data is - depending on _type a poygon-tuple,
an image or a list constructed using the addcomponent method.
"""
def __init__(self, type_, data=None):
self._type = type_
if type_ == "polygon":
if isinstance(data, list):
data = tuple(data)
elif type_ == "image":
if isinstance(data, str):
if data.lower().endswith(".gif") and isfile(data):
data = TurtleScreen._image(data)
# else data assumed to be Photoimage
elif type_ == "compound":
data = []
else:
raise TurtleGraphicsError("There is no shape type %s" % type_)
self._data = data
def addcomponent(self, poly, fill, outline=None):
"""Add component to a shape of type compound.
Arguments: poly is a polygon, i. e. a tuple of number pairs.
fill is the fillcolor of the component,
outline is the outline color of the component.
call (for a Shapeobject namend s):
-- s.addcomponent(((0,0), (10,10), (-10,10)), "red", "blue")
Example:
>>> poly = ((0,0),(10,-5),(0,10),(-10,-5))
>>> s = Shape("compound")
>>> s.addcomponent(poly, "red", "blue")
>>> # .. add more components and then use register_shape()
"""
if self._type != "compound":
raise TurtleGraphicsError("Cannot add component to %s Shape"
% self._type)
if outline is None:
outline = fill
self._data.append([poly, fill, outline])
class Tbuffer(object):
"""Ring buffer used as undobuffer for RawTurtle objects."""
def __init__(self, bufsize=10):
self.bufsize = bufsize
self.buffer = [[None]] * bufsize
self.ptr = -1
self.cumulate = False
def reset(self, bufsize=None):
if bufsize is None:
for i in range(self.bufsize):
self.buffer[i] = [None]
else:
self.bufsize = bufsize
self.buffer = [[None]] * bufsize
self.ptr = -1
def push(self, item):
if self.bufsize > 0:
if not self.cumulate:
self.ptr = (self.ptr + 1) % self.bufsize
self.buffer[self.ptr] = item
else:
self.buffer[self.ptr].append(item)
def pop(self):
if self.bufsize > 0:
item = self.buffer[self.ptr]
if item is None:
return None
else:
self.buffer[self.ptr] = [None]
self.ptr = (self.ptr - 1) % self.bufsize
return (item)
def nr_of_items(self):
return self.bufsize - self.buffer.count([None])
def __repr__(self):
return str(self.buffer) + " " + str(self.ptr)
class TurtleScreen(TurtleScreenBase):
"""Provides screen oriented methods like setbg etc.
Only relies upon the methods of TurtleScreenBase and NOT
upon components of the underlying graphics toolkit -
which is Tkinter in this case.
"""
_RUNNING = True
def __init__(self, cv, mode=_CFG["mode"],
colormode=_CFG["colormode"], delay=_CFG["delay"]):
self._shapes = {
"arrow" : Shape("polygon", ((-10,0), (10,0), (0,10))),
"turtle" : Shape("polygon", ((0,16), (-2,14), (-1,10), (-4,7),
(-7,9), (-9,8), (-6,5), (-7,1), (-5,-3), (-8,-6),
(-6,-8), (-4,-5), (0,-7), (4,-5), (6,-8), (8,-6),
(5,-3), (7,1), (6,5), (9,8), (7,9), (4,7), (1,10),
(2,14))),
"circle" : Shape("polygon", ((10,0), (9.51,3.09), (8.09,5.88),
(5.88,8.09), (3.09,9.51), (0,10), (-3.09,9.51),
(-5.88,8.09), (-8.09,5.88), (-9.51,3.09), (-10,0),
(-9.51,-3.09), (-8.09,-5.88), (-5.88,-8.09),
(-3.09,-9.51), (-0.00,-10.00), (3.09,-9.51),
(5.88,-8.09), (8.09,-5.88), (9.51,-3.09))),
"square" : Shape("polygon", ((10,-10), (10,10), (-10,10),
(-10,-10))),
"triangle" : Shape("polygon", ((10,-5.77), (0,11.55),
(-10,-5.77))),
"classic": Shape("polygon", ((0,0),(-5,-9),(0,-7),(5,-9))),
"blank" : Shape("image", self._blankimage())
}
self._bgpics = {"nopic" : ""}
TurtleScreenBase.__init__(self, cv)
self._mode = mode
self._delayvalue = delay
self._colormode = _CFG["colormode"]
self._keys = []
self.clear()
def clear(self):
"""Delete all drawings and all turtles from the TurtleScreen.
No argument.
Reset empty TurtleScreen to its initial state: white background,
no backgroundimage, no eventbindings and tracing on.
Example (for a TurtleScreen instance named screen):
>>> screen.clear()
Note: this method is not available as function.
"""
self._delayvalue = _CFG["delay"]
self._colormode = _CFG["colormode"]
self._delete("all")
self._bgpic = self._createimage("")
self._bgpicname = "nopic"
self._tracing = 1
self._updatecounter = 0
self._turtles = []
self.bgcolor("white")
for btn in 1, 2, 3:
self.onclick(None, btn)
self.onkeypress(None)
for key in self._keys[:]:
self.onkey(None, key)
self.onkeypress(None, key)
Turtle._pen = None
def mode(self, mode=None):
"""Set turtle-mode ('standard', 'logo' or 'world') and perform reset.
Optional argument:
mode -- on of the strings 'standard', 'logo' or 'world'
Mode 'standard' is compatible with turtle.py.
Mode 'logo' is compatible with most Logo-Turtle-Graphics.
Mode 'world' uses userdefined 'worldcoordinates'. *Attention*: in
this mode angles appear distorted if x/y unit-ratio doesn't equal 1.
If mode is not given, return the current mode.
Mode Initial turtle heading positive angles
------------|-------------------------|-------------------
'standard' to the right (east) counterclockwise
'logo' upward (north) clockwise
Examples:
>>> mode('logo') # resets turtle heading to north
>>> mode()
'logo'
"""
if mode is None:
return self._mode
mode = mode.lower()
if mode not in ["standard", "logo", "world"]:
raise TurtleGraphicsError("No turtle-graphics-mode %s" % mode)
self._mode = mode
if mode in ["standard", "logo"]:
self._setscrollregion(-self.canvwidth//2, -self.canvheight//2,
self.canvwidth//2, self.canvheight//2)
self.xscale = self.yscale = 1.0
self.reset()
def setworldcoordinates(self, llx, lly, urx, ury):
"""Set up a user defined coordinate-system.
Arguments:
llx -- a number, x-coordinate of lower left corner of canvas
lly -- a number, y-coordinate of lower left corner of canvas
urx -- a number, x-coordinate of upper right corner of canvas
ury -- a number, y-coordinate of upper right corner of canvas
Set up user coodinat-system and switch to mode 'world' if necessary.
This performs a screen.reset. If mode 'world' is already active,
all drawings are redrawn according to the new coordinates.
But ATTENTION: in user-defined coordinatesystems angles may appear
distorted. (see Screen.mode())
Example (for a TurtleScreen instance named screen):
>>> screen.setworldcoordinates(-10,-0.5,50,1.5)
>>> for _ in range(36):
... left(10)
... forward(0.5)
"""
if self.mode() != "world":
self.mode("world")
xspan = float(urx - llx)
yspan = float(ury - lly)
wx, wy = self._window_size()
self.screensize(wx-20, wy-20)
oldxscale, oldyscale = self.xscale, self.yscale
self.xscale = self.canvwidth / xspan
self.yscale = self.canvheight / yspan
srx1 = llx * self.xscale
sry1 = -ury * self.yscale
srx2 = self.canvwidth + srx1
sry2 = self.canvheight + sry1
self._setscrollregion(srx1, sry1, srx2, sry2)
self._rescale(self.xscale/oldxscale, self.yscale/oldyscale)
self.update()
def register_shape(self, name, shape=None):
"""Adds a turtle shape to TurtleScreen's shapelist.
Arguments:
(1) name is the name of a gif-file and shape is None.
Installs the corresponding image shape.
!! Image-shapes DO NOT rotate when turning the turtle,
!! so they do not display the heading of the turtle!
(2) name is an arbitrary string and shape is a tuple
of pairs of coordinates. Installs the corresponding
polygon shape
(3) name is an arbitrary string and shape is a
(compound) Shape object. Installs the corresponding
compound shape.
To use a shape, you have to issue the command shape(shapename).
call: register_shape("turtle.gif")
--or: register_shape("tri", ((0,0), (10,10), (-10,10)))
Example (for a TurtleScreen instance named screen):
>>> screen.register_shape("triangle", ((5,-3),(0,5),(-5,-3)))
"""
if shape is None:
# image
if name.lower().endswith(".gif"):
shape = Shape("image", self._image(name))
else:
raise TurtleGraphicsError("Bad arguments for register_shape.\n"
+ "Use help(register_shape)" )
elif isinstance(shape, tuple):
shape = Shape("polygon", shape)
## else shape assumed to be Shape-instance
self._shapes[name] = shape
def _colorstr(self, color):
"""Return color string corresponding to args.
Argument may be a string or a tuple of three
numbers corresponding to actual colormode,
i.e. in the range 0<=n<=colormode.
If the argument doesn't represent a color,
an error is raised.
"""
if len(color) == 1:
color = color[0]
if isinstance(color, str):
if self._iscolorstring(color) or color == "":
return color
else:
raise TurtleGraphicsError("bad color string: %s" % str(color))
try:
r, g, b = color
except:
raise TurtleGraphicsError("bad color arguments: %s" % str(color))
if self._colormode == 1.0:
r, g, b = [round(255.0*x) for x in (r, g, b)]
if not ((0 <= r <= 255) and (0 <= g <= 255) and (0 <= b <= 255)):
raise TurtleGraphicsError("bad color sequence: %s" % str(color))
return "#%02x%02x%02x" % (r, g, b)
def _color(self, cstr):
if not cstr.startswith("#"):
return cstr
if len(cstr) == 7:
cl = [int(cstr[i:i+2], 16) for i in (1, 3, 5)]
elif len(cstr) == 4:
cl = [16*int(cstr[h], 16) for h in cstr[1:]]
else:
raise TurtleGraphicsError("bad colorstring: %s" % cstr)
return tuple([c * self._colormode/255 for c in cl])
def colormode(self, cmode=None):
"""Return the colormode or set it to 1.0 or 255.
Optional argument:
cmode -- one of the values 1.0 or 255
r, g, b values of colortriples have to be in range 0..cmode.
Example (for a TurtleScreen instance named screen):
>>> screen.colormode()
1.0
>>> screen.colormode(255)
>>> pencolor(240,160,80)
"""
if cmode is None:
return self._colormode
if cmode == 1.0:
self._colormode = float(cmode)
elif cmode == 255:
self._colormode = int(cmode)
def reset(self):
"""Reset all Turtles on the Screen to their initial state.
No argument.
Example (for a TurtleScreen instance named screen):
>>> screen.reset()
"""
for turtle in self._turtles:
turtle._setmode(self._mode)
turtle.reset()
def turtles(self):
"""Return the list of turtles on the screen.
Example (for a TurtleScreen instance named screen):
>>> screen.turtles()
[<turtle.Turtle object at 0x00E11FB0>]
"""
return self._turtles
def bgcolor(self, *args):
"""Set or return backgroundcolor of the TurtleScreen.
Arguments (if given): a color string or three numbers
in the range 0..colormode or a 3-tuple of such numbers.
Example (for a TurtleScreen instance named screen):
>>> screen.bgcolor("orange")
>>> screen.bgcolor()
'orange'
>>> screen.bgcolor(0.5,0,0.5)
>>> screen.bgcolor()
'#800080'
"""
if args:
color = self._colorstr(args)
else:
color = None
color = self._bgcolor(color)
if color is not None:
color = self._color(color)
return color
def tracer(self, n=None, delay=None):
"""Turns turtle animation on/off and set delay for update drawings.
Optional arguments:
n -- nonnegative integer
delay -- nonnegative integer
If n is given, only each n-th regular screen update is really performed.
(Can be used to accelerate the drawing of complex graphics.)
Second arguments sets delay value (see RawTurtle.delay())
Example (for a TurtleScreen instance named screen):
>>> screen.tracer(8, 25)
>>> dist = 2
>>> for i in range(200):
... fd(dist)
... rt(90)
... dist += 2
"""
if n is None:
return self._tracing
self._tracing = int(n)
self._updatecounter = 0
if delay is not None:
self._delayvalue = int(delay)
if self._tracing:
self.update()
def delay(self, delay=None):
""" Return or set the drawing delay in milliseconds.
Optional argument:
delay -- positive integer
Example (for a TurtleScreen instance named screen):
>>> screen.delay(15)
>>> screen.delay()
15
"""
if delay is None:
return self._delayvalue
self._delayvalue = int(delay)
def _incrementudc(self):
"""Increment upadate counter."""
if not TurtleScreen._RUNNING:
TurtleScreen._RUNNNING = True
raise Terminator
if self._tracing > 0:
self._updatecounter += 1
self._updatecounter %= self._tracing
def update(self):
"""Perform a TurtleScreen update.
"""
tracing = self._tracing
self._tracing = True
for t in self.turtles():
t._update_data()
t._drawturtle()
self._tracing = tracing
self._update()
def window_width(self):
""" Return the width of the turtle window.
Example (for a TurtleScreen instance named screen):
>>> screen.window_width()
640
"""
return self._window_size()[0]
def window_height(self):
""" Return the height of the turtle window.
Example (for a TurtleScreen instance named screen):
>>> screen.window_height()
480
"""
return self._window_size()[1]
def getcanvas(self):
"""Return the Canvas of this TurtleScreen.
No argument.
Example (for a Screen instance named screen):
>>> cv = screen.getcanvas()
>>> cv
<turtle.ScrolledCanvas instance at 0x010742D8>
"""
return self.cv
def getshapes(self):
"""Return a list of names of all currently available turtle shapes.
No argument.
Example (for a TurtleScreen instance named screen):
>>> screen.getshapes()
['arrow', 'blank', 'circle', ... , 'turtle']
"""
return sorted(self._shapes.keys())
def onclick(self, fun, btn=1, add=None):
"""Bind fun to mouse-click event on canvas.
Arguments:
fun -- a function with two arguments, the coordinates of the
clicked point on the canvas.
num -- the number of the mouse-button, defaults to 1
Example (for a TurtleScreen instance named screen)
>>> screen.onclick(goto)
>>> # Subsequently clicking into the TurtleScreen will
>>> # make the turtle move to the clicked point.
>>> screen.onclick(None)
"""
self._onscreenclick(fun, btn, add)
def onkey(self, fun, key):
"""Bind fun to key-release event of key.
Arguments:
fun -- a function with no arguments
key -- a string: key (e.g. "a") or key-symbol (e.g. "space")
In order to be able to register key-events, TurtleScreen
must have focus. (See method listen.)
Example (for a TurtleScreen instance named screen):
>>> def f():
... fd(50)
... lt(60)
...
>>> screen.onkey(f, "Up")
>>> screen.listen()
Subsequently the turtle can be moved by repeatedly pressing
the up-arrow key, consequently drawing a hexagon
"""
if fun is None:
if key in self._keys:
self._keys.remove(key)
elif key not in self._keys:
self._keys.append(key)
self._onkeyrelease(fun, key)
def onkeypress(self, fun, key=None):
"""Bind fun to key-press event of key if key is given,
or to any key-press-event if no key is given.
Arguments:
fun -- a function with no arguments
key -- a string: key (e.g. "a") or key-symbol (e.g. "space")
In order to be able to register key-events, TurtleScreen
must have focus. (See method listen.)
Example (for a TurtleScreen instance named screen
and a Turtle instance named turtle):
>>> def f():
... fd(50)
... lt(60)
...
>>> screen.onkeypress(f, "Up")
>>> screen.listen()
Subsequently the turtle can be moved by repeatedly pressing
the up-arrow key, or by keeping pressed the up-arrow key.
consequently drawing a hexagon.
"""
if fun is None:
if key in self._keys:
self._keys.remove(key)
elif key is not None and key not in self._keys:
self._keys.append(key)
self._onkeypress(fun, key)
def listen(self, xdummy=None, ydummy=None):
"""Set focus on TurtleScreen (in order to collect key-events)
No arguments.
Dummy arguments are provided in order
to be able to pass listen to the onclick method.
Example (for a TurtleScreen instance named screen):
>>> screen.listen()
"""
self._listen()
def ontimer(self, fun, t=0):
"""Install a timer, which calls fun after t milliseconds.
Arguments:
fun -- a function with no arguments.
t -- a number >= 0
Example (for a TurtleScreen instance named screen):
>>> running = True
>>> def f():
... if running:
... fd(50)
... lt(60)
... screen.ontimer(f, 250)
...
>>> f() # makes the turtle marching around
>>> running = False
"""
self._ontimer(fun, t)
def bgpic(self, picname=None):
"""Set background image or return name of current backgroundimage.
Optional argument:
picname -- a string, name of a gif-file or "nopic".
If picname is a filename, set the corresponding image as background.
If picname is "nopic", delete backgroundimage, if present.
If picname is None, return the filename of the current backgroundimage.
Example (for a TurtleScreen instance named screen):
>>> screen.bgpic()
'nopic'
>>> screen.bgpic("landscape.gif")
>>> screen.bgpic()
'landscape.gif'
"""
if picname is None:
return self._bgpicname
if picname not in self._bgpics:
self._bgpics[picname] = self._image(picname)
self._setbgpic(self._bgpic, self._bgpics[picname])
self._bgpicname = picname
def screensize(self, canvwidth=None, canvheight=None, bg=None):
"""Resize the canvas the turtles are drawing on.
Optional arguments:
canvwidth -- positive integer, new width of canvas in pixels
canvheight -- positive integer, new height of canvas in pixels
bg -- colorstring or color-tuple, new backgroundcolor
If no arguments are given, return current (canvaswidth, canvasheight)
Do not alter the drawing window. To observe hidden parts of
the canvas use the scrollbars. (Can make visible those parts
of a drawing, which were outside the canvas before!)
Example (for a Turtle instance named turtle):
>>> turtle.screensize(2000,1500)
>>> # e.g. to search for an erroneously escaped turtle ;-)
"""
return self._resize(canvwidth, canvheight, bg)
onscreenclick = onclick
resetscreen = reset
clearscreen = clear
addshape = register_shape
onkeyrelease = onkey
class TNavigator(object):
"""Navigation part of the RawTurtle.
Implements methods for turtle movement.
"""
START_ORIENTATION = {
"standard": Vec2D(1.0, 0.0),
"world" : Vec2D(1.0, 0.0),
"logo" : Vec2D(0.0, 1.0) }
DEFAULT_MODE = "standard"
DEFAULT_ANGLEOFFSET = 0
DEFAULT_ANGLEORIENT = 1
def __init__(self, mode=DEFAULT_MODE):
self._angleOffset = self.DEFAULT_ANGLEOFFSET
self._angleOrient = self.DEFAULT_ANGLEORIENT
self._mode = mode
self.undobuffer = None
self.degrees()
self._mode = None
self._setmode(mode)
TNavigator.reset(self)
def reset(self):
"""reset turtle to its initial values
Will be overwritten by parent class
"""
self._position = Vec2D(0.0, 0.0)
self._orient = TNavigator.START_ORIENTATION[self._mode]
def _setmode(self, mode=None):
"""Set turtle-mode to 'standard', 'world' or 'logo'.
"""
if mode is None:
return self._mode
if mode not in ["standard", "logo", "world"]:
return
self._mode = mode
if mode in ["standard", "world"]:
self._angleOffset = 0
self._angleOrient = 1
else: # mode == "logo":
self._angleOffset = self._fullcircle/4.
self._angleOrient = -1
def _setDegreesPerAU(self, fullcircle):
"""Helper function for degrees() and radians()"""
self._fullcircle = fullcircle
self._degreesPerAU = 360/fullcircle
if self._mode == "standard":
self._angleOffset = 0
else:
self._angleOffset = fullcircle/4.
def degrees(self, fullcircle=360.0):
""" Set angle measurement units to degrees.
Optional argument:
fullcircle - a number
Set angle measurement units, i. e. set number
of 'degrees' for a full circle. Dafault value is
360 degrees.
Example (for a Turtle instance named turtle):
>>> turtle.left(90)
>>> turtle.heading()
90
Change angle measurement unit to grad (also known as gon,
grade, or gradian and equals 1/100-th of the right angle.)
>>> turtle.degrees(400.0)
>>> turtle.heading()
100
"""
self._setDegreesPerAU(fullcircle)
def radians(self):
""" Set the angle measurement units to radians.
No arguments.
Example (for a Turtle instance named turtle):
>>> turtle.heading()
90
>>> turtle.radians()
>>> turtle.heading()
1.5707963267948966
"""
self._setDegreesPerAU(2*math.pi)
def _go(self, distance):
"""move turtle forward by specified distance"""
ende = self._position + self._orient * distance
self._goto(ende)
def _rotate(self, angle):
"""Turn turtle counterclockwise by specified angle if angle > 0."""
angle *= self._degreesPerAU
self._orient = self._orient.rotate(angle)
def _goto(self, end):
"""move turtle to position end."""
self._position = end
def forward(self, distance):
"""Move the turtle forward by the specified distance.
Aliases: forward | fd
Argument:
distance -- a number (integer or float)
Move the turtle forward by the specified distance, in the direction
the turtle is headed.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 0.00)
>>> turtle.forward(25)
>>> turtle.position()
(25.00,0.00)
>>> turtle.forward(-75)
>>> turtle.position()
(-50.00,0.00)
"""
self._go(distance)
def back(self, distance):
"""Move the turtle backward by distance.
Aliases: back | backward | bk
Argument:
distance -- a number
Move the turtle backward by distance ,opposite to the direction the
turtle is headed. Do not change the turtle's heading.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 0.00)
>>> turtle.backward(30)
>>> turtle.position()
(-30.00, 0.00)
"""
self._go(-distance)
def right(self, angle):
"""Turn turtle right by angle units.
Aliases: right | rt
Argument:
angle -- a number (integer or float)
Turn turtle right by angle units. (Units are by default degrees,
but can be set via the degrees() and radians() functions.)
Angle orientation depends on mode. (See this.)
Example (for a Turtle instance named turtle):
>>> turtle.heading()
22.0
>>> turtle.right(45)
>>> turtle.heading()
337.0
"""
self._rotate(-angle)
def left(self, angle):
"""Turn turtle left by angle units.
Aliases: left | lt
Argument:
angle -- a number (integer or float)
Turn turtle left by angle units. (Units are by default degrees,
but can be set via the degrees() and radians() functions.)
Angle orientation depends on mode. (See this.)
Example (for a Turtle instance named turtle):
>>> turtle.heading()
22.0
>>> turtle.left(45)
>>> turtle.heading()
67.0
"""
self._rotate(angle)
def pos(self):
"""Return the turtle's current location (x,y), as a Vec2D-vector.
Aliases: pos | position
No arguments.
Example (for a Turtle instance named turtle):
>>> turtle.pos()
(0.00, 240.00)
"""
return self._position
def xcor(self):
""" Return the turtle's x coordinate.
No arguments.
Example (for a Turtle instance named turtle):
>>> reset()
>>> turtle.left(60)
>>> turtle.forward(100)
>>> print turtle.xcor()
50.0
"""
return self._position[0]
def ycor(self):
""" Return the turtle's y coordinate
---
No arguments.
Example (for a Turtle instance named turtle):
>>> reset()
>>> turtle.left(60)
>>> turtle.forward(100)
>>> print turtle.ycor()
86.6025403784
"""
return self._position[1]
def goto(self, x, y=None):
"""Move turtle to an absolute position.
Aliases: setpos | setposition | goto:
Arguments:
x -- a number or a pair/vector of numbers
y -- a number None
call: goto(x, y) # two coordinates
--or: goto((x, y)) # a pair (tuple) of coordinates
--or: goto(vec) # e.g. as returned by pos()
Move turtle to an absolute position. If the pen is down,
a line will be drawn. The turtle's orientation does not change.
Example (for a Turtle instance named turtle):
>>> tp = turtle.pos()
>>> tp
(0.00, 0.00)
>>> turtle.setpos(60,30)
>>> turtle.pos()
(60.00,30.00)
>>> turtle.setpos((20,80))
>>> turtle.pos()
(20.00,80.00)
>>> turtle.setpos(tp)
>>> turtle.pos()
(0.00,0.00)
"""
if y is None:
self._goto(Vec2D(*x))
else:
self._goto(Vec2D(x, y))
def home(self):
"""Move turtle to the origin - coordinates (0,0).
No arguments.
Move turtle to the origin - coordinates (0,0) and set its
heading to its start-orientation (which depends on mode).
Example (for a Turtle instance named turtle):
>>> turtle.home()
"""
self.goto(0, 0)
self.setheading(0)
def setx(self, x):
"""Set the turtle's first coordinate to x
Argument:
x -- a number (integer or float)
Set the turtle's first coordinate to x, leave second coordinate
unchanged.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 240.00)
>>> turtle.setx(10)
>>> turtle.position()
(10.00, 240.00)
"""
self._goto(Vec2D(x, self._position[1]))
def sety(self, y):
"""Set the turtle's second coordinate to y
Argument:
y -- a number (integer or float)
Set the turtle's first coordinate to x, second coordinate remains
unchanged.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00, 40.00)
>>> turtle.sety(-10)
>>> turtle.position()
(0.00, -10.00)
"""
self._goto(Vec2D(self._position[0], y))
def distance(self, x, y=None):
"""Return the distance from the turtle to (x,y) in turtle step units.
Arguments:
x -- a number or a pair/vector of numbers or a turtle instance
y -- a number None None
call: distance(x, y) # two coordinates
--or: distance((x, y)) # a pair (tuple) of coordinates
--or: distance(vec) # e.g. as returned by pos()
--or: distance(mypen) # where mypen is another turtle
Example (for a Turtle instance named turtle):
>>> turtle.pos()
(0.00, 0.00)
>>> turtle.distance(30,40)
50.0
>>> pen = Turtle()
>>> pen.forward(77)
>>> turtle.distance(pen)
77.0
"""
if y is not None:
pos = Vec2D(x, y)
if isinstance(x, Vec2D):
pos = x
elif isinstance(x, tuple):
pos = Vec2D(*x)
elif isinstance(x, TNavigator):
pos = x._position
return abs(pos - self._position)
def towards(self, x, y=None):
"""Return the angle of the line from the turtle's position to (x, y).
Arguments:
x -- a number or a pair/vector of numbers or a turtle instance
y -- a number None None
call: distance(x, y) # two coordinates
--or: distance((x, y)) # a pair (tuple) of coordinates
--or: distance(vec) # e.g. as returned by pos()
--or: distance(mypen) # where mypen is another turtle
Return the angle, between the line from turtle-position to position
specified by x, y and the turtle's start orientation. (Depends on
modes - "standard" or "logo")
Example (for a Turtle instance named turtle):
>>> turtle.pos()
(10.00, 10.00)
>>> turtle.towards(0,0)
225.0
"""
if y is not None:
pos = Vec2D(x, y)
if isinstance(x, Vec2D):
pos = x
elif isinstance(x, tuple):
pos = Vec2D(*x)
elif isinstance(x, TNavigator):
pos = x._position
x, y = pos - self._position
result = round(math.atan2(y, x)*180.0/math.pi, 10) % 360.0
result /= self._degreesPerAU
return (self._angleOffset + self._angleOrient*result) % self._fullcircle
def heading(self):
""" Return the turtle's current heading.
No arguments.
Example (for a Turtle instance named turtle):
>>> turtle.left(67)
>>> turtle.heading()
67.0
"""
x, y = self._orient
result = round(math.atan2(y, x)*180.0/math.pi, 10) % 360.0
result /= self._degreesPerAU
return (self._angleOffset + self._angleOrient*result) % self._fullcircle
def setheading(self, to_angle):
"""Set the orientation of the turtle to to_angle.
Aliases: setheading | seth
Argument:
to_angle -- a number (integer or float)
Set the orientation of the turtle to to_angle.
Here are some common directions in degrees:
standard - mode: logo-mode:
-------------------|--------------------
0 - east 0 - north
90 - north 90 - east
180 - west 180 - south
270 - south 270 - west
Example (for a Turtle instance named turtle):
>>> turtle.setheading(90)
>>> turtle.heading()
90
"""
angle = (to_angle - self.heading())*self._angleOrient
full = self._fullcircle
angle = (angle+full/2.)%full - full/2.
self._rotate(angle)
def circle(self, radius, extent = None, steps = None):
""" Draw a circle with given radius.
Arguments:
radius -- a number
extent (optional) -- a number
steps (optional) -- an integer
Draw a circle with given radius. The center is radius units left
of the turtle; extent - an angle - determines which part of the
circle is drawn. If extent is not given, draw the entire circle.
If extent is not a full circle, one endpoint of the arc is the
current pen position. Draw the arc in counterclockwise direction
if radius is positive, otherwise in clockwise direction. Finally
the direction of the turtle is changed by the amount of extent.
As the circle is approximated by an inscribed regular polygon,
steps determines the number of steps to use. If not given,
it will be calculated automatically. Maybe used to draw regular
polygons.
call: circle(radius) # full circle
--or: circle(radius, extent) # arc
--or: circle(radius, extent, steps)
--or: circle(radius, steps=6) # 6-sided polygon
Example (for a Turtle instance named turtle):
>>> turtle.circle(50)
>>> turtle.circle(120, 180) # semicircle
"""
if self.undobuffer:
self.undobuffer.push(["seq"])
self.undobuffer.cumulate = True
speed = self.speed()
if extent is None:
extent = self._fullcircle
if steps is None:
frac = abs(extent)/self._fullcircle
steps = 1+int(min(11+abs(radius)/6.0, 59.0)*frac)
w = 1.0 * extent / steps
w2 = 0.5 * w
l = 2.0 * radius * math.sin(w2*math.pi/180.0*self._degreesPerAU)
if radius < 0:
l, w, w2 = -l, -w, -w2
tr = self._tracer()
dl = self._delay()
if speed == 0:
self._tracer(0, 0)
else:
self.speed(0)
self._rotate(w2)
for i in range(steps):
self.speed(speed)
self._go(l)
self.speed(0)
self._rotate(w)
self._rotate(-w2)
if speed == 0:
self._tracer(tr, dl)
self.speed(speed)
if self.undobuffer:
self.undobuffer.cumulate = False
## three dummy methods to be implemented by child class:
def speed(self, s=0):
"""dummy method - to be overwritten by child class"""
def _tracer(self, a=None, b=None):
"""dummy method - to be overwritten by child class"""
def _delay(self, n=None):
"""dummy method - to be overwritten by child class"""
fd = forward
bk = back
backward = back
rt = right
lt = left
position = pos
setpos = goto
setposition = goto
seth = setheading
class TPen(object):
"""Drawing part of the RawTurtle.
Implements drawing properties.
"""
def __init__(self, resizemode=_CFG["resizemode"]):
self._resizemode = resizemode # or "user" or "noresize"
self.undobuffer = None
TPen._reset(self)
def _reset(self, pencolor=_CFG["pencolor"],
fillcolor=_CFG["fillcolor"]):
self._pensize = 1
self._shown = True
self._pencolor = pencolor
self._fillcolor = fillcolor
self._drawing = True
self._speed = 3
self._stretchfactor = (1., 1.)
self._shearfactor = 0.
self._tilt = 0.
self._shapetrafo = (1., 0., 0., 1.)
self._outlinewidth = 1
def resizemode(self, rmode=None):
"""Set resizemode to one of the values: "auto", "user", "noresize".
(Optional) Argument:
rmode -- one of the strings "auto", "user", "noresize"
Different resizemodes have the following effects:
- "auto" adapts the appearance of the turtle
corresponding to the value of pensize.
- "user" adapts the appearance of the turtle according to the
values of stretchfactor and outlinewidth (outline),
which are set by shapesize()
- "noresize" no adaption of the turtle's appearance takes place.
If no argument is given, return current resizemode.
resizemode("user") is called by a call of shapesize with arguments.
Examples (for a Turtle instance named turtle):
>>> turtle.resizemode("noresize")
>>> turtle.resizemode()
'noresize'
"""
if rmode is None:
return self._resizemode
rmode = rmode.lower()
if rmode in ["auto", "user", "noresize"]:
self.pen(resizemode=rmode)
def pensize(self, width=None):
"""Set or return the line thickness.
Aliases: pensize | width
Argument:
width -- positive number
Set the line thickness to width or return it. If resizemode is set
to "auto" and turtleshape is a polygon, that polygon is drawn with
the same line thickness. If no argument is given, current pensize
is returned.
Example (for a Turtle instance named turtle):
>>> turtle.pensize()
1
>>> turtle.pensize(10) # from here on lines of width 10 are drawn
"""
if width is None:
return self._pensize
self.pen(pensize=width)
def penup(self):
"""Pull the pen up -- no drawing when moving.
Aliases: penup | pu | up
No argument
Example (for a Turtle instance named turtle):
>>> turtle.penup()
"""
if not self._drawing:
return
self.pen(pendown=False)
def pendown(self):
"""Pull the pen down -- drawing when moving.
Aliases: pendown | pd | down
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.pendown()
"""
if self._drawing:
return
self.pen(pendown=True)
def isdown(self):
"""Return True if pen is down, False if it's up.
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.penup()
>>> turtle.isdown()
False
>>> turtle.pendown()
>>> turtle.isdown()
True
"""
return self._drawing
def speed(self, speed=None):
""" Return or set the turtle's speed.
Optional argument:
speed -- an integer in the range 0..10 or a speedstring (see below)
Set the turtle's speed to an integer value in the range 0 .. 10.
If no argument is given: return current speed.
If input is a number greater than 10 or smaller than 0.5,
speed is set to 0.
Speedstrings are mapped to speedvalues in the following way:
'fastest' : 0
'fast' : 10
'normal' : 6
'slow' : 3
'slowest' : 1
speeds from 1 to 10 enforce increasingly faster animation of
line drawing and turtle turning.
Attention:
speed = 0 : *no* animation takes place. forward/back makes turtle jump
and likewise left/right make the turtle turn instantly.
Example (for a Turtle instance named turtle):
>>> turtle.speed(3)
"""
speeds = {'fastest':0, 'fast':10, 'normal':6, 'slow':3, 'slowest':1 }
if speed is None:
return self._speed
if speed in speeds:
speed = speeds[speed]
elif 0.5 < speed < 10.5:
speed = int(round(speed))
else:
speed = 0
self.pen(speed=speed)
def color(self, *args):
"""Return or set the pencolor and fillcolor.
Arguments:
Several input formats are allowed.
They use 0, 1, 2, or 3 arguments as follows:
color()
Return the current pencolor and the current fillcolor
as a pair of color specification strings as are returned
by pencolor and fillcolor.
color(colorstring), color((r,g,b)), color(r,g,b)
inputs as in pencolor, set both, fillcolor and pencolor,
to the given value.
color(colorstring1, colorstring2),
color((r1,g1,b1), (r2,g2,b2))
equivalent to pencolor(colorstring1) and fillcolor(colorstring2)
and analogously, if the other input format is used.
If turtleshape is a polygon, outline and interior of that polygon
is drawn with the newly set colors.
For mor info see: pencolor, fillcolor
Example (for a Turtle instance named turtle):
>>> turtle.color('red', 'green')
>>> turtle.color()
('red', 'green')
>>> colormode(255)
>>> color((40, 80, 120), (160, 200, 240))
>>> color()
('#285078', '#a0c8f0')
"""
if args:
l = len(args)
if l == 1:
pcolor = fcolor = args[0]
elif l == 2:
pcolor, fcolor = args
elif l == 3:
pcolor = fcolor = args
pcolor = self._colorstr(pcolor)
fcolor = self._colorstr(fcolor)
self.pen(pencolor=pcolor, fillcolor=fcolor)
else:
return self._color(self._pencolor), self._color(self._fillcolor)
def pencolor(self, *args):
""" Return or set the pencolor.
Arguments:
Four input formats are allowed:
- pencolor()
Return the current pencolor as color specification string,
possibly in hex-number format (see example).
May be used as input to another color/pencolor/fillcolor call.
- pencolor(colorstring)
s is a Tk color specification string, such as "red" or "yellow"
- pencolor((r, g, b))
*a tuple* of r, g, and b, which represent, an RGB color,
and each of r, g, and b are in the range 0..colormode,
where colormode is either 1.0 or 255
- pencolor(r, g, b)
r, g, and b represent an RGB color, and each of r, g, and b
are in the range 0..colormode
If turtleshape is a polygon, the outline of that polygon is drawn
with the newly set pencolor.
Example (for a Turtle instance named turtle):
>>> turtle.pencolor('brown')
>>> tup = (0.2, 0.8, 0.55)
>>> turtle.pencolor(tup)
>>> turtle.pencolor()
'#33cc8c'
"""
if args:
color = self._colorstr(args)
if color == self._pencolor:
return
self.pen(pencolor=color)
else:
return self._color(self._pencolor)
def fillcolor(self, *args):
""" Return or set the fillcolor.
Arguments:
Four input formats are allowed:
- fillcolor()
Return the current fillcolor as color specification string,
possibly in hex-number format (see example).
May be used as input to another color/pencolor/fillcolor call.
- fillcolor(colorstring)
s is a Tk color specification string, such as "red" or "yellow"
- fillcolor((r, g, b))
*a tuple* of r, g, and b, which represent, an RGB color,
and each of r, g, and b are in the range 0..colormode,
where colormode is either 1.0 or 255
- fillcolor(r, g, b)
r, g, and b represent an RGB color, and each of r, g, and b
are in the range 0..colormode
If turtleshape is a polygon, the interior of that polygon is drawn
with the newly set fillcolor.
Example (for a Turtle instance named turtle):
>>> turtle.fillcolor('violet')
>>> col = turtle.pencolor()
>>> turtle.fillcolor(col)
>>> turtle.fillcolor(0, .5, 0)
"""
if args:
color = self._colorstr(args)
if color == self._fillcolor:
return
self.pen(fillcolor=color)
else:
return self._color(self._fillcolor)
def showturtle(self):
"""Makes the turtle visible.
Aliases: showturtle | st
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.hideturtle()
>>> turtle.showturtle()
"""
self.pen(shown=True)
def hideturtle(self):
"""Makes the turtle invisible.
Aliases: hideturtle | ht
No argument.
It's a good idea to do this while you're in the
middle of a complicated drawing, because hiding
the turtle speeds up the drawing observably.
Example (for a Turtle instance named turtle):
>>> turtle.hideturtle()
"""
self.pen(shown=False)
def isvisible(self):
"""Return True if the Turtle is shown, False if it's hidden.
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.hideturtle()
>>> print turtle.isvisible():
False
"""
return self._shown
def pen(self, pen=None, **pendict):
"""Return or set the pen's attributes.
Arguments:
pen -- a dictionary with some or all of the below listed keys.
**pendict -- one or more keyword-arguments with the below
listed keys as keywords.
Return or set the pen's attributes in a 'pen-dictionary'
with the following key/value pairs:
"shown" : True/False
"pendown" : True/False
"pencolor" : color-string or color-tuple
"fillcolor" : color-string or color-tuple
"pensize" : positive number
"speed" : number in range 0..10
"resizemode" : "auto" or "user" or "noresize"
"stretchfactor": (positive number, positive number)
"shearfactor": number
"outline" : positive number
"tilt" : number
This dictionary can be used as argument for a subsequent
pen()-call to restore the former pen-state. Moreover one
or more of these attributes can be provided as keyword-arguments.
This can be used to set several pen attributes in one statement.
Examples (for a Turtle instance named turtle):
>>> turtle.pen(fillcolor="black", pencolor="red", pensize=10)
>>> turtle.pen()
{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1,
'pencolor': 'red', 'pendown': True, 'fillcolor': 'black',
'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0}
>>> penstate=turtle.pen()
>>> turtle.color("yellow","")
>>> turtle.penup()
>>> turtle.pen()
{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1,
'pencolor': 'yellow', 'pendown': False, 'fillcolor': '',
'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0}
>>> p.pen(penstate, fillcolor="green")
>>> p.pen()
{'pensize': 10, 'shown': True, 'resizemode': 'auto', 'outline': 1,
'pencolor': 'red', 'pendown': True, 'fillcolor': 'green',
'stretchfactor': (1,1), 'speed': 3, 'shearfactor': 0.0}
"""
_pd = {"shown" : self._shown,
"pendown" : self._drawing,
"pencolor" : self._pencolor,
"fillcolor" : self._fillcolor,
"pensize" : self._pensize,
"speed" : self._speed,
"resizemode" : self._resizemode,
"stretchfactor" : self._stretchfactor,
"shearfactor" : self._shearfactor,
"outline" : self._outlinewidth,
"tilt" : self._tilt
}
if not (pen or pendict):
return _pd
if isinstance(pen, dict):
p = pen
else:
p = {}
p.update(pendict)
_p_buf = {}
for key in p:
_p_buf[key] = _pd[key]
if self.undobuffer:
self.undobuffer.push(("pen", _p_buf))
newLine = False
if "pendown" in p:
if self._drawing != p["pendown"]:
newLine = True
if "pencolor" in p:
if isinstance(p["pencolor"], tuple):
p["pencolor"] = self._colorstr((p["pencolor"],))
if self._pencolor != p["pencolor"]:
newLine = True
if "pensize" in p:
if self._pensize != p["pensize"]:
newLine = True
if newLine:
self._newLine()
if "pendown" in p:
self._drawing = p["pendown"]
if "pencolor" in p:
self._pencolor = p["pencolor"]
if "pensize" in p:
self._pensize = p["pensize"]
if "fillcolor" in p:
if isinstance(p["fillcolor"], tuple):
p["fillcolor"] = self._colorstr((p["fillcolor"],))
self._fillcolor = p["fillcolor"]
if "speed" in p:
self._speed = p["speed"]
if "resizemode" in p:
self._resizemode = p["resizemode"]
if "stretchfactor" in p:
sf = p["stretchfactor"]
if isinstance(sf, (int, float)):
sf = (sf, sf)
self._stretchfactor = sf
if "shearfactor" in p:
self._shearfactor = p["shearfactor"]
if "outline" in p:
self._outlinewidth = p["outline"]
if "shown" in p:
self._shown = p["shown"]
if "tilt" in p:
self._tilt = p["tilt"]
if "stretchfactor" in p or "tilt" in p or "shearfactor" in p:
scx, scy = self._stretchfactor
shf = self._shearfactor
sa, ca = math.sin(self._tilt), math.cos(self._tilt)
self._shapetrafo = ( scx*ca, scy*(shf*ca + sa),
-scx*sa, scy*(ca - shf*sa))
self._update()
## three dummy methods to be implemented by child class:
def _newLine(self, usePos = True):
"""dummy method - to be overwritten by child class"""
def _update(self, count=True, forced=False):
"""dummy method - to be overwritten by child class"""
def _color(self, args):
"""dummy method - to be overwritten by child class"""
def _colorstr(self, args):
"""dummy method - to be overwritten by child class"""
width = pensize
up = penup
pu = penup
pd = pendown
down = pendown
st = showturtle
ht = hideturtle
class _TurtleImage(object):
"""Helper class: Datatype to store Turtle attributes
"""
def __init__(self, screen, shapeIndex):
self.screen = screen
self._type = None
self._setshape(shapeIndex)
def _setshape(self, shapeIndex):
screen = self.screen
self.shapeIndex = shapeIndex
if self._type == "polygon" == screen._shapes[shapeIndex]._type:
return
if self._type == "image" == screen._shapes[shapeIndex]._type:
return
if self._type in ["image", "polygon"]:
screen._delete(self._item)
elif self._type == "compound":
for item in self._item:
screen._delete(item)
self._type = screen._shapes[shapeIndex]._type
if self._type == "polygon":
self._item = screen._createpoly()
elif self._type == "image":
self._item = screen._createimage(screen._shapes["blank"]._data)
elif self._type == "compound":
self._item = [screen._createpoly() for item in
screen._shapes[shapeIndex]._data]
class RawTurtle(TPen, TNavigator):
"""Animation part of the RawTurtle.
Puts RawTurtle upon a TurtleScreen and provides tools for
its animation.
"""
screens = []
def __init__(self, canvas=None,
shape=_CFG["shape"],
undobuffersize=_CFG["undobuffersize"],
visible=_CFG["visible"]):
if isinstance(canvas, _Screen):
self.screen = canvas
elif isinstance(canvas, TurtleScreen):
if canvas not in RawTurtle.screens:
RawTurtle.screens.append(canvas)
self.screen = canvas
elif isinstance(canvas, (ScrolledCanvas, Canvas)):
for screen in RawTurtle.screens:
if screen.cv == canvas:
self.screen = screen
break
else:
self.screen = TurtleScreen(canvas)
RawTurtle.screens.append(self.screen)
else:
raise TurtleGraphicsError("bad cavas argument %s" % canvas)
screen = self.screen
TNavigator.__init__(self, screen.mode())
TPen.__init__(self)
screen._turtles.append(self)
self.drawingLineItem = screen._createline()
self.turtle = _TurtleImage(screen, shape)
self._poly = None
self._creatingPoly = False
self._fillitem = self._fillpath = None
self._shown = visible
self._hidden_from_screen = False
self.currentLineItem = screen._createline()
self.currentLine = [self._position]
self.items = [self.currentLineItem]
self.stampItems = []
self._undobuffersize = undobuffersize
self.undobuffer = Tbuffer(undobuffersize)
self._update()
def reset(self):
"""Delete the turtle's drawings and restore its default values.
No argument.
Delete the turtle's drawings from the screen, re-center the turtle
and set variables to the default values.
Example (for a Turtle instance named turtle):
>>> turtle.position()
(0.00,-22.00)
>>> turtle.heading()
100.0
>>> turtle.reset()
>>> turtle.position()
(0.00,0.00)
>>> turtle.heading()
0.0
"""
TNavigator.reset(self)
TPen._reset(self)
self._clear()
self._drawturtle()
self._update()
def setundobuffer(self, size):
"""Set or disable undobuffer.
Argument:
size -- an integer or None
If size is an integer an empty undobuffer of given size is installed.
Size gives the maximum number of turtle-actions that can be undone
by the undo() function.
If size is None, no undobuffer is present.
Example (for a Turtle instance named turtle):
>>> turtle.setundobuffer(42)
"""
if size is None:
self.undobuffer = None
else:
self.undobuffer = Tbuffer(size)
def undobufferentries(self):
"""Return count of entries in the undobuffer.
No argument.
Example (for a Turtle instance named turtle):
>>> while undobufferentries():
... undo()
"""
if self.undobuffer is None:
return 0
return self.undobuffer.nr_of_items()
def _clear(self):
"""Delete all of pen's drawings"""
self._fillitem = self._fillpath = None
for item in self.items:
self.screen._delete(item)
self.currentLineItem = self.screen._createline()
self.currentLine = []
if self._drawing:
self.currentLine.append(self._position)
self.items = [self.currentLineItem]
self.clearstamps()
self.setundobuffer(self._undobuffersize)
def clear(self):
"""Delete the turtle's drawings from the screen. Do not move turtle.
No arguments.
Delete the turtle's drawings from the screen. Do not move turtle.
State and position of the turtle as well as drawings of other
turtles are not affected.
Examples (for a Turtle instance named turtle):
>>> turtle.clear()
"""
self._clear()
self._update()
def _update_data(self):
self.screen._incrementudc()
if self.screen._updatecounter != 0:
return
if len(self.currentLine)>1:
self.screen._drawline(self.currentLineItem, self.currentLine,
self._pencolor, self._pensize)
def _update(self):
"""Perform a Turtle-data update.
"""
screen = self.screen
if screen._tracing == 0:
return
elif screen._tracing == 1:
self._update_data()
self._drawturtle()
screen._update() # TurtleScreenBase
screen._delay(screen._delayvalue) # TurtleScreenBase
else:
self._update_data()
if screen._updatecounter == 0:
for t in screen.turtles():
t._drawturtle()
screen._update()
def _tracer(self, flag=None, delay=None):
"""Turns turtle animation on/off and set delay for update drawings.
Optional arguments:
n -- nonnegative integer
delay -- nonnegative integer
If n is given, only each n-th regular screen update is really performed.
(Can be used to accelerate the drawing of complex graphics.)
Second arguments sets delay value (see RawTurtle.delay())
Example (for a Turtle instance named turtle):
>>> turtle.tracer(8, 25)
>>> dist = 2
>>> for i in range(200):
... turtle.fd(dist)
... turtle.rt(90)
... dist += 2
"""
return self.screen.tracer(flag, delay)
def _color(self, args):
return self.screen._color(args)
def _colorstr(self, args):
return self.screen._colorstr(args)
def _cc(self, args):
"""Convert colortriples to hexstrings.
"""
if isinstance(args, str):
return args
try:
r, g, b = args
except:
raise TurtleGraphicsError("bad color arguments: %s" % str(args))
if self.screen._colormode == 1.0:
r, g, b = [round(255.0*x) for x in (r, g, b)]
if not ((0 <= r <= 255) and (0 <= g <= 255) and (0 <= b <= 255)):
raise TurtleGraphicsError("bad color sequence: %s" % str(args))
return "#%02x%02x%02x" % (r, g, b)
def clone(self):
"""Create and return a clone of the turtle.
No argument.
Create and return a clone of the turtle with same position, heading
and turtle properties.
Example (for a Turtle instance named mick):
mick = Turtle()
joe = mick.clone()
"""
screen = self.screen
self._newLine(self._drawing)
turtle = self.turtle
self.screen = None
self.turtle = None # too make self deepcopy-able
q = deepcopy(self)
self.screen = screen
self.turtle = turtle
q.screen = screen
q.turtle = _TurtleImage(screen, self.turtle.shapeIndex)
screen._turtles.append(q)
ttype = screen._shapes[self.turtle.shapeIndex]._type
if ttype == "polygon":
q.turtle._item = screen._createpoly()
elif ttype == "image":
q.turtle._item = screen._createimage(screen._shapes["blank"]._data)
elif ttype == "compound":
q.turtle._item = [screen._createpoly() for item in
screen._shapes[self.turtle.shapeIndex]._data]
q.currentLineItem = screen._createline()
q._update()
return q
def shape(self, name=None):
"""Set turtle shape to shape with given name / return current shapename.
Optional argument:
name -- a string, which is a valid shapename
Set turtle shape to shape with given name or, if name is not given,
return name of current shape.
Shape with name must exist in the TurtleScreen's shape dictionary.
Initially there are the following polygon shapes:
'arrow', 'turtle', 'circle', 'square', 'triangle', 'classic'.
To learn about how to deal with shapes see Screen-method register_shape.
Example (for a Turtle instance named turtle):
>>> turtle.shape()
'arrow'
>>> turtle.shape("turtle")
>>> turtle.shape()
'turtle'
"""
if name is None:
return self.turtle.shapeIndex
if not name in self.screen.getshapes():
raise TurtleGraphicsError("There is no shape named %s" % name)
self.turtle._setshape(name)
self._update()
def shapesize(self, stretch_wid=None, stretch_len=None, outline=None):
"""Set/return turtle's stretchfactors/outline. Set resizemode to "user".
Optinonal arguments:
stretch_wid : positive number
stretch_len : positive number
outline : positive number
Return or set the pen's attributes x/y-stretchfactors and/or outline.
Set resizemode to "user".
If and only if resizemode is set to "user", the turtle will be displayed
stretched according to its stretchfactors:
stretch_wid is stretchfactor perpendicular to orientation
stretch_len is stretchfactor in direction of turtles orientation.
outline determines the width of the shapes's outline.
Examples (for a Turtle instance named turtle):
>>> turtle.resizemode("user")
>>> turtle.shapesize(5, 5, 12)
>>> turtle.shapesize(outline=8)
"""
if stretch_wid is stretch_len is outline is None:
stretch_wid, stretch_len = self._stretchfactor
return stretch_wid, stretch_len, self._outlinewidth
if stretch_wid == 0 or stretch_len == 0:
raise TurtleGraphicsError("stretch_wid/stretch_len must not be zero")
if stretch_wid is not None:
if stretch_len is None:
stretchfactor = stretch_wid, stretch_wid
else:
stretchfactor = stretch_wid, stretch_len
elif stretch_len is not None:
stretchfactor = self._stretchfactor[0], stretch_len
else:
stretchfactor = self._stretchfactor
if outline is None:
outline = self._outlinewidth
self.pen(resizemode="user",
stretchfactor=stretchfactor, outline=outline)
def shearfactor(self, shear=None):
"""Set or return the current shearfactor.
Optional argument: shear -- number, tangent of the shear angle
Shear the turtleshape according to the given shearfactor shear,
which is the tangent of the shear angle. DO NOT change the
turtle's heading (direction of movement).
If shear is not given: return the current shearfactor, i. e. the
tangent of the shear angle, by which lines parallel to the
heading of the turtle are sheared.
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
>>> turtle.shapesize(5,2)
>>> turtle.shearfactor(0.5)
>>> turtle.shearfactor()
>>> 0.5
"""
if shear is None:
return self._shearfactor
self.pen(resizemode="user", shearfactor=shear)
def settiltangle(self, angle):
"""Rotate the turtleshape to point in the specified direction
Argument: angle -- number
Rotate the turtleshape to point in the direction specified by angle,
regardless of its current tilt-angle. DO NOT change the turtle's
heading (direction of movement).
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
>>> turtle.shapesize(5,2)
>>> turtle.settiltangle(45)
>>> stamp()
>>> turtle.fd(50)
>>> turtle.settiltangle(-45)
>>> stamp()
>>> turtle.fd(50)
"""
tilt = -angle * self._degreesPerAU * self._angleOrient
tilt = (tilt * math.pi / 180.0) % (2*math.pi)
self.pen(resizemode="user", tilt=tilt)
def tiltangle(self, angle=None):
"""Set or return the current tilt-angle.
Optional argument: angle -- number
Rotate the turtleshape to point in the direction specified by angle,
regardless of its current tilt-angle. DO NOT change the turtle's
heading (direction of movement).
If angle is not given: return the current tilt-angle, i. e. the angle
between the orientation of the turtleshape and the heading of the
turtle (its direction of movement).
Deprecated since Python 3.1
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
>>> turtle.shapesize(5,2)
>>> turtle.tilt(45)
>>> turtle.tiltangle()
"""
if angle is None:
tilt = -self._tilt * (180.0/math.pi) * self._angleOrient
return (tilt / self._degreesPerAU) % self._fullcircle
else:
self.settiltangle(angle)
def tilt(self, angle):
"""Rotate the turtleshape by angle.
Argument:
angle - a number
Rotate the turtleshape by angle from its current tilt-angle,
but do NOT change the turtle's heading (direction of movement).
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
>>> turtle.shapesize(5,2)
>>> turtle.tilt(30)
>>> turtle.fd(50)
>>> turtle.tilt(30)
>>> turtle.fd(50)
"""
self.settiltangle(angle + self.tiltangle())
def shapetransform(self, t11=None, t12=None, t21=None, t22=None):
"""Set or return the current transformation matrix of the turtle shape.
Optional arguments: t11, t12, t21, t22 -- numbers.
If none of the matrix elements are given, return the transformation
matrix.
Otherwise set the given elements and transform the turtleshape
according to the matrix consisting of first row t11, t12 and
second row t21, 22.
Modify stretchfactor, shearfactor and tiltangle according to the
given matrix.
Examples (for a Turtle instance named turtle):
>>> turtle.shape("square")
>>> turtle.shapesize(4,2)
>>> turtle.shearfactor(-0.5)
>>> turtle.shapetransform()
(4.0, -1.0, -0.0, 2.0)
"""
if t11 is t12 is t21 is t22 is None:
return self._shapetrafo
m11, m12, m21, m22 = self._shapetrafo
if t11 is not None: m11 = t11
if t12 is not None: m12 = t12
if t21 is not None: m21 = t21
if t22 is not None: m22 = t22
if t11 * t22 - t12 * t21 == 0:
raise TurtleGraphicsError("Bad shape transform matrix: must not be singular")
self._shapetrafo = (m11, m12, m21, m22)
alfa = math.atan2(-m21, m11) % (2 * math.pi)
sa, ca = math.sin(alfa), math.cos(alfa)
a11, a12, a21, a22 = (ca*m11 - sa*m21, ca*m12 - sa*m22,
sa*m11 + ca*m21, sa*m12 + ca*m22)
self._stretchfactor = a11, a22
self._shearfactor = a12/a22
self._tilt = alfa
self._update()
def _polytrafo(self, poly):
"""Computes transformed polygon shapes from a shape
according to current position and heading.
"""
screen = self.screen
p0, p1 = self._position
e0, e1 = self._orient
e = Vec2D(e0, e1 * screen.yscale / screen.xscale)
e0, e1 = (1.0 / abs(e)) * e
return [(p0+(e1*x+e0*y)/screen.xscale, p1+(-e0*x+e1*y)/screen.yscale)
for (x, y) in poly]
def get_shapepoly(self):
"""Return the current shape polygon as tuple of coordinate pairs.
No argument.
Examples (for a Turtle instance named turtle):
>>> turtle.shape("square")
>>> turtle.shapetransform(4, -1, 0, 2)
>>> turtle.get_shapepoly()
((50, -20), (30, 20), (-50, 20), (-30, -20))
"""
shape = self.screen._shapes[self.turtle.shapeIndex]
if shape._type == "polygon":
return self._getshapepoly(shape._data, shape._type == "compound")
# else return None
def _getshapepoly(self, polygon, compound=False):
"""Calculate transformed shape polygon according to resizemode
and shapetransform.
"""
if self._resizemode == "user" or compound:
t11, t12, t21, t22 = self._shapetrafo
elif self._resizemode == "auto":
l = max(1, self._pensize/5.0)
t11, t12, t21, t22 = l, 0, 0, l
elif self._resizemode == "noresize":
return polygon
return tuple([(t11*x + t12*y, t21*x + t22*y) for (x, y) in polygon])
def _drawturtle(self):
"""Manages the correct rendering of the turtle with respect to
its shape, resizemode, stretch and tilt etc."""
screen = self.screen
shape = screen._shapes[self.turtle.shapeIndex]
ttype = shape._type
titem = self.turtle._item
if self._shown and screen._updatecounter == 0 and screen._tracing > 0:
self._hidden_from_screen = False
tshape = shape._data
if ttype == "polygon":
if self._resizemode == "noresize": w = 1
elif self._resizemode == "auto": w = self._pensize
else: w =self._outlinewidth
shape = self._polytrafo(self._getshapepoly(tshape))
fc, oc = self._fillcolor, self._pencolor
screen._drawpoly(titem, shape, fill=fc, outline=oc,
width=w, top=True)
elif ttype == "image":
screen._drawimage(titem, self._position, tshape)
elif ttype == "compound":
for item, (poly, fc, oc) in zip(titem, tshape):
poly = self._polytrafo(self._getshapepoly(poly, True))
screen._drawpoly(item, poly, fill=self._cc(fc),
outline=self._cc(oc), width=self._outlinewidth, top=True)
else:
if self._hidden_from_screen:
return
if ttype == "polygon":
screen._drawpoly(titem, ((0, 0), (0, 0), (0, 0)), "", "")
elif ttype == "image":
screen._drawimage(titem, self._position,
screen._shapes["blank"]._data)
elif ttype == "compound":
for item in titem:
screen._drawpoly(item, ((0, 0), (0, 0), (0, 0)), "", "")
self._hidden_from_screen = True
############################## stamp stuff ###############################
def stamp(self):
"""Stamp a copy of the turtleshape onto the canvas and return its id.
No argument.
Stamp a copy of the turtle shape onto the canvas at the current
turtle position. Return a stamp_id for that stamp, which can be
used to delete it by calling clearstamp(stamp_id).
Example (for a Turtle instance named turtle):
>>> turtle.color("blue")
>>> turtle.stamp()
13
>>> turtle.fd(50)
"""
screen = self.screen
shape = screen._shapes[self.turtle.shapeIndex]
ttype = shape._type
tshape = shape._data
if ttype == "polygon":
stitem = screen._createpoly()
if self._resizemode == "noresize": w = 1
elif self._resizemode == "auto": w = self._pensize
else: w =self._outlinewidth
shape = self._polytrafo(self._getshapepoly(tshape))
fc, oc = self._fillcolor, self._pencolor
screen._drawpoly(stitem, shape, fill=fc, outline=oc,
width=w, top=True)
elif ttype == "image":
stitem = screen._createimage("")
screen._drawimage(stitem, self._position, tshape)
elif ttype == "compound":
stitem = []
for element in tshape:
item = screen._createpoly()
stitem.append(item)
stitem = tuple(stitem)
for item, (poly, fc, oc) in zip(stitem, tshape):
poly = self._polytrafo(self._getshapepoly(poly, True))
screen._drawpoly(item, poly, fill=self._cc(fc),
outline=self._cc(oc), width=self._outlinewidth, top=True)
self.stampItems.append(stitem)
self.undobuffer.push(("stamp", stitem))
return stitem
def _clearstamp(self, stampid):
"""does the work for clearstamp() and clearstamps()
"""
if stampid in self.stampItems:
if isinstance(stampid, tuple):
for subitem in stampid:
self.screen._delete(subitem)
else:
self.screen._delete(stampid)
self.stampItems.remove(stampid)
# Delete stampitem from undobuffer if necessary
# if clearstamp is called directly.
item = ("stamp", stampid)
buf = self.undobuffer
if item not in buf.buffer:
return
index = buf.buffer.index(item)
buf.buffer.remove(item)
if index <= buf.ptr:
buf.ptr = (buf.ptr - 1) % buf.bufsize
buf.buffer.insert((buf.ptr+1)%buf.bufsize, [None])
def clearstamp(self, stampid):
"""Delete stamp with given stampid
Argument:
stampid - an integer, must be return value of previous stamp() call.
Example (for a Turtle instance named turtle):
>>> turtle.color("blue")
>>> astamp = turtle.stamp()
>>> turtle.fd(50)
>>> turtle.clearstamp(astamp)
"""
self._clearstamp(stampid)
self._update()
def clearstamps(self, n=None):
"""Delete all or first/last n of turtle's stamps.
Optional argument:
n -- an integer
If n is None, delete all of pen's stamps,
else if n > 0 delete first n stamps
else if n < 0 delete last n stamps.
Example (for a Turtle instance named turtle):
>>> for i in range(8):
... turtle.stamp(); turtle.fd(30)
...
>>> turtle.clearstamps(2)
>>> turtle.clearstamps(-2)
>>> turtle.clearstamps()
"""
if n is None:
toDelete = self.stampItems[:]
elif n >= 0:
toDelete = self.stampItems[:n]
else:
toDelete = self.stampItems[n:]
for item in toDelete:
self._clearstamp(item)
self._update()
def _goto(self, end):
"""Move the pen to the point end, thereby drawing a line
if pen is down. All other methodes for turtle movement depend
on this one.
"""
## Version with undo-stuff
go_modes = ( self._drawing,
self._pencolor,
self._pensize,
isinstance(self._fillpath, list))
screen = self.screen
undo_entry = ("go", self._position, end, go_modes,
(self.currentLineItem,
self.currentLine[:],
screen._pointlist(self.currentLineItem),
self.items[:])
)
if self.undobuffer:
self.undobuffer.push(undo_entry)
start = self._position
if self._speed and screen._tracing == 1:
diff = (end-start)
diffsq = (diff[0]*screen.xscale)**2 + (diff[1]*screen.yscale)**2
nhops = 1+int((diffsq**0.5)/(3*(1.1**self._speed)*self._speed))
delta = diff * (1.0/nhops)
for n in range(1, nhops):
if n == 1:
top = True
else:
top = False
self._position = start + delta * n
if self._drawing:
screen._drawline(self.drawingLineItem,
(start, self._position),
self._pencolor, self._pensize, top)
self._update()
if self._drawing:
screen._drawline(self.drawingLineItem, ((0, 0), (0, 0)),
fill="", width=self._pensize)
# Turtle now at end,
if self._drawing: # now update currentLine
self.currentLine.append(end)
if isinstance(self._fillpath, list):
self._fillpath.append(end)
###### vererbung!!!!!!!!!!!!!!!!!!!!!!
self._position = end
if self._creatingPoly:
self._poly.append(end)
if len(self.currentLine) > 42: # 42! answer to the ultimate question
# of life, the universe and everything
self._newLine()
self._update() #count=True)
def _undogoto(self, entry):
"""Reverse a _goto. Used for undo()
"""
old, new, go_modes, coodata = entry
drawing, pc, ps, filling = go_modes
cLI, cL, pl, items = coodata
screen = self.screen
if abs(self._position - new) > 0.5:
print ("undogoto: HALLO-DA-STIMMT-WAS-NICHT!")
# restore former situation
self.currentLineItem = cLI
self.currentLine = cL
if pl == [(0, 0), (0, 0)]:
usepc = ""
else:
usepc = pc
screen._drawline(cLI, pl, fill=usepc, width=ps)
todelete = [i for i in self.items if (i not in items) and
(screen._type(i) == "line")]
for i in todelete:
screen._delete(i)
self.items.remove(i)
start = old
if self._speed and screen._tracing == 1:
diff = old - new
diffsq = (diff[0]*screen.xscale)**2 + (diff[1]*screen.yscale)**2
nhops = 1+int((diffsq**0.5)/(3*(1.1**self._speed)*self._speed))
delta = diff * (1.0/nhops)
for n in range(1, nhops):
if n == 1:
top = True
else:
top = False
self._position = new + delta * n
if drawing:
screen._drawline(self.drawingLineItem,
(start, self._position),
pc, ps, top)
self._update()
if drawing:
screen._drawline(self.drawingLineItem, ((0, 0), (0, 0)),
fill="", width=ps)
# Turtle now at position old,
self._position = old
## if undo is done during creating a polygon, the last vertex
## will be deleted. if the polygon is entirely deleted,
## creatingPoly will be set to False.
## Polygons created before the last one will not be affected by undo()
if self._creatingPoly:
if len(self._poly) > 0:
self._poly.pop()
if self._poly == []:
self._creatingPoly = False
self._poly = None
if filling:
if self._fillpath == []:
self._fillpath = None
print("Unwahrscheinlich in _undogoto!")
elif self._fillpath is not None:
self._fillpath.pop()
self._update() #count=True)
def _rotate(self, angle):
"""Turns pen clockwise by angle.
"""
if self.undobuffer:
self.undobuffer.push(("rot", angle, self._degreesPerAU))
angle *= self._degreesPerAU
neworient = self._orient.rotate(angle)
tracing = self.screen._tracing
if tracing == 1 and self._speed > 0:
anglevel = 3.0 * self._speed
steps = 1 + int(abs(angle)/anglevel)
delta = 1.0*angle/steps
for _ in range(steps):
self._orient = self._orient.rotate(delta)
self._update()
self._orient = neworient
self._update()
def _newLine(self, usePos=True):
"""Closes current line item and starts a new one.
Remark: if current line became too long, animation
performance (via _drawline) slowed down considerably.
"""
if len(self.currentLine) > 1:
self.screen._drawline(self.currentLineItem, self.currentLine,
self._pencolor, self._pensize)
self.currentLineItem = self.screen._createline()
self.items.append(self.currentLineItem)
else:
self.screen._drawline(self.currentLineItem, top=True)
self.currentLine = []
if usePos:
self.currentLine = [self._position]
def filling(self):
"""Return fillstate (True if filling, False else).
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.begin_fill()
>>> if turtle.filling():
... turtle.pensize(5)
... else:
... turtle.pensize(3)
"""
return isinstance(self._fillpath, list)
def begin_fill(self):
"""Called just before drawing a shape to be filled.
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.color("black", "red")
>>> turtle.begin_fill()
>>> turtle.circle(60)
>>> turtle.end_fill()
"""
if not self.filling():
self._fillitem = self.screen._createpoly()
self.items.append(self._fillitem)
self._fillpath = [self._position]
self._newLine()
if self.undobuffer:
self.undobuffer.push(("beginfill", self._fillitem))
self._update()
def end_fill(self):
"""Fill the shape drawn after the call begin_fill().
No argument.
Example (for a Turtle instance named turtle):
>>> turtle.color("black", "red")
>>> turtle.begin_fill()
>>> turtle.circle(60)
>>> turtle.end_fill()
"""
if self.filling():
if len(self._fillpath) > 2:
self.screen._drawpoly(self._fillitem, self._fillpath,
fill=self._fillcolor)
if self.undobuffer:
self.undobuffer.push(("dofill", self._fillitem))
self._fillitem = self._fillpath = None
self._update()
def dot(self, size=None, *color):
"""Draw a dot with diameter size, using color.
Optional arguments:
size -- an integer >= 1 (if given)
color -- a colorstring or a numeric color tuple
Draw a circular dot with diameter size, using color.
If size is not given, the maximum of pensize+4 and 2*pensize is used.
Example (for a Turtle instance named turtle):
>>> turtle.dot()
>>> turtle.fd(50); turtle.dot(20, "blue"); turtle.fd(50)
"""
if not color:
if isinstance(size, (str, tuple)):
color = self._colorstr(size)
size = self._pensize + max(self._pensize, 4)
else:
color = self._pencolor
if not size:
size = self._pensize + max(self._pensize, 4)
else:
if size is None:
size = self._pensize + max(self._pensize, 4)
color = self._colorstr(color)
if hasattr(self.screen, "_dot"):
item = self.screen._dot(self._position, size, color)
self.items.append(item)
if self.undobuffer:
self.undobuffer.push(("dot", item))
else:
pen = self.pen()
if self.undobuffer:
self.undobuffer.push(["seq"])
self.undobuffer.cumulate = True
try:
if self.resizemode() == 'auto':
self.ht()
self.pendown()
self.pensize(size)
self.pencolor(color)
self.forward(0)
finally:
self.pen(pen)
if self.undobuffer:
self.undobuffer.cumulate = False
def _write(self, txt, align, font):
"""Performs the writing for write()
"""
item, end = self.screen._write(self._position, txt, align, font,
self._pencolor)
self.items.append(item)
if self.undobuffer:
self.undobuffer.push(("wri", item))
return end
def write(self, arg, move=False, align="left", font=("Arial", 8, "normal")):
"""Write text at the current turtle position.
Arguments:
arg -- info, which is to be written to the TurtleScreen
move (optional) -- True/False
align (optional) -- one of the strings "left", "center" or right"
font (optional) -- a triple (fontname, fontsize, fonttype)
Write text - the string representation of arg - at the current
turtle position according to align ("left", "center" or right")
and with the given font.
If move is True, the pen is moved to the bottom-right corner
of the text. By default, move is False.
Example (for a Turtle instance named turtle):
>>> turtle.write('Home = ', True, align="center")
>>> turtle.write((0,0), True)
"""
if self.undobuffer:
self.undobuffer.push(["seq"])
self.undobuffer.cumulate = True
end = self._write(str(arg), align.lower(), font)
if move:
x, y = self.pos()
self.setpos(end, y)
if self.undobuffer:
self.undobuffer.cumulate = False
def begin_poly(self):
"""Start recording the vertices of a polygon.
No argument.
Start recording the vertices of a polygon. Current turtle position
is first point of polygon.
Example (for a Turtle instance named turtle):
>>> turtle.begin_poly()
"""
self._poly = [self._position]
self._creatingPoly = True
def end_poly(self):
"""Stop recording the vertices of a polygon.
No argument.
Stop recording the vertices of a polygon. Current turtle position is
last point of polygon. This will be connected with the first point.
Example (for a Turtle instance named turtle):
>>> turtle.end_poly()
"""
self._creatingPoly = False
def get_poly(self):
"""Return the lastly recorded polygon.
No argument.
Example (for a Turtle instance named turtle):
>>> p = turtle.get_poly()
>>> turtle.register_shape("myFavouriteShape", p)
"""
## check if there is any poly?
if self._poly is not None:
return tuple(self._poly)
def getscreen(self):
"""Return the TurtleScreen object, the turtle is drawing on.
No argument.
Return the TurtleScreen object, the turtle is drawing on.
So TurtleScreen-methods can be called for that object.
Example (for a Turtle instance named turtle):
>>> ts = turtle.getscreen()
>>> ts
<turtle.TurtleScreen object at 0x0106B770>
>>> ts.bgcolor("pink")
"""
return self.screen
def getturtle(self):
"""Return the Turtleobject itself.
No argument.
Only reasonable use: as a function to return the 'anonymous turtle':
Example:
>>> pet = getturtle()
>>> pet.fd(50)
>>> pet
<turtle.Turtle object at 0x0187D810>
>>> turtles()
[<turtle.Turtle object at 0x0187D810>]
"""
return self
getpen = getturtle
################################################################
### screen oriented methods recurring to methods of TurtleScreen
################################################################
def _delay(self, delay=None):
"""Set delay value which determines speed of turtle animation.
"""
return self.screen.delay(delay)
def onclick(self, fun, btn=1, add=None):
"""Bind fun to mouse-click event on this turtle on canvas.
Arguments:
fun -- a function with two arguments, to which will be assigned
the coordinates of the clicked point on the canvas.
num -- number of the mouse-button defaults to 1 (left mouse button).
add -- True or False. If True, new binding will be added, otherwise
it will replace a former binding.
Example for the anonymous turtle, i. e. the procedural way:
>>> def turn(x, y):
... left(360)
...
>>> onclick(turn) # Now clicking into the turtle will turn it.
>>> onclick(None) # event-binding will be removed
"""
self.screen._onclick(self.turtle._item, fun, btn, add)
self._update()
def onrelease(self, fun, btn=1, add=None):
"""Bind fun to mouse-button-release event on this turtle on canvas.
Arguments:
fun -- a function with two arguments, to which will be assigned
the coordinates of the clicked point on the canvas.
num -- number of the mouse-button defaults to 1 (left mouse button).
Example (for a MyTurtle instance named joe):
>>> class MyTurtle(Turtle):
... def glow(self,x,y):
... self.fillcolor("red")
... def unglow(self,x,y):
... self.fillcolor("")
...
>>> joe = MyTurtle()
>>> joe.onclick(joe.glow)
>>> joe.onrelease(joe.unglow)
Clicking on joe turns fillcolor red, unclicking turns it to
transparent.
"""
self.screen._onrelease(self.turtle._item, fun, btn, add)
self._update()
def ondrag(self, fun, btn=1, add=None):
"""Bind fun to mouse-move event on this turtle on canvas.
Arguments:
fun -- a function with two arguments, to which will be assigned
the coordinates of the clicked point on the canvas.
num -- number of the mouse-button defaults to 1 (left mouse button).
Every sequence of mouse-move-events on a turtle is preceded by a
mouse-click event on that turtle.
Example (for a Turtle instance named turtle):
>>> turtle.ondrag(turtle.goto)
Subsequently clicking and dragging a Turtle will move it
across the screen thereby producing handdrawings (if pen is
down).
"""
self.screen._ondrag(self.turtle._item, fun, btn, add)
def _undo(self, action, data):
"""Does the main part of the work for undo()
"""
if self.undobuffer is None:
return
if action == "rot":
angle, degPAU = data
self._rotate(-angle*degPAU/self._degreesPerAU)
dummy = self.undobuffer.pop()
elif action == "stamp":
stitem = data[0]
self.clearstamp(stitem)
elif action == "go":
self._undogoto(data)
elif action in ["wri", "dot"]:
item = data[0]
self.screen._delete(item)
self.items.remove(item)
elif action == "dofill":
item = data[0]
self.screen._drawpoly(item, ((0, 0),(0, 0),(0, 0)),
fill="", outline="")
elif action == "beginfill":
item = data[0]
self._fillitem = self._fillpath = None
if item in self.items:
self.screen._delete(item)
self.items.remove(item)
elif action == "pen":
TPen.pen(self, data[0])
self.undobuffer.pop()
def undo(self):
"""undo (repeatedly) the last turtle action.
No argument.
undo (repeatedly) the last turtle action.
Number of available undo actions is determined by the size of
the undobuffer.
Example (for a Turtle instance named turtle):
>>> for i in range(4):
... turtle.fd(50); turtle.lt(80)
...
>>> for i in range(8):
... turtle.undo()
...
"""
if self.undobuffer is None:
return
item = self.undobuffer.pop()
action = item[0]
data = item[1:]
if action == "seq":
while data:
item = data.pop()
self._undo(item[0], item[1:])
else:
self._undo(action, data)
turtlesize = shapesize
RawPen = RawTurtle
### Screen - Singleton ########################
def Screen():
"""Return the singleton screen object.
If none exists at the moment, create a new one and return it,
else return the existing one."""
if Turtle._screen is None:
Turtle._screen = _Screen()
return Turtle._screen
class _Screen(TurtleScreen):
_root = None
_canvas = None
_title = _CFG["title"]
def __init__(self):
# XXX there is no need for this code to be conditional,
# as there will be only a single _Screen instance, anyway
# XXX actually, the turtle demo is injecting root window,
# so perhaps the conditional creation of a root should be
# preserved (perhaps by passing it as an optional parameter)
if _Screen._root is None:
_Screen._root = self._root = _Root()
self._root.title(_Screen._title)
self._root.ondestroy(self._destroy)
if _Screen._canvas is None:
width = _CFG["width"]
height = _CFG["height"]
canvwidth = _CFG["canvwidth"]
canvheight = _CFG["canvheight"]
leftright = _CFG["leftright"]
topbottom = _CFG["topbottom"]
self._root.setupcanvas(width, height, canvwidth, canvheight)
_Screen._canvas = self._root._getcanvas()
TurtleScreen.__init__(self, _Screen._canvas)
self.setup(width, height, leftright, topbottom)
def setup(self, width=_CFG["width"], height=_CFG["height"],
startx=_CFG["leftright"], starty=_CFG["topbottom"]):
""" Set the size and position of the main window.
Arguments:
width: as integer a size in pixels, as float a fraction of the screen.
Default is 50% of screen.
height: as integer the height in pixels, as float a fraction of the
screen. Default is 75% of screen.
startx: if positive, starting position in pixels from the left
edge of the screen, if negative from the right edge
Default, startx=None is to center window horizontally.
starty: if positive, starting position in pixels from the top
edge of the screen, if negative from the bottom edge
Default, starty=None is to center window vertically.
Examples (for a Screen instance named screen):
>>> screen.setup (width=200, height=200, startx=0, starty=0)
sets window to 200x200 pixels, in upper left of screen
>>> screen.setup(width=.75, height=0.5, startx=None, starty=None)
sets window to 75% of screen by 50% of screen and centers
"""
if not hasattr(self._root, "set_geometry"):
return
sw = self._root.win_width()
sh = self._root.win_height()
if isinstance(width, float) and 0 <= width <= 1:
width = sw*width
if startx is None:
startx = (sw - width) / 2
if isinstance(height, float) and 0 <= height <= 1:
height = sh*height
if starty is None:
starty = (sh - height) / 2
self._root.set_geometry(width, height, startx, starty)
self.update()
def title(self, titlestring):
"""Set title of turtle-window
Argument:
titlestring -- a string, to appear in the titlebar of the
turtle graphics window.
This is a method of Screen-class. Not available for TurtleScreen-
objects.
Example (for a Screen instance named screen):
>>> screen.title("Welcome to the turtle-zoo!")
"""
if _Screen._root is not None:
_Screen._root.title(titlestring)
_Screen._title = titlestring
def _destroy(self):
root = self._root
if root is _Screen._root:
Turtle._pen = None
Turtle._screen = None
_Screen._root = None
_Screen._canvas = None
TurtleScreen._RUNNING = True
root.destroy()
def bye(self):
"""Shut the turtlegraphics window.
Example (for a TurtleScreen instance named screen):
>>> screen.bye()
"""
self._destroy()
def exitonclick(self):
"""Go into mainloop until the mouse is clicked.
No arguments.
Bind bye() method to mouseclick on TurtleScreen.
If "using_IDLE" - value in configuration dictionary is False
(default value), enter mainloop.
If IDLE with -n switch (no subprocess) is used, this value should be
set to True in turtle.cfg. In this case IDLE's mainloop
is active also for the client script.
This is a method of the Screen-class and not available for
TurtleScreen instances.
Example (for a Screen instance named screen):
>>> screen.exitonclick()
"""
def exitGracefully(x, y):
"""Screen.bye() with two dummy-parameters"""
self.bye()
self.onclick(exitGracefully)
if _CFG["using_IDLE"]:
return
try:
mainloop()
except AttributeError:
exit(0)
class Turtle(RawTurtle):
"""RawTurtle auto-creating (scrolled) canvas.
When a Turtle object is created or a function derived from some
Turtle method is called a TurtleScreen object is automatically created.
"""
_pen = None
_screen = None
def __init__(self,
shape=_CFG["shape"],
undobuffersize=_CFG["undobuffersize"],
visible=_CFG["visible"]):
if Turtle._screen is None:
Turtle._screen = Screen()
RawTurtle.__init__(self, Turtle._screen,
shape=shape,
undobuffersize=undobuffersize,
visible=visible)
Pen = Turtle
def _getpen():
"""Create the 'anonymous' turtle if not already present."""
if Turtle._pen is None:
Turtle._pen = Turtle()
return Turtle._pen
def _getscreen():
"""Create a TurtleScreen if not already present."""
if Turtle._screen is None:
Turtle._screen = Screen()
return Turtle._screen
def write_docstringdict(filename="turtle_docstringdict"):
"""Create and write docstring-dictionary to file.
Optional argument:
filename -- a string, used as filename
default value is turtle_docstringdict
Has to be called explicitly, (not used by the turtle-graphics classes)
The docstring dictionary will be written to the Python script <filname>.py
It is intended to serve as a template for translation of the docstrings
into different languages.
"""
docsdict = {}
for methodname in _tg_screen_functions:
key = "_Screen."+methodname
docsdict[key] = eval(key).__doc__
for methodname in _tg_turtle_functions:
key = "Turtle."+methodname
docsdict[key] = eval(key).__doc__
f = open("%s.py" % filename,"w")
keys = sorted([x for x in docsdict.keys()
if x.split('.')[1] not in _alias_list])
f.write('docsdict = {\n\n')
for key in keys[:-1]:
f.write('%s :\n' % repr(key))
f.write(' """%s\n""",\n\n' % docsdict[key])
key = keys[-1]
f.write('%s :\n' % repr(key))
f.write(' """%s\n"""\n\n' % docsdict[key])
f.write("}\n")
f.close()
def read_docstrings(lang):
"""Read in docstrings from lang-specific docstring dictionary.
Transfer docstrings, translated to lang, from a dictionary-file
to the methods of classes Screen and Turtle and - in revised form -
to the corresponding functions.
"""
modname = "turtle_docstringdict_%(language)s" % {'language':lang.lower()}
module = __import__(modname)
docsdict = module.docsdict
for key in docsdict:
try:
# eval(key).im_func.__doc__ = docsdict[key]
eval(key).__doc__ = docsdict[key]
except:
print("Bad docstring-entry: %s" % key)
_LANGUAGE = _CFG["language"]
try:
if _LANGUAGE != "english":
read_docstrings(_LANGUAGE)
except ImportError:
print("Cannot find docsdict for", _LANGUAGE)
except:
print ("Unknown Error when trying to import %s-docstring-dictionary" %
_LANGUAGE)
def getmethparlist(ob):
"""Get strings describing the arguments for the given object
Returns a pair of strings representing function parameter lists
including parenthesis. The first string is suitable for use in
function definition and the second is suitable for use in function
call. The "self" parameter is not included.
"""
defText = callText = ""
# bit of a hack for methods - turn it into a function
# but we drop the "self" param.
# Try and build one for Python defined functions
args, varargs, varkw = inspect.getargs(ob.__code__)
items2 = args[1:]
realArgs = args[1:]
defaults = ob.__defaults__ or []
defaults = ["=%r" % (value,) for value in defaults]
defaults = [""] * (len(realArgs)-len(defaults)) + defaults
items1 = [arg + dflt for arg, dflt in zip(realArgs, defaults)]
if varargs is not None:
items1.append("*" + varargs)
items2.append("*" + varargs)
if varkw is not None:
items1.append("**" + varkw)
items2.append("**" + varkw)
defText = ", ".join(items1)
defText = "(%s)" % defText
callText = ", ".join(items2)
callText = "(%s)" % callText
return defText, callText
def _turtle_docrevise(docstr):
"""To reduce docstrings from RawTurtle class for functions
"""
import re
if docstr is None:
return None
turtlename = _CFG["exampleturtle"]
newdocstr = docstr.replace("%s." % turtlename,"")
parexp = re.compile(r' \(.+ %s\):' % turtlename)
newdocstr = parexp.sub(":", newdocstr)
return newdocstr
def _screen_docrevise(docstr):
"""To reduce docstrings from TurtleScreen class for functions
"""
import re
if docstr is None:
return None
screenname = _CFG["examplescreen"]
newdocstr = docstr.replace("%s." % screenname,"")
parexp = re.compile(r' \(.+ %s\):' % screenname)
newdocstr = parexp.sub(":", newdocstr)
return newdocstr
## The following mechanism makes all methods of RawTurtle and Turtle available
## as functions. So we can enhance, change, add, delete methods to these
## classes and do not need to change anything here.
for methodname in _tg_screen_functions:
pl1, pl2 = getmethparlist(eval('_Screen.' + methodname))
if pl1 == "":
print(">>>>>>", pl1, pl2)
continue
defstr = ("def %(key)s%(pl1)s: return _getscreen().%(key)s%(pl2)s" %
{'key':methodname, 'pl1':pl1, 'pl2':pl2})
exec(defstr)
eval(methodname).__doc__ = _screen_docrevise(eval('_Screen.'+methodname).__doc__)
for methodname in _tg_turtle_functions:
pl1, pl2 = getmethparlist(eval('Turtle.' + methodname))
if pl1 == "":
print(">>>>>>", pl1, pl2)
continue
defstr = ("def %(key)s%(pl1)s: return _getpen().%(key)s%(pl2)s" %
{'key':methodname, 'pl1':pl1, 'pl2':pl2})
exec(defstr)
eval(methodname).__doc__ = _turtle_docrevise(eval('Turtle.'+methodname).__doc__)
done = mainloop
if __name__ == "__main__":
def switchpen():
if isdown():
pu()
else:
pd()
def demo1():
"""Demo of old turtle.py - module"""
reset()
tracer(True)
up()
backward(100)
down()
# draw 3 squares; the last filled
width(3)
for i in range(3):
if i == 2:
begin_fill()
for _ in range(4):
forward(20)
left(90)
if i == 2:
color("maroon")
end_fill()
up()
forward(30)
down()
width(1)
color("black")
# move out of the way
tracer(False)
up()
right(90)
forward(100)
right(90)
forward(100)
right(180)
down()
# some text
write("startstart", 1)
write("start", 1)
color("red")
# staircase
for i in range(5):
forward(20)
left(90)
forward(20)
right(90)
# filled staircase
tracer(True)
begin_fill()
for i in range(5):
forward(20)
left(90)
forward(20)
right(90)
end_fill()
# more text
def demo2():
"""Demo of some new features."""
speed(1)
st()
pensize(3)
setheading(towards(0, 0))
radius = distance(0, 0)/2.0
rt(90)
for _ in range(18):
switchpen()
circle(radius, 10)
write("wait a moment...")
while undobufferentries():
undo()
reset()
lt(90)
colormode(255)
laenge = 10
pencolor("green")
pensize(3)
lt(180)
for i in range(-2, 16):
if i > 0:
begin_fill()
fillcolor(255-15*i, 0, 15*i)
for _ in range(3):
fd(laenge)
lt(120)
end_fill()
laenge += 10
lt(15)
speed((speed()+1)%12)
#end_fill()
lt(120)
pu()
fd(70)
rt(30)
pd()
color("red","yellow")
speed(0)
begin_fill()
for _ in range(4):
circle(50, 90)
rt(90)
fd(30)
rt(90)
end_fill()
lt(90)
pu()
fd(30)
pd()
shape("turtle")
tri = getturtle()
tri.resizemode("auto")
turtle = Turtle()
turtle.resizemode("auto")
turtle.shape("turtle")
turtle.reset()
turtle.left(90)
turtle.speed(0)
turtle.up()
turtle.goto(280, 40)
turtle.lt(30)
turtle.down()
turtle.speed(6)
turtle.color("blue","orange")
turtle.pensize(2)
tri.speed(6)
setheading(towards(turtle))
count = 1
while tri.distance(turtle) > 4:
turtle.fd(3.5)
turtle.lt(0.6)
tri.setheading(tri.towards(turtle))
tri.fd(4)
if count % 20 == 0:
turtle.stamp()
tri.stamp()
switchpen()
count += 1
tri.write("CAUGHT! ", font=("Arial", 16, "bold"), align="right")
tri.pencolor("black")
tri.pencolor("red")
def baba(xdummy, ydummy):
clearscreen()
bye()
time.sleep(2)
while undobufferentries():
tri.undo()
turtle.undo()
tri.fd(50)
tri.write(" Click me!", font = ("Courier", 12, "bold") )
tri.onclick(baba, 1)
demo1()
demo2()
exitonclick()
| bsd-3-clause | 8,964,513,729,629,554,000 | 33.605371 | 109 | 0.552012 | false |
farhaanbukhsh/tunir | tunirlib/__init__.py | 1 | 10840 | import os
import sys
import json
import time
import redis
import signal
import argparse
import tempfile
import shutil
import paramiko
from pprint import pprint
from testvm import build_and_run
from tunirvagrant import vagrant_and_run
from tunirdocker import Docker, Result
from collections import OrderedDict
STR = OrderedDict()
def run(host='127.0.0.1', port=22, user='root',
password='passw0rd', command='/bin/true', bufsize=-1, key_filename=''):
"""
Excecutes a command using paramiko and returns the result.
:param host: Host to connect
:param port: The port number
:param user: The username of the system
:param password: User password
:param command: The command to run
:param key_filename: SSH private key file.
:return:
"""
port = int(port)
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
if not key_filename:
client.connect(hostname=host, port=port,
username=user, password=password, banner_timeout=10)
else:
print host, port, user, key_filename
client.connect(hostname=host, port=port,
username=user, key_filename=key_filename, banner_timeout=10)
chan = client.get_transport().open_session()
chan.settimeout(None)
chan.set_combine_stderr(True)
chan.exec_command(command)
stdout = chan.makefile('r', bufsize)
stderr = chan.makefile_stderr('r', bufsize)
stdout_text = stdout.read()
stderr_text = stderr.read()
out = Result(stdout_text)
status = int(chan.recv_exit_status())
client.close()
out.return_code = status
return out
def read_job_configuration(jobname='', config_dir='./'):
"""
:param jobname: Name of the job
:param config_dir: Directory for configuration.
:return: Configuration dict
"""
data = None
name = jobname + '.json'
name = os.path.join(config_dir, name)
if not os.path.exists(name):
print "Job configuration is missing."
return None
with open(name) as fobj:
data = json.load(fobj)
return data
def try_again(func):
"We will try again for ssh errors."
def wrapper(*args, **kargs):
try:
result = func(*args, **kargs)
except paramiko.ssh_exception.SSHException:
print "Getting ssh exception, sleeping for 30 seconds and then trying again."
time.sleep(30)
result = func(*args, **kargs)
return result
return wrapper
@try_again
def execute(config, command, container=None):
"""
Executes a given command based on the system.
:param config: Configuration dictionary.
:param command: The command to execute
:return: (Output text, boolean)
"""
result = ''
negative = False
if command.startswith('@@'):
command = command[3:].strip()
result = run(config['host_string'], config.get('port', '22'), config['user'],
config.get('password', None), command, key_filename=config.get('key', None))
if result.return_code != 0: # If the command does not fail, then it is a failure.
negative = True
else:
result = run(config['host_string'], config.get('port', '22'), config['user'],
config.get('password', None), command, key_filename=config.get('key', None))
return result, negative
def update_result(result, command, negative):
"""
Updates the result based on input.
:param result: Output from the command
:param job: Job object from model.
:param command: Text command.
:param negative: If it is a negative command, which is supposed to fail.
:param stateless: If it is a stateless job or not.
:return: Boolean, False if the job as whole is failed.
"""
if negative:
status = True
if result.return_code == 0:
status = False
d = {'command': command, 'result': unicode(result, encoding='utf-8', errors='replace'),
'ret': result.return_code, 'status': status}
STR[command] = d
else:
status = True
if result.return_code != 0:
status = False
d = {'command': command, 'result': unicode(result, encoding='utf-8', errors='replace'),
'ret': result.return_code, 'status': status}
STR[command] = d
if result.return_code != 0 and not negative:
# Save the error message and status as fail.
return False
return True
def run_job(args, jobpath, job_name='', config=None, container=None, port=None):
"""
Runs the given command using fabric.
:param args: Command line arguments.
:param jobpath: Path to the job file.
:param job_name: string job name.
:param config: Configuration of the given job
:param container: Docker object for a Docker job.
:param port: The port number to connect in case of a vm.
:return: Status of the job in boolean
"""
if not os.path.exists(jobpath):
print "Missing job file {0}".format(jobpath)
return False
# Now read the commands inside the job file
# and execute them one by one, we need to save
# the result too.
commands = []
status = True
with open(jobpath) as fobj:
commands = fobj.readlines()
try:
job = None
print "Starting a stateless job."
if not 'host_string' in config: # For VM based tests.
config['host_string'] = '127.0.0.1'
if config['type'] == 'vm':
config['port'] = port
elif config['type'] == 'bare':
config['host_string'] = config['image']
elif config['type'] == 'docker':
# Now we will convert this job as a bare metal :)
config['type'] = 'bare'
config['host_string'] = container.ip
time.sleep(10)
for command in commands:
negative = False
result = ''
command = command.strip('\n')
if command.startswith('SLEEP'): # We will have to sleep
word = command.split(' ')[1]
print "Sleeping for %s." % word
time.sleep(int(word))
continue
print "Executing command: %s" % command
try:
result, negative = execute(config, command)
status = update_result(result, command, negative)
if not status:
break
except: #execute failed second time
status = False
break
# If we are here, that means all commands ran successfully.
finally:
# Now for stateless jobs
print "\n\nJob status: %s\n\n" % status
for key, value in STR.iteritems():
print "command: %s" % value['command']
print "status: %s\n" % value['status']
print value['result']
print "\n"
return status
def get_port():
"Gets the latest port from redis queue."
r = redis.Redis()
port = r.rpop('tunirports')
print "Got port: %s" % port
return port
def return_port(port):
"""
Returns the port to the queue.
:param port: The port number
:return: None
"""
r = redis.Redis()
port = r.lpush('tunirports', port)
return port
def main(args):
"Starting point of the code"
job_name = ''
vm = None
port = None
temp_d = None
container = None
atomic = False
image_dir = ''
vagrant = None
return_code = -100
run_job_flag = True
if args.atomic:
atomic = True
if args.job:
job_name = args.job
else:
sys.exit(-2)
# First let us read the vm configuration.
config = read_job_configuration(job_name, args.config_dir)
if not config: # Bad config name
sys.exit(-1)
if config['type'] in ['vm',]:
# If there is an image_dir then use that, else we need to
# create a temp directory to store the image in
if args.image_dir:
image_dir = args.image_dir
else:
temp_d = tempfile.mkdtemp()
image_dir = temp_d
# If the image_dir is not yet created lets create it
if not os.path.exists(image_dir):
os.mkdir(image_dir)
# Create the supporting meta directory if it doesn't exist
if not os.path.exists(os.path.join(image_dir, 'meta')):
os.mkdir(os.path.join(image_dir, 'meta'))
# Update perms on directory
os.system('chmod 0777 %s' % image_dir)
if config['type'] == 'vm':
# First get us the free port number from redis queue.
port = get_port()
if not port:
print "No port found in the redis queue."
return
vm = build_and_run(config['image'], config['ram'],
graphics=True, vnc=False, atomic=atomic,
port=port, image_dir=image_dir)
job_pid = vm.pid # The pid to kill at the end
# We should wait for a minute here
time.sleep(60)
if config['type'] == 'docker':
container = Docker(config['image'])
jobpath = os.path.join(args.config_dir, job_name + '.txt')
if config['type'] == 'vagrant':
os.system('mkdir -p /var/run/tunir')
vagrant, config = vagrant_and_run(config)
if vagrant.failed:
run_job_flag = False
try:
if run_job_flag:
status = run_job(args, jobpath, job_name, config, container, port)
if status:
return_code = 0
finally:
# Now let us kill the kvm process
if vm:
os.kill(job_pid, signal.SIGKILL)
if temp_d:
shutil.rmtree(temp_d)
return_port(port)
if container:
container.rm()
if vagrant:
print "Removing the box."
vagrant.destroy()
else:
# FIXME!!!
# Somehow the terminal is not echoing unless we do the line below.
os.system('stty sane')
sys.exit(return_code)
def startpoint():
parser = argparse.ArgumentParser()
parser.add_argument("--job", help="The job configuration name to run")
parser.add_argument("--stateless", help="Do not store the result, just print it in the STDOUT.", action='store_true')
parser.add_argument("--config-dir", help="Path to the directory where the job config and commands can be found.",
default='./')
parser.add_argument("--image-dir", help="Path to the directory where vm images will be held")
parser.add_argument("--atomic", help="We are using an Atomic image.", action='store_true')
args = parser.parse_args()
main(args)
if __name__ == '__main__':
startpoint()
| gpl-2.0 | 5,878,103,509,677,801,000 | 31.166172 | 121 | 0.587085 | false |
serge-sans-paille/pythran | pythran/tests/test_complex.py | 1 | 8598 | import numpy as np
from pythran.config import cfg
from pythran.tests import TestEnv
from pythran.typing import NDArray
import unittest
try:
np.float128
has_float128 = True
except AttributeError:
has_float128 = False
class TestComplex(TestEnv):
""" Check complex support in Pythran. """
def test_complex_limited_range(self):
""" Check complex computation is the same as numpy for corner case. """
# see -fcx-limited-range
if cfg.getboolean('pythran', 'complex_hook'):
self.run_test("""
def test_complex_limited_range(a, b):
return a * b""",
complex(-4, np.nan), complex(4, -np.inf),
test_complex_limited_range=[complex, complex])
def test_complex128_to_complex64(self):
self.run_test("""
import numpy as np
def complex128_to_complex64(a):
return np.complex64(a)""",
complex(-4.4, 4.4),
complex128_to_complex64=[complex])
def test_conjugate(self):
"""
Check complex conjugate.
Checked for:
* Method and numpy function call
* conj and conjugate for each of them
* complex and array (1 and 2 D)
"""
self.run_test("""
def test_conjugate(c, a, a2d):
import numpy as np
return (np.conj(c), np.conj(a), a2d.conj(),
np.conjugate(c), np.conjugate(a), a2d.conjugate())
""",
3 + 2j, np.array([3 + 2j]), np.array([[3 + 2j]]),
test_conjugate=[np.complex128,
NDArray[np.complex128, :],
NDArray[complex, :, :]])
def test_complex_array_abs(self):
self.run_test('def test_complex_array_abs(a): import numpy as np ; return np.abs(a)',
np.array([[3 + 2j]]),
test_complex_array_abs=[NDArray[complex, :, :]])
def test_complex_floordiv(self):
self.run_test('def complex_floordiv(x): import numpy as np; return np.floor_divide(x, 2 + 2j)',
3.5 - 3.5j,
complex_floordiv=[complex])
def test_complex_array_sqr(self):
self.run_test('def test_complex_array_sqr(a): return a ** 2',
np.array([[3 + 2j]]),
test_complex_array_sqr=[NDArray[complex, :, :]])
def test_complex_array_mul_i(self):
self.run_test('def test_complex_array_mul_i(e): return e + 1j * e',
np.array([[3.,2.,4.]]),
test_complex_array_mul_i=[NDArray[float, :, :]])
def test_non_complex_array_real_imag(self):
self.run_test('def test_non_complex_array_real_imag(e): return e.real + e.imag',
np.array([[3.,2.,4.]]),
test_non_complex_array_real_imag=[NDArray[float, :, :]])
def test_complex_array_real_imag(self):
self.run_test('def test_complex_array_real_imag(e): return e.real + e.imag',
np.array([[3.,2.,4.]], dtype=complex),
test_complex_array_real_imag=[NDArray[complex, :, :]])
def test_complex_sum_different_types(self):
self.run_test('def test_complex_different_types(a,b): return a + b',
np.array([[3 + 2j]],dtype=np.complex64),np.array([[8 + 1j]],dtype=np.complex128),
test_complex_different_types=[NDArray[np.complex64, :, :],NDArray[np.complex128, :, :]])
def test_complex_sum_same_types(self):
self.run_test('def test_complex_same_types(a): return a + a',
np.array([[3 + 2j]],dtype=np.complex64),
test_complex_same_types=[NDArray[np.complex64, :, :]])
def test_complex_array_real_assign(self):
self.run_test('def test_complex_array_real_assign(a): a.real = 1; return a',
np.array([[3 + 2j, 2, 1, 0]] * 3,dtype=np.complex64),
test_complex_array_real_assign=[NDArray[np.complex64, :, :]])
def test_complex_array_gexpr_real_assign(self):
self.run_test('def test_complex_array_gexpr_real_assign(a): a.real[1:] = 1; return a',
np.array([[3 + 2j, 2, 1, 0]] * 3,dtype=np.complex64),
test_complex_array_gexpr_real_assign=[NDArray[np.complex64, :, :]])
def test_complex_array_iexpr_real_assign(self):
self.run_test('def test_complex_array_iexpr_real_assign(a): a.real[1] = 1; return a',
np.array([[3 + 2j, 2, 1, 0]] * 3,dtype=np.complex64),
test_complex_array_iexpr_real_assign=[NDArray[np.complex64, :, :]])
def test_complex_broadcast_scalar0(self):
self.run_test('def complex_broadcast_scalar0(x): return x + 1.5, 1.3 +x, 3.1 - x, x - 3.7, x * 5.4, 7.6 * x',
5.1 + 3j,
complex_broadcast_scalar0=[complex])
def test_complex_broadcast_scalar1(self):
self.run_test('def complex_broadcast_scalar1(x): return x + 1.5, 1.3 +x, 3.1 - x, x - 3.7, x * 5.4, 7.6 * x',
np.complex64(5.1 + 3j),
complex_broadcast_scalar1=[np.complex64])
def test_complex_array_imag_assign(self):
self.run_test('def test_complex_array_imag_assign(a): a.imag = 1; return a',
np.array([[3 + 2j, 2, 1, 0]] * 3,dtype=np.complex64),
test_complex_array_imag_assign=[NDArray[np.complex64, :, :]])
def test_complex_array_gexpr_imag_assign(self):
self.run_test('def test_complex_array_gexpr_imag_assign(a): a.imag[1:] = 1; return a',
np.array([[3 + 2j, 2, 1, 0]] * 3,dtype=np.complex64),
test_complex_array_gexpr_imag_assign=[NDArray[np.complex64, :, :]])
def test_complex_array_iexpr_imag_assign(self):
self.run_test('def test_complex_array_iexpr_imag_assign(a): a.imag[1] = 1; return a',
np.array([[3 + 2j, 2, 1, 0]] * 3,dtype=np.complex64),
test_complex_array_iexpr_imag_assign=[NDArray[np.complex64, :, :]])
def test_complex_array_expr_imag(self):
self.run_test('def test_complex_array_expr_imag(a): return (2.j*a).imag',
np.array([[3 + 2j, 2, 1, 0]] * 3,dtype=np.complex64),
test_complex_array_expr_imag=[NDArray[np.complex64, :, :]])
def test_complex_array_expr_real(self):
self.run_test('def test_complex_array_expr_real(a): return (2+a).real',
np.array([[3 + 2j, 2, 1, 0]] * 3,dtype=np.complex64),
test_complex_array_expr_real=[NDArray[np.complex64, :, :]])
@unittest.skipIf(not has_float128, "not float128")
def test_complex256_array0(self):
self.run_test('def complex256_array0(x): import numpy as np; return np.cos(x * 2j)',
np.array([1.2,3.1], dtype=np.complex256) ** 6,
complex256_array0=[NDArray[np.complex256, :]])
@unittest.skipIf(not has_float128, "not float128")
def test_complex256_array1(self):
self.run_test('def complex256_array1(x): import numpy as np; return (x * 2j)**2',
np.array([1.2,3.1], dtype=np.complex256) ** 6,
complex256_array1=[NDArray[np.complex256, :]])
@unittest.skipIf(not has_float128, "not float128")
def test_complex256_array2(self):
self.run_test('def complex256_array2(x): import numpy as np; return np.ones(x, dtype=np.complex256)',
10,
complex256_array2=[int])
@unittest.skipIf(not has_float128, "not float128")
def test_complex256_array3(self):
self.run_test('def complex256_array3(x): return x.real, x.imag',
np.array([2j, 2], dtype=np.complex256)** 5,
complex256_array3=[NDArray[np.complex256, :]])
@unittest.skipIf(not has_float128, "not float128")
def test_complex256_array4(self):
self.run_test('def complex256_array4(x): return x.conj(), x.sum()',
np.array([2j, 2], dtype=np.complex256)** 7,
complex256_array4=[NDArray[np.complex256, :]])
@unittest.skipIf(not has_float128, "not float128")
def test_complex256_array5(self):
self.run_test('def complex256_array5(x): return x',
np.complex256(1 + 1j),
complex256_array5=[np.complex256])
| bsd-3-clause | -5,162,376,291,478,705,000 | 46.241758 | 117 | 0.544313 | false |
datalogics-robb/scons | src/engine/SCons/Optik/option.py | 2 | 14507 | """optik.option
Defines the Option class and some standard value-checking functions.
"""
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
# Original Optik revision this is based on:
__Optik_revision__ = "option.py,v 1.19.2.1 2002/07/23 01:51:14 gward Exp"
# Copyright (c) 2001 Gregory P. Ward. All rights reserved.
# See the README.txt distributed with Optik for licensing terms.
# created 2001/10/17, GPW (from optik.py)
import sys
import string
from types import TupleType, ListType, DictType
from SCons.Optik.errors import OptionError, OptionValueError
_builtin_cvt = { "int" : (int, "integer"),
"long" : (long, "long integer"),
"float" : (float, "floating-point"),
"complex" : (complex, "complex") }
def check_builtin (option, opt, value):
(cvt, what) = _builtin_cvt[option.type]
try:
return cvt(value)
except ValueError:
raise OptionValueError(
#"%s: invalid %s argument %s" % (opt, what, repr(value)))
"option %s: invalid %s value: %s" % (opt, what, repr(value)))
def check_choice(option, opt, value):
if value in option.choices:
return value
else:
choices = string.join(map(repr, option.choices),", ")
raise OptionValueError(
"option %s: invalid choice: %s (choose from %s)"
% (opt, repr(value), choices))
# Not supplying a default is different from a default of None,
# so we need an explicit "not supplied" value.
NO_DEFAULT = "NO"+"DEFAULT"
class Option:
"""
Instance attributes:
_short_opts : [string]
_long_opts : [string]
action : string
type : string
dest : string
default : any
nargs : int
const : any
choices : [string]
callback : function
callback_args : (any*)
callback_kwargs : { string : any }
help : string
metavar : string
"""
# The list of instance attributes that may be set through
# keyword args to the constructor.
ATTRS = ['action',
'type',
'dest',
'default',
'nargs',
'const',
'choices',
'callback',
'callback_args',
'callback_kwargs',
'help',
'metavar']
# The set of actions allowed by option parsers. Explicitly listed
# here so the constructor can validate its arguments.
ACTIONS = ("store",
"store_const",
"store_true",
"store_false",
"append",
"count",
"callback",
"help",
"version")
# The set of actions that involve storing a value somewhere;
# also listed just for constructor argument validation. (If
# the action is one of these, there must be a destination.)
STORE_ACTIONS = ("store",
"store_const",
"store_true",
"store_false",
"append",
"count")
# The set of actions for which it makes sense to supply a value
# type, ie. where we expect an argument to this option.
TYPED_ACTIONS = ("store",
"append",
"callback")
# The set of known types for option parsers. Again, listed here for
# constructor argument validation.
TYPES = ("string", "int", "long", "float", "complex", "choice")
# Dictionary of argument checking functions, which convert and
# validate option arguments according to the option type.
#
# Signature of checking functions is:
# check(option : Option, opt : string, value : string) -> any
# where
# option is the Option instance calling the checker
# opt is the actual option seen on the command-line
# (eg. "-a", "--file")
# value is the option argument seen on the command-line
#
# The return value should be in the appropriate Python type
# for option.type -- eg. an integer if option.type == "int".
#
# If no checker is defined for a type, arguments will be
# unchecked and remain strings.
TYPE_CHECKER = { "int" : check_builtin,
"long" : check_builtin,
"float" : check_builtin,
"complex" : check_builtin,
"choice" : check_choice,
}
# CHECK_METHODS is a list of unbound method objects; they are called
# by the constructor, in order, after all attributes are
# initialized. The list is created and filled in later, after all
# the methods are actually defined. (I just put it here because I
# like to define and document all class attributes in the same
# place.) Subclasses that add another _check_*() method should
# define their own CHECK_METHODS list that adds their check method
# to those from this class.
CHECK_METHODS = None
# -- Constructor/initialization methods ----------------------------
def __init__ (self, *opts, **attrs):
# Set _short_opts, _long_opts attrs from 'opts' tuple
opts = self._check_opt_strings(opts)
self._set_opt_strings(opts)
# Set all other attrs (action, type, etc.) from 'attrs' dict
self._set_attrs(attrs)
# Check all the attributes we just set. There are lots of
# complicated interdependencies, but luckily they can be farmed
# out to the _check_*() methods listed in CHECK_METHODS -- which
# could be handy for subclasses! The one thing these all share
# is that they raise OptionError if they discover a problem.
for checker in self.CHECK_METHODS:
checker(self)
def _check_opt_strings (self, opts):
# Filter out None because early versions of Optik had exactly
# one short option and one long option, either of which
# could be None.
opts = filter(None, opts)
if not opts:
raise OptionError("at least one option string must be supplied",
self)
return opts
def _set_opt_strings (self, opts):
self._short_opts = []
self._long_opts = []
for opt in opts:
if len(opt) < 2:
raise OptionError(
"invalid option string %s: "
"must be at least two characters long" % (`opt`,), self)
elif len(opt) == 2:
if not (opt[0] == "-" and opt[1] != "-"):
raise OptionError(
"invalid short option string %s: "
"must be of the form -x, (x any non-dash char)" % (`opt`,),
self)
self._short_opts.append(opt)
else:
if not (opt[0:2] == "--" and opt[2] != "-"):
raise OptionError(
"invalid long option string %s: "
"must start with --, followed by non-dash" % (`opt`,),
self)
self._long_opts.append(opt)
def _set_attrs (self, attrs):
for attr in self.ATTRS:
if attrs.has_key(attr):
setattr(self, attr, attrs[attr])
del attrs[attr]
else:
if attr == 'default':
setattr(self, attr, NO_DEFAULT)
else:
setattr(self, attr, None)
if attrs:
raise OptionError(
"invalid keyword arguments: %s" % string.join(attrs.keys(),", "),
self)
# -- Constructor validation methods --------------------------------
def _check_action (self):
if self.action is None:
self.action = "store"
elif self.action not in self.ACTIONS:
raise OptionError("invalid action: %s" % (`self.action`,), self)
def _check_type (self):
if self.type is None:
# XXX should factor out another class attr here: list of
# actions that *require* a type
if self.action in ("store", "append"):
if self.choices is not None:
# The "choices" attribute implies "choice" type.
self.type = "choice"
else:
# No type given? "string" is the most sensible default.
self.type = "string"
else:
if self.type not in self.TYPES:
raise OptionError("invalid option type: %s" % (`self.type`,), self)
if self.action not in self.TYPED_ACTIONS:
raise OptionError(
"must not supply a type for action %s" % (`self.action`,), self)
def _check_choice(self):
if self.type == "choice":
if self.choices is None:
raise OptionError(
"must supply a list of choices for type 'choice'", self)
elif type(self.choices) not in (TupleType, ListType):
raise OptionError(
"choices must be a list of strings ('%s' supplied)"
% string.split(str(type(self.choices)),"'")[1], self)
elif self.choices is not None:
raise OptionError(
"must not supply choices for type %s" % (repr(self.type),), self)
def _check_dest (self):
if self.action in self.STORE_ACTIONS and self.dest is None:
# No destination given, and we need one for this action.
# Glean a destination from the first long option string,
# or from the first short option string if no long options.
if self._long_opts:
# eg. "--foo-bar" -> "foo_bar"
self.dest = string.replace(self._long_opts[0][2:],'-', '_')
else:
self.dest = self._short_opts[0][1]
def _check_const (self):
if self.action != "store_const" and self.const is not None:
raise OptionError(
"'const' must not be supplied for action %s" % (repr(self.action),),
self)
def _check_nargs (self):
if self.action in self.TYPED_ACTIONS:
if self.nargs is None:
self.nargs = 1
elif self.nargs is not None:
raise OptionError(
"'nargs' must not be supplied for action %s" % (repr(self.action),),
self)
def _check_callback (self):
if self.action == "callback":
if not callable(self.callback):
raise OptionError(
"callback not callable: %s" % (repr(self.callback),), self)
if (self.callback_args is not None and
type(self.callback_args) is not TupleType):
raise OptionError(
"callback_args, if supplied, must be a tuple: not %s"
% (repr(self.callback_args),), self)
if (self.callback_kwargs is not None and
type(self.callback_kwargs) is not DictType):
raise OptionError(
"callback_kwargs, if supplied, must be a dict: not %s"
% (repr(self.callback_kwargs),), self)
else:
if self.callback is not None:
raise OptionError(
"callback supplied (%s) for non-callback option"
% (repr(self.callback),), self)
if self.callback_args is not None:
raise OptionError(
"callback_args supplied for non-callback option", self)
if self.callback_kwargs is not None:
raise OptionError(
"callback_kwargs supplied for non-callback option", self)
CHECK_METHODS = [_check_action,
_check_type,
_check_choice,
_check_dest,
_check_const,
_check_nargs,
_check_callback]
# -- Miscellaneous methods -----------------------------------------
def __str__ (self):
if self._short_opts or self._long_opts:
return string.join(self._short_opts + self._long_opts,"/")
else:
raise RuntimeError, "short_opts and long_opts both empty!"
def takes_value (self):
return self.type is not None
# -- Processing methods --------------------------------------------
def check_value (self, opt, value):
checker = self.TYPE_CHECKER.get(self.type)
if checker is None:
return value
else:
return checker(self, opt, value)
def process (self, opt, value, values, parser):
# First, convert the value(s) to the right type. Howl if any
# value(s) are bogus.
if value is not None:
if self.nargs == 1:
value = self.check_value(opt, value)
else:
def cv(v,check=self.check_value,o=opt):
return check(o,v)
value = tuple(map(cv,value))
# And then take whatever action is expected of us.
# This is a separate method to make life easier for
# subclasses to add new actions.
return self.take_action(
self.action, self.dest, opt, value, values, parser)
def take_action (self, action, dest, opt, value, values, parser):
if action == "store":
setattr(values, dest, value)
elif action == "store_const":
setattr(values, dest, self.const)
elif action == "store_true":
setattr(values, dest, 1)
elif action == "store_false":
setattr(values, dest, 0)
elif action == "append":
values.ensure_value(dest, []).append(value)
elif action == "count":
setattr(values, dest, values.ensure_value(dest, 0) + 1)
elif action == "callback":
args = self.callback_args or ()
kwargs = self.callback_kwargs or {}
apply( self.callback, (self, opt, value, parser,)+ args, kwargs)
elif action == "help":
parser.print_help()
sys.exit(0)
elif action == "version":
parser.print_version()
sys.exit(0)
else:
raise RuntimeError, "unknown action %s" % (repr(self.action),)
return 1
# class Option
| mit | 42,371,967,996,143,944 | 36.389175 | 84 | 0.530158 | false |
wagnerand/amo-validator | tests/test_js_jstypes.py | 2 | 3042 | import validator.testcases.javascript.jstypes as jstypes
from js_helper import _do_test_raw
def test_jsarray_output():
"""Test that the output function for JSArray doesn't bork."""
ja = jstypes.JSArray()
ja.elements = [None, None]
ja.output() # Used to throw tracebacks.
ja.get_literal_value() # Also used to throw tracebacks.
def test_jsobject_output():
"""Test that the output function for JSObject doesn't bork."""
jso = jstypes.JSObject()
jso.data = {'first': None}
jso.output() # Used to throw tracebacks
def test_jsobject_recursion():
"""Test that circular references don't cause recursion errors."""
jso = jstypes.JSObject()
jso2 = jstypes.JSObject()
jso.data = {'first': jstypes.JSWrapper(jso2)}
jso2.data = {'second': jstypes.JSWrapper(jso)}
print jso.output()
assert '(recursion)' in jso.output()
def test_jsarray_recursion():
"""Test that circular references don't cause recursion errors."""
ja = jstypes.JSArray()
ja2 = jstypes.JSArray()
ja.elements = [jstypes.JSWrapper(ja2)]
ja2.elements = [jstypes.JSWrapper(ja)]
print ja.output()
assert '(recursion)' in ja.output()
print ja.get_literal_value()
assert '(recursion)' in ja.get_literal_value()
def test_jsliteral_regex():
"""
Test that there aren't tracebacks from JSLiterals that perform raw binary
operations.
"""
assert not _do_test_raw("""
var x = /foo/gi;
var y = x + " ";
var z = /bar/i + 0;
""").failed()
def test_jsarray_contsructor():
"""
Test for tracebacks that were caused by JSArray not calling it's parent's
constructor.
"""
assert not _do_test_raw("""
var x = [];
x.foo = "bar";
x["zap"] = "foo";
baz("zap" in x);
""").failed()
def test_jsobject_computed_properties():
"""
Tests that computed property names work as expected.
"""
ID = ('testcases_javascript_instancetypes', 'set_on_event',
'on*_str_assignment')
err1 = _do_test_raw("""
var foo = {};
foo["onthing"] = "stuff";
""")
err2 = _do_test_raw("""
var foo = {
["onthing"]: "stuff",
};
""")
assert err1.warnings[0]['id'] == ID
assert err2.warnings[0]['id'] == ID
assert not _do_test_raw("""
var foo = {
[Symbol.iterator]: function* () {},
["foo" + bar]: "baz",
[thing]: "quux",
};
""").failed()
def test_jsobject_get_wrap():
"""Test that JSObject always returns a JSWrapper."""
x = jstypes.JSObject()
x.data['foo'] = jstypes.JSLiteral('bar')
out = x.get('foo')
assert isinstance(out, jstypes.JSWrapper)
assert out.get_literal_value() == 'bar'
def test_jsarray_get_wrap():
"""Test that JSArray always returns a JSWrapper."""
x = jstypes.JSArray()
x.elements = [None, jstypes.JSLiteral('bar')]
out = x.get('1')
assert isinstance(out, jstypes.JSWrapper)
assert out.get_literal_value() == 'bar'
| bsd-3-clause | -6,442,464,064,066,883,000 | 23.336 | 77 | 0.594346 | false |
WINOT/cide.py | src/preprocessor/compile.py | 1 | 1347 | import os
import sass
PREPROCESSOR_STYLE_EXT = '.scss'
COMPILED_STYLE_EXT = '.css'
def compile(scss_dir_path, output_dir):
# Detect all SASS files
scss_files = ["{0}/{1}".format(scss_dir_path,filename)
for filename in os.listdir(scss_dir_path)
if filename.endswith(PREPROCESSOR_STYLE_EXT)]
# Process all detected SASS files
for scss in scss_files:
# for more options, check help by doing
# python -c "import sass;help(sass.compile)
compiled_css = sass.compile(filename=scss)
compiled_css_path = os.path.join(output_dir,
os.path.basename(scss).replace(PREPROCESSOR_STYLE_EXT,
COMPILED_STYLE_EXT))
# Open file in write mode to override any existing content
with open(compiled_css_path, 'w') as compiled_file:
compiled_file.write(compiled_css)
if __name__ == "__main__":
# Calling this script will allow to change the css files
# during execution without having to stop the instance.
# Look for files in the same directory as this script
current_dir_path = os.path.dirname(os.path.abspath(__file__))
output_dir_path = os.path.normpath(os.path.join(current_dir_path, '../static/css'))
# Launch compilation
compile(current_dir_path, output_dir_path)
| gpl-3.0 | -5,051,824,188,549,518,000 | 38.617647 | 92 | 0.639941 | false |
ToFuProject/tofu | examples/tutorials/tuto_plot_solid_angles.py | 2 | 3854 | """
Computing the integrated solid angle of a particle
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
This tutorial show how to crete a configuration and computing the
integrated solid angle subtended by a particle's trajectory along phi
with certain discretization, and plotting it.
:math:`\int_\Phi \Omega R d \Phi`
Useful for reconstructing emissivity.
"""
###############################################################################
# We start by loading ITER's configuration (built-in in `tofu`)
import matplotlib.pyplot as plt
import numpy as np
import tofu as tf
config = tf.load_config("ITER")
###############################################################################
# We define the particles properties and trajectory.
# Let's suppose we have the data for three points of the trajectory,
# the particle moves along X from point (5, 0, 0) in cartesian coordinates,
# to (6, 0, 0) and finally to (7, 0, 0). At the end point the particle radius
# seems to be a bit bigger (:math:`2 \mu m` instead of :math:`1 \mu m`)
part_rad = np.r_[.0001, .0001, .0002]*2
part_traj = np.array([[5.0, 0.0, 0.0],
[6.0, 0.0, 0.0],
[7.0, 0.0, 0.0]], order="F").T
###############################################################################
# Let's set some parameters for the discretization for computing the integral:
# resolutions in (R, Z, Phi) directions and the values to only compute the
# integral on the core of the plasma.
r_step = z_step = phi_step = 0.02 # 1 cm resolution along all directions
Rminmax = np.r_[4.0, 8.0]
Zminmax = np.r_[-5., 5.0]
###############################################################################
# Let's compute the integrated solid angle: the function returns the points
# in (R,Z) of the discretization, the integrated solid angle on those points,
# the indices to reconstruct the discretization in all domain and a the volume
# unit :math:`dR * dZ`.
pts, sa_map, ind, vol = tf.geom._GG.compute_solid_angle_map(part_traj,
part_rad,
r_step,
z_step,
phi_step,
Rminmax,
Zminmax)
###############################################################################
# Now we can plot the results the first point on the trajectory
fig1, ax = plt.subplots()
ax.scatter(pts[0, :], pts[1, :], # R and Z coordinates
marker="s", # each point is a squared pixel
edgecolors=None, # no boundary for smooth plot
s=10, # size of pixel
c=sa_map[:, 0].flatten(), # pixel color is value of int solid angle
)
ax.set_box_aspect(1)
plt.show()
###############################################################################
# or the three points in the trajectory
fig2, list_axes = plt.subplots(ncols=3, sharey=True)
# Now we can plot the results for all points on the trajectory
for (ind, ax) in enumerate(list_axes):
ax.scatter(pts[0, :], pts[1, :],
marker="s",
edgecolors=None,
s=10,
c=sa_map[:, ind].flatten(), # we change particle number
)
ax.set_box_aspect(1)
plt.show()
###############################################################################
# Now let's see the 1D profile of all particles for z = 0.
izero = np.abs(pts[1, :]) < z_step
fig3, list_axes = plt.subplots(ncols=3, sharey=False)
for (ind, ax) in enumerate(list_axes):
ax.plot(pts[0, izero], sa_map[izero, ind])
ax.set_box_aspect(1)
plt.show()
| mit | -8,849,116,922,982,718,000 | 39.145833 | 79 | 0.484172 | false |
georglind/humo | humo/molparser.py | 1 | 2064 | from __future__ import division, print_function
import sys
import numpy as np
import re
# molparser function
# parses mol file string into a molecule
def parse(molfile):
with open(molfile, "r") as mfile:
data = mfile.readlines()
mol = (''.join(data)).splitlines()
try:
res = read_mol(mol)
except Exception as inst:
sys.exit('Cannot read mol file format: ' + str(inst))
return res
def read_mol(mol):
meta = read_meta(mol)
if not meta:
return False
xyz, atoms = read_atoms(mol, meta[1], meta[2])
links = read_links(mol, meta[1] + meta[2], meta[3])
return (meta[0], xyz, atoms, links)
def read_meta(mol):
prog = re.compile('^([\(\w\d\)-]+)')
res = prog.match(mol[0])
name = False
if res is not None:
name = res.group(0)
line = False
prog = re.compile('\s+(\d+)\s+(\d+)\s.*?V2000$')
for i, line in enumerate(mol):
res = prog.match(line)
if res is not None:
n_atoms = int(res.group(1))
n_links = int(res.group(2))
line = i + 1
break
if not line:
return False
return (name, line, n_atoms, n_links)
def read_atoms(mol, start, length):
atoms = []
xyz = []
for i in np.arange(start, start+length):
atom = read_atom(mol[i])
atoms.append(atom['element'])
xyz.append(atom['xyz'])
return np.array(xyz), atoms
def read_atom(line):
atom = {'element': 'C', 'xyz': [0,0,0]}
floats = re.findall(r"[-+]?\d*\.\d+|\d+", line)
element = re.findall('[A-z]+', line)
if floats is None or element is None:
return False
atom['element'] = element[0]
atom['xyz'] = [float(floats[0]), float(floats[1]), float(floats[2])]
return atom
def read_links(mol, start, length):
links = []
for i in np.arange(start, start+length):
n = read_link(mol[i])
links.append([n[0]-1, n[1]-1, n[2]])
return links
def read_link(line):
return [int(n) for n in re.findall('\d+', line)]
| mit | -7,508,596,887,681,300,000 | 20.5 | 72 | 0.556202 | false |
automl/paramsklearn | ParamSklearn/components/classification/qda.py | 1 | 2394 | import sklearn.qda
from HPOlibConfigSpace.configuration_space import ConfigurationSpace
from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter
from ParamSklearn.components.base import \
ParamSklearnClassificationAlgorithm
from ParamSklearn.constants import *
from ParamSklearn.implementations.util import softmax
class QDA(ParamSklearnClassificationAlgorithm):
def __init__(self, reg_param, random_state=None):
self.reg_param = float(reg_param)
self.estimator = None
def fit(self, X, Y):
estimator = sklearn.qda.QDA(self.reg_param)
if len(Y.shape) == 2 and Y.shape[1] > 1:
self.estimator = sklearn.multiclass.OneVsRestClassifier(estimator, n_jobs=1)
else:
self.estimator = estimator
self.estimator.fit(X, Y)
return self
def predict(self, X):
if self.estimator is None:
raise NotImplementedError()
return self.estimator.predict(X)
def predict_proba(self, X):
if self.estimator is None:
raise NotImplementedError()
df = self.estimator.predict_proba(X)
return softmax(df)
@staticmethod
def get_properties(dataset_properties=None):
return {'shortname': 'QDA',
'name': 'Quadratic Discriminant Analysis',
'handles_missing_values': False,
'handles_nominal_values': False,
'handles_numerical_features': True,
'prefers_data_scaled': True,
# Find out if this is good because of sparsity
'prefers_data_normalized': False,
'handles_regression': False,
'handles_classification': True,
'handles_multiclass': True,
'handles_multilabel': True,
'is_deterministic': True,
'handles_sparse': False,
'input': (DENSE, UNSIGNED_DATA),
'output': (PREDICTIONS,),
# TODO find out what is best used here!
'preferred_dtype': None}
@staticmethod
def get_hyperparameter_search_space(dataset_properties=None):
reg_param = UniformFloatHyperparameter('reg_param', 0.0, 10.0,
default=0.5)
cs = ConfigurationSpace()
cs.add_hyperparameter(reg_param)
return cs
| bsd-3-clause | -6,723,825,138,510,086,000 | 34.205882 | 88 | 0.60401 | false |
Nu3001/external_chromium_org | chrome/test/chromedriver/archive.py | 23 | 2325 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Downloads items from the Chromium continuous archive."""
import os
import platform
import urllib
import util
CHROME_27_REVISION = '190466'
CHROME_28_REVISION = '198276'
CHROME_29_REVISION = '208261'
_SITE = 'http://commondatastorage.googleapis.com'
class Site(object):
CONTINUOUS = _SITE + '/chromium-browser-continuous'
SNAPSHOT = _SITE + '/chromium-browser-snapshots'
def GetLatestRevision(site=Site.CONTINUOUS):
"""Returns the latest revision (as a string) available for this platform.
Args:
site: the archive site to check against, default to the continuous one.
"""
url = site + '/%s/LAST_CHANGE'
return urllib.urlopen(url % _GetDownloadPlatform()).read()
def DownloadChrome(revision, dest_dir, site=Site.CONTINUOUS):
"""Downloads the packaged Chrome from the archive to the given directory.
Args:
revision: the revision of Chrome to download.
dest_dir: the directory to download Chrome to.
site: the archive site to download from, default to the continuous one.
Returns:
The path to the unzipped Chrome binary.
"""
def GetZipName():
if util.IsWindows():
return 'chrome-win32'
elif util.IsMac():
return 'chrome-mac'
elif util.IsLinux():
return 'chrome-linux'
def GetChromePathFromPackage():
if util.IsWindows():
return 'chrome.exe'
elif util.IsMac():
return 'Chromium.app/Contents/MacOS/Chromium'
elif util.IsLinux():
return 'chrome'
zip_path = os.path.join(dest_dir, 'chrome-%s.zip' % revision)
if not os.path.exists(zip_path):
url = site + '/%s/%s/%s.zip' % (_GetDownloadPlatform(), revision,
GetZipName())
print 'Downloading', url, '...'
urllib.urlretrieve(url, zip_path)
util.Unzip(zip_path, dest_dir)
return os.path.join(dest_dir, GetZipName(), GetChromePathFromPackage())
def _GetDownloadPlatform():
"""Returns the name for this platform on the archive site."""
if util.IsWindows():
return 'Win'
elif util.IsMac():
return 'Mac'
elif util.IsLinux():
if platform.architecture()[0] == '64bit':
return 'Linux_x64'
else:
return 'Linux'
| bsd-3-clause | -1,170,667,158,733,422,800 | 28.0625 | 75 | 0.67914 | false |
openpolis/rst2pdf-patched-docutils-0.8 | rst2pdf/tests/execmgr.py | 1 | 12410 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
#$HeadURL$
#$LastChangedDate$
#$LastChangedRevision$
# See LICENSE.txt for licensing terms
'''
Copyright (c) 2009, Patrick Maupin, Austin, Texas
A wrapper around subprocess that performs two functions:
1) Adds non-blocking I/O
2) Adds process killability and timeouts
Currently only works under Linux.
'''
import sys
import subprocess
import select
import os
import time
import textwrap
from signal import SIGTERM, SIGKILL
import traceback
class BaseExec(object):
''' BaseExec is designed to be subclassed.
It wraps subprocess.Popen, and adds the
ability to kill a process and to manage
timeouts. By default, it uses pipes for
the new process, but doesn't do anything
with them.
'''
is_python_proc = False
defaults = dict(
bufsize=0,
executable=None,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
preexec_fn=None, # Callable object in child process
close_fds=False,
shell=False,
cwd=None,
env=None,
universal_newlines=False,
startupinfo=None,
creationflags=0,
timeout=500.0, # Time in seconds before termination
killdelay=20.0, # Time in seconds after termination before kill
python_proc=None,
)
def before_init(self, keywords):
# Replace this in subclass to do more setup
pass
def after_init(self):
# Replace this in subclass to execute code after
# process creation
pass
def wrap_python_exec(self, preexec_fn):
# Don't let anything in our buffer wrap back into new process
# Otherwise, it might (will!) come out twice...
sys.stdout.flush()
sys.stderr.flush()
self.is_python_proc = True
def wrapper():
sys.argv = self.args
try:
preexec_fn()
except Exception:
sys.stdout.flush()
print >> sys.stderr, traceback.format_exc()
sys.stderr.write(chr(1))
except SystemExit, s:
sys.stdout.flush()
code = s.code
try:
code = int(code)
except:
pass
if code:
print >> sys.stderr, code
sys.stderr.write(chr(1))
else:
sys.stdout.flush()
sys.stderr.flush()
return wrapper
def __init__(self, *args, **kw):
# Allow flexible args handling.
if len(args) < 2:
try:
args[0] + ''
except TypeError:
args = args[0]
else:
args = args[0].split()
self.args = args
# Handle defaults
keywords = self.defaults.copy()
keywords.update(kw)
# Get our timeout information, and call
# subclass to get other parameters
self.timeout = keywords.pop('timeout') + time.time()
self.killdelay = keywords.pop('killdelay')
self.before_init(keywords)
# Handle any special Python proc
python_proc = keywords.pop('python_proc')
if python_proc is not None:
assert keywords.pop('preexec_fn') is None
keywords['preexec_fn'] = self.wrap_python_exec(python_proc)
args = ['true']
# Start the process and let subclass execute
proc = subprocess.Popen(args, **keywords)
self.proc = proc
self.after_init()
def kill(self, force=False):
action = force and SIGKILL or SIGTERM
os.kill(self.proc.pid, action)
return action
def checktimeout(self):
# Poll to decide if subprocess needs to be killed
now = time.time()
if now < self.timeout:
return 0
killdelay, self.killdelay = self.killdelay, 0
self.timeout = now + killdelay
return self.kill(not killdelay)
class PipeReader(object):
''' PipeReader is an iterator class designed to read from
the next ready pipe.
It can handle as many pipes at a time as desired,
and each call to next() will yield one of the following:
pipe, data -- After reading data from pipe
pipe, None -- When pipe is closing
None, None -- On timeout if no data
It raises StopIteration if no pipes are still open.
A logical extension would be to handle output pipes as well,
such as the subprocess's stdin, but the initial version is
input pipes only (the subprocess's stdout and stderr).
'''
TIMEOUT = 1.0 # Poll interval in seconds
BUFSIZE = 100000
def __init__(self, *pipes, **kw):
self.timeout = kw.pop('timeout', self.TIMEOUT)
self.bufsize = kw.pop('bufsize', self.BUFSIZE)
self.by_pipenum = {} # Dictionary of read functions
self.ready = [] # List of ready pipes
assert not kw, kw # Check for mispelings :)
for pipe in pipes:
self.addpipe(pipe)
def addpipe(self, pipe):
pipenum = pipe.fileno()
bufsize = self.bufsize
by_pipenum = self.by_pipenum
def getdata():
chunk = os.read(pipenum, bufsize)
if chunk:
return pipe, chunk
else:
# Here, we're done. Remove ourselves from
# the dictionary and return None as a notification
del by_pipenum[pipenum]
return pipe, None
assert by_pipenum.setdefault(pipenum, getdata) is getdata
def __iter__(self):
return self
def next(self):
ready = self.ready
if not ready:
allpipes = list(self.by_pipenum)
if not allpipes:
raise StopIteration
ready[:] = select.select(allpipes,[],[],self.timeout)[0]
if not ready:
return None, None # Allow code to execute after timeout
return self.by_pipenum[ready.pop()]()
class LineSplitter(object):
''' LineSplitter takes arbitrary string
data and splits it into text lines.
It manages the case where a single
line of data returned from a pipe is
split across multiple reads.
'''
def __init__(self, prefix):
self.prefix = prefix
self.leftovers = ''
self.lines = []
def __call__(self, chunk):
if not chunk:
if self.leftovers:
chunk = '\n'
else:
return self
chunk = chunk.replace('\r\n', '\n').replace('\r', '\n')
chunk = self.leftovers + chunk
newlines = chunk.split('\n')
self.leftovers = newlines.pop()
oldlines = self.lines
oldlines.reverse()
oldlines.extend(newlines)
oldlines.reverse()
return self
def __iter__(self):
return self
def next(self):
try:
return self.prefix, self.lines.pop()
except IndexError:
raise StopIteration
class TextOutExec(BaseExec):
''' TextOutExec is used for when an executed subprocess's
stdout and stderr are line-oriented text output.
This class is its own iterator. Each line from
the subprocess is yielded from here, with a prefix:
' ' -- line written by subprocess to stdout
'* ' -- line written by subprocess to stderr
'** ' -- line represents subprocess exit code
NB: Current implementation is probably not that secure,
in that it assumes that once the pipes are closed,
the process should be terminating itself shortly.
If this proves to be a problem in real life, we
can add timeout checking to the "wait for things
to finish up" logic.
'''
defaults = dict(
pollinterval=1.0,
readbufsize=100000,
)
defaults.update(BaseExec.defaults)
def before_init(self, keywords):
self.pollinterval = keywords.pop('pollinterval')
self.bufsize = keywords.pop('readbufsize')
def after_init(self):
proc = self.proc
self.pipes = PipeReader(proc.stdout, proc.stderr,
timeout=self.pollinterval, bufsize=self.bufsize)
self.pipedir = {proc.stdout : LineSplitter(' '),
proc.stderr : LineSplitter('*')}
self.lines = []
self.finished = False
def __iter__(self):
return self
def next(self):
lines = self.lines
while not lines:
self.checktimeout()
for pipe, data in self.pipes:
if pipe is not None:
lines.extend(self.pipedir[pipe](data))
lines.reverse()
break
else:
if self.finished:
raise StopIteration
else:
self.finished = True
lines.append(('**', str(self.proc.wait())))
return '%s %s' % lines.pop()
def elapsedtime(when=time.time()):
mins, secs = divmod(round(time.time() - when, 1), 60)
hrs, mins = divmod(mins, 60)
hrs = hrs and ('%02d:' % int(round(hrs))) or ''
mins = mins and ('%02d:' % int(round(mins))) or ''
secs = '%04.1f' % secs
units = hrs and 'hours' or mins and 'minutes' or 'seconds'
return '%s%s%s %s' % (hrs, mins, secs, units)
def default_logger(resultlist, data=None, data2=None):
if data is not None:
resultlist.append(data)
if data2 is None:
data2 = data
print data2
def textexec(*arg, **kw):
''' Exec a subprocess, print lines, and also return
them to caller
'''
logger = kw.pop('logger', default_logger)
formatcmd = textwrap.TextWrapper(initial_indent=' ',
subsequent_indent=' ',
break_long_words=False).fill
subproc = TextOutExec(*arg, **kw)
args = subproc.args
procname = args[0]
starttime = time.time()
result = []
logger(result,
'Process "%s" started on %s\n\n%s\n\n' % (
procname, time.asctime(), formatcmd(' '.join(args))))
errcode = 0
badexit = '* ' + chr(1)
for line in subproc:
if line == badexit and subproc.is_python_proc:
errcode = 1
continue
if not line.startswith('**'):
logger(result, line)
continue
errcode = errcode or int(line.split()[-1])
status = errcode and 'FAIL' or 'PASS'
logger(result,
'\nProgram %s exit code: %s (%d) elapsed time: %s\n' %
(procname, status, errcode, elapsedtime(starttime)))
logger(result, None,
'Cumulative execution time is %s\n' % elapsedtime())
return errcode, result
if __name__ == '__main__':
def goodfunc():
print "Good func", sys.argv
def badfunc():
assert 0, "Boo! %s" % sys.argv
#raise SystemExit('I am bad')
if len(sys.argv) > 1:
print "Starting subprocess"
sys.stdout.flush()
for i in range(10):
time.sleep(0.2)
print "This is line", i
sys.stdout.flush()
print >> sys.stderr, "This is an error message"
print "Ending subprocess"
if sys.argv[1] == 'die':
raise SystemExit('Deliberately croaking')
else:
print 'Calling good python_proc 1'
textexec('goodfunc', '1', python_proc=goodfunc)
print 'Calling bad python_proc 1'
textexec('badfunc', '1', python_proc=badfunc)
print 'Calling good python_proc 2'
textexec('goodfunc', '2', python_proc=goodfunc)
print 'Calling bad python_proc 2'
textexec('badfunc', '2', python_proc=badfunc)
print "Calling myself"
textexec(__file__, 'subprocess')
print "Calling myself with kill time"
textexec(__file__, 'subprocess', timeout=0.8)
print "Calling myself with forced error exit"
textexec(__file__, 'die')
print 'All Done'
| mit | -1,626,864,979,727,015,200 | 30.902314 | 81 | 0.552941 | false |
dbentley/pants | src/python/pants/backend/python/python_setup.py | 4 | 5589 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from pex.fetcher import Fetcher, PyPIFetcher
from pex.http import Context
from pkg_resources import Requirement
from pants.subsystem.subsystem import Subsystem
class PythonSetup(Subsystem):
"""A python environment."""
options_scope = 'python-setup'
@classmethod
def register_options(cls, register):
super(PythonSetup, cls).register_options(register)
register('--interpreter-requirement', advanced=True, default='CPython>=2.7,<3',
help='The interpreter requirement string for this python environment.')
register('--setuptools-version', advanced=True, default='5.4.1',
help='The setuptools version for this python environment.')
register('--wheel-version', advanced=True, default='0.24.0',
help='The wheel version for this python environment.')
register('--platforms', advanced=True, type=list, default=['current'],
help='The wheel version for this python environment.')
register('--interpreter-cache-dir', advanced=True, default=None, metavar='<dir>',
help='The parent directory for the interpreter cache. '
'If unspecified, a standard path under the workdir is used.')
register('--chroot-cache-dir', advanced=True, default=None, metavar='<dir>',
help='The parent directory for the chroot cache. '
'If unspecified, a standard path under the workdir is used.')
register('--resolver-cache-dir', advanced=True, default=None, metavar='<dir>',
help='The parent directory for the requirement resolver cache. '
'If unspecified, a standard path under the workdir is used.')
register('--resolver-cache-ttl', advanced=True, type=int, metavar='<seconds>',
default=10 * 365 * 86400, # 10 years.
help='The time in seconds before we consider re-resolving an open-ended requirement, '
'e.g. "flask>=0.2" if a matching distribution is available on disk.')
register('--artifact-cache-dir', advanced=True, default=None, metavar='<dir>',
help='The parent directory for the python artifact cache. '
'If unspecified, a standard path under the workdir is used.')
@property
def interpreter_requirement(self):
return self.get_options().interpreter_requirement
@property
def setuptools_version(self):
return self.get_options().setuptools_version
@property
def wheel_version(self):
return self.get_options().wheel_version
@property
def platforms(self):
return self.get_options().platforms
@property
def interpreter_cache_dir(self):
return (self.get_options().interpreter_cache_dir or
os.path.join(self.scratch_dir, 'interpreters'))
@property
def chroot_cache_dir(self):
return (self.get_options().chroot_cache_dir or
os.path.join(self.scratch_dir, 'chroots'))
@property
def resolver_cache_dir(self):
return (self.get_options().resolver_cache_dir or
os.path.join(self.scratch_dir, 'resolved_requirements'))
@property
def resolver_cache_ttl(self):
return self.get_options().resolver_cache_ttl
@property
def artifact_cache_dir(self):
"""Note that this is unrelated to the general pants artifact cache."""
return (self.get_options().artifact_cache_dir or
os.path.join(self.scratch_dir, 'artifacts'))
@property
def scratch_dir(self):
return os.path.join(self.get_options().pants_workdir, *self.options_scope.split('.'))
def setuptools_requirement(self):
return self._failsafe_parse('setuptools=={0}'.format(self.setuptools_version))
def wheel_requirement(self):
return self._failsafe_parse('wheel=={0}'.format(self.wheel_version))
# This is a setuptools <1 and >1 compatible version of Requirement.parse.
# For setuptools <1, if you did Requirement.parse('setuptools'), it would
# return 'distribute' which of course is not desirable for us. So they
# added a replacement=False keyword arg. Sadly, they removed this keyword
# arg in setuptools >= 1 so we have to simply failover using TypeError as a
# catch for 'Invalid Keyword Argument'.
def _failsafe_parse(self, requirement):
try:
return Requirement.parse(requirement, replacement=False)
except TypeError:
return Requirement.parse(requirement)
class PythonRepos(Subsystem):
"""A python code repository."""
options_scope = 'python-repos'
@classmethod
def register_options(cls, register):
super(PythonRepos, cls).register_options(register)
register('--repos', advanced=True, type=list, default=[],
help='URLs of code repositories.')
register('--indexes', advanced=True, type=list,
default=['https://pypi.python.org/simple/'],
help='URLs of code repository indexes.')
@property
def repos(self):
return self.get_options().repos
@property
def indexes(self):
return self.get_options().indexes
def get_fetchers(self):
fetchers = []
fetchers.extend(Fetcher([url]) for url in self.repos)
fetchers.extend(PyPIFetcher(url) for url in self.indexes)
return fetchers
def get_network_context(self):
# TODO(wickman): Add retry, conn_timeout, threads, etc configuration here.
return Context.get()
| apache-2.0 | -4,818,169,105,469,906,000 | 38.359155 | 99 | 0.684559 | false |
FireWRT/OpenWrt-Firefly-Libraries | staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python3.4/test/multibytecodec_support.py | 11 | 14523 | #
# multibytecodec_support.py
# Common Unittest Routines for CJK codecs
#
import codecs
import os
import re
import sys
import unittest
from http.client import HTTPException
from test import support
from io import BytesIO
class TestBase:
encoding = '' # codec name
codec = None # codec tuple (with 4 elements)
tstring = None # must set. 2 strings to test StreamReader
codectests = None # must set. codec test tuple
roundtriptest = 1 # set if roundtrip is possible with unicode
has_iso10646 = 0 # set if this encoding contains whole iso10646 map
xmlcharnametest = None # string to test xmlcharrefreplace
unmappedunicode = '\udeee' # a unicode code point that is not mapped.
def setUp(self):
if self.codec is None:
self.codec = codecs.lookup(self.encoding)
self.encode = self.codec.encode
self.decode = self.codec.decode
self.reader = self.codec.streamreader
self.writer = self.codec.streamwriter
self.incrementalencoder = self.codec.incrementalencoder
self.incrementaldecoder = self.codec.incrementaldecoder
def test_chunkcoding(self):
tstring_lines = []
for b in self.tstring:
lines = b.split(b"\n")
last = lines.pop()
assert last == b""
lines = [line + b"\n" for line in lines]
tstring_lines.append(lines)
for native, utf8 in zip(*tstring_lines):
u = self.decode(native)[0]
self.assertEqual(u, utf8.decode('utf-8'))
if self.roundtriptest:
self.assertEqual(native, self.encode(u)[0])
def test_errorhandle(self):
for source, scheme, expected in self.codectests:
if isinstance(source, bytes):
func = self.decode
else:
func = self.encode
if expected:
result = func(source, scheme)[0]
if func is self.decode:
self.assertTrue(type(result) is str, type(result))
self.assertEqual(result, expected,
'%a.decode(%r, %r)=%a != %a'
% (source, self.encoding, scheme, result,
expected))
else:
self.assertTrue(type(result) is bytes, type(result))
self.assertEqual(result, expected,
'%a.encode(%r, %r)=%a != %a'
% (source, self.encoding, scheme, result,
expected))
else:
self.assertRaises(UnicodeError, func, source, scheme)
def test_xmlcharrefreplace(self):
if self.has_iso10646:
self.skipTest('encoding contains full ISO 10646 map')
s = "\u0b13\u0b23\u0b60 nd eggs"
self.assertEqual(
self.encode(s, "xmlcharrefreplace")[0],
b"ଓଣୠ nd eggs"
)
def test_customreplace_encode(self):
if self.has_iso10646:
self.skipTest('encoding contains full ISO 10646 map')
from html.entities import codepoint2name
def xmlcharnamereplace(exc):
if not isinstance(exc, UnicodeEncodeError):
raise TypeError("don't know how to handle %r" % exc)
l = []
for c in exc.object[exc.start:exc.end]:
if ord(c) in codepoint2name:
l.append("&%s;" % codepoint2name[ord(c)])
else:
l.append("&#%d;" % ord(c))
return ("".join(l), exc.end)
codecs.register_error("test.xmlcharnamereplace", xmlcharnamereplace)
if self.xmlcharnametest:
sin, sout = self.xmlcharnametest
else:
sin = "\xab\u211c\xbb = \u2329\u1234\u232a"
sout = b"«ℜ» = ⟨ሴ⟩"
self.assertEqual(self.encode(sin,
"test.xmlcharnamereplace")[0], sout)
def test_callback_returns_bytes(self):
def myreplace(exc):
return (b"1234", exc.end)
codecs.register_error("test.cjktest", myreplace)
enc = self.encode("abc" + self.unmappedunicode + "def", "test.cjktest")[0]
self.assertEqual(enc, b"abc1234def")
def test_callback_wrong_objects(self):
def myreplace(exc):
return (ret, exc.end)
codecs.register_error("test.cjktest", myreplace)
for ret in ([1, 2, 3], [], None, object()):
self.assertRaises(TypeError, self.encode, self.unmappedunicode,
'test.cjktest')
def test_callback_long_index(self):
def myreplace(exc):
return ('x', int(exc.end))
codecs.register_error("test.cjktest", myreplace)
self.assertEqual(self.encode('abcd' + self.unmappedunicode + 'efgh',
'test.cjktest'), (b'abcdxefgh', 9))
def myreplace(exc):
return ('x', sys.maxsize + 1)
codecs.register_error("test.cjktest", myreplace)
self.assertRaises(IndexError, self.encode, self.unmappedunicode,
'test.cjktest')
def test_callback_None_index(self):
def myreplace(exc):
return ('x', None)
codecs.register_error("test.cjktest", myreplace)
self.assertRaises(TypeError, self.encode, self.unmappedunicode,
'test.cjktest')
def test_callback_backward_index(self):
def myreplace(exc):
if myreplace.limit > 0:
myreplace.limit -= 1
return ('REPLACED', 0)
else:
return ('TERMINAL', exc.end)
myreplace.limit = 3
codecs.register_error("test.cjktest", myreplace)
self.assertEqual(self.encode('abcd' + self.unmappedunicode + 'efgh',
'test.cjktest'),
(b'abcdREPLACEDabcdREPLACEDabcdREPLACEDabcdTERMINALefgh', 9))
def test_callback_forward_index(self):
def myreplace(exc):
return ('REPLACED', exc.end + 2)
codecs.register_error("test.cjktest", myreplace)
self.assertEqual(self.encode('abcd' + self.unmappedunicode + 'efgh',
'test.cjktest'), (b'abcdREPLACEDgh', 9))
def test_callback_index_outofbound(self):
def myreplace(exc):
return ('TERM', 100)
codecs.register_error("test.cjktest", myreplace)
self.assertRaises(IndexError, self.encode, self.unmappedunicode,
'test.cjktest')
def test_incrementalencoder(self):
UTF8Reader = codecs.getreader('utf-8')
for sizehint in [None] + list(range(1, 33)) + \
[64, 128, 256, 512, 1024]:
istream = UTF8Reader(BytesIO(self.tstring[1]))
ostream = BytesIO()
encoder = self.incrementalencoder()
while 1:
if sizehint is not None:
data = istream.read(sizehint)
else:
data = istream.read()
if not data:
break
e = encoder.encode(data)
ostream.write(e)
self.assertEqual(ostream.getvalue(), self.tstring[0])
def test_incrementaldecoder(self):
UTF8Writer = codecs.getwriter('utf-8')
for sizehint in [None, -1] + list(range(1, 33)) + \
[64, 128, 256, 512, 1024]:
istream = BytesIO(self.tstring[0])
ostream = UTF8Writer(BytesIO())
decoder = self.incrementaldecoder()
while 1:
data = istream.read(sizehint)
if not data:
break
else:
u = decoder.decode(data)
ostream.write(u)
self.assertEqual(ostream.getvalue(), self.tstring[1])
def test_incrementalencoder_error_callback(self):
inv = self.unmappedunicode
e = self.incrementalencoder()
self.assertRaises(UnicodeEncodeError, e.encode, inv, True)
e.errors = 'ignore'
self.assertEqual(e.encode(inv, True), b'')
e.reset()
def tempreplace(exc):
return ('called', exc.end)
codecs.register_error('test.incremental_error_callback', tempreplace)
e.errors = 'test.incremental_error_callback'
self.assertEqual(e.encode(inv, True), b'called')
# again
e.errors = 'ignore'
self.assertEqual(e.encode(inv, True), b'')
def test_streamreader(self):
UTF8Writer = codecs.getwriter('utf-8')
for name in ["read", "readline", "readlines"]:
for sizehint in [None, -1] + list(range(1, 33)) + \
[64, 128, 256, 512, 1024]:
istream = self.reader(BytesIO(self.tstring[0]))
ostream = UTF8Writer(BytesIO())
func = getattr(istream, name)
while 1:
data = func(sizehint)
if not data:
break
if name == "readlines":
ostream.writelines(data)
else:
ostream.write(data)
self.assertEqual(ostream.getvalue(), self.tstring[1])
def test_streamwriter(self):
readfuncs = ('read', 'readline', 'readlines')
UTF8Reader = codecs.getreader('utf-8')
for name in readfuncs:
for sizehint in [None] + list(range(1, 33)) + \
[64, 128, 256, 512, 1024]:
istream = UTF8Reader(BytesIO(self.tstring[1]))
ostream = self.writer(BytesIO())
func = getattr(istream, name)
while 1:
if sizehint is not None:
data = func(sizehint)
else:
data = func()
if not data:
break
if name == "readlines":
ostream.writelines(data)
else:
ostream.write(data)
self.assertEqual(ostream.getvalue(), self.tstring[0])
class TestBase_Mapping(unittest.TestCase):
pass_enctest = []
pass_dectest = []
supmaps = []
codectests = []
def setUp(self):
try:
self.open_mapping_file().close() # test it to report the error early
except (OSError, HTTPException):
self.skipTest("Could not retrieve "+self.mapfileurl)
def open_mapping_file(self):
return support.open_urlresource(self.mapfileurl)
def test_mapping_file(self):
if self.mapfileurl.endswith('.xml'):
self._test_mapping_file_ucm()
else:
self._test_mapping_file_plain()
def _test_mapping_file_plain(self):
unichrs = lambda s: ''.join(map(chr, map(eval, s.split('+'))))
urt_wa = {}
with self.open_mapping_file() as f:
for line in f:
if not line:
break
data = line.split('#')[0].strip().split()
if len(data) != 2:
continue
csetval = eval(data[0])
if csetval <= 0x7F:
csetch = bytes([csetval & 0xff])
elif csetval >= 0x1000000:
csetch = bytes([(csetval >> 24), ((csetval >> 16) & 0xff),
((csetval >> 8) & 0xff), (csetval & 0xff)])
elif csetval >= 0x10000:
csetch = bytes([(csetval >> 16), ((csetval >> 8) & 0xff),
(csetval & 0xff)])
elif csetval >= 0x100:
csetch = bytes([(csetval >> 8), (csetval & 0xff)])
else:
continue
unich = unichrs(data[1])
if ord(unich) == 0xfffd or unich in urt_wa:
continue
urt_wa[unich] = csetch
self._testpoint(csetch, unich)
def _test_mapping_file_ucm(self):
with self.open_mapping_file() as f:
ucmdata = f.read()
uc = re.findall('<a u="([A-F0-9]{4})" b="([0-9A-F ]+)"/>', ucmdata)
for uni, coded in uc:
unich = chr(int(uni, 16))
codech = bytes(int(c, 16) for c in coded.split())
self._testpoint(codech, unich)
def test_mapping_supplemental(self):
for mapping in self.supmaps:
self._testpoint(*mapping)
def _testpoint(self, csetch, unich):
if (csetch, unich) not in self.pass_enctest:
self.assertEqual(unich.encode(self.encoding), csetch)
if (csetch, unich) not in self.pass_dectest:
self.assertEqual(str(csetch, self.encoding), unich)
def test_errorhandle(self):
for source, scheme, expected in self.codectests:
if isinstance(source, bytes):
func = source.decode
else:
func = source.encode
if expected:
if isinstance(source, bytes):
result = func(self.encoding, scheme)
self.assertTrue(type(result) is str, type(result))
self.assertEqual(result, expected,
'%a.decode(%r, %r)=%a != %a'
% (source, self.encoding, scheme, result,
expected))
else:
result = func(self.encoding, scheme)
self.assertTrue(type(result) is bytes, type(result))
self.assertEqual(result, expected,
'%a.encode(%r, %r)=%a != %a'
% (source, self.encoding, scheme, result,
expected))
else:
self.assertRaises(UnicodeError, func, self.encoding, scheme)
def load_teststring(name):
dir = os.path.join(os.path.dirname(__file__), 'cjkencodings')
with open(os.path.join(dir, name + '.txt'), 'rb') as f:
encoded = f.read()
with open(os.path.join(dir, name + '-utf8.txt'), 'rb') as f:
utf8 = f.read()
return encoded, utf8
| gpl-2.0 | -6,616,056,198,958,550,000 | 37.522546 | 82 | 0.514425 | false |
caffeinate/test-pylot | FlaskGunicornSqlAlchemy/test.py | 1 | 1341 | '''
Created on 19 Jun 2015
@author: si
'''
import json
import unittest
from flask_app import create_app, db
from config import TestConfig as Config
SHOW_LOG_MESSAGES = False
if not SHOW_LOG_MESSAGES:
# when local log is switched off, also hide logs from app below CRITICAL
import logging
logging.disable(logging.ERROR)
class FooTest(unittest.TestCase):
def setUp(self):
self.config = Config()
self.app = create_app(self.config)
self.test_client = self.app.test_client()
with self.app.app_context():
db.create_all()
def tearDown(self):
self.config.drop_db()
def log(self, msg):
if SHOW_LOG_MESSAGES:
print msg
def test_empty_root(self):
rv = self.test_client.get('/')
assert 'Hello' in rv.data
def test_add_foo(self):
testUrl = '/foo/'
with self.app.app_context():
#create_permissions_universe(db)
raw = { 'title' : 'hellofoo' }
d = json.dumps(raw)
rv = self.test_client.post(testUrl,
data=d,
content_type='application/json')
response = json.loads(rv.data)
assert response['success'] == True
if __name__ == '__main__':
unittest.main()
| mit | 9,216,450,153,285,915,000 | 22.526316 | 76 | 0.561521 | false |
abhilashnta/edx-platform | common/test/acceptance/pages/lms/teams.py | 18 | 1553 | # -*- coding: utf-8 -*-
"""
Teams page.
"""
from .course_page import CoursePage
from ..common.paging import PaginatedUIMixin
TOPIC_CARD_CSS = 'div.wrapper-card-core'
BROWSE_BUTTON_CSS = 'a.nav-item[data-index="1"]'
class TeamsPage(CoursePage):
"""
Teams page/tab.
"""
url_path = "teams"
def is_browser_on_page(self):
""" Checks if teams page is being viewed """
return self.q(css='body.view-teams').present
def get_body_text(self):
""" Returns the current dummy text. This will be changed once there is more content on the page. """
main_page_content_css = '.page-content-main'
self.wait_for(
lambda: len(self.q(css=main_page_content_css).text) == 1,
description="Body text is present"
)
return self.q(css=main_page_content_css).text[0]
def browse_topics(self):
""" View the Browse tab of the Teams page. """
self.q(css=BROWSE_BUTTON_CSS).click()
class BrowseTopicsPage(CoursePage, PaginatedUIMixin):
"""
The 'Browse' tab of the Teams page.
"""
url_path = "teams/#browse"
def is_browser_on_page(self):
"""Check if the Browse tab is being viewed."""
button_classes = self.q(css=BROWSE_BUTTON_CSS).attrs('class')
if len(button_classes) == 0:
return False
return 'is-active' in button_classes[0]
@property
def topic_cards(self):
"""Return a list of the topic cards present on the page."""
return self.q(css=TOPIC_CARD_CSS).results
| agpl-3.0 | -6,341,567,472,234,809,000 | 27.236364 | 108 | 0.613651 | false |
atopuzov/nitro-python | nssrc/com/citrix/netscaler/nitro/resource/config/aaa/aaasession_args.py | 3 | 2291 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class aaasession_args :
ur""" Provides additional arguments required for fetching the aaasession resource.
"""
def __init__(self) :
self._username = ""
self._groupname = ""
self._iip = ""
self._netmask = ""
@property
def username(self) :
ur"""Name of the AAA user.<br/>Minimum length = 1.
"""
try :
return self._username
except Exception as e:
raise e
@username.setter
def username(self, username) :
ur"""Name of the AAA user.<br/>Minimum length = 1
"""
try :
self._username = username
except Exception as e:
raise e
@property
def groupname(self) :
ur"""Name of the AAA group.<br/>Minimum length = 1.
"""
try :
return self._groupname
except Exception as e:
raise e
@groupname.setter
def groupname(self, groupname) :
ur"""Name of the AAA group.<br/>Minimum length = 1
"""
try :
self._groupname = groupname
except Exception as e:
raise e
@property
def iip(self) :
ur"""IP address or the first address in the intranet IP range.<br/>Minimum length = 1.
"""
try :
return self._iip
except Exception as e:
raise e
@iip.setter
def iip(self, iip) :
ur"""IP address or the first address in the intranet IP range.<br/>Minimum length = 1
"""
try :
self._iip = iip
except Exception as e:
raise e
@property
def netmask(self) :
ur"""Subnet mask for the intranet IP range.<br/>Minimum length = 1.
"""
try :
return self._netmask
except Exception as e:
raise e
@netmask.setter
def netmask(self, netmask) :
ur"""Subnet mask for the intranet IP range.<br/>Minimum length = 1
"""
try :
self._netmask = netmask
except Exception as e:
raise e
| apache-2.0 | 5,817,614,043,970,136,000 | 22.377551 | 89 | 0.667394 | false |
CodeLionX/CommentSearchEngine | cse/reader/CommentReader.py | 1 | 5284 | import csv
import os
from cse.reader.ArticleMappingReader import ArticleMappingReader
from cse.reader.AuthorMappingReader import AuthorMappingReader
class CommentReader(object):
def __init__(self, commentsFilepath, arcticlesFilepath, authorsFilepath, delimiter=',', ignoreMapping=True):
self.__delimiter = delimiter
self.__ignoreMapping = ignoreMapping
self.__commentsFile = None
self.__commentReader = None
self.__commentsFilepath = commentsFilepath
if not self.__ignoreMapping:
self.__authorsReader = AuthorMappingReader(authorsFilepath)
self.__articlesReader = ArticleMappingReader(arcticlesFilepath)
self.__startSeekPointer = 0
self.__iterMode = False
def open(self):
if not os.path.exists(os.path.dirname(self.__commentsFilepath)):
raise Exception("comments file not found!")
self.__commentsFile = open(self.__commentsFilepath, 'r', newline='', encoding="UTF-8")
self.__commentReader = csv.reader(self.__commentsFile, delimiter=self.__delimiter)
if not self.__ignoreMapping:
self.__articlesReader.open()
return self
def startSeekPointer(self):
return self.__startSeekPointer
def currentSeekPointer(self):
return self.__commentsFile.tell()
def readline(self, pointer, skipArticleMapping=True):
if self.__iterMode:
# save seek pointer and restore it to allow this
raise IOError("CommentReader in iteration mode. File seeking not possible!")
# read comments file contents
self.__commentsFile.seek(pointer)
line = self.__commentsFile.readline()
iterRow = next(csv.reader([line], delimiter=self.__delimiter))
if not (skipArticleMapping or self.__ignoreMapping):
# setup article iterator
iter(self.__articlesReader)
# load first article mapping
next(self.__articlesReader)
return self.__parseIterRow(iterRow)
def close(self):
self.__commentsFile.close()
if not self.__ignoreMapping:
self.__articlesReader.close()
def __silentParseToInt(self, data, default):
try:
return int(data)
except ValueError:
return default
def __parseIterRow(self, row, skipArticleMapping=True):
commentId = int(row[0])
articleId = int(row[1])
articleUrl = ""
author = row[2]
text = row[3].replace("\\n", "\n")
timestamp = row[4]
parentId = self.__silentParseToInt(row[5], None)
upvotes = 0 #self.__silentParseToInt(row[6], 0) # currently unused information
downvotes = 0 #self.__silentParseToInt(row[7], 0) # currently unused information
if not self.__ignoreMapping:
author = self.__authorsReader.lookupAuthorname(row[2])
if not (skipArticleMapping or self.__ignoreMapping):
# sequentially load article mapping
# if there are some articles without comments we skip these articles
while self.__articlesReader.currentArticleId() != articleId:
try:
next(self.__articlesReader)
except StopIteration as e:
# restart article iterator
iter(self.__articlesReader)
next(self.__articlesReader)
print("!-- restarted article id mapping iterator --!")
print(" searching article", articleId, author, commentId)
articleUrl = self.__articlesReader.currentArticleUrl()
return {
"commentId": commentId,
"article_url": articleUrl,
"article_id": articleId,
"comment_author": author,
"comment_text" : text,
"timestamp" : timestamp,
"parent_comment_id" : parentId,
"upvotes" : upvotes,
"downvotes": downvotes
}
def __iter__(self):
self.__commentsFile.seek(0)
self.__iterMode = True
# skip csv header in iteration mode:
self.__commentsFile.readline()
self.__startSeekPointer = self.__commentsFile.tell()
if not self.__ignoreMapping:
# setup article iterator
iter(self.__articlesReader)
# load first article mapping
next(self.__articlesReader)
return self
def __next__(self):
try:
line = self.__commentsFile.readline()
if not line:
raise StopIteration("Empty line")
except StopIteration as stop:
self.__iterMode = False
raise stop
iterRow = next(csv.reader([line], delimiter=self.__delimiter))
try:
return self.__parseIterRow(iterRow, skipArticleMapping=False)
except ValueError:
return self.__next__()
def __enter__(self):
return self.open()
def __exit__(self, type, value, traceback):
self.close()
if __name__ == "__main__":
with CommentReader("data/comments.csv", "data/articleIds.csv", "data/authorMapping.csv") as reader:
for comment in reader:
print(comment["commentId"])
| mit | 4,552,779,042,184,910,300 | 31.024242 | 112 | 0.596139 | false |
jgmanzanas/CMNT_004_15 | project-addons/transportation/__init__.py | 1 | 1134 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Pexego Sistemas Informáticos All Rights Reserved
# $Jesús Ventosinos Mayor <[email protected]>$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import transportation
from . import res_partner
from . import sale
from . import rotation
from . import stock
from . import wizard | agpl-3.0 | 1,017,638,475,695,935,600 | 40.962963 | 78 | 0.628092 | false |
theAeon/Private-Peer-Domain-Syndication | ppds/config.py | 1 | 6516 | '''contains configuration class- applies and stores configuration'''
import sys
import json
import os
import shutil
import requests
import ppds.repository
def testrepo(repo):
'''send http request to repolist on host'''
try:
repourl = 'http://' + repo
request = requests.get("%s/ppdslist.json" % repourl,
allow_redirects=False)
request.raise_for_status()
except (requests.exceptions.ConnectionError,
requests.exceptions.HTTPError,
requests.exceptions.MissingSchema):
try:
repourl = 'https://' + repo
request = requests.get("%s/ppdslist.json" % repourl,
allow_redirects=False)
request.raise_for_status()
except (requests.exceptions.ConnectionError,
requests.exceptions.HTTPError,
requests.exceptions.MissingSchema):
return 'down'
class Configuration:
# pylint: disable=too-many-instance-attributes
'''everything to do with configuration in a convenient class'''
def __init__(self, mode, isroot, args):
'''define all aspects of configuration (datalocation hardcoded)'''
self.notice = """
DO NOT CHANGE THIS FILE UNLESS YOU ABSOLUTELY KNOW WHAT YOU'RE DOING
"""
self.repositories = []
self.hostfile = ''
self.defaultdomain = 'repo.ppds.me'
self.repoobjectdict = {}
self.repopriority = {}
self.datafolder = None
self.__dict__ = self.__dict__
self.autoconfig()
if not os.path.exists(self.datafolder):
if mode == 'cli':
if isroot and "--f" not in args:
print('''
Please run as a non-root user to generate user files
Use --f to override''')
sys.exit(1)
print("Writing default data directory...")
print("Creating default configration file...")
self.patchlocation = '%s/hosts.patch' % self.datafolder
os.makedirs(self.datafolder)
self.save(mode, isroot, args)
self.makerepofolders(isroot, args)
self.load()
def printdict(self):
'''debug'''
print(self.__dict__)
def autoconfig(self):
'''detect platform and hosts file location (windows may be wrong
append default repo (repo.ppds.me)(pls no stealerino my domainerino)'''
if sys.platform == 'darwin' or sys.platform == 'linux':
self.hostfile = '/etc/'
self.datafolder = '%s/.config/ppds' % os.getenv("HOME")
elif sys.platform == 'win32':
root = os.getenv("systemroot")
self.hostfile = root + "\\System32\\drivers\\etc\\"
home = os.getenv('appdata')
self.datafolder = home + "\\ppds"
# self.hostfile = '%\SystemRoot%\\System32\\drivers\\etc\\'
else:
self.hostfile = str(input("Enter Plaintext Hostfile Location: "))
self.repositories.append(self.defaultdomain)
def save(self, mode, isroot, args):
'''dump config to json'''
if isroot and "--f" not in args:
print("""
Please run as a non-root user to generate user files
Use --f to override""")
sys.exit(1)
if (any(self.repoobjectdict) is False and
any(self.repopriority) is False):
if os.path.isfile('%s/config.json' % self.datafolder):
if mode == 'cli':
check = str(input('Overwrite config? (y/n): '))
if check == 'y':
os.remove('%s/config.json' % self.datafolder)
else:
return 'cancelled'
else:
return 'notempty'
cfg = open('%s/config.json' % self.datafolder, 'w+')
json.dump(self.__dict__, cfg)
cfg.close()
return 'created'
def load(self):
'''load config from config.json'''
if os.path.isfile('%s/config.json' % self.datafolder):
cfg = open('%s/config.json' % self.datafolder, 'r+')
self.__dict__ = json.load(cfg)
cfg.close()
else:
return 'No File'
def makerepofolders(self, isroot, args):
'''make folders for all repos in the repository list'''
if isroot and "--f" not in args:
print("""
Please run as a non-root user to generate user files
Use --f to override""")
sys.exit(1)
if not os.path.exists('%s/repos' % self.datafolder):
os.mkdir('%s/repos' % self.datafolder)
for entry in self.repositories:
if not os.path.exists('%s/repos/%s/' % (self.datafolder, entry)):
os.makedirs('%s/repos/%s/' % (self.datafolder, entry))
if not os.path.exists('%s/repos/%s/ppdslist.json'
% (self.datafolder, entry)):
with open('%s/repos/%s/ppdslist.json'
% (self.datafolder, entry), "w+") as filev:
filev.write('{}')
def addrepo(self, repo, isroot, args):
'''add repo to list, checking for server status'''
if testrepo(repo) == 'down':
return "failure"
self.repositories.append(repo)
self.makerepofolders(isroot, args)
def forceaddrepo(self, repo, isroot, args):
'''add repo to list regardless of server status'''
self.repositories.append(repo)
self.makerepofolders(isroot, args)
def removerepo(self, repo):
'''removes repo from list'''
if repo in self.repositories:
self.repositories.remove(repo)
shutil.rmtree('%s/repos/%s/' % (self.datafolder, repo))
else:
return 'repo does not exist'
def initrepolist(self):
'''adds repository class to list
unload before saving or modifying please'''
self.definerepopriority()
for item in self.repositories:
self.repoobjectdict[item] = ppds.repository.Repository(item,
self)
def unloadrepolist(self):
'''clears repository classes'''
self.repoobjectdict = {}
def definerepopriority(self):
''' assigns each repo a number based on order in list '''
self.repopriority = dict((name, order) for order, name in
enumerate(self.repositories))
| gpl-3.0 | -2,574,981,858,352,770,000 | 38.017964 | 79 | 0.552179 | false |
chrisxue815/leetcode_python | problems/test_0127_bfs.py | 1 | 1427 | import collections
import unittest
from typing import List
import utils
# O(n) time. O(n) space. BFS.
class Solution:
def ladderLength(self, beginWord: str, endWord: str, wordList: List[str]) -> int:
if endWord not in wordList:
return 0
graph = collections.defaultdict(set)
for word in wordList:
for i in range(len(word)):
wildcard = word[:i] + '.' + word[i + 1:]
graph[wildcard].add(word)
visited = set()
q = collections.deque()
q.append((1, beginWord))
while q:
distance, curr = q.pop()
if curr in visited:
continue
visited.add(curr)
distance += 1
for i in range(len(curr)):
nxt_wildcard = curr[:i] + '.' + curr[i + 1:]
for nxt in graph[nxt_wildcard]:
if nxt == endWord:
return distance
if nxt not in visited:
q.append((distance, nxt))
return 0
class Test(unittest.TestCase):
def test(self):
cases = utils.load_test_json(__file__).test_cases
for case in cases:
args = str(case.args)
actual = Solution().ladderLength(**case.args.__dict__)
self.assertEqual(case.expected, actual, msg=args)
if __name__ == '__main__':
unittest.main()
| unlicense | -9,142,651,164,870,475,000 | 25.425926 | 85 | 0.50946 | false |
nuagenetworks/vspk-python | vspk/v5_0/fetchers/nunsgatewaymonitors_fetcher.py | 2 | 2163 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from bambou import NURESTFetcher
class NUNSGatewayMonitorsFetcher(NURESTFetcher):
""" Represents a NUNSGatewayMonitors fetcher
Notes:
This fetcher enables to fetch NUNSGatewayMonitor objects.
See:
bambou.NURESTFetcher
"""
@classmethod
def managed_class(cls):
""" Return NUNSGatewayMonitor class that is managed.
Returns:
.NUNSGatewayMonitor: the managed class
"""
from .. import NUNSGatewayMonitor
return NUNSGatewayMonitor
| bsd-3-clause | -4,801,858,764,833,780,000 | 39.830189 | 86 | 0.731854 | false |
pombredanne/pyjs | pyjs/runners/sessionhistory.py | 6 | 3046 | # Copyright (C) 2007, One Laptop Per Child
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import logging
import gobject
import xpcom
from xpcom.components import interfaces
class HistoryListener(gobject.GObject):
_com_interfaces_ = interfaces.nsISHistoryListener
__gsignals__ = {
'session-history-changed': (gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
([int])),
'session-link-changed': (gobject.SIGNAL_RUN_FIRST, gobject.TYPE_NONE,
([str]))
}
def __init__(self):
gobject.GObject.__init__(self)
self._wrapped_self = xpcom.server.WrapObject( \
self, interfaces.nsISHistoryListener)
weak_ref = xpcom.client.WeakReference(self._wrapped_self)
def setup(self, web_navigation):
self._session_history = web_navigation.sessionHistory
self._session_history.addSHistoryListener(self._wrapped_self)
def OnHistoryGoBack(self, back_uri):
logging.debug("OnHistoryGoBack: %s" % back_uri.spec)
self.emit('session-link-changed', back_uri.spec)
self.emit('session-history-changed', self._session_history.index - 1)
return True
def OnHistoryGoForward(self, forward_uri):
logging.debug("OnHistoryGoForward: %s" % forward_uri.spec)
self.emit('session-link-changed', forward_uri.spec)
self.emit('session-history-changed', self._session_history.index + 1)
return True
def OnHistoryGotoIndex(self, index, goto_uri):
logging.debug("OnHistoryGotoIndex: %i %s" % (index, goto_uri.spec))
self.emit('session-link-changed', goto_uri.spec)
self.emit('session-history-changed', index)
return True
def OnHistoryNewEntry(self, new_uri):
logging.debug("OnHistoryNewEntry: %s" % new_uri.spec)
self.emit('session-link-changed', new_uri.spec)
self.emit('session-history-changed', self._session_history.index + 1)
def OnHistoryPurge(self, num_entries):
logging.debug("OnHistoryPurge: %i" % num_entries)
#self.emit('session-history-changed')
return True
def OnHistoryReload(self, reload_uri, reload_flags):
self.emit('session-link-changed', reload_uri.spec)
logging.debug("OnHistoryReload: %s" % reload_uri.spec)
return True
| apache-2.0 | -1,808,784,363,263,737,300 | 39.078947 | 77 | 0.665791 | false |
lbdreyer/cartopy | lib/cartopy/tests/mpl/test_caching.py | 1 | 8343 | # (C) British Crown Copyright 2011 - 2012, Met Office
#
# This file is part of cartopy.
#
# cartopy is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# cartopy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with cartopy. If not, see <http://www.gnu.org/licenses/>.
import gc
from owslib.wmts import WebMapTileService
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from matplotlib.collections import PatchCollection
from matplotlib.path import Path
import shapely.geometry
import six
import cartopy.crs as ccrs
from cartopy.mpl.feature_artist import FeatureArtist
from cartopy.io.ogc_clients import WMTSRasterSource
import cartopy.io.shapereader
import cartopy.mpl.geoaxes as cgeoaxes
import cartopy.mpl.patch
from cartopy.examples.waves import sample_data
class CallCounter(object):
"""
Exposes a context manager which can count the number of calls to a specific
function. (useful for cache checking!)
Internally, the target function is replaced with a new one created
by this context manager which then increments ``self.count`` every
time it is called.
Example usage::
show_counter = CallCounter(plt, 'show')
with show_counter:
plt.show()
plt.show()
plt.show()
print show_counter.count # <--- outputs 3
"""
def __init__(self, parent, function_name):
self.count = 0
self.parent = parent
self.function_name = function_name
self.orig_fn = getattr(parent, function_name)
def __enter__(self):
def replacement_fn(*args, **kwargs):
self.count += 1
return self.orig_fn(*args, **kwargs)
setattr(self.parent, self.function_name, replacement_fn)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
setattr(self.parent, self.function_name, self.orig_fn)
def test_coastline_loading_cache():
# a5caae040ee11e72a62a53100fe5edc355304419 added coastline caching.
# This test ensures it is working.
# Create coastlines to ensure they are cached.
ax1 = plt.subplot(2, 1, 1, projection=ccrs.PlateCarree())
ax1.coastlines()
plt.draw()
# Create another instance of the coastlines and count
# the number of times shapereader.Reader is created.
counter = CallCounter(cartopy.io.shapereader.Reader, '__init__')
with counter:
ax2 = plt.subplot(2, 1, 1, projection=ccrs.Robinson())
ax2.coastlines()
plt.draw()
assert counter.count == 0, ('The shapereader Reader class was created {} '
'times, indicating that the caching is not '
'working.'.format(counter.count))
plt.close()
def test_shapefile_transform_cache():
# a5caae040ee11e72a62a53100fe5edc355304419 added shapefile mpl
# geometry caching based on geometry object id. This test ensures
# it is working.
coastline_path = cartopy.io.shapereader.natural_earth(resolution="50m",
category='physical',
name='coastline')
geoms = cartopy.io.shapereader.Reader(coastline_path).geometries()
# Use the first 10 of them.
geoms = tuple(geoms)[:10]
n_geom = len(geoms)
ax = plt.axes(projection=ccrs.Robinson())
# Empty the cache.
FeatureArtist._geometry_to_path_cache.clear()
assert len(FeatureArtist._geometry_to_path_cache) == 0
counter = CallCounter(ax.projection, 'project_geometry')
with counter:
ax.add_geometries(geoms, ccrs.PlateCarree())
ax.add_geometries(geoms, ccrs.PlateCarree())
ax.add_geometries(geoms[:], ccrs.PlateCarree())
plt.draw()
# Without caching the count would have been
# n_calls * n_geom, but should now be just n_geom.
assert counter.count == n_geom, ('The given geometry was transformed too '
'many times (expected: %s; got %s) - the'
' caching is not working.'
''.format(n_geom, n_geom, counter.count))
# Check the cache has an entry for each geometry.
assert len(FeatureArtist._geometry_to_path_cache) == n_geom
# Check that the cache is empty again once we've dropped all references
# to the source paths.
plt.clf()
del geoms
gc.collect()
assert len(FeatureArtist._geometry_to_path_cache) == 0
plt.close()
def test_contourf_transform_path_counting():
ax = plt.axes(projection=ccrs.Robinson())
plt.draw()
# Capture the size of the cache before our test.
gc.collect()
initial_cache_size = len(cgeoaxes._PATH_TRANSFORM_CACHE)
path_to_geos_counter = CallCounter(cartopy.mpl.patch, 'path_to_geos')
with path_to_geos_counter:
x, y, z = sample_data((30, 60))
cs = plt.contourf(x, y, z, 5, transform=ccrs.PlateCarree())
n_geom = sum([len(c.get_paths()) for c in cs.collections])
del cs
if not six.PY3:
del c
plt.draw()
# Before the performance enhancement, the count would have been 2 * n_geom,
# but should now be just n_geom.
msg = ('The given geometry was transformed too many times (expected: %s; '
'got %s) - the caching is not working.'
'' % (n_geom, path_to_geos_counter.count))
assert path_to_geos_counter.count == n_geom, msg
# Check the cache has an entry for each geometry.
assert len(cgeoaxes._PATH_TRANSFORM_CACHE) == initial_cache_size + n_geom
# Check that the cache is empty again once we've dropped all references
# to the source paths.
plt.clf()
gc.collect()
assert len(cgeoaxes._PATH_TRANSFORM_CACHE) == initial_cache_size
plt.close()
def test_wmts_tile_caching():
image_cache = WMTSRasterSource._shared_image_cache
image_cache.clear()
assert len(image_cache) == 0
url = 'http://map1c.vis.earthdata.nasa.gov/wmts-geo/wmts.cgi'
wmts = WebMapTileService(url)
layer_name = 'MODIS_Terra_CorrectedReflectance_TrueColor'
source = WMTSRasterSource(wmts, layer_name)
gettile_counter = CallCounter(wmts, 'gettile')
crs = ccrs.PlateCarree()
extent = (-180, 180, -90, 90)
resolution = (20, 10)
with gettile_counter:
source.fetch_raster(crs, extent, resolution)
n_tiles = 2
assert gettile_counter.count == n_tiles, ('Too many tile requests - '
'expected {}, got {}.'.format(
n_tiles,
gettile_counter.count)
)
gc.collect()
assert len(image_cache) == 1
assert len(image_cache[wmts]) == 1
tiles_key = (layer_name, '0')
assert len(image_cache[wmts][tiles_key]) == n_tiles
# Second time around we shouldn't request any more tiles so the
# call count will stay the same.
with gettile_counter:
source.fetch_raster(crs, extent, resolution)
assert gettile_counter.count == n_tiles, ('Too many tile requests - '
'expected {}, got {}.'.format(
n_tiles,
gettile_counter.count)
)
gc.collect()
assert len(image_cache) == 1
assert len(image_cache[wmts]) == 1
tiles_key = (layer_name, '0')
assert len(image_cache[wmts][tiles_key]) == n_tiles
# Once there are no live references the weak-ref cache should clear.
del source, wmts, gettile_counter
gc.collect()
assert len(image_cache) == 0
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
| lgpl-3.0 | 2,896,544,417,070,608,400 | 34.961207 | 79 | 0.620161 | false |
Micutio/ComplexAutomatonBase | cab/ca/ca_rect.py | 1 | 9174 | """
This module contains the class for a CA with rectangular cells.
Moore and von-Neumann neighborhoods are available.
"""
from typing import Dict, Tuple, Union
import cab.ca.ca as cab_ca
import cab.abm.agent as cab_agent
import cab.ca.cell as cab_cell
__author__: str = 'Michael Wagner'
class CARect(cab_ca.CabCA):
def __init__(self, cab_sys, proto_cell: cab_cell.CellRect = None):
"""
Initializes and returns the cellular automaton.
The CA is a dictionary and not a list of lists
:returns The initialized CA.
"""
super().__init__(cab_sys, proto_cell)
self.ca_grid: Dict[Tuple[int, int], cab_cell.CellRect] = dict()
self.sys = cab_sys
self.grid_height: int = self.sys.gc.GRID_HEIGHT
self.grid_width: int = self.sys.gc.GRID_WIDTH
self.height: int = int(self.grid_height / self.sys.gc.CELL_SIZE)
self.width: int = int(self.grid_width / self.sys.gc.CELL_SIZE)
self.cell_size: int = self.sys.gc.CELL_SIZE
self.use_moore_neighborhood: bool = self.sys.gc.USE_MOORE_NEIGHBORHOOD
self.use_borders: bool = self.sys.gc.USE_CA_BORDERS
self.proto_cell: cab_cell.CellRect = None
if proto_cell is None:
for j in range(0, self.height):
for i in range(0, self.width):
self.ca_grid[i, j] = cab_cell.CellRect(i, j, self.sys.gc)
else:
self.proto_cell = proto_cell
for j in range(0, self.height):
for i in range(0, self.width):
self.ca_grid[i, j] = proto_cell.clone(i, j)
if self.use_moore_neighborhood:
self.init_moore()
self.init_moore_borders()
else:
self.init_von_neumann()
self.init_von_neumann_borders()
def cycle_automaton(self):
"""
This method updates the cellular automaton
"""
self.update_cells_from_neighborhood()
self.update_cells_state()
def update_cells_from_neighborhood(self):
for cell in self.ca_grid.values():
cell.sense_neighborhood()
def update_cells_state(self):
"""
After executing update_neighs this is the actual update of the cell itself
"""
for cell in self.ca_grid.values():
cell.update()
def get_agent_neighborhood(self, agent_x, agent_y, dist) ->\
Dict[Tuple[int, int], Tuple[cab_cell.CellRect, Union[bool, cab_agent.CabAgent]]]:
"""
Creates a dictionary {'position': (cell, [agents on that cell])}
for the calling agent to get an overview over its immediate surrounding.
"""
x = int(agent_x / self.cell_size)
y = int(agent_y / self.cell_size)
neighborhood = {}
other_agents = self.sys.abm.agent_locations
for i in range(-1 - dist, 2 + dist):
for j in range(-1 - dist, 2 + dist):
grid_x = x + i
grid_y = y + j
if (grid_x, grid_y) in self.ca_grid and not (grid_x == 0 and grid_y == 0):
a = self.ca_grid[grid_x, grid_y]
if (grid_x, grid_y) not in other_agents:
b = False
else:
b = other_agents[grid_x, grid_y]
neighborhood[grid_x, grid_y] = (a, b)
return neighborhood
# Individual methods for this specific CA
def init_von_neumann(self):
"""
Looping over all cells to gather the neighbor information they need to update.
This method uses Von-Neumann-Neighborhood (except for borders which are dealt
with by borders_von_neumann)
This method is used for initializing neighbor references in the cells
"""
for y in range(1, self.height - 1):
for x in range(1, self.width - 1):
neighbors = [self.ca_grid[x, (y - 1)],
self.ca_grid[x, (y + 1)],
self.ca_grid[(x - 1), y],
self.ca_grid[(x + 1), y]]
self.ca_grid[x, y].set_neighbors(neighbors)
def init_von_neumann_borders(self):
"""
Going through all border-regions of the automaton to update them.
This method is used for initializing neighbor references in the cells
"""
w = self.width
h = self.height
for y in range(1, self.height - 1):
neighbors_vn = [
self.ca_grid[0, (y - 1)], self.ca_grid[0, (y + 1)], self.ca_grid[1, y]]
self.ca_grid[0, y].set_neighbors(neighbors_vn)
neighbors_vn = [self.ca_grid[(
w - 1), (y - 1)], self.ca_grid[(w - 1), (y + 1)], self.ca_grid[(w - 2), y]]
self.ca_grid[(w - 1), y].set_neighbors(neighbors_vn)
for x in range(1, self.width - 1):
neighbors_vn = [self.ca_grid[x, 1],
self.ca_grid[(x - 1), 0], self.ca_grid[(x + 1), 0]]
self.ca_grid[x, 0].set_neighbors(neighbors_vn)
neighbors_vn = [
self.ca_grid[x, (h - 2)], self.ca_grid[(x - 1), (h - 1)], self.ca_grid[(x + 1), (h - 1)]]
self.ca_grid[x, (h - 1)].set_neighbors(neighbors_vn)
# top left corner
neighbors_vn = [self.ca_grid[0, 1], self.ca_grid[1, 0]]
self.ca_grid[0, 0].set_neighbors(neighbors_vn)
# top right corner
neighbors_vn = [self.ca_grid[(w - 1), 1], self.ca_grid[(w - 2), 0]]
self.ca_grid[(w - 1), 0].set_neighbors(neighbors_vn)
# bottom left corner
neighbors_vn = [self.ca_grid[0, (h - 2)], self.ca_grid[1, (h - 1)]]
self.ca_grid[0, (h - 1)].set_neighbors(neighbors_vn)
# bottom right corner
neighbors_vn = [
self.ca_grid[(w - 1), (h - 2)], self.ca_grid[(w - 2), (h - 1)]]
self.ca_grid[(w - 1), (h - 1)].set_neighbors(neighbors_vn)
def init_moore(self):
"""
Looping over all cells to gather the neighbor information they need to update.
This method uses Moore_neighborhood. (except for borders which are dealt
with by borders_moore)
This method is used for initializing neighbor references in the cells
"""
for y in range(1, self.height - 1):
for x in range(1, self.width - 1):
neighbors = [self.ca_grid[x, (y - 1)],
self.ca_grid[x, (y + 1)],
self.ca_grid[(x - 1), y],
self.ca_grid[(x + 1), y],
self.ca_grid[(x - 1), (y - 1)], # Top Left
self.ca_grid[(x + 1), (y - 1)], # Top Right
self.ca_grid[(x - 1), (y + 1)], # Bottom Left
self.ca_grid[(x + 1), (y + 1)]] # Bottom Right
self.ca_grid[x, y].set_neighbors(neighbors)
def init_moore_borders(self):
"""
Going through all border-regions of the automaton to update them.
This method is used for initializing neighbor references in the cells
"""
w = self.width
h = self.height
for y in range(1, self.height - 1):
neighbors_mo = [self.ca_grid[0, (y - 1)], self.ca_grid[0, (y + 1)], self.ca_grid[1, y],
self.ca_grid[1, (y - 1)], self.ca_grid[1, (y + 1)]]
self.ca_grid[0, y].set_neighbors(neighbors_mo)
neighbors_mo = [self.ca_grid[(w - 1), (y - 1)], self.ca_grid[(w - 1), (y + 1)], self.ca_grid[(w - 2), y],
self.ca_grid[(w - 2), (y - 1)], self.ca_grid[(w - 2), (y + 1)]]
self.ca_grid[(w - 1), y].set_neighbors(neighbors_mo)
for x in range(1, self.width - 1):
neighbors_mo = [self.ca_grid[x, 1], self.ca_grid[(x - 1), 0], self.ca_grid[(x + 1), 0],
self.ca_grid[(x - 1), 1], self.ca_grid[(x + 1), 1]]
self.ca_grid[x, 0].set_neighbors(neighbors_mo)
neighbors_mo = [self.ca_grid[x, (h - 2)], self.ca_grid[(x - 1), (h - 1)], self.ca_grid[(x + 1), (h - 1)],
self.ca_grid[(x - 1), (h - 2)], self.ca_grid[(x + 1), (h - 2)]]
self.ca_grid[x, (h - 1)].set_neighbors(neighbors_mo)
# top left corner
neighbors_mo = [self.ca_grid[0, 1],
self.ca_grid[1, 0], self.ca_grid[1, 1]]
self.ca_grid[0, 0].set_neighbors(neighbors_mo)
# top right corner
neighbors_mo = [
self.ca_grid[(w - 1), 1], self.ca_grid[(w - 2), 0], self.ca_grid[(w - 2), 1]]
self.ca_grid[(w - 1), 0].set_neighbors(neighbors_mo)
# bottom left corner
neighbors_mo = [
self.ca_grid[0, (h - 2)], self.ca_grid[1, (h - 1)], self.ca_grid[1, (h - 2)]]
self.ca_grid[0, (h - 1)].set_neighbors(neighbors_mo)
# bottom right corner
neighbors_mo = [self.ca_grid[(
w - 1), (h - 2)], self.ca_grid[(w - 2), (h - 1)], self.ca_grid[(w - 2), (h - 2)]]
self.ca_grid[(w - 1), (h - 1)].set_neighbors(neighbors_mo)
| mit | 395,515,731,538,101,200 | 41.669767 | 117 | 0.516351 | false |
mollstam/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Pillow-2.9.0/winbuild/config.py | 2 | 3822 | import os
SF_MIRROR = 'http://iweb.dl.sourceforge.net'
pythons = {#'26': 7,
'27': 7,
#'32': 7,
'33': 7.1,
'34': 7.1}
VIRT_BASE = "c:/vp/"
X64_EXT = os.environ.get('X64_EXT', "x64")
libs = {
'zlib': {
'url': 'http://zlib.net/zlib128.zip',
'hash': 'md5:126f8676442ffbd97884eb4d6f32afb4',
'dir': 'zlib-1.2.8',
},
'jpeg': {
'url': 'http://www.ijg.org/files/jpegsr9a.zip',
'hash': 'md5:a34f3c82760270ee1e1885b15b90a72e', # not found - generated by wiredfool
'dir': 'jpeg-9a',
},
'tiff': {
'url': 'ftp://ftp.remotesensing.org/pub/libtiff/tiff-4.0.4.zip',
'hash': 'md5:8f538a34156188f9a8dcddb679c65d1e',
'dir': 'tiff-4.0.4',
},
'freetype': {
'url': 'http://download.savannah.gnu.org/releases/freetype/freetype-2.6.tar.gz',
'hash': 'md5:1d733ea6c1b7b3df38169fbdbec47d2b',
'dir': 'freetype-2.6',
},
'lcms': {
'url': SF_MIRROR+'/project/lcms/lcms/2.7/lcms2-2.7.zip',
'hash': 'sha1:7ff1a5b721ca719760ba6eb4ec6f38d5e65381cf',
'dir': 'lcms2-2.7',
},
'tcl-8.5': {
'url': SF_MIRROR+'/project/tcl/Tcl/8.5.18/tcl8518-src.zip',
'hash': 'sha1:4c2aed9043088c630a4c795265e2738ef1b4db3b',
'dir': '',
},
'tk-8.5': {
'url': SF_MIRROR+'/project/tcl/Tcl/8.5.18/tk8518-src.zip',
'hash': 'sha1:273f55148777413774aa722ecad25cabda1e31ae',
'dir': '',
'version':'8.5.18',
},
'tcl-8.6': {
'url': SF_MIRROR+'/project/tcl/Tcl/8.6.4/tcl864-src.zip',
'hash': 'md5:35748d2fc61e08a2fdb23b85c6f8c4a0',
'dir': '',
},
'tk-8.6': {
'url': SF_MIRROR+'/project/tcl/Tcl/8.6.4/tk864-src.zip',
'hash': 'md5:111d45061a69e7f5250b6ec8ca7c4f35',
'dir': '',
'version':'8.6.4',
},
'webp': {
'url': 'http://downloads.webmproject.org/releases/webp/libwebp-0.4.3.tar.gz',
'hash': 'sha1:1c307a61c4d0018620b4ba9a58e8f48a8d6640ef',
'dir': 'libwebp-0.4.3',
},
'openjpeg': {
'url': SF_MIRROR+'/project/openjpeg/openjpeg/2.1.0/openjpeg-2.1.0.tar.gz',
'hash': 'md5:f6419fcc233df84f9a81eb36633c6db6',
'dir': 'openjpeg-2.1.0',
},
}
bin_libs = {
'openjpeg': {
'filename': 'openjpeg-2.0.0-win32-x86.zip',
'hash': 'sha1:xxx',
'version': '2.0'
},
}
compilers = {
(7, 64): {
'env_version': 'v7.0',
'vc_version': '2008',
'env_flags': '/x64 /xp',
'inc_dir': 'msvcr90-x64',
'platform': 'x64',
'webp_platform': 'x64',
},
(7, 32): {
'env_version': 'v7.0',
'vc_version': '2008',
'env_flags': '/x86 /xp',
'inc_dir': 'msvcr90-x32',
'platform': 'Win32',
'webp_platform': 'x86',
},
(7.1, 64): {
'env_version': 'v7.1',
'vc_version': '2010',
'env_flags': '/x64 /vista',
'inc_dir': 'msvcr10-x64',
'platform': 'x64',
'webp_platform': 'x64',
},
(7.1, 32): {
'env_version': 'v7.1',
'vc_version': '2010',
'env_flags': '/x86 /vista',
'inc_dir': 'msvcr10-x32',
'platform': 'Win32',
'webp_platform': 'x86',
},
}
def pyversion_fromEnv():
py = os.environ['PYTHON']
py_version = '27'
for k in pythons.keys():
if k in py:
py_version = k
break
if '64' in py:
py_version = '%s%s' % (py_version, X64_EXT)
return py_version
def compiler_fromEnv():
py = os.environ['PYTHON']
for k, v in pythons.items():
if k in py:
compiler_version = v
break
bit = 32
if '64' in py:
bit = 64
return compilers[(compiler_version, bit)]
| mit | 7,292,043,135,636,486,000 | 25.358621 | 93 | 0.502878 | false |
zaina/nova | nova/tests/functional/v3/test_cloudpipe.py | 24 | 3794 | # Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid as uuid_lib
from oslo_config import cfg
from nova.cloudpipe import pipelib
from nova.network import api as network_api
from nova.tests.functional.v3 import api_sample_base
from nova.tests.unit.image import fake
CONF = cfg.CONF
CONF.import_opt('vpn_image_id', 'nova.cloudpipe.pipelib')
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.extensions')
class CloudPipeSampleTest(api_sample_base.ApiSampleTestBaseV3):
ADMIN_API = True
extension_name = "os-cloudpipe"
# TODO(gmann): Overriding '_api_version' till all functional tests
# are merged between v2 and v2.1. After that base class variable
# itself can be changed to 'v2'
_api_version = 'v2'
def _get_flags(self):
f = super(CloudPipeSampleTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.cloudpipe.Cloudpipe')
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.cloudpipe_update.Cloudpipe_update')
return f
def setUp(self):
super(CloudPipeSampleTest, self).setUp()
def get_user_data(self, project_id):
"""Stub method to generate user data for cloudpipe tests."""
return "VVNFUiBEQVRB\n"
def network_api_get(self, context, network_uuid):
"""Stub to get a valid network and its information."""
return {'vpn_public_address': '127.0.0.1',
'vpn_public_port': 22}
self.stubs.Set(pipelib.CloudPipe, 'get_encoded_zip', get_user_data)
self.stubs.Set(network_api.API, "get",
network_api_get)
def generalize_subs(self, subs, vanilla_regexes):
subs['project_id'] = '[0-9a-f-]+'
return subs
def test_cloud_pipe_create(self):
# Get api samples of cloud pipe extension creation.
self.flags(vpn_image_id=fake.get_valid_image_id())
project = {'project_id': str(uuid_lib.uuid4().hex)}
response = self._do_post('os-cloudpipe', 'cloud-pipe-create-req',
project)
subs = self._get_regexes()
subs.update(project)
subs['image_id'] = CONF.vpn_image_id
self._verify_response('cloud-pipe-create-resp', subs, response, 200)
return project
def test_cloud_pipe_list(self):
# Get api samples of cloud pipe extension get request.
project = self.test_cloud_pipe_create()
response = self._do_get('os-cloudpipe')
subs = self._get_regexes()
subs.update(project)
subs['image_id'] = CONF.vpn_image_id
self._verify_response('cloud-pipe-get-resp', subs, response, 200)
def test_cloud_pipe_update(self):
subs = {'vpn_ip': '192.168.1.1',
'vpn_port': 2000}
response = self._do_put('os-cloudpipe/configure-project',
'cloud-pipe-update-req',
subs)
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
| apache-2.0 | 2,481,565,830,314,478,000 | 38.520833 | 78 | 0.630206 | false |
TheGentlemanOctopus/thegentlemanoctopus | octopus_code/core/octopus/patterns/lavaLampPattern2.py | 1 | 1250 |
from pattern import Pattern
import time
import numpy as np
class LavaLampPattern2(Pattern):
def __init__(self):
self.register_param("freq", 0, 1, 0.2)
self.register_param("speed_r", 0, 10, 2)
self.register_param("speed_g", 0, 10, 1)
self.register_param("speed_b", 0, 10, 1.5)
self.register_param("blob_speed", 0, 10, 1)
self.register_param("time_warp", 0, 3, 0.1)
self.t = time.time()
def next_frame(self, octopus, data):
level = data.eq[3]
# The level gives a little speed boost
self.t += (time.time()-self.t) + self.time_warp*level
# Pull out x,y,z
pixels = octopus.pixels()
x = np.array([pixel.location[0] for pixel in pixels])
y = np.array([pixel.location[1] for pixel in pixels])
z = np.array([pixel.location[2] for pixel in pixels])
# Sine Time
r = 255*0.5*(1+np.sin(2*np.pi*self.freq*x*(1) + self.t*self.speed_r))
g = 0.5*255*0.5*(1+np.sin(2*np.pi*self.freq*y + self.t*self.speed_g))
ones = np.ones(len(z))
b = level*255*ones
#g = g * 0.6 + (r+b) * 0.2
for i in range(len(pixels)):
pixels[i].color = (int(r[i]), int(g[i]), int(b[i]))
| gpl-3.0 | -7,673,507,445,046,682,000 | 28.761905 | 77 | 0.5512 | false |
alexandrujuncu/sos | sos/plugins/upstart.py | 11 | 1765 | # Copyright (C) 2012 Red Hat, Inc., Bryn M. Reeves <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin
class Upstart(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin):
"""Upstart init system
"""
plugin_name = 'upstart'
profiles = ('system', 'services', 'boot')
packages = ('upstart',)
def setup(self):
self.add_cmd_output([
'initctl --system list',
'initctl --system version',
'init --version',
"ls -l /etc/init/"
])
# Job Configuration Files
self.add_copy_spec([
'/etc/init.conf',
'/etc/event.d/*',
'/etc/init/*.conf'
])
# State file
self.add_copy_spec('/var/log/upstart/upstart.state')
# Log files
self.add_copy_spec_limit('/var/log/upstart/*',
sizelimit=self.get_option('log_size'))
# Session Jobs (running Upstart as a Session Init)
self.add_copy_spec('/usr/share/upstart/')
# vim: set et ts=4 sw=4 :
| gpl-2.0 | 7,508,279,425,462,348,000 | 32.301887 | 72 | 0.633428 | false |
mahabs/nitro | nssrc/com/citrix/netscaler/nitro/resource/config/authorization/authorizationpolicy_aaauser_binding.py | 1 | 5051 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class authorizationpolicy_aaauser_binding(base_resource) :
""" Binding class showing the aaauser that can be bound to authorizationpolicy.
"""
def __init__(self) :
self._boundto = ""
self._priority = 0
self._name = ""
self.___count = 0
@property
def boundto(self) :
"""The entity name to which policy is bound.
"""
try :
return self._boundto
except Exception as e:
raise e
@boundto.setter
def boundto(self, boundto) :
"""The entity name to which policy is bound.
"""
try :
self._boundto = boundto
except Exception as e:
raise e
@property
def name(self) :
"""Name of the authorization policy.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name of the authorization policy.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def priority(self) :
try :
return self._priority
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(authorizationpolicy_aaauser_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.authorizationpolicy_aaauser_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.name) :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
""" Use this API to fetch authorizationpolicy_aaauser_binding resources.
"""
try :
obj = authorizationpolicy_aaauser_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
""" Use this API to fetch filtered set of authorizationpolicy_aaauser_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = authorizationpolicy_aaauser_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
""" Use this API to count authorizationpolicy_aaauser_binding resources configued on NetScaler.
"""
try :
obj = authorizationpolicy_aaauser_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
""" Use this API to count the filtered set of authorizationpolicy_aaauser_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = authorizationpolicy_aaauser_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class authorizationpolicy_aaauser_binding_response(base_response) :
def __init__(self, length=1) :
self.authorizationpolicy_aaauser_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.authorizationpolicy_aaauser_binding = [authorizationpolicy_aaauser_binding() for _ in range(length)]
| apache-2.0 | 6,528,671,830,296,188,000 | 28.028736 | 137 | 0.702435 | false |
dlspano/scripts | python/mysql_backup/mysql_backup.py | 2 | 6244 | #!/usr/bin/env python3
import argparse
import paramiko
import logging
import os
from datetime import datetime
LOG = logging.getLogger(__name__)
SSH_USER = "centos"
def backup_database(ssh, database, directory):
"""
Use paramiko to run mysql dump on the remote host
:param ssh:
:param database:
:param backup_dir:
:return back_path:
"""
backup_time = datetime.now().strftime('%m-%d-%Y-%H:%M:%S')
path = '{0}/{1}-{2}.sql'.format(directory, database, backup_time)
mysqldump_cmd = "sudo bash -c 'mysqldump {0} > {1}'".format(database,
path)
try:
_, stdout, stderr = ssh.exec_command(mysqldump_cmd)
exit_status = stdout.channel.recv_exit_status()
# We should get an exit status of 1 if the path doesn't exist
if exit_status > 0:
LOG.error('Command exit status'
' {0} {1}'.format(exit_status, stderr.read().decode()))
return None
return path
except paramiko.ssh_exception.SSHException as e:
LOG.error('Connection to host failed with error'
'{0}'.format(e))
def compress_db_backup(ssh, path):
"""
Compress backup file with remote host's gzip command
:param ssh:
:param path:
:return:
"""
compress_cmd = 'sudo gzip {0}'.format(path)
compressed_path = '{0}.gz'.format(path)
file_list_output = ''
try:
_, stdout, _ = ssh.exec_command(compress_cmd)
LOG.info('stdout: {0}'.format(stdout.read().decode()))
_, stdout, _ = ssh.exec_command('ls {0}'.format(compressed_path))
file_list_output = stdout.read().decode()
LOG.info('file_list: {0}'.format(file_list_output))
except paramiko.ssh_exception.SSHException as e:
LOG.error('Connection to host failed with error'
'{0}'.format(e))
return compressed_path if compressed_path in file_list_output else None
def create_connection(hostname, username):
"""
Create a connection to the remote host
:param hostname:
:param username:
:return:
"""
LOG.info('Trying to connect')
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname=hostname, username=username, timeout=180,
look_for_keys=True)
return ssh
def create_local_path(path):
"""
Create local path for backups
:param path:
:return:
"""
if not os.path.exists(path):
os.makedirs(path)
def create_remote_path(ssh, path):
"""
Create remote backup path if it doesn't exist
:param ssh:
:param path:
:return:
"""
create_path_cmd = 'sudo mkdir -p {0}'.format(path)
try:
_, stdout, stderr = ssh.exec_command(create_path_cmd)
exit_status = stdout.channel.recv_exit_status()
# We should get an exit status of 1 if the path doesn't exist
if exit_status > 0:
LOG.error('Command exit status'
' {0} {1}'.format(exit_status, stderr.read().decode()))
return None
return path
except paramiko.ssh_exception.SSHException as e:
LOG.error('Connection to host failed with error'
'{0}'.format(e))
def get_backup_file(ssh, local_path, remote_path):
"""
Retrieve backup file from MySQL host
:param ssh:
:param local_path:
:param remote_path:
:return:
"""
sftp = ssh.open_sftp()
backup_file = remote_path.split('/')[-1]
local_path = '/'.join([local_path, backup_file])
with sftp.open(remote_path, mode='rb') as remote_file:
contents = remote_file.read()
with open(local_path, 'wb') as local_file:
local_file.write(contents)
def remote_cleanup(ssh, remote_path):
"""
Cleanup remote host backup directory
:param ssh:
:param remote_path:
:return:
"""
cleanup_cmd = 'sudo rm -f {0}'.format(remote_path)
try:
_, stdout, stderr = ssh.exec_command(cleanup_cmd)
exit_status = stdout.channel.recv_exit_status()
# We should get an exit status of 1 if the path doesn't exist
if exit_status > 0:
LOG.error('Command exit status'
' {0} {1}'.format(exit_status, stderr.read().decode()))
return False
return True
except paramiko.ssh_exception.SSHException as e:
LOG.error('Connection to host failed with error'
'{0}'.format(e))
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'--local-dir', action='store',
default='/tmp',
help="The backup directory on the local host")
parser.add_argument(
'--remote-dir', action='store',
default='/tmp',
help="The backup directory on the remote database host")
parser.add_argument(
'--database', action='store',
required=True,
help="The database you want to backup")
parser.add_argument(
'--server', action='store',
required=True,
help="The database server ip/hostname")
parser.add_argument(
'-v', '--verbose', action='count', default=0,
help="Increase verbosity (specify multiple times for more)")
args = parser.parse_args()
log_level = logging.INFO
if args.verbose >= 1:
log_level = logging.DEBUG
format = '%(asctime)s - %(levelname)s - %(message)s'
logging.basicConfig(format=format, datefmt='%m-%d %H:%M', level=log_level)
ssh = create_connection(args.server, SSH_USER)
create_local_path(args.local_dir)
create_remote_path(ssh, args.remote_dir)
db_backup = backup_database(ssh, args.database, args.remote_dir)
if db_backup:
compressed_db_backup = compress_db_backup(ssh, db_backup)
if compressed_db_backup:
get_backup_file(ssh, args.local_dir, compressed_db_backup)
cleanup = remote_cleanup(ssh, compressed_db_backup)
LOG.debug('Cleanup finished with a status of {0}'.format(cleanup))
if __name__ == '__main__':
main()
| gpl-3.0 | -8,992,226,170,535,014,000 | 28.733333 | 78 | 0.599776 | false |
ZwickyTransientFacility/ztf_sim | ztf_sim/Scheduler.py | 1 | 8313 | """Core scheduler classes."""
import configparser
from collections import defaultdict
import logging
import numpy as np
from astropy.time import Time
import astropy.units as u
from .QueueManager import ListQueueManager, GreedyQueueManager, GurobiQueueManager
from .ObsLogger import ObsLogger
from .configuration import SchedulerConfiguration
from .constants import BASE_DIR, PROGRAM_IDS, EXPOSURE_TIME, READOUT_TIME
from .utils import block_index, block_use_fraction
from .utils import next_12deg_evening_twilight, next_12deg_morning_twilight
class Scheduler(object):
def __init__(self, scheduler_config_file_fullpath,
run_config_file_fullpath, other_queue_configs = None,
output_path = BASE_DIR+'../sims/'):
self.logger = logging.getLogger(__name__)
self.scheduler_config = SchedulerConfiguration(
scheduler_config_file_fullpath)
self.queue_configs = self.scheduler_config.build_queue_configs()
self.queues = self.scheduler_config.build_queues(self.queue_configs)
self.timed_queues_tonight = []
self.set_queue('default')
self.run_config = configparser.ConfigParser()
self.run_config.read(run_config_file_fullpath)
if 'log_name' in self.run_config['scheduler']:
log_name = self.run_config['scheduler']['log_name']
else:
log_name = self.scheduler_config.config['run_name']
# initialize sqlite history
self.obs_log = ObsLogger(log_name,
output_path = output_path,
clobber=self.run_config['scheduler'].getboolean('clobber_db'),)
def set_queue(self, queue_name):
if queue_name not in self.queues:
raise ValueError(f'Requested queue {queue_name} not available!')
self.Q = self.queues[queue_name]
def add_queue(self, queue_name, queue, clobber=True):
if clobber or (queue_name not in self.queues):
self.queues[queue_name] = queue
else:
raise ValueError(f"Queue {queue_name} already exists!")
def delete_queue(self, queue_name):
if (queue_name in self.queues):
if self.Q.queue_name == queue_name:
self.set_queue('default')
del self.queues[queue_name]
else:
raise ValueError(f"Queue {queue_name} does not exist!")
def find_block_use_tonight(self, time_now):
# also sets up timed_queues_tonight
# start of the night
mjd_today = np.floor(time_now.mjd).astype(int)
# Look for timed queues that will be valid tonight,
# to exclude from the nightly solution
self.timed_queues_tonight = []
today = Time(mjd_today, format='mjd')
tomorrow = Time(mjd_today + 1, format='mjd')
block_start = block_index(today)[0]
block_stop = block_index(tomorrow)[0]
block_use = defaultdict(float)
# compute fraction of twilight blocks not available
evening_twilight = next_12deg_evening_twilight(today)
morning_twilight = next_12deg_morning_twilight(today)
evening_twilight_block = block_index(evening_twilight)[0]
frac_evening_twilight = block_use_fraction(
evening_twilight_block, today, evening_twilight)
block_use[evening_twilight_block] = frac_evening_twilight
self.logger.debug(f'{frac_evening_twilight} of block {evening_twilight_block} is before 12 degree twilight')
morning_twilight_block = block_index(morning_twilight)[0]
frac_morning_twilight = block_use_fraction(
morning_twilight_block, morning_twilight, tomorrow)
block_use[morning_twilight_block] = frac_morning_twilight
self.logger.debug(f'{frac_morning_twilight} of block {morning_twilight_block} is before 12 degree twilight')
for qq_name, qq in self.queues.items():
if qq.queue_name in ['default', 'fallback']:
continue
if qq.validity_window is not None:
qq_block_use = qq.compute_block_use()
is_tonight = False
# sum block use
for block, frac in qq_block_use.items():
if (block_start <= block <= block_stop):
if frac > 0:
is_tonight = True
self.logger.debug(f'{frac} of block {block} used by queue {qq.queue_name}')
block_use[block] += frac
if block_use[block] > 1:
self.logger.warn(f'Too many observations for block {block}: {block_use[block]}')
block_use[block] = 1.
if is_tonight:
self.timed_queues_tonight.append(qq_name)
return block_use
def count_timed_observations_tonight(self):
# determine how many equivalent obs are in timed queues
timed_obs = {prog:0 for prog in PROGRAM_IDS}
if len(self.timed_queues_tonight) == 0:
return timed_obs
for qq in self.timed_queues_tonight:
queue = self.queues[qq].queue.copy()
if 'n_repeats' not in queue.columns:
queue['n_repeats'] = 1.
queue['total_time'] = (queue['exposure_time'] +
READOUT_TIME.to(u.second).value)*queue['n_repeats']
net = queue[['program_id','total_time']].groupby('program_id').agg(np.sum)
count_equivalent = np.round(net['total_time']/(EXPOSURE_TIME + READOUT_TIME).to(u.second).value).astype(int).to_dict()
for k, v in count_equivalent.items():
timed_obs[k] += v
return timed_obs
def check_for_TOO_queue_and_switch(self, time_now):
# check if a TOO queue is now valid
for qq_name, qq in self.queues.items():
if qq.is_TOO:
if qq.is_valid(time_now):
# switch if the current queue is not a TOO
if (not self.Q.is_TOO) and len(qq.queue):
self.set_queue(qq_name)
# or if the current TOO queue is empty
if ((self.Q.is_TOO) and (len(self.Q.queue) == 0)
and len(qq.queue)):
self.set_queue(qq_name)
def check_for_timed_queue_and_switch(self, time_now):
# drop out of a timed queue if it's no longer valid
if self.Q.queue_name != 'default':
if not self.Q.is_valid(time_now):
self.set_queue('default')
# only switch from default or fallback queues
if self.Q.queue_name in ['default', 'fallback']:
# check if a timed queue is now valid
for qq_name, qq in self.queues.items():
if (qq.validity_window is not None) and (qq.is_valid(time_now)):
if (qq.queue_type == 'list'):
# list queues should have items in them
if len(qq.queue):
self.set_queue(qq_name)
else:
# don't have a good way to check length of non-list
# queues before nightly assignments
if qq.requests_in_window:
self.set_queue(qq_name)
def remove_empty_and_expired_queues(self, time_now):
queues_for_deletion = []
for qq_name, qq in self.queues.items():
if qq.queue_name in ['default', 'fallback']:
continue
if qq.validity_window is not None:
if qq.validity_window[1] < time_now:
self.logger.info(f'Deleting expired queue {qq_name}')
queues_for_deletion.append(qq_name)
continue
if (qq.queue_type == 'list') and (len(qq.queue) == 0):
self.logger.info(f'Deleting empty queue {qq_name}')
queues_for_deletion.append(qq_name)
# ensure we don't have duplicate values
queues_for_deletion = set(queues_for_deletion)
for qq_name in queues_for_deletion:
self.delete_queue(qq_name)
| bsd-3-clause | 5,210,332,803,409,682,000 | 39.950739 | 130 | 0.571154 | false |
khkaminska/bokeh | bokeh/server/tests/test_wsmanager.py | 9 | 2774 | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2015, Continuum Analytics, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
from __future__ import absolute_import
from bokeh import protocol
from bokeh import session
from bokeh.server.app import bokeh_app
from bokeh.util.testing import BokehServerTestCase
ws_address = "ws://localhost:5006/bokeh/sub"
class TestSubscribeWebSocket(BokehServerTestCase):
def test_basic_subscribe(self):
self.sess1 = session.Session()
self.sess1.use_doc('first')
self.sess2 = session.Session()
self.sess2.use_doc('second')
#connect sock to defaultdoc
#connect sock2 to defaultdoc
#connect sock3 to defaultdoc2
firstid = self.sess1.docid
secondid = self.sess2.docid
firsttoken = self.sess1.apikey
secondtoken = self.sess2.apikey
import websocket
sock = websocket.WebSocket()
connect(sock, ws_address, 'bokehplot:%s' % firstid, firsttoken)
sock2 = websocket.WebSocket()
connect(sock2, ws_address, 'bokehplot:%s' % firstid, firsttoken)
sock3 = websocket.WebSocket()
connect(sock3, ws_address, 'bokehplot:%s' % secondid, secondtoken)
#make sure sock and sock2 receive message
bokeh_app.publisher.send('bokehplot:%s' % firstid, 'hello!')
msg = sock.recv()
assert msg == 'bokehplot:%s:hello!' % firstid
msg = sock2.recv()
assert msg == 'bokehplot:%s:hello!' % firstid
# send messages on 2 topics, make sure that sockets receive
# the right messages
bokeh_app.publisher.send('bokehplot:%s' % firstid, 'hello2!')
bokeh_app.publisher.send('bokehplot:%s' % secondid, 'hello3!')
msg = sock.recv()
assert msg == 'bokehplot:%s:hello2!' % firstid
msg = sock2.recv()
assert msg == 'bokehplot:%s:hello2!' % firstid
msg = sock3.recv()
assert msg == 'bokehplot:%s:hello3!' % secondid
def connect(sock, addr, topic, auth):
# TODO (bev) increasing timeout due to failing TravisCI tests
# investigate if this is the real solution or if there is a
# deeper problem
sock.timeout = 4.0
sock.connect(addr)
msgobj = dict(msgtype='subscribe',
topic=topic,
auth=auth
)
sock.send(protocol.serialize_json(msgobj))
msg = sock.recv()
msg = msg.split(":", 2)[-1]
msgobj = protocol.deserialize_json(msg)
assert msgobj['status'][:2] == ['subscribesuccess', topic]
| bsd-3-clause | 5,393,376,359,212,422,000 | 38.070423 | 78 | 0.602019 | false |
akaihola/PyChecker | pychecker2/test.py | 11 | 1470 | import os
import sys
import unittest
import glob
def test(modules, verbosity):
for m in modules:
s = unittest.defaultTestLoader.loadTestsFromName(m)
result = unittest.TextTestRunner(verbosity=verbosity).run(s)
if not result.wasSuccessful():
return 1
return 0
def _modules(root):
modules = []
files = glob.glob(os.path.join(root, 'utest', '*.py'))
files.sort()
for fname in files:
fname = os.path.split(fname)[1] # remove path
module = 'pychecker2.utest.' + os.path.splitext(fname)[0]
if not module.endswith('_'): # ignore __init__
modules.append(module)
return modules
class Usage(Exception): pass
def main(args):
import getopt
verbosity = 1
try:
opts, files = getopt.getopt(args, 'v')
for opt, arg in opts:
if opt == '-v':
verbosity += 1
else:
raise Usage('unknown option ' + opt)
except getopt.GetoptError, detail:
raise Usage(str(detail))
root = os.path.dirname(os.path.realpath(sys.argv[0]))
pychecker2 = os.path.split(root)[0]
sys.path.append(pychecker2)
return test(_modules(root), verbosity)
if __name__ == '__main__':
try:
sys.exit(main(sys.argv[1:]))
except Usage, error:
err = sys.stderr
print >>err, "Error: " + str(error)
print >>err, "Usage: %s [-v]" % sys.argv[0]
sys.exit(1)
| bsd-3-clause | 3,999,784,883,007,276,000 | 25.727273 | 68 | 0.57551 | false |
pmdp/GIW | mongodb-2/gr11_mongoengine.py | 1 | 6555 | # -*- coding: utf-8 -*-
from mongoengine import *
db = connect('giw_mongoengine')
class CreditCard(EmbeddedDocument):
name = StringField(required=True, min_length=3, max_length=40)
number = StringField(required=True, regex="^(\d{16})$")
month = StringField(required=True, regex="^(0[1-9]|1[0-2])$")
year = StringField(required=True, regex='^\d{2}$')
cvv = StringField(required=True, regex='^\d{3}$')
class Item (Document):
barcode = StringField(required=True, unique=True, regex="^\d{13}$")
name = StringField(required=True, min_length=3, max_length=30)
category = IntField(required=True, min_value=0)
categories_list = ListField(IntField(min_value=0))
def clean(self):
reverse = self.barcode[::-1]
control_digit = int(self.barcode[-1])
reverse = reverse[1:]
#print("Reverse sin control: ", reverse)
#print("Digito de control: ", control_digit)
sum = 0
for i, digit in enumerate(reverse):
if (i+1) % 2 == 0:
sum += int(digit)
else:
sum += int(digit) * 3
digit = (10 - (sum % 10)) % 10
#print(u"El digito de control debería ser: ", digit)
if control_digit != digit:
raise ValidationError(u"El dígito de control EAN-13 no es válido")
#Si tiene categorías secundarias comprobar que la principal está en la primera posición
if self.categories_list and (self.categories_list[0] != self.category):
raise ValidationError(u"La categoría principal no está en la primera posición de las secundarias")
class OrderLines(EmbeddedDocument):
quantity = IntField(required=True)
price = FloatField(required=True, min_value=0.0)
name = StringField(required=True, min_length=3, max_length=30)
total_price = FloatField(required=True, min_value=0.0)
item = ReferenceField(Item, required=True)
def clean(self):
if (float(self.quantity) * float(self.price)) != float(self.total_price):
raise ValidationError(u"El precio total de la línea no corresponde con el precio y el número de productos")
if self.name != self.item.name:
raise ValidationError(u"El nombre de la línea de prodcuto no corresponde con el nombre del producto referenciado")
class Order(Document):
total_price = FloatField(required=True, min_value=0.0)
order_date = ComplexDateTimeField(required=True)
order_lines = EmbeddedDocumentListField(OrderLines, required=True)
def clean(self):
sum = 0.0
for ol in list(self.order_lines):
sum += float(ol.total_price)
if float(self.total_price) != sum:
raise ValidationError(u"El precio total del pedido no corresponde a la suma de todas sus líneas")
class User(Document):
dni = StringField(required=True, unique=True, regex="^(([X-Z]{1})(\d{7})([A-Z]{1}))|((\d{8})([A-Z]{1}))$")
name = StringField(required=True, min_length=3, max_length=30)
surname = StringField(required=True, min_length=3, max_length=30)
second_surname = StringField(min_length=3, max_length=30)
birthdate = StringField(required=True, regex='^([0-9]{4})([-])([0-9]{2})([-])([0-9]{2})$')
last_access = ListField(ComplexDateTimeField)
credit_cards = EmbeddedDocumentListField(CreditCard)
orders = ListField(ReferenceField(Order, reverse_delete_rule=PULL))
def clean(self):
#Letras en orden según el resto de la división de los números del DNI por 23
letters = "TRWAGMYFPDXBNJZSQVHLCKE"
#Si el dni es NIE de extranjeros (empieza por letra)
if self.dni[0].isalpha():
if self.dni[0] == 'X':
v = '0'
elif self.dni[0] == 'Y':
v = '1'
elif self.dni[0] == 'Z':
v = '2'
if self.dni[1:8].isdigit() and (letters[int(v + self.dni[1:8]) % 23] != self.dni[8]):
raise ValidationError(u"La letra del NIE no es válida")
#Si es NIF
else:
#Coge los 8 primeros digitos del DNI
if self.dni[:8].isdigit() and (letters[int(self.dni[:8]) % 23] != self.dni[8]):
raise ValidationError(u"La letra del NIF no es válida")
def insertar():
#Prodcutos, Líneas de pedido y Pedidos
i1 = Item(barcode="1234567890418", name="pan", category=2, categories_list=[2, 4, 5])
i2 = Item(barcode="7684846473780", name="cebolla", category=2)
i3 = Item(barcode="7456573483840", name="oreo", category=3)
i4 = Item(barcode="6667753988647", name="helado", category=4)
i1.save()
i2.save()
i3.save()
i4.save()
ol1 = OrderLines(quantity=2, price=0.55, name="pan", total_price=1.1, item=i1)
ol2 = OrderLines(quantity=3, price=0.32, name="cebolla", total_price=0.96, item=i2)
ol3 = OrderLines(quantity=1, price=2.0, name="oreo", total_price=2.0, item=i3)
ol4 = OrderLines(quantity=7, price=3.0, name="helado", total_price=21.0, item=i4)
o1 = Order(total_price=2.06, order_date="2016,12,15,12,34,21,888283", order_lines=[ol1, ol2])
o2 = Order(total_price=23, order_date="2016,12,20,12,34,21,888283", order_lines=[ol3, ol4])
o3 = Order(total_price=24.1, order_date="2016,12,20,12,34,21,888283", order_lines=[ol1, ol3, ol4])
o4 = Order(total_price=21.96, order_date="2016,12,20,12,34,21,888283", order_lines=[ol4, ol2])
o1.save()
o2.save()
o3.save()
o4.save()
#Tarjetas de credito y Usuarios
c1 = CreditCard(name='Pedro', number='1234567891234567', month='02', year='20', cvv='455')
c2 = CreditCard(name='María', number='7684874647484837', month='11', year='17', cvv='345')
c3 = CreditCard(name='Irving', number='0383537847236284', month='12', year='22', cvv='566')
p = User(dni='08264947N', name='Pedro', surname='Hernandez', birthdate='1993-06-20',
credit_cards=[c1, c2], orders=[o1, o2])
p1 = User(dni='Z7334448Y', name='Irving', surname='Mendez', birthdate='1993-06-20',
credit_cards=[c3], orders=[o3, o4])
p.save()
p1.save()
#Comprobación de eliminado de un pedido de la lista de un usuario al borrar dicho pedido
print(u"Borrando pedido o1")
nP = len(User.objects.get(name="Pedro").orders)
print(u"Número de pedidos de Pedro antes de borrar:", nP)
o1.delete()
nP2 = len(User.objects.get(name="Pedro").orders)
print(u"Número de pedidos de Pedro después de borrar:", nP2)
if __name__ == "__main__":
db.drop_database('giw_mongoengine')
insertar() | gpl-3.0 | -5,478,443,918,519,179,000 | 42.838926 | 126 | 0.629 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.