repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
ThinkingBridge/platform_external_chromium_org
|
chrome/common/extensions/docs/server2/manifest_data_source_test.py
|
23
|
2819
|
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import unittest
from compiled_file_system import CompiledFileSystem
from manifest_data_source import ManifestDataSource
from object_store_creator import ObjectStoreCreator
from test_file_system import TestFileSystem
file_system = TestFileSystem({
"_manifest_features.json": json.dumps({
'req0': {
'extension_types': ['platform_app', 'extension']
},
'req1': {
'extension_types': 'all'
},
'opt0': {
'extension_types': ['extension']
},
'opt1': {
'extension_types': ['hosted_app']
},
'free0': {
'extension_types': ['platform_app']
},
'free1': {
'extension_types': ['platform_app', 'hosted_app', 'extension']
},
'only0': {
'extension_types': 'all'
},
'only1': {
'extension_types': ['platform_app']
},
'rec0': {
'extension_types': ['extension']
},
'rec1': {
'extension_types': ['platform_app', 'extension']
}
}),
"manifest.json": json.dumps({
'required': [
{
'name': 'req0',
'example': 'Extension'
},
{'name': 'req1'}
],
'only_one': [
{'name': 'only0'},
{'name': 'only1'}
],
'recommended': [
{'name': 'rec0'},
{'name': 'rec1'}
],
'optional': [
{'name': 'opt0'},
{'name': 'opt1'}
]
})
})
class ManifestDataSourceTest(unittest.TestCase):
def testCreateManifestData(self):
expected_extensions = {
'required': [
{
'name': 'req0',
'example': 'Extension'
},
{'name': 'req1'}
],
'recommended': [
{'name': 'rec0'},
{'name': 'rec1'}
],
'only_one': [
{'name': 'only0'}
],
'optional': [
{'name': 'free1'},
{
'name': 'opt0',
'is_last': True
}
]
}
expected_apps = {
'required': [
{
'name': 'req0',
'example': 'Application'
},
{'name': 'req1'}
],
'recommended': [
{'name': 'rec1'}
],
'only_one': [
{'name': 'only0'},
{'name': 'only1'}
],
'optional': [
{'name': 'free0'},
{
'name': 'free1',
'is_last': True
}
]
}
mds = ManifestDataSource(
CompiledFileSystem.Factory(file_system, ObjectStoreCreator.ForTest()),
file_system, 'manifest.json', '_manifest_features.json')
self.assertEqual(expected_extensions, mds.get('extensions'))
self.assertEqual(expected_apps, mds.get('apps'))
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
feigaochn/leetcode
|
p116_populating_next_right_pointers_in_each_node.py
|
2
|
2293
|
# coding: utf-8
# author: Fei Gao <[email protected]>
# Problem: populating next right pointers in each node
#
# Given a binary tree
#
# struct TreeLinkNode {
# TreeLinkNode *left;
# TreeLinkNode *right;
# TreeLinkNode *next;
# }
#
# Populate each next pointer to point to its next right node. If there is
# no next right node, the next pointer should be set to NULL.
# Initially, all next pointers are set to NULL.
#
# Note:
#
# You may only use constant extra space.
# You may assume that it is a perfect binary tree (ie, all leaves are at
# the same level, and every parent has two children).
#
# For example,
# Given the following perfect binary tree,
#
# 1
# / \
# 2 3
# / \ / \
# 4 5 6 7
#
# After calling your function, the tree should look like:
#
# 1 -> NULL
# / \
# 2 -> 3 -> NULL
# / \ / \
# 4->5->6->7 -> NULL
#
# Subscribe to see which companies asked this question
#
# Show Tags
#
# Tree
# Depth-first Search
#
# Show Similar Problems
#
# (H) Populating Next Right Pointers in Each Node II
# (M) Binary Tree Right Side View
# Definition for binary tree with next pointer.
class TreeLinkNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
self.next = None
class Solution(object):
def connect(self, root):
"""
:type root: TreeLinkNode
:rtype: nothing
"""
if not root: return root
next_level = [root]
while next_level:
cur_level = next_level[::]
next_level = []
for idx, node in enumerate(cur_level):
node.next = cur_level[idx + 1] if idx + 1 < len(cur_level) else None
if node.left: next_level.append(node.left)
if node.right: next_level.append(node.right)
def main():
solver = Solution()
tests = [
(('param',), 'result'),
]
for params, expect in tests:
print('-' * 5 + 'TEST' + '-' * 5)
print('Input: ' + str(params))
print('Expect: ' + str(expect))
result = solver.connect(*params)
print('Result: ' + str(result))
pass
if __name__ == '__main__':
main()
pass
|
mit
|
nikste/tensorflow
|
tensorflow/compiler/tests/lstm.py
|
132
|
6452
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A simple LSTM layer with benchmarks.
This sets up a simple LSTM (Long Short Term Memory) layer, unrolled to a fixed
length sequence. The only deviation from standard LSTM cells is that
activations are clipped, inspired by the GNMT machine translation model.
The GNMT paper has more details: https://arxiv.org/abs/1609.08144
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
def Clip(x):
"""Clips x to the range [-1., 1.]."""
return math_ops.maximum(math_ops.minimum(x, 1.), -1.)
def LSTMCellWeightsShape(num_inputs, num_nodes):
"""Returns the shape of the weights for a single LSTM cell."""
# Dimension 0 accounts for combining x with the previous m state.
# Dimension 1 accounts for the in value and the (in, forget, out) gates.
return [num_inputs + num_nodes, 4 * num_nodes]
def LSTMCell(weights, m_prev, c_prev, x, pad):
"""Unrolls a single LSTM cell with clipped activations forward by one step.
Args:
weights: Weight matrix with shape LSTMCellWeightsShape.
m_prev: Previous m states with shape [batch_size, num_nodes].
c_prev: Previous c states with shape [batch_size, num_nodes].
x: Input with shape [batch_size, num_inputs].
pad: Padding with shape [batch_size, 1]. Each padding value is either
0 or 1, where 1 indicates padding; i.e. the input is shorter than the
sequence length, and the (m, c) states should simply be passed through
from the previous states.
Returns:
The next (m, c) states, each with shape [batch_size, num_nodes].
"""
# Apply weights to the input and previous hidden state.
# The matmul here is the "big" operation.
xm = array_ops.concat([x, m_prev], 1)
xmw = math_ops.matmul(xm, weights)
# Element-wise ops for the standard LSTM cell, with clipped activations.
# XLA can fuse these operations into a single loop.
in_value, in_gate, forget_gate, out_gate = array_ops.split(
value=xmw, num_or_size_splits=4, axis=1)
in_value = math_ops.tanh(in_value)
in_gate = math_ops.sigmoid(in_gate)
forget_gate = math_ops.sigmoid(forget_gate)
out_gate = math_ops.sigmoid(out_gate)
c_next = Clip(Clip(forget_gate * c_prev) + Clip(in_gate * in_value))
m_next = Clip(out_gate * c_next)
# Account for padding.
c_next = c_prev * pad + c_next * (1.0 - pad)
m_next = m_prev * pad + m_next * (1.0 - pad)
return m_next, c_next
def LSTMLayer(cell_name, weights, m, c, x_seq, pad_seq):
"""Unrolls a layer of LSTM cells forward by the sequence length.
The sequence length is determined by the length of x_seq and pad_seq, which
must be the same.
Args:
cell_name: Base name of each cell.
weights: Weight matrix with shape LSTMCellWeightsShape.
m: Initial m states with shape [batch_size, num_nodes].
c: Initial c states with shape [batch_size, num_nodes].
x_seq: List of inputs, each with shape [batch_size, num_inputs].
The length of the list is the sequence length.
pad_seq: List of paddings, each with shape [batch_size, 1].
The length of the list is the sequence length.
Each padding value is either 0 or 1, where 1 indicates padding;
i.e. the input is shorter than the sequence length.
Returns:
List of per-sequence-step outputs, each with shape [batch_size, num_nodes].
Raises:
ValueError: If len(x_seq) != len(pad_seq).
"""
if len(x_seq) != len(pad_seq):
raise ValueError('length of x_seq(%d) != pad_seq(%d)' %
(len(x_seq), len(pad_seq)))
out_seq = []
for seq in range(len(x_seq)):
with ops.name_scope('%s_%d' % (cell_name, seq)):
m, c = LSTMCell(weights, m, c, x_seq[seq], pad_seq[seq])
out_seq.append(array_ops.identity(m, name='out'))
return out_seq
def RandomVar(shape, name=None):
"""Returns a variable of the given shape initialized to random values."""
return variables.Variable(
random_ops.random_uniform(shape), dtype=dtypes.float32, name=name)
def RandomInputs(batch_size, seq_length, num_inputs):
"""Returns randomly initialized (x_seq, pad_seq) sequences."""
x_seq = []
pad_seq = []
with ops.name_scope('inputs'):
for seq in range(seq_length):
x_seq.append(RandomVar([batch_size, num_inputs], name='x_seq_%d' % seq))
# Real padding values are always a sequence of 0 followed by a
# sequence of 1, but random values are fine for benchmarking.
pad_seq.append(RandomVar([batch_size, 1], name='pad_seq_%d' % seq))
return x_seq, pad_seq
def BuildLSTMLayer(batch_size, seq_length, num_inputs, num_nodes):
"""Builds a single LSTM layer with random weights and inputs.
Args:
batch_size: Inputs are fed in batches of this size.
seq_length: The sequence length to unroll the LSTM layer.
num_inputs: Dimension of inputs that are fed into each LSTM cell.
num_nodes: The number of nodes in each LSTM cell.
Returns:
(out_seq, weights) pair. The out_seq is a list of per-sequence-step
outputs, each with shape [batch_size, num_nodes]. The weights are a list of
weight variables that may be trained.
"""
weights = RandomVar(
LSTMCellWeightsShape(num_inputs, num_nodes), name='weights')
m = array_ops.zeros([batch_size, num_nodes], name='init_m')
c = array_ops.zeros([batch_size, num_nodes], name='init_c')
x_seq, pad_seq = RandomInputs(batch_size, seq_length, num_inputs)
out_seq = LSTMLayer('lstm', weights, m, c, x_seq, pad_seq)
return out_seq, [weights]
|
apache-2.0
|
nhenezi/kuma
|
kuma/search/decorators.py
|
26
|
1278
|
import logging
from django.db.models.signals import pre_delete
from elasticsearch.exceptions import ConnectionError
from kuma.wiki.signals import render_done
from .signals import render_done_handler, pre_delete_handler
log = logging.getLogger('kuma.search.decorators')
def requires_good_connection(fun):
"""Decorator that logs an error on connection issues
9 out of 10 doctors say that connection errors are usually because
ES_URLS is set wrong. This catches those errors and helps you out
with fixing it.
"""
def _requires_good_connection(*args, **kwargs):
try:
return fun(*args, **kwargs)
except ConnectionError:
log.error('Either your ElasticSearch process is not quite '
'ready to rumble, is not running at all, or ES_URLS'
'is set wrong in your settings_local.py file.')
return _requires_good_connection
def register_live_index(model_cls):
"""Register a model and index for auto indexing."""
uid = str(model_cls) + 'live_indexing'
render_done.connect(render_done_handler, model_cls, dispatch_uid=uid)
pre_delete.connect(pre_delete_handler, model_cls, dispatch_uid=uid)
# Enable this to be used as decorator.
return model_cls
|
mpl-2.0
|
mattuuh7/incubator-airflow
|
airflow/example_dags/docker_copy_data.py
|
22
|
3304
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
This sample "listen to directory". move the new file and print it, using docker-containers.
The following operators are being used: DockerOperator, BashOperator & ShortCircuitOperator.
TODO: Review the workflow, change it accordingly to to your environment & enable the code.
'''
# from __future__ import print_function
#
# from airflow import DAG
# import airflow
# from datetime import datetime, timedelta
# from airflow.operators import BashOperator
# from airflow.operators import ShortCircuitOperator
# from airflow.operators.docker_operator import DockerOperator
#
# default_args = {
# 'owner': 'airflow',
# 'depends_on_past': False,
# 'start_date': datetime.now(),
# 'email': ['[email protected]'],
# 'email_on_failure': False,
# 'email_on_retry': False,
# 'retries': 1,
# 'retry_delay': timedelta(minutes=5),
# }
#
# dag = DAG(
# 'docker_sample_copy_data', default_args=default_args, schedule_interval=timedelta(minutes=10))
#
# locate_file_cmd = """
# sleep 10
# find {{params.source_location}} -type f -printf "%f\n" | head -1
# """
#
# t_view = BashOperator(
# task_id='view_file',
# bash_command=locate_file_cmd,
# xcom_push=True,
# params={'source_location': '/your/input_dir/path'},
# dag=dag)
#
#
# def is_data_available(*args, **kwargs):
# ti = kwargs['ti']
# data = ti.xcom_pull(key=None, task_ids='view_file')
# return not data == ''
#
#
# t_is_data_available = ShortCircuitOperator(
# task_id='check_if_data_available',
# provide_context=True,
# python_callable=is_data_available,
# dag=dag)
#
# t_move = DockerOperator(
# api_version='1.19',
# docker_url='tcp://localhost:2375', # replace it with swarm/docker endpoint
# image='centos:latest',
# network_mode='bridge',
# volumes=['/your/host/input_dir/path:/your/input_dir/path',
# '/your/host/output_dir/path:/your/output_dir/path'],
# command='./entrypoint.sh',
# task_id='move_data',
# xcom_push=True,
# params={'source_location': '/your/input_dir/path',
# 'target_location': '/your/output_dir/path'},
# dag=dag)
#
# print_templated_cmd = """
# cat {{ ti.xcom_pull('move_data') }}
# """
#
# t_print = DockerOperator(
# api_version='1.19',
# docker_url='tcp://localhost:2375',
# image='centos:latest',
# volumes=['/your/host/output_dir/path:/your/output_dir/path'],
# command=print_templated_cmd,
# task_id='print',
# dag=dag)
#
# t_view.set_downstream(t_is_data_available)
# t_is_data_available.set_downstream(t_move)
# t_move.set_downstream(t_print)
|
apache-2.0
|
JohnVCS/fossologyFunTime
|
newsrc/nodeDict.py
|
1
|
1062
|
#!/usr/bin/python
# Copyright (C) 2016 Jesse Moseman, and John Carlo B. Viernes IV
#
# This file is part of fossologyFunTime.
#
# fossologyFunTime is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# fossologyFunTime is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with fossologyFunTime. If not, see <http://www.gnu.org/licenses/>.
#
# SPDX-License-Identifier: GPL-2.0+
import networkx
graph=networkx.read_graphml("test.graphml")
nodesDict=dict(graph.nodes(data="NodeLabel"))
edgeLabels=[]
for e1,e2 in graph.edges():
edgeLabels.append((nodesDict[e1]['label'],nodesDict[e2]['label']))
for e in edgeLabels:
print e
|
gpl-2.0
|
abhijo89/Django-facebook
|
docs/docs_env/Lib/encodings/cp855.py
|
593
|
34106
|
""" Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP855.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp855',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x0452, # CYRILLIC SMALL LETTER DJE
0x0081: 0x0402, # CYRILLIC CAPITAL LETTER DJE
0x0082: 0x0453, # CYRILLIC SMALL LETTER GJE
0x0083: 0x0403, # CYRILLIC CAPITAL LETTER GJE
0x0084: 0x0451, # CYRILLIC SMALL LETTER IO
0x0085: 0x0401, # CYRILLIC CAPITAL LETTER IO
0x0086: 0x0454, # CYRILLIC SMALL LETTER UKRAINIAN IE
0x0087: 0x0404, # CYRILLIC CAPITAL LETTER UKRAINIAN IE
0x0088: 0x0455, # CYRILLIC SMALL LETTER DZE
0x0089: 0x0405, # CYRILLIC CAPITAL LETTER DZE
0x008a: 0x0456, # CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
0x008b: 0x0406, # CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
0x008c: 0x0457, # CYRILLIC SMALL LETTER YI
0x008d: 0x0407, # CYRILLIC CAPITAL LETTER YI
0x008e: 0x0458, # CYRILLIC SMALL LETTER JE
0x008f: 0x0408, # CYRILLIC CAPITAL LETTER JE
0x0090: 0x0459, # CYRILLIC SMALL LETTER LJE
0x0091: 0x0409, # CYRILLIC CAPITAL LETTER LJE
0x0092: 0x045a, # CYRILLIC SMALL LETTER NJE
0x0093: 0x040a, # CYRILLIC CAPITAL LETTER NJE
0x0094: 0x045b, # CYRILLIC SMALL LETTER TSHE
0x0095: 0x040b, # CYRILLIC CAPITAL LETTER TSHE
0x0096: 0x045c, # CYRILLIC SMALL LETTER KJE
0x0097: 0x040c, # CYRILLIC CAPITAL LETTER KJE
0x0098: 0x045e, # CYRILLIC SMALL LETTER SHORT U
0x0099: 0x040e, # CYRILLIC CAPITAL LETTER SHORT U
0x009a: 0x045f, # CYRILLIC SMALL LETTER DZHE
0x009b: 0x040f, # CYRILLIC CAPITAL LETTER DZHE
0x009c: 0x044e, # CYRILLIC SMALL LETTER YU
0x009d: 0x042e, # CYRILLIC CAPITAL LETTER YU
0x009e: 0x044a, # CYRILLIC SMALL LETTER HARD SIGN
0x009f: 0x042a, # CYRILLIC CAPITAL LETTER HARD SIGN
0x00a0: 0x0430, # CYRILLIC SMALL LETTER A
0x00a1: 0x0410, # CYRILLIC CAPITAL LETTER A
0x00a2: 0x0431, # CYRILLIC SMALL LETTER BE
0x00a3: 0x0411, # CYRILLIC CAPITAL LETTER BE
0x00a4: 0x0446, # CYRILLIC SMALL LETTER TSE
0x00a5: 0x0426, # CYRILLIC CAPITAL LETTER TSE
0x00a6: 0x0434, # CYRILLIC SMALL LETTER DE
0x00a7: 0x0414, # CYRILLIC CAPITAL LETTER DE
0x00a8: 0x0435, # CYRILLIC SMALL LETTER IE
0x00a9: 0x0415, # CYRILLIC CAPITAL LETTER IE
0x00aa: 0x0444, # CYRILLIC SMALL LETTER EF
0x00ab: 0x0424, # CYRILLIC CAPITAL LETTER EF
0x00ac: 0x0433, # CYRILLIC SMALL LETTER GHE
0x00ad: 0x0413, # CYRILLIC CAPITAL LETTER GHE
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x0445, # CYRILLIC SMALL LETTER HA
0x00b6: 0x0425, # CYRILLIC CAPITAL LETTER HA
0x00b7: 0x0438, # CYRILLIC SMALL LETTER I
0x00b8: 0x0418, # CYRILLIC CAPITAL LETTER I
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x0439, # CYRILLIC SMALL LETTER SHORT I
0x00be: 0x0419, # CYRILLIC CAPITAL LETTER SHORT I
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x043a, # CYRILLIC SMALL LETTER KA
0x00c7: 0x041a, # CYRILLIC CAPITAL LETTER KA
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x00a4, # CURRENCY SIGN
0x00d0: 0x043b, # CYRILLIC SMALL LETTER EL
0x00d1: 0x041b, # CYRILLIC CAPITAL LETTER EL
0x00d2: 0x043c, # CYRILLIC SMALL LETTER EM
0x00d3: 0x041c, # CYRILLIC CAPITAL LETTER EM
0x00d4: 0x043d, # CYRILLIC SMALL LETTER EN
0x00d5: 0x041d, # CYRILLIC CAPITAL LETTER EN
0x00d6: 0x043e, # CYRILLIC SMALL LETTER O
0x00d7: 0x041e, # CYRILLIC CAPITAL LETTER O
0x00d8: 0x043f, # CYRILLIC SMALL LETTER PE
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x041f, # CYRILLIC CAPITAL LETTER PE
0x00de: 0x044f, # CYRILLIC SMALL LETTER YA
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x042f, # CYRILLIC CAPITAL LETTER YA
0x00e1: 0x0440, # CYRILLIC SMALL LETTER ER
0x00e2: 0x0420, # CYRILLIC CAPITAL LETTER ER
0x00e3: 0x0441, # CYRILLIC SMALL LETTER ES
0x00e4: 0x0421, # CYRILLIC CAPITAL LETTER ES
0x00e5: 0x0442, # CYRILLIC SMALL LETTER TE
0x00e6: 0x0422, # CYRILLIC CAPITAL LETTER TE
0x00e7: 0x0443, # CYRILLIC SMALL LETTER U
0x00e8: 0x0423, # CYRILLIC CAPITAL LETTER U
0x00e9: 0x0436, # CYRILLIC SMALL LETTER ZHE
0x00ea: 0x0416, # CYRILLIC CAPITAL LETTER ZHE
0x00eb: 0x0432, # CYRILLIC SMALL LETTER VE
0x00ec: 0x0412, # CYRILLIC CAPITAL LETTER VE
0x00ed: 0x044c, # CYRILLIC SMALL LETTER SOFT SIGN
0x00ee: 0x042c, # CYRILLIC CAPITAL LETTER SOFT SIGN
0x00ef: 0x2116, # NUMERO SIGN
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x044b, # CYRILLIC SMALL LETTER YERU
0x00f2: 0x042b, # CYRILLIC CAPITAL LETTER YERU
0x00f3: 0x0437, # CYRILLIC SMALL LETTER ZE
0x00f4: 0x0417, # CYRILLIC CAPITAL LETTER ZE
0x00f5: 0x0448, # CYRILLIC SMALL LETTER SHA
0x00f6: 0x0428, # CYRILLIC CAPITAL LETTER SHA
0x00f7: 0x044d, # CYRILLIC SMALL LETTER E
0x00f8: 0x042d, # CYRILLIC CAPITAL LETTER E
0x00f9: 0x0449, # CYRILLIC SMALL LETTER SHCHA
0x00fa: 0x0429, # CYRILLIC CAPITAL LETTER SHCHA
0x00fb: 0x0447, # CYRILLIC SMALL LETTER CHE
0x00fc: 0x0427, # CYRILLIC CAPITAL LETTER CHE
0x00fd: 0x00a7, # SECTION SIGN
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
u'\x00' # 0x0000 -> NULL
u'\x01' # 0x0001 -> START OF HEADING
u'\x02' # 0x0002 -> START OF TEXT
u'\x03' # 0x0003 -> END OF TEXT
u'\x04' # 0x0004 -> END OF TRANSMISSION
u'\x05' # 0x0005 -> ENQUIRY
u'\x06' # 0x0006 -> ACKNOWLEDGE
u'\x07' # 0x0007 -> BELL
u'\x08' # 0x0008 -> BACKSPACE
u'\t' # 0x0009 -> HORIZONTAL TABULATION
u'\n' # 0x000a -> LINE FEED
u'\x0b' # 0x000b -> VERTICAL TABULATION
u'\x0c' # 0x000c -> FORM FEED
u'\r' # 0x000d -> CARRIAGE RETURN
u'\x0e' # 0x000e -> SHIFT OUT
u'\x0f' # 0x000f -> SHIFT IN
u'\x10' # 0x0010 -> DATA LINK ESCAPE
u'\x11' # 0x0011 -> DEVICE CONTROL ONE
u'\x12' # 0x0012 -> DEVICE CONTROL TWO
u'\x13' # 0x0013 -> DEVICE CONTROL THREE
u'\x14' # 0x0014 -> DEVICE CONTROL FOUR
u'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x0016 -> SYNCHRONOUS IDLE
u'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x0018 -> CANCEL
u'\x19' # 0x0019 -> END OF MEDIUM
u'\x1a' # 0x001a -> SUBSTITUTE
u'\x1b' # 0x001b -> ESCAPE
u'\x1c' # 0x001c -> FILE SEPARATOR
u'\x1d' # 0x001d -> GROUP SEPARATOR
u'\x1e' # 0x001e -> RECORD SEPARATOR
u'\x1f' # 0x001f -> UNIT SEPARATOR
u' ' # 0x0020 -> SPACE
u'!' # 0x0021 -> EXCLAMATION MARK
u'"' # 0x0022 -> QUOTATION MARK
u'#' # 0x0023 -> NUMBER SIGN
u'$' # 0x0024 -> DOLLAR SIGN
u'%' # 0x0025 -> PERCENT SIGN
u'&' # 0x0026 -> AMPERSAND
u"'" # 0x0027 -> APOSTROPHE
u'(' # 0x0028 -> LEFT PARENTHESIS
u')' # 0x0029 -> RIGHT PARENTHESIS
u'*' # 0x002a -> ASTERISK
u'+' # 0x002b -> PLUS SIGN
u',' # 0x002c -> COMMA
u'-' # 0x002d -> HYPHEN-MINUS
u'.' # 0x002e -> FULL STOP
u'/' # 0x002f -> SOLIDUS
u'0' # 0x0030 -> DIGIT ZERO
u'1' # 0x0031 -> DIGIT ONE
u'2' # 0x0032 -> DIGIT TWO
u'3' # 0x0033 -> DIGIT THREE
u'4' # 0x0034 -> DIGIT FOUR
u'5' # 0x0035 -> DIGIT FIVE
u'6' # 0x0036 -> DIGIT SIX
u'7' # 0x0037 -> DIGIT SEVEN
u'8' # 0x0038 -> DIGIT EIGHT
u'9' # 0x0039 -> DIGIT NINE
u':' # 0x003a -> COLON
u';' # 0x003b -> SEMICOLON
u'<' # 0x003c -> LESS-THAN SIGN
u'=' # 0x003d -> EQUALS SIGN
u'>' # 0x003e -> GREATER-THAN SIGN
u'?' # 0x003f -> QUESTION MARK
u'@' # 0x0040 -> COMMERCIAL AT
u'A' # 0x0041 -> LATIN CAPITAL LETTER A
u'B' # 0x0042 -> LATIN CAPITAL LETTER B
u'C' # 0x0043 -> LATIN CAPITAL LETTER C
u'D' # 0x0044 -> LATIN CAPITAL LETTER D
u'E' # 0x0045 -> LATIN CAPITAL LETTER E
u'F' # 0x0046 -> LATIN CAPITAL LETTER F
u'G' # 0x0047 -> LATIN CAPITAL LETTER G
u'H' # 0x0048 -> LATIN CAPITAL LETTER H
u'I' # 0x0049 -> LATIN CAPITAL LETTER I
u'J' # 0x004a -> LATIN CAPITAL LETTER J
u'K' # 0x004b -> LATIN CAPITAL LETTER K
u'L' # 0x004c -> LATIN CAPITAL LETTER L
u'M' # 0x004d -> LATIN CAPITAL LETTER M
u'N' # 0x004e -> LATIN CAPITAL LETTER N
u'O' # 0x004f -> LATIN CAPITAL LETTER O
u'P' # 0x0050 -> LATIN CAPITAL LETTER P
u'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
u'R' # 0x0052 -> LATIN CAPITAL LETTER R
u'S' # 0x0053 -> LATIN CAPITAL LETTER S
u'T' # 0x0054 -> LATIN CAPITAL LETTER T
u'U' # 0x0055 -> LATIN CAPITAL LETTER U
u'V' # 0x0056 -> LATIN CAPITAL LETTER V
u'W' # 0x0057 -> LATIN CAPITAL LETTER W
u'X' # 0x0058 -> LATIN CAPITAL LETTER X
u'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
u'Z' # 0x005a -> LATIN CAPITAL LETTER Z
u'[' # 0x005b -> LEFT SQUARE BRACKET
u'\\' # 0x005c -> REVERSE SOLIDUS
u']' # 0x005d -> RIGHT SQUARE BRACKET
u'^' # 0x005e -> CIRCUMFLEX ACCENT
u'_' # 0x005f -> LOW LINE
u'`' # 0x0060 -> GRAVE ACCENT
u'a' # 0x0061 -> LATIN SMALL LETTER A
u'b' # 0x0062 -> LATIN SMALL LETTER B
u'c' # 0x0063 -> LATIN SMALL LETTER C
u'd' # 0x0064 -> LATIN SMALL LETTER D
u'e' # 0x0065 -> LATIN SMALL LETTER E
u'f' # 0x0066 -> LATIN SMALL LETTER F
u'g' # 0x0067 -> LATIN SMALL LETTER G
u'h' # 0x0068 -> LATIN SMALL LETTER H
u'i' # 0x0069 -> LATIN SMALL LETTER I
u'j' # 0x006a -> LATIN SMALL LETTER J
u'k' # 0x006b -> LATIN SMALL LETTER K
u'l' # 0x006c -> LATIN SMALL LETTER L
u'm' # 0x006d -> LATIN SMALL LETTER M
u'n' # 0x006e -> LATIN SMALL LETTER N
u'o' # 0x006f -> LATIN SMALL LETTER O
u'p' # 0x0070 -> LATIN SMALL LETTER P
u'q' # 0x0071 -> LATIN SMALL LETTER Q
u'r' # 0x0072 -> LATIN SMALL LETTER R
u's' # 0x0073 -> LATIN SMALL LETTER S
u't' # 0x0074 -> LATIN SMALL LETTER T
u'u' # 0x0075 -> LATIN SMALL LETTER U
u'v' # 0x0076 -> LATIN SMALL LETTER V
u'w' # 0x0077 -> LATIN SMALL LETTER W
u'x' # 0x0078 -> LATIN SMALL LETTER X
u'y' # 0x0079 -> LATIN SMALL LETTER Y
u'z' # 0x007a -> LATIN SMALL LETTER Z
u'{' # 0x007b -> LEFT CURLY BRACKET
u'|' # 0x007c -> VERTICAL LINE
u'}' # 0x007d -> RIGHT CURLY BRACKET
u'~' # 0x007e -> TILDE
u'\x7f' # 0x007f -> DELETE
u'\u0452' # 0x0080 -> CYRILLIC SMALL LETTER DJE
u'\u0402' # 0x0081 -> CYRILLIC CAPITAL LETTER DJE
u'\u0453' # 0x0082 -> CYRILLIC SMALL LETTER GJE
u'\u0403' # 0x0083 -> CYRILLIC CAPITAL LETTER GJE
u'\u0451' # 0x0084 -> CYRILLIC SMALL LETTER IO
u'\u0401' # 0x0085 -> CYRILLIC CAPITAL LETTER IO
u'\u0454' # 0x0086 -> CYRILLIC SMALL LETTER UKRAINIAN IE
u'\u0404' # 0x0087 -> CYRILLIC CAPITAL LETTER UKRAINIAN IE
u'\u0455' # 0x0088 -> CYRILLIC SMALL LETTER DZE
u'\u0405' # 0x0089 -> CYRILLIC CAPITAL LETTER DZE
u'\u0456' # 0x008a -> CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
u'\u0406' # 0x008b -> CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
u'\u0457' # 0x008c -> CYRILLIC SMALL LETTER YI
u'\u0407' # 0x008d -> CYRILLIC CAPITAL LETTER YI
u'\u0458' # 0x008e -> CYRILLIC SMALL LETTER JE
u'\u0408' # 0x008f -> CYRILLIC CAPITAL LETTER JE
u'\u0459' # 0x0090 -> CYRILLIC SMALL LETTER LJE
u'\u0409' # 0x0091 -> CYRILLIC CAPITAL LETTER LJE
u'\u045a' # 0x0092 -> CYRILLIC SMALL LETTER NJE
u'\u040a' # 0x0093 -> CYRILLIC CAPITAL LETTER NJE
u'\u045b' # 0x0094 -> CYRILLIC SMALL LETTER TSHE
u'\u040b' # 0x0095 -> CYRILLIC CAPITAL LETTER TSHE
u'\u045c' # 0x0096 -> CYRILLIC SMALL LETTER KJE
u'\u040c' # 0x0097 -> CYRILLIC CAPITAL LETTER KJE
u'\u045e' # 0x0098 -> CYRILLIC SMALL LETTER SHORT U
u'\u040e' # 0x0099 -> CYRILLIC CAPITAL LETTER SHORT U
u'\u045f' # 0x009a -> CYRILLIC SMALL LETTER DZHE
u'\u040f' # 0x009b -> CYRILLIC CAPITAL LETTER DZHE
u'\u044e' # 0x009c -> CYRILLIC SMALL LETTER YU
u'\u042e' # 0x009d -> CYRILLIC CAPITAL LETTER YU
u'\u044a' # 0x009e -> CYRILLIC SMALL LETTER HARD SIGN
u'\u042a' # 0x009f -> CYRILLIC CAPITAL LETTER HARD SIGN
u'\u0430' # 0x00a0 -> CYRILLIC SMALL LETTER A
u'\u0410' # 0x00a1 -> CYRILLIC CAPITAL LETTER A
u'\u0431' # 0x00a2 -> CYRILLIC SMALL LETTER BE
u'\u0411' # 0x00a3 -> CYRILLIC CAPITAL LETTER BE
u'\u0446' # 0x00a4 -> CYRILLIC SMALL LETTER TSE
u'\u0426' # 0x00a5 -> CYRILLIC CAPITAL LETTER TSE
u'\u0434' # 0x00a6 -> CYRILLIC SMALL LETTER DE
u'\u0414' # 0x00a7 -> CYRILLIC CAPITAL LETTER DE
u'\u0435' # 0x00a8 -> CYRILLIC SMALL LETTER IE
u'\u0415' # 0x00a9 -> CYRILLIC CAPITAL LETTER IE
u'\u0444' # 0x00aa -> CYRILLIC SMALL LETTER EF
u'\u0424' # 0x00ab -> CYRILLIC CAPITAL LETTER EF
u'\u0433' # 0x00ac -> CYRILLIC SMALL LETTER GHE
u'\u0413' # 0x00ad -> CYRILLIC CAPITAL LETTER GHE
u'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2591' # 0x00b0 -> LIGHT SHADE
u'\u2592' # 0x00b1 -> MEDIUM SHADE
u'\u2593' # 0x00b2 -> DARK SHADE
u'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
u'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
u'\u0445' # 0x00b5 -> CYRILLIC SMALL LETTER HA
u'\u0425' # 0x00b6 -> CYRILLIC CAPITAL LETTER HA
u'\u0438' # 0x00b7 -> CYRILLIC SMALL LETTER I
u'\u0418' # 0x00b8 -> CYRILLIC CAPITAL LETTER I
u'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
u'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
u'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
u'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
u'\u0439' # 0x00bd -> CYRILLIC SMALL LETTER SHORT I
u'\u0419' # 0x00be -> CYRILLIC CAPITAL LETTER SHORT I
u'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
u'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
u'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
u'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
u'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
u'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
u'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
u'\u043a' # 0x00c6 -> CYRILLIC SMALL LETTER KA
u'\u041a' # 0x00c7 -> CYRILLIC CAPITAL LETTER KA
u'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
u'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
u'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
u'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
u'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
u'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
u'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
u'\xa4' # 0x00cf -> CURRENCY SIGN
u'\u043b' # 0x00d0 -> CYRILLIC SMALL LETTER EL
u'\u041b' # 0x00d1 -> CYRILLIC CAPITAL LETTER EL
u'\u043c' # 0x00d2 -> CYRILLIC SMALL LETTER EM
u'\u041c' # 0x00d3 -> CYRILLIC CAPITAL LETTER EM
u'\u043d' # 0x00d4 -> CYRILLIC SMALL LETTER EN
u'\u041d' # 0x00d5 -> CYRILLIC CAPITAL LETTER EN
u'\u043e' # 0x00d6 -> CYRILLIC SMALL LETTER O
u'\u041e' # 0x00d7 -> CYRILLIC CAPITAL LETTER O
u'\u043f' # 0x00d8 -> CYRILLIC SMALL LETTER PE
u'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
u'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
u'\u2588' # 0x00db -> FULL BLOCK
u'\u2584' # 0x00dc -> LOWER HALF BLOCK
u'\u041f' # 0x00dd -> CYRILLIC CAPITAL LETTER PE
u'\u044f' # 0x00de -> CYRILLIC SMALL LETTER YA
u'\u2580' # 0x00df -> UPPER HALF BLOCK
u'\u042f' # 0x00e0 -> CYRILLIC CAPITAL LETTER YA
u'\u0440' # 0x00e1 -> CYRILLIC SMALL LETTER ER
u'\u0420' # 0x00e2 -> CYRILLIC CAPITAL LETTER ER
u'\u0441' # 0x00e3 -> CYRILLIC SMALL LETTER ES
u'\u0421' # 0x00e4 -> CYRILLIC CAPITAL LETTER ES
u'\u0442' # 0x00e5 -> CYRILLIC SMALL LETTER TE
u'\u0422' # 0x00e6 -> CYRILLIC CAPITAL LETTER TE
u'\u0443' # 0x00e7 -> CYRILLIC SMALL LETTER U
u'\u0423' # 0x00e8 -> CYRILLIC CAPITAL LETTER U
u'\u0436' # 0x00e9 -> CYRILLIC SMALL LETTER ZHE
u'\u0416' # 0x00ea -> CYRILLIC CAPITAL LETTER ZHE
u'\u0432' # 0x00eb -> CYRILLIC SMALL LETTER VE
u'\u0412' # 0x00ec -> CYRILLIC CAPITAL LETTER VE
u'\u044c' # 0x00ed -> CYRILLIC SMALL LETTER SOFT SIGN
u'\u042c' # 0x00ee -> CYRILLIC CAPITAL LETTER SOFT SIGN
u'\u2116' # 0x00ef -> NUMERO SIGN
u'\xad' # 0x00f0 -> SOFT HYPHEN
u'\u044b' # 0x00f1 -> CYRILLIC SMALL LETTER YERU
u'\u042b' # 0x00f2 -> CYRILLIC CAPITAL LETTER YERU
u'\u0437' # 0x00f3 -> CYRILLIC SMALL LETTER ZE
u'\u0417' # 0x00f4 -> CYRILLIC CAPITAL LETTER ZE
u'\u0448' # 0x00f5 -> CYRILLIC SMALL LETTER SHA
u'\u0428' # 0x00f6 -> CYRILLIC CAPITAL LETTER SHA
u'\u044d' # 0x00f7 -> CYRILLIC SMALL LETTER E
u'\u042d' # 0x00f8 -> CYRILLIC CAPITAL LETTER E
u'\u0449' # 0x00f9 -> CYRILLIC SMALL LETTER SHCHA
u'\u0429' # 0x00fa -> CYRILLIC CAPITAL LETTER SHCHA
u'\u0447' # 0x00fb -> CYRILLIC SMALL LETTER CHE
u'\u0427' # 0x00fc -> CYRILLIC CAPITAL LETTER CHE
u'\xa7' # 0x00fd -> SECTION SIGN
u'\u25a0' # 0x00fe -> BLACK SQUARE
u'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a4: 0x00cf, # CURRENCY SIGN
0x00a7: 0x00fd, # SECTION SIGN
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ad: 0x00f0, # SOFT HYPHEN
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x0401: 0x0085, # CYRILLIC CAPITAL LETTER IO
0x0402: 0x0081, # CYRILLIC CAPITAL LETTER DJE
0x0403: 0x0083, # CYRILLIC CAPITAL LETTER GJE
0x0404: 0x0087, # CYRILLIC CAPITAL LETTER UKRAINIAN IE
0x0405: 0x0089, # CYRILLIC CAPITAL LETTER DZE
0x0406: 0x008b, # CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
0x0407: 0x008d, # CYRILLIC CAPITAL LETTER YI
0x0408: 0x008f, # CYRILLIC CAPITAL LETTER JE
0x0409: 0x0091, # CYRILLIC CAPITAL LETTER LJE
0x040a: 0x0093, # CYRILLIC CAPITAL LETTER NJE
0x040b: 0x0095, # CYRILLIC CAPITAL LETTER TSHE
0x040c: 0x0097, # CYRILLIC CAPITAL LETTER KJE
0x040e: 0x0099, # CYRILLIC CAPITAL LETTER SHORT U
0x040f: 0x009b, # CYRILLIC CAPITAL LETTER DZHE
0x0410: 0x00a1, # CYRILLIC CAPITAL LETTER A
0x0411: 0x00a3, # CYRILLIC CAPITAL LETTER BE
0x0412: 0x00ec, # CYRILLIC CAPITAL LETTER VE
0x0413: 0x00ad, # CYRILLIC CAPITAL LETTER GHE
0x0414: 0x00a7, # CYRILLIC CAPITAL LETTER DE
0x0415: 0x00a9, # CYRILLIC CAPITAL LETTER IE
0x0416: 0x00ea, # CYRILLIC CAPITAL LETTER ZHE
0x0417: 0x00f4, # CYRILLIC CAPITAL LETTER ZE
0x0418: 0x00b8, # CYRILLIC CAPITAL LETTER I
0x0419: 0x00be, # CYRILLIC CAPITAL LETTER SHORT I
0x041a: 0x00c7, # CYRILLIC CAPITAL LETTER KA
0x041b: 0x00d1, # CYRILLIC CAPITAL LETTER EL
0x041c: 0x00d3, # CYRILLIC CAPITAL LETTER EM
0x041d: 0x00d5, # CYRILLIC CAPITAL LETTER EN
0x041e: 0x00d7, # CYRILLIC CAPITAL LETTER O
0x041f: 0x00dd, # CYRILLIC CAPITAL LETTER PE
0x0420: 0x00e2, # CYRILLIC CAPITAL LETTER ER
0x0421: 0x00e4, # CYRILLIC CAPITAL LETTER ES
0x0422: 0x00e6, # CYRILLIC CAPITAL LETTER TE
0x0423: 0x00e8, # CYRILLIC CAPITAL LETTER U
0x0424: 0x00ab, # CYRILLIC CAPITAL LETTER EF
0x0425: 0x00b6, # CYRILLIC CAPITAL LETTER HA
0x0426: 0x00a5, # CYRILLIC CAPITAL LETTER TSE
0x0427: 0x00fc, # CYRILLIC CAPITAL LETTER CHE
0x0428: 0x00f6, # CYRILLIC CAPITAL LETTER SHA
0x0429: 0x00fa, # CYRILLIC CAPITAL LETTER SHCHA
0x042a: 0x009f, # CYRILLIC CAPITAL LETTER HARD SIGN
0x042b: 0x00f2, # CYRILLIC CAPITAL LETTER YERU
0x042c: 0x00ee, # CYRILLIC CAPITAL LETTER SOFT SIGN
0x042d: 0x00f8, # CYRILLIC CAPITAL LETTER E
0x042e: 0x009d, # CYRILLIC CAPITAL LETTER YU
0x042f: 0x00e0, # CYRILLIC CAPITAL LETTER YA
0x0430: 0x00a0, # CYRILLIC SMALL LETTER A
0x0431: 0x00a2, # CYRILLIC SMALL LETTER BE
0x0432: 0x00eb, # CYRILLIC SMALL LETTER VE
0x0433: 0x00ac, # CYRILLIC SMALL LETTER GHE
0x0434: 0x00a6, # CYRILLIC SMALL LETTER DE
0x0435: 0x00a8, # CYRILLIC SMALL LETTER IE
0x0436: 0x00e9, # CYRILLIC SMALL LETTER ZHE
0x0437: 0x00f3, # CYRILLIC SMALL LETTER ZE
0x0438: 0x00b7, # CYRILLIC SMALL LETTER I
0x0439: 0x00bd, # CYRILLIC SMALL LETTER SHORT I
0x043a: 0x00c6, # CYRILLIC SMALL LETTER KA
0x043b: 0x00d0, # CYRILLIC SMALL LETTER EL
0x043c: 0x00d2, # CYRILLIC SMALL LETTER EM
0x043d: 0x00d4, # CYRILLIC SMALL LETTER EN
0x043e: 0x00d6, # CYRILLIC SMALL LETTER O
0x043f: 0x00d8, # CYRILLIC SMALL LETTER PE
0x0440: 0x00e1, # CYRILLIC SMALL LETTER ER
0x0441: 0x00e3, # CYRILLIC SMALL LETTER ES
0x0442: 0x00e5, # CYRILLIC SMALL LETTER TE
0x0443: 0x00e7, # CYRILLIC SMALL LETTER U
0x0444: 0x00aa, # CYRILLIC SMALL LETTER EF
0x0445: 0x00b5, # CYRILLIC SMALL LETTER HA
0x0446: 0x00a4, # CYRILLIC SMALL LETTER TSE
0x0447: 0x00fb, # CYRILLIC SMALL LETTER CHE
0x0448: 0x00f5, # CYRILLIC SMALL LETTER SHA
0x0449: 0x00f9, # CYRILLIC SMALL LETTER SHCHA
0x044a: 0x009e, # CYRILLIC SMALL LETTER HARD SIGN
0x044b: 0x00f1, # CYRILLIC SMALL LETTER YERU
0x044c: 0x00ed, # CYRILLIC SMALL LETTER SOFT SIGN
0x044d: 0x00f7, # CYRILLIC SMALL LETTER E
0x044e: 0x009c, # CYRILLIC SMALL LETTER YU
0x044f: 0x00de, # CYRILLIC SMALL LETTER YA
0x0451: 0x0084, # CYRILLIC SMALL LETTER IO
0x0452: 0x0080, # CYRILLIC SMALL LETTER DJE
0x0453: 0x0082, # CYRILLIC SMALL LETTER GJE
0x0454: 0x0086, # CYRILLIC SMALL LETTER UKRAINIAN IE
0x0455: 0x0088, # CYRILLIC SMALL LETTER DZE
0x0456: 0x008a, # CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
0x0457: 0x008c, # CYRILLIC SMALL LETTER YI
0x0458: 0x008e, # CYRILLIC SMALL LETTER JE
0x0459: 0x0090, # CYRILLIC SMALL LETTER LJE
0x045a: 0x0092, # CYRILLIC SMALL LETTER NJE
0x045b: 0x0094, # CYRILLIC SMALL LETTER TSHE
0x045c: 0x0096, # CYRILLIC SMALL LETTER KJE
0x045e: 0x0098, # CYRILLIC SMALL LETTER SHORT U
0x045f: 0x009a, # CYRILLIC SMALL LETTER DZHE
0x2116: 0x00ef, # NUMERO SIGN
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
|
bsd-3-clause
|
ubic135/odoo-design
|
addons/hr_holidays/report/hr_holidays_report.py
|
341
|
2369
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
from openerp.osv import fields,osv
class hr_holidays_remaining_leaves_user(osv.osv):
_name = "hr.holidays.remaining.leaves.user"
_description = "Total holidays by type"
_auto = False
_columns = {
'name': fields.char('Employee'),
'no_of_leaves': fields.integer('Remaining leaves'),
'user_id': fields.many2one('res.users', 'User'),
'leave_type': fields.char('Leave Type'),
}
def init(self, cr):
tools.drop_view_if_exists(cr, 'hr_holidays_remaining_leaves_user')
cr.execute("""
CREATE or REPLACE view hr_holidays_remaining_leaves_user as (
SELECT
min(hrs.id) as id,
rr.name as name,
sum(hrs.number_of_days) as no_of_leaves,
rr.user_id as user_id,
hhs.name as leave_type
FROM
hr_holidays as hrs, hr_employee as hre,
resource_resource as rr,hr_holidays_status as hhs
WHERE
hrs.employee_id = hre.id and
hre.resource_id = rr.id and
hhs.id = hrs.holiday_status_id
GROUP BY
rr.name,rr.user_id,hhs.name
)
""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
AstroPrint/AstroBox
|
src/astroprint/printfiles/gcode.py
|
1
|
10242
|
# coding=utf-8
from __future__ import absolute_import
__author__ = "Gina Häußge <[email protected]> based on work by David Braam"
__author__ = "Daniel Arroyo. 3DaGogo, Inc <[email protected]>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
import os
import logging
import math
import base64
import zlib
from octoprint.settings import settings
from octoprint.events import eventManager, Events
from astroprint.printfiles import PrintFilesManager, MetadataAnalyzer, FileDestinations, AnalysisAborted
from astroprint.util.gCodeAnalyzer import GCodeAnalyzer
class PrintFileManagerGcode(PrintFilesManager):
name = 'gcode'
fileFormat = 'gcode'
SUPPORTED_EXTENSIONS = ["gcode", "gco", "g"]
def __init__(self):
self._logger = logging.getLogger(__name__)
self._metadataAnalyzer = GcodeMetadataAnalyzer(getPathCallback=self.getAbsolutePath, loadedCallback=self._onMetadataAnalysisFinished)
super(PrintFileManagerGcode, self).__init__()
class GcodeMetadataAnalyzer(MetadataAnalyzer):
def __init__(self, getPathCallback, loadedCallback):
self._logger = logging.getLogger(__name__)
self._gcode = None
super(GcodeMetadataAnalyzer, self).__init__(getPathCallback, loadedCallback)
def pause(self):
super(GcodeMetadataAnalyzer, self).pause()
if self._gcode is not None:
self._logger.debug("Aborting running analysis, will restart when Gcode analyzer is resumed")
self._gcode.abort()
def _analyzeFile(self, filename):
path = self._getPathCallback(filename)
if path is None or not os.path.exists(path):
return
self._currentFile = filename
self._currentProgress = 0
try:
self._logger.debug("Starting analysis of file %s" % filename)
eventManager().fire(Events.METADATA_ANALYSIS_STARTED, {"file": filename})
self._gcode = GcodeInterpreter(self._loadedCallback,self._currentFile)
self._gcode.progressCallback = self._onParsingProgress
self._gcode.load(path)
finally:
self._gcode = None
self._currentProgress = None
self._currentFile = None
class GcodeInterpreter(object):
def __init__(self,loadedCallback,currentFile):
self._logger = logging.getLogger(__name__)
self.layerList = None
self.extrusionAmount = [0]
self.extrusionVolume = [0]
self.totalMoveTimeMinute = 0
self.filename = None
self.progressCallback = None
self._loadedCallback = loadedCallback
self._currentFile = currentFile
self._abort = False
self._filamentDiameter = 0
def cbGCodeAnalyzerReady(self,timePerLayers,totalPrintTime,layerCount,size,layer_height,total_filament,parent):
self.progressCallback(100.0)
self.layerList = timePerLayers
self.totalMoveTimeMinute = totalPrintTime/60
self.layerCount = layerCount
self.size = size
self.layer_height = layer_height
self.total_filament = None#total_filament has not got any information
self._logger.debug("Analysis of file %s finished, notifying callback" % self.filename)
parent._loadedCallback(parent._currentFile, parent)
def cbGCodeAnalyzerException(self,parameters):
self._logger.warn("There was a problem using /usr/bin/astroprint/GCodeAnalyzer... using alternative algorithm")
with open(parameters['filename'], "r") as f:
self._load(f)
self._logger.debug("Analysis of file %s finished, notifying callback" % parameters['filename'])
parameters['parent']._loadedCallback(parameters['parent']._currentFile, parameters['parent'])
def load(self, filename):
if os.path.isfile(filename):
self.filename = filename
self._fileSize = os.stat(filename).st_size
self.progressCallback(0.0)
GCodeAnalyzer(self.filename, False, self.cbGCodeAnalyzerReady, self.cbGCodeAnalyzerException, self).makeCalcs()
def abort(self):
self._abort = True
def _load(self, gcodeFile):
filePos = 0
pos = [0.0, 0.0, 0.0]
posOffset = [0.0, 0.0, 0.0]
currentE = [0.0]
totalExtrusion = [0.0]
maxExtrusion = [0.0]
currentExtruder = 0
totalMoveTimeMinute = 0.0
absoluteE = True
scale = 1.0
posAbs = True
feedRateXY = settings().getFloat(["printerParameters", "movementSpeed", "x"])
offsets = settings().get(["printerParameters", "extruderOffsets"])
for line in gcodeFile:
if self._abort:
raise AnalysisAborted()
filePos += 1
try:
if self.progressCallback is not None and (filePos % 1000 == 0):
if isinstance(gcodeFile, (file)):
self.progressCallback(float(gcodeFile.tell()) / float(self._fileSize))
elif isinstance(gcodeFile, (list)):
self.progressCallback(float(filePos) / float(len(gcodeFile)))
except:
pass
if ';' in line:
comment = line[line.find(';')+1:].strip()
if comment.startswith("filament_diameter"):
self._filamentDiameter = float(comment.split("=", 1)[1].strip())
elif comment.startswith("CURA_PROFILE_STRING"):
curaOptions = self._parseCuraProfileString(comment)
if "filament_diameter" in curaOptions:
try:
self._filamentDiameter = float(curaOptions["filament_diameter"])
except:
self._filamentDiameter = 0.0
line = line[0:line.find(';')]
G = self._getCodeInt(line, 'G')
M = self._getCodeInt(line, 'M')
T = self._getCodeInt(line, 'T')
if G is not None:
if G == 0 or G == 1: #Move
x = self._getCodeFloat(line, 'X')
y = self._getCodeFloat(line, 'Y')
z = self._getCodeFloat(line, 'Z')
e = self._getCodeFloat(line, 'E')
f = self._getCodeFloat(line, 'F')
oldPos = pos
pos = pos[:]
if posAbs:
if x is not None:
pos[0] = x * scale + posOffset[0]
if y is not None:
pos[1] = y * scale + posOffset[1]
if z is not None:
pos[2] = z * scale + posOffset[2]
else:
if x is not None:
pos[0] += x * scale
if y is not None:
pos[1] += y * scale
if z is not None:
pos[2] += z * scale
if f is not None:
feedRateXY = f
moveType = 'move'
if e is not None:
if absoluteE:
e -= currentE[currentExtruder]
if e > 0.0:
moveType = 'extrude'
if e < 0.0:
moveType = 'retract'
totalExtrusion[currentExtruder] += e
currentE[currentExtruder] += e
if totalExtrusion[currentExtruder] > maxExtrusion[currentExtruder]:
maxExtrusion[currentExtruder] = totalExtrusion[currentExtruder]
else:
e = 0.0
if x is not None or y is not None or z is not None:
diffX = oldPos[0] - pos[0]
diffY = oldPos[1] - pos[1]
totalMoveTimeMinute += math.sqrt(diffX * diffX + diffY * diffY) / feedRateXY
elif moveType == "extrude":
diffX = oldPos[0] - pos[0]
diffY = oldPos[1] - pos[1]
time1 = math.sqrt(diffX * diffX + diffY * diffY) / feedRateXY
time2 = abs(e / feedRateXY)
totalMoveTimeMinute += max(time1, time2)
elif moveType == "retract":
totalMoveTimeMinute += abs(e / feedRateXY)
if moveType == 'move' and oldPos[2] != pos[2]:
if oldPos[2] > pos[2] and abs(oldPos[2] - pos[2]) > 5.0 and pos[2] < 1.0:
oldPos[2] = 0.0
elif G == 4: #Delay
S = self._getCodeFloat(line, 'S')
if S is not None:
totalMoveTimeMinute += S / 60.0
P = self._getCodeFloat(line, 'P')
if P is not None:
totalMoveTimeMinute += P / 60.0 / 1000.0
elif G == 20: #Units are inches
scale = 25.4
elif G == 21: #Units are mm
scale = 1.0
elif G == 28: #Home
x = self._getCodeFloat(line, 'X')
y = self._getCodeFloat(line, 'Y')
z = self._getCodeFloat(line, 'Z')
center = [0.0,0.0,0.0]
if x is None and y is None and z is None:
pos = center
else:
pos = pos[:]
if x is not None:
pos[0] = center[0]
if y is not None:
pos[1] = center[1]
if z is not None:
pos[2] = center[2]
elif G == 90: #Absolute position
posAbs = True
elif G == 91: #Relative position
posAbs = False
elif G == 92:
x = self._getCodeFloat(line, 'X')
y = self._getCodeFloat(line, 'Y')
z = self._getCodeFloat(line, 'Z')
e = self._getCodeFloat(line, 'E')
if e is not None:
currentE[currentExtruder] = e
if x is not None:
posOffset[0] = pos[0] - x
if y is not None:
posOffset[1] = pos[1] - y
if z is not None:
posOffset[2] = pos[2] - z
elif M is not None:
if M == 82: #Absolute E
absoluteE = True
elif M == 83: #Relative E
absoluteE = False
elif T is not None:
posOffset[0] -= offsets[currentExtruder]["x"] if currentExtruder < len(offsets) else 0
posOffset[1] -= offsets[currentExtruder]["y"] if currentExtruder < len(offsets) else 0
currentExtruder = T
posOffset[0] += offsets[currentExtruder]["x"] if currentExtruder < len(offsets) else 0
posOffset[1] += offsets[currentExtruder]["y"] if currentExtruder < len(offsets) else 0
if len(currentE) <= currentExtruder:
for i in range(len(currentE), currentExtruder + 1):
currentE.append(0.0)
if len(maxExtrusion) <= currentExtruder:
for i in range(len(maxExtrusion), currentExtruder + 1):
maxExtrusion.append(0.0)
if len(totalExtrusion) <= currentExtruder:
for i in range(len(totalExtrusion), currentExtruder + 1):
totalExtrusion.append(0.0)
if self.progressCallback is not None:
self.progressCallback(100.0)
self.extrusionAmount = maxExtrusion
self.extrusionVolume = [0] * len(maxExtrusion)
for i in range(len(maxExtrusion)):
radius = self._filamentDiameter / 2
self.extrusionVolume[i] = (self.extrusionAmount[i] * (math.pi * radius * radius)) / 1000
self.totalMoveTimeMinute = totalMoveTimeMinute
def _parseCuraProfileString(self, comment):
return {key: value for (key, value) in map(lambda x: x.split("=", 1), zlib.decompress(base64.b64decode(comment[len("CURA_PROFILE_STRING:"):])).split("\b"))}
def _getCodeInt(self, line, code):
n = line.find(code) + 1
if n < 1:
return None
m = line.find(' ', n)
try:
if m < 0:
return int(line[n:])
return int(line[n:m])
except:
return None
def _getCodeFloat(self, line, code):
n = line.find(code) + 1
if n < 1:
return None
m = line.find(' ', n)
try:
if m < 0:
return float(line[n:])
return float(line[n:m])
except:
return None
|
agpl-3.0
|
dacjames/scrapy
|
scrapy/spiders/feed.py
|
151
|
5441
|
"""
This module implements the XMLFeedSpider which is the recommended spider to use
for scraping from an XML feed.
See documentation in docs/topics/spiders.rst
"""
from scrapy.spiders import Spider
from scrapy.utils.iterators import xmliter, csviter
from scrapy.utils.spider import iterate_spider_output
from scrapy.selector import Selector
from scrapy.exceptions import NotConfigured, NotSupported
class XMLFeedSpider(Spider):
"""
This class intends to be the base class for spiders that scrape
from XML feeds.
You can choose whether to parse the file using the 'iternodes' iterator, an
'xml' selector, or an 'html' selector. In most cases, it's convenient to
use iternodes, since it's a faster and cleaner.
"""
iterator = 'iternodes'
itertag = 'item'
namespaces = ()
def process_results(self, response, results):
"""This overridable method is called for each result (item or request)
returned by the spider, and it's intended to perform any last time
processing required before returning the results to the framework core,
for example setting the item GUIDs. It receives a list of results and
the response which originated that results. It must return a list of
results (Items or Requests).
"""
return results
def adapt_response(self, response):
"""You can override this function in order to make any changes you want
to into the feed before parsing it. This function must return a
response.
"""
return response
def parse_node(self, response, selector):
"""This method must be overriden with your custom spider functionality"""
if hasattr(self, 'parse_item'): # backward compatibility
return self.parse_item(response, selector)
raise NotImplementedError
def parse_nodes(self, response, nodes):
"""This method is called for the nodes matching the provided tag name
(itertag). Receives the response and an Selector for each node.
Overriding this method is mandatory. Otherwise, you spider won't work.
This method must return either a BaseItem, a Request, or a list
containing any of them.
"""
for selector in nodes:
ret = iterate_spider_output(self.parse_node(response, selector))
for result_item in self.process_results(response, ret):
yield result_item
def parse(self, response):
if not hasattr(self, 'parse_node'):
raise NotConfigured('You must define parse_node method in order to scrape this XML feed')
response = self.adapt_response(response)
if self.iterator == 'iternodes':
nodes = self._iternodes(response)
elif self.iterator == 'xml':
selector = Selector(response, type='xml')
self._register_namespaces(selector)
nodes = selector.xpath('//%s' % self.itertag)
elif self.iterator == 'html':
selector = Selector(response, type='html')
self._register_namespaces(selector)
nodes = selector.xpath('//%s' % self.itertag)
else:
raise NotSupported('Unsupported node iterator')
return self.parse_nodes(response, nodes)
def _iternodes(self, response):
for node in xmliter(response, self.itertag):
self._register_namespaces(node)
yield node
def _register_namespaces(self, selector):
for (prefix, uri) in self.namespaces:
selector.register_namespace(prefix, uri)
class CSVFeedSpider(Spider):
"""Spider for parsing CSV feeds.
It receives a CSV file in a response; iterates through each of its rows,
and calls parse_row with a dict containing each field's data.
You can set some options regarding the CSV file, such as the delimiter, quotechar
and the file's headers.
"""
delimiter = None # When this is None, python's csv module's default delimiter is used
quotechar = None # When this is None, python's csv module's default quotechar is used
headers = None
def process_results(self, response, results):
"""This method has the same purpose as the one in XMLFeedSpider"""
return results
def adapt_response(self, response):
"""This method has the same purpose as the one in XMLFeedSpider"""
return response
def parse_row(self, response, row):
"""This method must be overriden with your custom spider functionality"""
raise NotImplementedError
def parse_rows(self, response):
"""Receives a response and a dict (representing each row) with a key for
each provided (or detected) header of the CSV file. This spider also
gives the opportunity to override adapt_response and
process_results methods for pre and post-processing purposes.
"""
for row in csviter(response, self.delimiter, self.headers, self.quotechar):
ret = iterate_spider_output(self.parse_row(response, row))
for result_item in self.process_results(response, ret):
yield result_item
def parse(self, response):
if not hasattr(self, 'parse_row'):
raise NotConfigured('You must define parse_row method in order to scrape this CSV feed')
response = self.adapt_response(response)
return self.parse_rows(response)
|
bsd-3-clause
|
broesamle/servo
|
components/script/dom/bindings/codegen/parser/tests/test_constructor.py
|
102
|
5283
|
import WebIDL
def WebIDLTest(parser, harness):
def checkArgument(argument, QName, name, type, optional, variadic):
harness.ok(isinstance(argument, WebIDL.IDLArgument),
"Should be an IDLArgument")
harness.check(argument.identifier.QName(), QName, "Argument has the right QName")
harness.check(argument.identifier.name, name, "Argument has the right name")
harness.check(str(argument.type), type, "Argument has the right return type")
harness.check(argument.optional, optional, "Argument has the right optional value")
harness.check(argument.variadic, variadic, "Argument has the right variadic value")
def checkMethod(method, QName, name, signatures,
static=True, getter=False, setter=False, creator=False,
deleter=False, legacycaller=False, stringifier=False,
chromeOnly=False):
harness.ok(isinstance(method, WebIDL.IDLMethod),
"Should be an IDLMethod")
harness.ok(method.isMethod(), "Method is a method")
harness.ok(not method.isAttr(), "Method is not an attr")
harness.ok(not method.isConst(), "Method is not a const")
harness.check(method.identifier.QName(), QName, "Method has the right QName")
harness.check(method.identifier.name, name, "Method has the right name")
harness.check(method.isStatic(), static, "Method has the correct static value")
harness.check(method.isGetter(), getter, "Method has the correct getter value")
harness.check(method.isSetter(), setter, "Method has the correct setter value")
harness.check(method.isCreator(), creator, "Method has the correct creator value")
harness.check(method.isDeleter(), deleter, "Method has the correct deleter value")
harness.check(method.isLegacycaller(), legacycaller, "Method has the correct legacycaller value")
harness.check(method.isStringifier(), stringifier, "Method has the correct stringifier value")
harness.check(method.getExtendedAttribute("ChromeOnly") is not None, chromeOnly, "Method has the correct value for ChromeOnly")
harness.check(len(method.signatures()), len(signatures), "Method has the correct number of signatures")
sigpairs = zip(method.signatures(), signatures)
for (gotSignature, expectedSignature) in sigpairs:
(gotRetType, gotArgs) = gotSignature
(expectedRetType, expectedArgs) = expectedSignature
harness.check(str(gotRetType), expectedRetType,
"Method has the expected return type.")
for i in range(0, len(gotArgs)):
(QName, name, type, optional, variadic) = expectedArgs[i]
checkArgument(gotArgs[i], QName, name, type, optional, variadic)
parser.parse("""
[Constructor]
interface TestConstructorNoArgs {
};
[Constructor(DOMString name)]
interface TestConstructorWithArgs {
};
[Constructor(object foo), Constructor(boolean bar)]
interface TestConstructorOverloads {
};
""")
results = parser.finish()
harness.check(len(results), 3, "Should be three productions")
harness.ok(isinstance(results[0], WebIDL.IDLInterface),
"Should be an IDLInterface")
harness.ok(isinstance(results[1], WebIDL.IDLInterface),
"Should be an IDLInterface")
harness.ok(isinstance(results[2], WebIDL.IDLInterface),
"Should be an IDLInterface")
checkMethod(results[0].ctor(), "::TestConstructorNoArgs::constructor",
"constructor", [("TestConstructorNoArgs (Wrapper)", [])])
checkMethod(results[1].ctor(), "::TestConstructorWithArgs::constructor",
"constructor",
[("TestConstructorWithArgs (Wrapper)",
[("::TestConstructorWithArgs::constructor::name", "name", "String", False, False)])])
checkMethod(results[2].ctor(), "::TestConstructorOverloads::constructor",
"constructor",
[("TestConstructorOverloads (Wrapper)",
[("::TestConstructorOverloads::constructor::foo", "foo", "Object", False, False)]),
("TestConstructorOverloads (Wrapper)",
[("::TestConstructorOverloads::constructor::bar", "bar", "Boolean", False, False)])])
parser = parser.reset()
parser.parse("""
[ChromeConstructor()]
interface TestChromeConstructor {
};
""")
results = parser.finish()
harness.check(len(results), 1, "Should be one production")
harness.ok(isinstance(results[0], WebIDL.IDLInterface),
"Should be an IDLInterface")
checkMethod(results[0].ctor(), "::TestChromeConstructor::constructor",
"constructor", [("TestChromeConstructor (Wrapper)", [])],
chromeOnly=True)
parser = parser.reset()
threw = False
try:
parser.parse("""
[Constructor(),
ChromeConstructor(DOMString a)]
interface TestChromeConstructor {
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Can't have both a Constructor and a ChromeConstructor")
|
mpl-2.0
|
AzamYahya/shogun
|
examples/undocumented/python_modular/graphical/converter_ffsep_bss.py
|
26
|
1095
|
"""
Blind Source Separation using the FFSep Algorithm with Shogun
Based on the example from scikit-learn
http://scikit-learn.org/
Kevin Hughes 2013
"""
import numpy as np
import pylab as pl
from modshogun import RealFeatures
from modshogun import FFSep
# Generate sample data
np.random.seed(0)
n_samples = 2000
time = np.linspace(0, 10, n_samples)
# Source Signals
s1 = np.sin(2 * time) # sin wave
s2 = np.sign(np.sin(3 * time)) # square wave
S = np.c_[s1, s2]
S += 0.2 * np.random.normal(size=S.shape) # add noise
# Standardize data
S /= S.std(axis=0)
S = S.T
# Mixing Matrix
A = np.array([[1, 0.5], [0.5, 1]])
# Mix Signals
X = np.dot(A,S)
mixed_signals = RealFeatures(X)
# Separating
ffsep = FFSep()
signals = ffsep.apply(mixed_signals)
S_ = signals.get_feature_matrix()
A_ = ffsep.get_mixing_matrix();
# Plot results
pl.figure()
pl.subplot(3, 1, 1)
pl.plot(S.T)
pl.title('True Sources')
pl.subplot(3, 1, 2)
pl.plot(X.T)
pl.title('Mixed Sources')
pl.subplot(3, 1, 3)
pl.plot(S_.T)
pl.title('Estimated Sources')
pl.subplots_adjust(0.09, 0.04, 0.94, 0.94, 0.26, 0.36)
pl.show()
|
gpl-3.0
|
mquandalle/rethinkdb
|
external/v8_3.30.33.16/build/gyp/test/mac/gyptest-xctest.py
|
221
|
1196
|
#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that xctest targets are correctly configured.
"""
import TestGyp
import sys
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['xcode'])
# Ignore this test if Xcode 5 is not installed
import subprocess
job = subprocess.Popen(['xcodebuild', '-version'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
out, err = job.communicate()
if job.returncode != 0:
raise Exception('Error %d running xcodebuild' % job.returncode)
xcode_version, build_number = out.splitlines()
# Convert the version string from 'Xcode 5.0' to ['5','0'].
xcode_version = xcode_version.split()[-1].split('.')
if xcode_version < ['5']:
test.pass_test()
CHDIR = 'xctest'
test.run_gyp('test.gyp', chdir=CHDIR)
test.build('test.gyp', chdir=CHDIR, arguments=['-scheme', 'classes', 'test'])
test.built_file_must_match('tests.xctest/Contents/Resources/resource.txt',
'foo\n', chdir=CHDIR)
test.pass_test()
|
agpl-3.0
|
vighneshbirodkar/scikit-image
|
doc/examples/color_exposure/plot_adapt_rgb.py
|
9
|
4535
|
"""
=========================================
Adapting gray-scale filters to RGB images
=========================================
There are many filters that are designed to work with gray-scale images but not
with color images. To simplify the process of creating functions that can adapt
to RGB images, scikit-image provides the ``adapt_rgb`` decorator.
To actually use the ``adapt_rgb`` decorator, you have to decide how you want to
adapt the RGB image for use with the gray-scale filter. There are two
pre-defined handlers:
``each_channel``
Pass each of the RGB channels to the filter one-by-one, and stitch the
results back into an RGB image.
``hsv_value``
Convert the RGB image to HSV and pass the value channel to the filter.
The filtered result is inserted back into the HSV image and converted
back to RGB.
Below, we demonstrate the use of ``adapt_rgb`` on a couple of gray-scale
filters:
"""
from skimage.color.adapt_rgb import adapt_rgb, each_channel, hsv_value
from skimage import filters
@adapt_rgb(each_channel)
def sobel_each(image):
return filters.sobel(image)
@adapt_rgb(hsv_value)
def sobel_hsv(image):
return filters.sobel(image)
######################################################################
# We can use these functions as we would normally use them, but now they work
# with both gray-scale and color images. Let's plot the results with a color
# image:
from skimage import data
from skimage.exposure import rescale_intensity
import matplotlib.pyplot as plt
image = data.astronaut()
fig = plt.figure(figsize=(14, 7))
ax_each = fig.add_subplot(121, adjustable='box-forced')
ax_hsv = fig.add_subplot(122, sharex=ax_each, sharey=ax_each,
adjustable='box-forced')
# We use 1 - sobel_each(image)
# but this will not work if image is not normalized
ax_each.imshow(rescale_intensity(1 - sobel_each(image)))
ax_each.set_xticks([]), ax_each.set_yticks([])
ax_each.set_title("Sobel filter computed\n on individual RGB channels")
# We use 1 - sobel_hsv(image) but this will not work if image is not normalized
ax_hsv.imshow(rescale_intensity(1 - sobel_hsv(image)))
ax_hsv.set_xticks([]), ax_hsv.set_yticks([])
ax_hsv.set_title("Sobel filter computed\n on (V)alue converted image (HSV)")
######################################################################
# Notice that the result for the value-filtered image preserves the color of
# the original image, but channel filtered image combines in a more
# surprising way. In other common cases, smoothing for example, the channel
# filtered image will produce a better result than the value-filtered image.
#
# You can also create your own handler functions for ``adapt_rgb``. To do so,
# just create a function with the following signature::
#
# def handler(image_filter, image, *args, **kwargs):
# # Manipulate RGB image here...
# image = image_filter(image, *args, **kwargs)
# # Manipulate filtered image here...
# return image
#
# Note that ``adapt_rgb`` handlers are written for filters where the image is
# the first argument.
#
# As a very simple example, we can just convert any RGB image to grayscale
# and then return the filtered result:
from skimage.color import rgb2gray
def as_gray(image_filter, image, *args, **kwargs):
gray_image = rgb2gray(image)
return image_filter(gray_image, *args, **kwargs)
######################################################################
# It's important to create a signature that uses ``*args`` and ``**kwargs``
# to pass arguments along to the filter so that the decorated function is
# allowed to have any number of positional and keyword arguments.
#
# Finally, we can use this handler with ``adapt_rgb`` just as before:
@adapt_rgb(as_gray)
def sobel_gray(image):
return filters.sobel(image)
fig = plt.figure(figsize=(7, 7))
ax = fig.add_subplot(111, sharex=ax_each, sharey=ax_each,
adjustable='box-forced')
# We use 1 - sobel_gray(image)
# but this will not work if image is not normalized
ax.imshow(rescale_intensity(1 - sobel_gray(image)), cmap=plt.cm.gray)
ax.set_xticks([]), ax.set_yticks([])
ax.set_title("Sobel filter computed\n on the converted grayscale image")
plt.show()
######################################################################
#
# .. note::
#
# A very simple check of the array shape is used for detecting RGB
# images, so ``adapt_rgb`` is not recommended for functions that support
# 3D volumes or color images in non-RGB spaces.
|
bsd-3-clause
|
40423107/2017springcd_hw
|
plugin/liquid_tags/test_flickr.py
|
278
|
2466
|
from . import flickr
try:
from unittest.mock import patch
except ImportError:
from mock import patch
import os
import pytest
import re
PLUGIN_DIR = os.path.dirname(__file__)
TEST_DATA_DIR = os.path.join(PLUGIN_DIR, 'test_data')
@pytest.mark.parametrize('input,expected', [
('18873146680 large "test 1"',
dict(photo_id='18873146680',
size='large',
alt='test 1')),
('18873146680 large \'test 1\'',
dict(photo_id='18873146680',
size='large',
alt='test 1')),
('18873143536360 medium "test number two"',
dict(photo_id='18873143536360',
size='medium',
alt='test number two')),
('18873143536360 small "test number 3"',
dict(photo_id='18873143536360',
size='small',
alt='test number 3')),
('18873143536360 "test 4"',
dict(photo_id='18873143536360',
size=None,
alt='test 4')),
('18873143536360',
dict(photo_id='18873143536360',
size=None,
alt=None)),
('123456 small',
dict(photo_id='123456',
size='small',
alt=None))
])
def test_regex(input, expected):
assert re.match(flickr.PARSE_SYNTAX, input).groupdict() == expected
@pytest.mark.parametrize('input,expected', [
(['1', 'server1', '1', 'secret1', 'small'],
'https://farm1.staticflickr.com/server1/1_secret1_n.jpg'),
(['2', 'server2', '2', 'secret2', 'medium'],
'https://farm2.staticflickr.com/server2/2_secret2_c.jpg'),
(['3', 'server3', '3', 'secret3', 'large'],
'https://farm3.staticflickr.com/server3/3_secret3_b.jpg')
])
def test_source_url(input, expected):
assert flickr.source_url(
input[0], input[1], input[2], input[3], input[4]) == expected
@patch('liquid_tags.flickr.urlopen')
def test_generage_html(mock_urlopen):
# mock the return to deliver the flickr.json file instead
with open(TEST_DATA_DIR + '/flickr.json', 'rb') as f:
mock_urlopen.return_value.read.return_value = f.read()
attrs = dict(
photo_id='1234567',
size='large',
alt='this is a test'
)
expected = ('<a href="https://www.flickr.com/photos/'
'marvinxsteadfast/18841055371/">'
'<img src="https://farm6.staticflickr.com/5552/1234567_'
'17ac287217_b.jpg" alt="this is a test"></a>')
assert flickr.generate_html(attrs, 'abcdef') == expected
|
agpl-3.0
|
codysoyland/django-grappelli
|
grappelli/sites.py
|
1
|
9327
|
# coding: utf-8
import re
from django import http, template
from django.contrib.admin import ModelAdmin
from django.contrib.admin import actions
from django.contrib.auth import authenticate, login
from django.db.models.base import ModelBase
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.shortcuts import render_to_response
from django.utils.functional import update_wrapper
from django.utils.safestring import mark_safe
from django.utils.text import capfirst
from django.utils.translation import ugettext_lazy, ugettext as _
from django.views.decorators.cache import never_cache
from django.conf import settings
try:
set
except NameError:
from sets import Set as set # Python 2.3 fallback
from django.contrib.admin.sites import AdminSite
class GrappelliSite(AdminSite):
"""
An AdminSite object encapsulates an instance of the Django admin application, ready
to be hooked in to your URLconf. Models are registered with the AdminSite using the
register() method, and the root() method can then be used as a Django view function
that presents a full admin interface for the collection of registered models.
"""
def __init__(self, name=None, app_name='admin'):
self._registry = {} # model_class class -> admin_class instance
self.root_path = None
if name is None:
self.name = 'admin'
else:
self.name = name
self.app_name = app_name
self._actions = {'delete_selected': actions.delete_selected}
self._global_actions = self._actions.copy()
self.groups = {}
self.collections = {}
self.group_template = ""
self.collection_template = ""
def index(self, request, extra_context=None):
"""
Displays the main admin index page, which lists all of the installed
apps that have been registered in this site.
"""
app_dict = {}
user = request.user
for model, model_admin in self._registry.items():
app_label = model._meta.app_label
has_module_perms = user.has_module_perms(app_label)
if has_module_perms:
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True in perms.values():
try:
order = model_admin.order
except:
order = 0
model_dict = {
'order': order,
'name': capfirst(model._meta.verbose_name_plural),
'admin_url': mark_safe('%s/%s/' % (app_label, model.__name__.lower())),
'perms': perms,
}
if app_label in app_dict:
app_dict[app_label]['models'].append(model_dict)
else:
app_dict[app_label] = {
'app_label': app_label,
'name': app_label.title(),
'app_url': app_label + '/',
'has_module_perms': has_module_perms,
'models': [model_dict],
}
# Sort the apps alphabetically.
app_list = app_dict.values()
app_list.sort(lambda x, y: cmp(x['name'], y['name']))
# First: Sort the models alphabetically within each app.
# Second: Sort the models according to their order-attribute.
for app in app_list:
app['models'].sort(lambda x, y: cmp(x['name'], y['name']))
app['models'].sort(lambda x, y: cmp(x['order'], y['order']))
# set cookie
if not request.session.get('grappelli'):
request.session['grappelli'] = {}
request.session['grappelli']['home'] = request.get_full_path()
request.session.modified = True
# Assign Apps to Groups
group_list = {}
custom_app_list = app_list
for k,v in self.groups.items():
if request.GET.get("g") and k != int(request.GET.get("g")):
continue
if request.GET.get("c") and k not in self.collections[int(request.GET.get("c"))]['groups']:
continue
group_list[k] = v
application_list = []
for app in v['apps']:
try:
application_list.append(app_dict[app])
# remove assigned app from custom app_list
custom_app_list = [d for d in custom_app_list if d.get('app_label') != app]
except:
pass
if len(application_list):
group_list[k]['applications'] = application_list
else:
group_list[k]['applications'] = ""
# Subsections for Groups and Collections
# set template and title
# clear app_list
if request.GET.get("g"):
try:
title = group_list[int(request.GET.get("g"))]['title']
except:
title = _('Site administration')
try:
group_template = group_list[int(request.GET.get("g"))]['template']
except:
group_template = None
tpl = group_template or self.group_template or "admin/index_group.html"
custom_app_list = []
elif request.GET.get("c"):
try:
title = self.collections[int(request.GET.get("c"))]['title']
except:
title = _('Site administration')
try:
collection_template = self.collections[int(request.GET.get("c"))]['template']
except:
collection_template = None
tpl = collection_template or self.collection_template or "admin/index_collection.html"
custom_app_list = []
else:
title = _('Site administration')
tpl = self.index_template or "admin/index.html"
# Reset grouplist if users has no permissions
if not app_list:
group_list = {}
context = {
'title': title,
'app_list': custom_app_list,
'root_path': self.root_path,
'group_list': group_list
}
context.update(extra_context or {})
context_instance = template.RequestContext(request, current_app=self.name)
return render_to_response(tpl, context,
context_instance=context_instance
)
index = never_cache(index)
def app_index(self, request, app_label, extra_context=None):
user = request.user
has_module_perms = user.has_module_perms(app_label)
app_dict = {}
for model, model_admin in self._registry.items():
if app_label == model._meta.app_label:
if has_module_perms:
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True in perms.values():
try:
order = model_admin.order
except:
order = 0
model_dict = {
'order': order,
'name': capfirst(model._meta.verbose_name_plural),
'admin_url': '%s/' % model.__name__.lower(),
'perms': perms,
}
if app_dict:
app_dict['models'].append(model_dict),
else:
# First time around, now that we know there's
# something to display, add in the necessary meta
# information.
app_dict = {
'name': app_label.title(),
'app_url': '',
'has_module_perms': has_module_perms,
'models': [model_dict],
}
if not app_dict:
raise http.Http404('The requested admin page does not exist.')
# Sort the models alphabetically within each app.
app_dict['models'].sort(lambda x, y: cmp(x['name'], y['name']))
app_dict['models'].sort(lambda x, y: cmp(x['order'], y['order']))
context = {
'title': _('%s administration') % capfirst(app_label),
'app_list': [app_dict],
'root_path': self.root_path,
}
context.update(extra_context or {})
context_instance = template.RequestContext(request, current_app=self.name)
return render_to_response(self.app_index_template or ('admin/%s/app_index.html' % app_label,
'admin/app_index.html'), context,
context_instance=context_instance
)
|
bsd-3-clause
|
kvaps/vdsm
|
vdsm_hooks/hostusb/after_vm_destroy.py
|
1
|
2558
|
#!/usr/bin/python
import os
import re
import sys
import traceback
import hooking
'''
after_vm_destroy:
return the original owner of the usb device
'''
HOOK_HOSTUSB_PATH = '/var/run/vdsm/hooks/hostusb-permissions'
def get_owner(devpath):
uid = pid = -1
content = ''
if not os.path.isfile(HOOK_HOSTUSB_PATH):
return uid, pid
with open(HOOK_HOSTUSB_PATH, 'r') as f:
for line in f:
if len(line) > 0 and line.split(':')[0] == devpath:
entry = line.split(':')
uid = entry[1]
pid = entry[2]
elif len(line) > 0:
content += line + '\n'
if uid != -1:
with open(HOOK_HOSTUSB_PATH, 'w') as f:
f.writelines(content)
return uid, pid
# !TODO:
# merge chown with before_vm_start.py
# maybe put it in hooks.py?
def chown(busid, deviceid):
devid = busid + ':' + deviceid
command = ['lsusb', '-s', devid]
retcode, out, err = hooking.execCmd(command, raw=True)
if retcode != 0:
sys.stderr.write('hostusb: cannot find usb device: %s\n' % devid)
sys.exit(2)
devpath = '/dev/bus/usb/' + out[4:7] + '/' + out[15:18]
uid, gid = get_owner(devpath)
if uid == -1:
sys.stderr.write('hostusb after_vm_destroy: cannot find devpath: %s '
'in file: %s\n' % (devpath, HOOK_HOSTUSB_PATH))
return
# we don't use os.chown because we need sudo
owner = str(uid) + ':' + str(gid)
command = ['/bin/chown', owner, devpath]
retcode, out, err = hooking.execCmd(command, sudo=True, raw=True)
if retcode != 0:
sys.stderr.write('hostusb after_vm_destroy: error chown %s to %s, '
'err = %s\n' % (devpath, owner, err))
sys.exit(2)
if 'hostusb' in os.environ:
try:
regex = re.compile('^0x[\d,A-F,a-f]{4}$')
for usb in os.environ['hostusb'].split('&'):
busid, deviceid = usb.split(':')
if len(regex.findall(busid)) != 1 or \
len(regex.findall(deviceid)) != 1:
sys.stderr.write('hostusb after_vm_destroy: bad input, '
'expected format for bus and '
'device, input: %s:%s\n' %
(busid, deviceid))
sys.exit(2)
chown(busid, deviceid)
except:
sys.stderr.write('hostusb after_vm_destroy: [unexpected error]: %s\n' %
traceback.format_exc())
sys.exit(2)
|
gpl-2.0
|
manojgudi/sandhi
|
modules/gr36/gnuradio-core/src/python/gnuradio/gr/prefs.py
|
13
|
3739
|
#
# Copyright 2006,2009 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import gnuradio_core as gsp
_prefs_base = gsp.gr_prefs
import ConfigParser
import os
import os.path
import sys
import glob
def _user_prefs_filename():
return os.path.expanduser('~/.gnuradio/config.conf')
def _sys_prefs_dirname():
return gsp.prefsdir()
def _bool(x):
"""
Try to coerce obj to a True or False
"""
if isinstance(x, bool):
return x
if isinstance(x, (float, int)):
return bool(x)
raise TypeError, x
class _prefs(_prefs_base):
"""
Derive our 'real class' from the stubbed out base class that has support
for SWIG directors. This allows C++ code to magically and transparently
invoke the methods in this python class.
"""
def __init__(self):
_prefs_base.__init__(self)
self.cp = ConfigParser.RawConfigParser()
self.__getattr__ = lambda self, name: getattr(self.cp, name)
def _sys_prefs_filenames(self):
dir = _sys_prefs_dirname()
try:
fnames = glob.glob(os.path.join(dir, '*.conf'))
except (IOError, OSError):
return []
fnames.sort()
return fnames
def _read_files(self):
filenames = self._sys_prefs_filenames()
filenames.append(_user_prefs_filename())
#print "filenames: ", filenames
self.cp.read(filenames)
# ----------------------------------------------------------------
# These methods override the C++ virtual methods of the same name
# ----------------------------------------------------------------
def has_section(self, section):
return self.cp.has_section(section)
def has_option(self, section, option):
return self.cp.has_option(section, option)
def get_string(self, section, option, default_val):
try:
return self.cp.get(section, option)
except:
return default_val
def get_bool(self, section, option, default_val):
try:
return self.cp.getboolean(section, option)
except:
return default_val
def get_long(self, section, option, default_val):
try:
return self.cp.getint(section, option)
except:
return default_val
def get_double(self, section, option, default_val):
try:
return self.cp.getfloat(section, option)
except:
return default_val
# ----------------------------------------------------------------
# End override of C++ virtual methods
# ----------------------------------------------------------------
_prefs_db = _prefs()
# if GR_DONT_LOAD_PREFS is set, don't load them.
# (make check uses this to avoid interactions.)
if os.getenv("GR_DONT_LOAD_PREFS", None) is None:
_prefs_db._read_files()
_prefs_base.set_singleton(_prefs_db) # tell C++ what instance to use
def prefs():
"""
Return the global preference data base
"""
return _prefs_db
|
gpl-3.0
|
Technocaveman/There-is-no-Third-Step
|
node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/lexers/templates.py
|
291
|
56003
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.templates
~~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for various template engines' markup.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexers.web import \
PhpLexer, HtmlLexer, XmlLexer, JavascriptLexer, CssLexer, LassoLexer
from pygments.lexers.agile import PythonLexer, PerlLexer
from pygments.lexers.compiled import JavaLexer
from pygments.lexers.jvm import TeaLangLexer
from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
include, using, this
from pygments.token import Error, Punctuation, \
Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
from pygments.util import html_doctype_matches, looks_like_xml
__all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
'ColdfusionHtmlLexer', 'VelocityLexer', 'VelocityHtmlLexer',
'VelocityXmlLexer', 'SspLexer', 'TeaTemplateLexer', 'LassoHtmlLexer',
'LassoXmlLexer', 'LassoCssLexer', 'LassoJavascriptLexer']
class ErbLexer(Lexer):
"""
Generic `ERB <http://ruby-doc.org/core/classes/ERB.html>`_ (Ruby Templating)
lexer.
Just highlights ruby code between the preprocessor directives, other data
is left untouched by the lexer.
All options are also forwarded to the `RubyLexer`.
"""
name = 'ERB'
aliases = ['erb']
mimetypes = ['application/x-ruby-templating']
_block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
def __init__(self, **options):
from pygments.lexers.agile import RubyLexer
self.ruby_lexer = RubyLexer(**options)
Lexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
"""
Since ERB doesn't allow "<%" and other tags inside of ruby
blocks we have to use a split approach here that fails for
that too.
"""
tokens = self._block_re.split(text)
tokens.reverse()
state = idx = 0
try:
while True:
# text
if state == 0:
val = tokens.pop()
yield idx, Other, val
idx += len(val)
state = 1
# block starts
elif state == 1:
tag = tokens.pop()
# literals
if tag in ('<%%', '%%>'):
yield idx, Other, tag
idx += 3
state = 0
# comment
elif tag == '<%#':
yield idx, Comment.Preproc, tag
val = tokens.pop()
yield idx + 3, Comment, val
idx += 3 + len(val)
state = 2
# blocks or output
elif tag in ('<%', '<%=', '<%-'):
yield idx, Comment.Preproc, tag
idx += len(tag)
data = tokens.pop()
r_idx = 0
for r_idx, r_token, r_value in \
self.ruby_lexer.get_tokens_unprocessed(data):
yield r_idx + idx, r_token, r_value
idx += len(data)
state = 2
elif tag in ('%>', '-%>'):
yield idx, Error, tag
idx += len(tag)
state = 0
# % raw ruby statements
else:
yield idx, Comment.Preproc, tag[0]
r_idx = 0
for r_idx, r_token, r_value in \
self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
yield idx + 1 + r_idx, r_token, r_value
idx += len(tag)
state = 0
# block ends
elif state == 2:
tag = tokens.pop()
if tag not in ('%>', '-%>'):
yield idx, Other, tag
else:
yield idx, Comment.Preproc, tag
idx += len(tag)
state = 0
except IndexError:
return
def analyse_text(text):
if '<%' in text and '%>' in text:
return 0.4
class SmartyLexer(RegexLexer):
"""
Generic `Smarty <http://smarty.php.net/>`_ template lexer.
Just highlights smarty code between the preprocessor directives, other
data is left untouched by the lexer.
"""
name = 'Smarty'
aliases = ['smarty']
filenames = ['*.tpl']
mimetypes = ['application/x-smarty']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
(r'[^{]+', Other),
(r'(\{)(\*.*?\*)(\})',
bygroups(Comment.Preproc, Comment, Comment.Preproc)),
(r'(\{php\})(.*?)(\{/php\})',
bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
Comment.Preproc)),
(r'(\{)(/?[a-zA-Z_][a-zA-Z0-9_]*)(\s*)',
bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
(r'\{', Comment.Preproc, 'smarty')
],
'smarty': [
(r'\s+', Text),
(r'\}', Comment.Preproc, '#pop'),
(r'#[a-zA-Z_][a-zA-Z0-9_]*#', Name.Variable),
(r'\$[a-zA-Z_][a-zA-Z0-9_]*(\.[a-zA-Z0-9_]+)*', Name.Variable),
(r'[~!%^&*()+=|\[\]:;,.<>/?{}@-]', Operator),
(r'(true|false|null)\b', Keyword.Constant),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Attribute)
]
}
def analyse_text(text):
rv = 0.0
if re.search('\{if\s+.*?\}.*?\{/if\}', text):
rv += 0.15
if re.search('\{include\s+file=.*?\}', text):
rv += 0.15
if re.search('\{foreach\s+.*?\}.*?\{/foreach\}', text):
rv += 0.15
if re.search('\{\$.*?\}', text):
rv += 0.01
return rv
class VelocityLexer(RegexLexer):
"""
Generic `Velocity <http://velocity.apache.org/>`_ template lexer.
Just highlights velocity directives and variable references, other
data is left untouched by the lexer.
"""
name = 'Velocity'
aliases = ['velocity']
filenames = ['*.vm','*.fhtml']
flags = re.MULTILINE | re.DOTALL
identifier = r'[a-zA-Z_][a-zA-Z0-9_]*'
tokens = {
'root': [
(r'[^{#$]+', Other),
(r'(#)(\*.*?\*)(#)',
bygroups(Comment.Preproc, Comment, Comment.Preproc)),
(r'(##)(.*?$)',
bygroups(Comment.Preproc, Comment)),
(r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
'directiveparams'),
(r'(#\{?)(' + identifier + r')(\}|\b)',
bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
(r'\$\{?', Punctuation, 'variable')
],
'variable': [
(identifier, Name.Variable),
(r'\(', Punctuation, 'funcparams'),
(r'(\.)(' + identifier + r')',
bygroups(Punctuation, Name.Variable), '#push'),
(r'\}', Punctuation, '#pop'),
(r'', Other, '#pop')
],
'directiveparams': [
(r'(&&|\|\||==?|!=?|[-<>+*%&\|\^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
Operator),
(r'\[', Operator, 'rangeoperator'),
(r'\b' + identifier + r'\b', Name.Function),
include('funcparams')
],
'rangeoperator': [
(r'\.\.', Operator),
include('funcparams'),
(r'\]', Operator, '#pop')
],
'funcparams': [
(r'\$\{?', Punctuation, 'variable'),
(r'\s+', Text),
(r',', Punctuation),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r"\b[0-9]+\b", Number),
(r'(true|false|null)\b', Keyword.Constant),
(r'\(', Punctuation, '#push'),
(r'\)', Punctuation, '#pop')
]
}
def analyse_text(text):
rv = 0.0
if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text):
rv += 0.25
if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text):
rv += 0.15
if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
rv += 0.15
if re.search(r'\$\{?[a-zA-Z_][a-zA-Z0-9_]*(\([^)]*\))?'
r'(\.[a-zA-Z0-9_]+(\([^)]*\))?)*\}?', text):
rv += 0.01
return rv
class VelocityHtmlLexer(DelegatingLexer):
"""
Subclass of the `VelocityLexer` that highlights unlexer data
with the `HtmlLexer`.
"""
name = 'HTML+Velocity'
aliases = ['html+velocity']
alias_filenames = ['*.html','*.fhtml']
mimetypes = ['text/html+velocity']
def __init__(self, **options):
super(VelocityHtmlLexer, self).__init__(HtmlLexer, VelocityLexer,
**options)
class VelocityXmlLexer(DelegatingLexer):
"""
Subclass of the `VelocityLexer` that highlights unlexer data
with the `XmlLexer`.
"""
name = 'XML+Velocity'
aliases = ['xml+velocity']
alias_filenames = ['*.xml','*.vm']
mimetypes = ['application/xml+velocity']
def __init__(self, **options):
super(VelocityXmlLexer, self).__init__(XmlLexer, VelocityLexer,
**options)
def analyse_text(text):
rv = VelocityLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.5
return rv
class DjangoLexer(RegexLexer):
"""
Generic `django <http://www.djangoproject.com/documentation/templates/>`_
and `jinja <http://wsgiarea.pocoo.org/jinja/>`_ template lexer.
It just highlights django/jinja code between the preprocessor directives,
other data is left untouched by the lexer.
"""
name = 'Django/Jinja'
aliases = ['django', 'jinja']
mimetypes = ['application/x-django-templating', 'application/x-jinja']
flags = re.M | re.S
tokens = {
'root': [
(r'[^{]+', Other),
(r'\{\{', Comment.Preproc, 'var'),
# jinja/django comments
(r'\{[*#].*?[*#]\}', Comment),
# django comments
(r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
Comment, Comment.Preproc, Text, Keyword, Text,
Comment.Preproc)),
# raw jinja blocks
(r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
Text, Comment.Preproc, Text, Keyword, Text,
Comment.Preproc)),
# filter blocks
(r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
'block'),
(r'(\{%)(-?\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Comment.Preproc, Text, Keyword), 'block'),
(r'\{', Other)
],
'varnames': [
(r'(\|)(\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Operator, Text, Name.Function)),
(r'(is)(\s+)(not)?(\s+)?([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Keyword, Text, Keyword, Text, Name.Function)),
(r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
(r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
Keyword),
(r'(loop|block|super|forloop)\b', Name.Builtin),
(r'[a-zA-Z][a-zA-Z0-9_-]*', Name.Variable),
(r'\.[a-zA-Z0-9_]+', Name.Variable),
(r':?"(\\\\|\\"|[^"])*"', String.Double),
(r":?'(\\\\|\\'|[^'])*'", String.Single),
(r'([{}()\[\]+\-*/,:~]|[><=]=?)', Operator),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
],
'var': [
(r'\s+', Text),
(r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
include('varnames')
],
'block': [
(r'\s+', Text),
(r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
include('varnames'),
(r'.', Punctuation)
]
}
def analyse_text(text):
rv = 0.0
if re.search(r'\{%\s*(block|extends)', text) is not None:
rv += 0.4
if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
rv += 0.1
if re.search(r'\{\{.*?\}\}', text) is not None:
rv += 0.1
return rv
class MyghtyLexer(RegexLexer):
"""
Generic `myghty templates`_ lexer. Code that isn't Myghty
markup is yielded as `Token.Other`.
*New in Pygments 0.6.*
.. _myghty templates: http://www.myghty.org/
"""
name = 'Myghty'
aliases = ['myghty']
filenames = ['*.myt', 'autodelegate']
mimetypes = ['application/x-myghty']
tokens = {
'root': [
(r'\s+', Text),
(r'(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
bygroups(Name.Tag, Text, Name.Function, Name.Tag,
using(this), Name.Tag)),
(r'(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
bygroups(Name.Tag, Name.Function, Name.Tag,
using(PythonLexer), Name.Tag)),
(r'(<&[^|])(.*?)(,.*?)?(&>)',
bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
(r'(<&\|)(.*?)(,.*?)?(&>)(?s)',
bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
(r'</&>', Name.Tag),
(r'(<%!?)(.*?)(%>)(?s)',
bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
(r'(?<=^)#[^\n]*(\n|\Z)', Comment),
(r'(?<=^)(%)([^\n]*)(\n|\Z)',
bygroups(Name.Tag, using(PythonLexer), Other)),
(r"""(?sx)
(.+?) # anything, followed by:
(?:
(?<=\n)(?=[%#]) | # an eval or comment line
(?=</?[%&]) | # a substitution or block or
# call start or end
# - don't consume
(\\\n) | # an escaped newline
\Z # end of string
)""", bygroups(Other, Operator)),
]
}
class MyghtyHtmlLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexer data
with the `HtmlLexer`.
*New in Pygments 0.6.*
"""
name = 'HTML+Myghty'
aliases = ['html+myghty']
mimetypes = ['text/html+myghty']
def __init__(self, **options):
super(MyghtyHtmlLexer, self).__init__(HtmlLexer, MyghtyLexer,
**options)
class MyghtyXmlLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexer data
with the `XmlLexer`.
*New in Pygments 0.6.*
"""
name = 'XML+Myghty'
aliases = ['xml+myghty']
mimetypes = ['application/xml+myghty']
def __init__(self, **options):
super(MyghtyXmlLexer, self).__init__(XmlLexer, MyghtyLexer,
**options)
class MyghtyJavascriptLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexer data
with the `JavascriptLexer`.
*New in Pygments 0.6.*
"""
name = 'JavaScript+Myghty'
aliases = ['js+myghty', 'javascript+myghty']
mimetypes = ['application/x-javascript+myghty',
'text/x-javascript+myghty',
'text/javascript+mygthy']
def __init__(self, **options):
super(MyghtyJavascriptLexer, self).__init__(JavascriptLexer,
MyghtyLexer, **options)
class MyghtyCssLexer(DelegatingLexer):
"""
Subclass of the `MyghtyLexer` that highlights unlexer data
with the `CssLexer`.
*New in Pygments 0.6.*
"""
name = 'CSS+Myghty'
aliases = ['css+myghty']
mimetypes = ['text/css+myghty']
def __init__(self, **options):
super(MyghtyCssLexer, self).__init__(CssLexer, MyghtyLexer,
**options)
class MasonLexer(RegexLexer):
"""
Generic `mason templates`_ lexer. Stolen from Myghty lexer. Code that isn't
Mason markup is HTML.
.. _mason templates: http://www.masonhq.com/
*New in Pygments 1.4.*
"""
name = 'Mason'
aliases = ['mason']
filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
mimetypes = ['application/x-mason']
tokens = {
'root': [
(r'\s+', Text),
(r'(<%doc>)(.*?)(</%doc>)(?s)',
bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
(r'(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
bygroups(Name.Tag, Text, Name.Function, Name.Tag,
using(this), Name.Tag)),
(r'(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)',
bygroups(Name.Tag, Name.Function, Name.Tag,
using(PerlLexer), Name.Tag)),
(r'(<&[^|])(.*?)(,.*?)?(&>)(?s)',
bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
(r'(<&\|)(.*?)(,.*?)?(&>)(?s)',
bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
(r'</&>', Name.Tag),
(r'(<%!?)(.*?)(%>)(?s)',
bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
(r'(?<=^)#[^\n]*(\n|\Z)', Comment),
(r'(?<=^)(%)([^\n]*)(\n|\Z)',
bygroups(Name.Tag, using(PerlLexer), Other)),
(r"""(?sx)
(.+?) # anything, followed by:
(?:
(?<=\n)(?=[%#]) | # an eval or comment line
(?=</?[%&]) | # a substitution or block or
# call start or end
# - don't consume
(\\\n) | # an escaped newline
\Z # end of string
)""", bygroups(using(HtmlLexer), Operator)),
]
}
def analyse_text(text):
rv = 0.0
if re.search('<&', text) is not None:
rv = 1.0
return rv
class MakoLexer(RegexLexer):
"""
Generic `mako templates`_ lexer. Code that isn't Mako
markup is yielded as `Token.Other`.
*New in Pygments 0.7.*
.. _mako templates: http://www.makotemplates.org/
"""
name = 'Mako'
aliases = ['mako']
filenames = ['*.mao']
mimetypes = ['application/x-mako']
tokens = {
'root': [
(r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
bygroups(Text, Comment.Preproc, Keyword, Other)),
(r'(\s*)(%)([^\n]*)(\n|\Z)',
bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
(r'(\s*)(##[^\n]*)(\n|\Z)',
bygroups(Text, Comment.Preproc, Other)),
(r'(?s)<%doc>.*?</%doc>', Comment.Preproc),
(r'(<%)([\w\.\:]+)',
bygroups(Comment.Preproc, Name.Builtin), 'tag'),
(r'(</%)([\w\.\:]+)(>)',
bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
(r'<%(?=([\w\.\:]+))', Comment.Preproc, 'ondeftags'),
(r'(<%(?:!?))(.*?)(%>)(?s)',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'(\$\{)(.*?)(\})',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'''(?sx)
(.+?) # anything, followed by:
(?:
(?<=\n)(?=%|\#\#) | # an eval or comment line
(?=\#\*) | # multiline comment
(?=</?%) | # a python block
# call start or end
(?=\$\{) | # a substitution
(?<=\n)(?=\s*%) |
# - don't consume
(\\\n) | # an escaped newline
\Z # end of string
)
''', bygroups(Other, Operator)),
(r'\s+', Text),
],
'ondeftags': [
(r'<%', Comment.Preproc),
(r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
include('tag'),
],
'tag': [
(r'((?:\w+)\s*=)(\s*)(".*?")',
bygroups(Name.Attribute, Text, String)),
(r'/?\s*>', Comment.Preproc, '#pop'),
(r'\s+', Text),
],
'attr': [
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}
class MakoHtmlLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexed data
with the `HtmlLexer`.
*New in Pygments 0.7.*
"""
name = 'HTML+Mako'
aliases = ['html+mako']
mimetypes = ['text/html+mako']
def __init__(self, **options):
super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
**options)
class MakoXmlLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexer data
with the `XmlLexer`.
*New in Pygments 0.7.*
"""
name = 'XML+Mako'
aliases = ['xml+mako']
mimetypes = ['application/xml+mako']
def __init__(self, **options):
super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
**options)
class MakoJavascriptLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexer data
with the `JavascriptLexer`.
*New in Pygments 0.7.*
"""
name = 'JavaScript+Mako'
aliases = ['js+mako', 'javascript+mako']
mimetypes = ['application/x-javascript+mako',
'text/x-javascript+mako',
'text/javascript+mako']
def __init__(self, **options):
super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
MakoLexer, **options)
class MakoCssLexer(DelegatingLexer):
"""
Subclass of the `MakoLexer` that highlights unlexer data
with the `CssLexer`.
*New in Pygments 0.7.*
"""
name = 'CSS+Mako'
aliases = ['css+mako']
mimetypes = ['text/css+mako']
def __init__(self, **options):
super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
**options)
# Genshi and Cheetah lexers courtesy of Matt Good.
class CheetahPythonLexer(Lexer):
"""
Lexer for handling Cheetah's special $ tokens in Python syntax.
"""
def get_tokens_unprocessed(self, text):
pylexer = PythonLexer(**self.options)
for pos, type_, value in pylexer.get_tokens_unprocessed(text):
if type_ == Token.Error and value == '$':
type_ = Comment.Preproc
yield pos, type_, value
class CheetahLexer(RegexLexer):
"""
Generic `cheetah templates`_ lexer. Code that isn't Cheetah
markup is yielded as `Token.Other`. This also works for
`spitfire templates`_ which use the same syntax.
.. _cheetah templates: http://www.cheetahtemplate.org/
.. _spitfire templates: http://code.google.com/p/spitfire/
"""
name = 'Cheetah'
aliases = ['cheetah', 'spitfire']
filenames = ['*.tmpl', '*.spt']
mimetypes = ['application/x-cheetah', 'application/x-spitfire']
tokens = {
'root': [
(r'(##[^\n]*)$',
(bygroups(Comment))),
(r'#[*](.|\n)*?[*]#', Comment),
(r'#end[^#\n]*(?:#|$)', Comment.Preproc),
(r'#slurp$', Comment.Preproc),
(r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
(bygroups(Comment.Preproc, using(CheetahPythonLexer),
Comment.Preproc))),
# TODO support other Python syntax like $foo['bar']
(r'(\$)([a-zA-Z_][a-zA-Z0-9_\.]*[a-zA-Z0-9_])',
bygroups(Comment.Preproc, using(CheetahPythonLexer))),
(r'(\$\{!?)(.*?)(\})(?s)',
bygroups(Comment.Preproc, using(CheetahPythonLexer),
Comment.Preproc)),
(r'''(?sx)
(.+?) # anything, followed by:
(?:
(?=[#][#a-zA-Z]*) | # an eval comment
(?=\$[a-zA-Z_{]) | # a substitution
\Z # end of string
)
''', Other),
(r'\s+', Text),
],
}
class CheetahHtmlLexer(DelegatingLexer):
"""
Subclass of the `CheetahLexer` that highlights unlexer data
with the `HtmlLexer`.
"""
name = 'HTML+Cheetah'
aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
mimetypes = ['text/html+cheetah', 'text/html+spitfire']
def __init__(self, **options):
super(CheetahHtmlLexer, self).__init__(HtmlLexer, CheetahLexer,
**options)
class CheetahXmlLexer(DelegatingLexer):
"""
Subclass of the `CheetahLexer` that highlights unlexer data
with the `XmlLexer`.
"""
name = 'XML+Cheetah'
aliases = ['xml+cheetah', 'xml+spitfire']
mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
def __init__(self, **options):
super(CheetahXmlLexer, self).__init__(XmlLexer, CheetahLexer,
**options)
class CheetahJavascriptLexer(DelegatingLexer):
"""
Subclass of the `CheetahLexer` that highlights unlexer data
with the `JavascriptLexer`.
"""
name = 'JavaScript+Cheetah'
aliases = ['js+cheetah', 'javascript+cheetah',
'js+spitfire', 'javascript+spitfire']
mimetypes = ['application/x-javascript+cheetah',
'text/x-javascript+cheetah',
'text/javascript+cheetah',
'application/x-javascript+spitfire',
'text/x-javascript+spitfire',
'text/javascript+spitfire']
def __init__(self, **options):
super(CheetahJavascriptLexer, self).__init__(JavascriptLexer,
CheetahLexer, **options)
class GenshiTextLexer(RegexLexer):
"""
A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ text
templates.
"""
name = 'Genshi Text'
aliases = ['genshitext']
mimetypes = ['application/x-genshi-text', 'text/x-genshi']
tokens = {
'root': [
(r'[^#\$\s]+', Other),
(r'^(\s*)(##.*)$', bygroups(Text, Comment)),
(r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
include('variable'),
(r'[#\$\s]', Other),
],
'directive': [
(r'\n', Text, '#pop'),
(r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
(r'(choose|when|with)([^\S\n]+)(.*)',
bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
(r'(choose|otherwise)\b', Keyword, '#pop'),
(r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
],
'variable': [
(r'(?<!\$)(\$\{)(.+?)(\})',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'(?<!\$)(\$)([a-zA-Z_][a-zA-Z0-9_\.]*)',
Name.Variable),
]
}
class GenshiMarkupLexer(RegexLexer):
"""
Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
`GenshiLexer`.
"""
flags = re.DOTALL
tokens = {
'root': [
(r'[^<\$]+', Other),
(r'(<\?python)(.*?)(\?>)',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
# yield style and script blocks as Other
(r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
(r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
(r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'),
include('variable'),
(r'[<\$]', Other),
],
'pytag': [
(r'\s+', Text),
(r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'pyattr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'pyattr': [
('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
(r'[^\s>]+', String, '#pop'),
],
'tag': [
(r'\s+', Text),
(r'py:[a-zA-Z0-9_-]+\s*=', Name.Attribute, 'pyattr'),
(r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'attr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'attr': [
('"', String, 'attr-dstring'),
("'", String, 'attr-sstring'),
(r'[^\s>]*', String, '#pop')
],
'attr-dstring': [
('"', String, '#pop'),
include('strings'),
("'", String)
],
'attr-sstring': [
("'", String, '#pop'),
include('strings'),
("'", String)
],
'strings': [
('[^"\'$]+', String),
include('variable')
],
'variable': [
(r'(?<!\$)(\$\{)(.+?)(\})',
bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
(r'(?<!\$)(\$)([a-zA-Z_][a-zA-Z0-9_\.]*)',
Name.Variable),
]
}
class HtmlGenshiLexer(DelegatingLexer):
"""
A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
`kid <http://kid-templating.org/>`_ kid HTML templates.
"""
name = 'HTML+Genshi'
aliases = ['html+genshi', 'html+kid']
alias_filenames = ['*.html', '*.htm', '*.xhtml']
mimetypes = ['text/html+genshi']
def __init__(self, **options):
super(HtmlGenshiLexer, self).__init__(HtmlLexer, GenshiMarkupLexer,
**options)
def analyse_text(text):
rv = 0.0
if re.search('\$\{.*?\}', text) is not None:
rv += 0.2
if re.search('py:(.*?)=["\']', text) is not None:
rv += 0.2
return rv + HtmlLexer.analyse_text(text) - 0.01
class GenshiLexer(DelegatingLexer):
"""
A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
`kid <http://kid-templating.org/>`_ kid XML templates.
"""
name = 'Genshi'
aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
filenames = ['*.kid']
alias_filenames = ['*.xml']
mimetypes = ['application/x-genshi', 'application/x-kid']
def __init__(self, **options):
super(GenshiLexer, self).__init__(XmlLexer, GenshiMarkupLexer,
**options)
def analyse_text(text):
rv = 0.0
if re.search('\$\{.*?\}', text) is not None:
rv += 0.2
if re.search('py:(.*?)=["\']', text) is not None:
rv += 0.2
return rv + XmlLexer.analyse_text(text) - 0.01
class JavascriptGenshiLexer(DelegatingLexer):
"""
A lexer that highlights javascript code in genshi text templates.
"""
name = 'JavaScript+Genshi Text'
aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
'javascript+genshi']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+genshi',
'text/x-javascript+genshi',
'text/javascript+genshi']
def __init__(self, **options):
super(JavascriptGenshiLexer, self).__init__(JavascriptLexer,
GenshiTextLexer,
**options)
def analyse_text(text):
return GenshiLexer.analyse_text(text) - 0.05
class CssGenshiLexer(DelegatingLexer):
"""
A lexer that highlights CSS definitions in genshi text templates.
"""
name = 'CSS+Genshi Text'
aliases = ['css+genshitext', 'css+genshi']
alias_filenames = ['*.css']
mimetypes = ['text/css+genshi']
def __init__(self, **options):
super(CssGenshiLexer, self).__init__(CssLexer, GenshiTextLexer,
**options)
def analyse_text(text):
return GenshiLexer.analyse_text(text) - 0.05
class RhtmlLexer(DelegatingLexer):
"""
Subclass of the ERB lexer that highlights the unlexed data with the
html lexer.
Nested Javascript and CSS is highlighted too.
"""
name = 'RHTML'
aliases = ['rhtml', 'html+erb', 'html+ruby']
filenames = ['*.rhtml']
alias_filenames = ['*.html', '*.htm', '*.xhtml']
mimetypes = ['text/html+ruby']
def __init__(self, **options):
super(RhtmlLexer, self).__init__(HtmlLexer, ErbLexer, **options)
def analyse_text(text):
rv = ErbLexer.analyse_text(text) - 0.01
if html_doctype_matches(text):
# one more than the XmlErbLexer returns
rv += 0.5
return rv
class XmlErbLexer(DelegatingLexer):
"""
Subclass of `ErbLexer` which highlights data outside preprocessor
directives with the `XmlLexer`.
"""
name = 'XML+Ruby'
aliases = ['xml+erb', 'xml+ruby']
alias_filenames = ['*.xml']
mimetypes = ['application/xml+ruby']
def __init__(self, **options):
super(XmlErbLexer, self).__init__(XmlLexer, ErbLexer, **options)
def analyse_text(text):
rv = ErbLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class CssErbLexer(DelegatingLexer):
"""
Subclass of `ErbLexer` which highlights unlexed data with the `CssLexer`.
"""
name = 'CSS+Ruby'
aliases = ['css+erb', 'css+ruby']
alias_filenames = ['*.css']
mimetypes = ['text/css+ruby']
def __init__(self, **options):
super(CssErbLexer, self).__init__(CssLexer, ErbLexer, **options)
def analyse_text(text):
return ErbLexer.analyse_text(text) - 0.05
class JavascriptErbLexer(DelegatingLexer):
"""
Subclass of `ErbLexer` which highlights unlexed data with the
`JavascriptLexer`.
"""
name = 'JavaScript+Ruby'
aliases = ['js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+ruby',
'text/x-javascript+ruby',
'text/javascript+ruby']
def __init__(self, **options):
super(JavascriptErbLexer, self).__init__(JavascriptLexer, ErbLexer,
**options)
def analyse_text(text):
return ErbLexer.analyse_text(text) - 0.05
class HtmlPhpLexer(DelegatingLexer):
"""
Subclass of `PhpLexer` that highlights unhandled data with the `HtmlLexer`.
Nested Javascript and CSS is highlighted too.
"""
name = 'HTML+PHP'
aliases = ['html+php']
filenames = ['*.phtml']
alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
'*.php[345]']
mimetypes = ['application/x-php',
'application/x-httpd-php', 'application/x-httpd-php3',
'application/x-httpd-php4', 'application/x-httpd-php5']
def __init__(self, **options):
super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options)
def analyse_text(text):
rv = PhpLexer.analyse_text(text) - 0.01
if html_doctype_matches(text):
rv += 0.5
return rv
class XmlPhpLexer(DelegatingLexer):
"""
Subclass of `PhpLexer` that higlights unhandled data with the `XmlLexer`.
"""
name = 'XML+PHP'
aliases = ['xml+php']
alias_filenames = ['*.xml', '*.php', '*.php[345]']
mimetypes = ['application/xml+php']
def __init__(self, **options):
super(XmlPhpLexer, self).__init__(XmlLexer, PhpLexer, **options)
def analyse_text(text):
rv = PhpLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class CssPhpLexer(DelegatingLexer):
"""
Subclass of `PhpLexer` which highlights unmatched data with the `CssLexer`.
"""
name = 'CSS+PHP'
aliases = ['css+php']
alias_filenames = ['*.css']
mimetypes = ['text/css+php']
def __init__(self, **options):
super(CssPhpLexer, self).__init__(CssLexer, PhpLexer, **options)
def analyse_text(text):
return PhpLexer.analyse_text(text) - 0.05
class JavascriptPhpLexer(DelegatingLexer):
"""
Subclass of `PhpLexer` which highlights unmatched data with the
`JavascriptLexer`.
"""
name = 'JavaScript+PHP'
aliases = ['js+php', 'javascript+php']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+php',
'text/x-javascript+php',
'text/javascript+php']
def __init__(self, **options):
super(JavascriptPhpLexer, self).__init__(JavascriptLexer, PhpLexer,
**options)
def analyse_text(text):
return PhpLexer.analyse_text(text)
class HtmlSmartyLexer(DelegatingLexer):
"""
Subclass of the `SmartyLexer` that highighlights unlexed data with the
`HtmlLexer`.
Nested Javascript and CSS is highlighted too.
"""
name = 'HTML+Smarty'
aliases = ['html+smarty']
alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
mimetypes = ['text/html+smarty']
def __init__(self, **options):
super(HtmlSmartyLexer, self).__init__(HtmlLexer, SmartyLexer, **options)
def analyse_text(text):
rv = SmartyLexer.analyse_text(text) - 0.01
if html_doctype_matches(text):
rv += 0.5
return rv
class XmlSmartyLexer(DelegatingLexer):
"""
Subclass of the `SmartyLexer` that highlights unlexed data with the
`XmlLexer`.
"""
name = 'XML+Smarty'
aliases = ['xml+smarty']
alias_filenames = ['*.xml', '*.tpl']
mimetypes = ['application/xml+smarty']
def __init__(self, **options):
super(XmlSmartyLexer, self).__init__(XmlLexer, SmartyLexer, **options)
def analyse_text(text):
rv = SmartyLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class CssSmartyLexer(DelegatingLexer):
"""
Subclass of the `SmartyLexer` that highlights unlexed data with the
`CssLexer`.
"""
name = 'CSS+Smarty'
aliases = ['css+smarty']
alias_filenames = ['*.css', '*.tpl']
mimetypes = ['text/css+smarty']
def __init__(self, **options):
super(CssSmartyLexer, self).__init__(CssLexer, SmartyLexer, **options)
def analyse_text(text):
return SmartyLexer.analyse_text(text) - 0.05
class JavascriptSmartyLexer(DelegatingLexer):
"""
Subclass of the `SmartyLexer` that highlights unlexed data with the
`JavascriptLexer`.
"""
name = 'JavaScript+Smarty'
aliases = ['js+smarty', 'javascript+smarty']
alias_filenames = ['*.js', '*.tpl']
mimetypes = ['application/x-javascript+smarty',
'text/x-javascript+smarty',
'text/javascript+smarty']
def __init__(self, **options):
super(JavascriptSmartyLexer, self).__init__(JavascriptLexer, SmartyLexer,
**options)
def analyse_text(text):
return SmartyLexer.analyse_text(text) - 0.05
class HtmlDjangoLexer(DelegatingLexer):
"""
Subclass of the `DjangoLexer` that highighlights unlexed data with the
`HtmlLexer`.
Nested Javascript and CSS is highlighted too.
"""
name = 'HTML+Django/Jinja'
aliases = ['html+django', 'html+jinja', 'htmldjango']
alias_filenames = ['*.html', '*.htm', '*.xhtml']
mimetypes = ['text/html+django', 'text/html+jinja']
def __init__(self, **options):
super(HtmlDjangoLexer, self).__init__(HtmlLexer, DjangoLexer, **options)
def analyse_text(text):
rv = DjangoLexer.analyse_text(text) - 0.01
if html_doctype_matches(text):
rv += 0.5
return rv
class XmlDjangoLexer(DelegatingLexer):
"""
Subclass of the `DjangoLexer` that highlights unlexed data with the
`XmlLexer`.
"""
name = 'XML+Django/Jinja'
aliases = ['xml+django', 'xml+jinja']
alias_filenames = ['*.xml']
mimetypes = ['application/xml+django', 'application/xml+jinja']
def __init__(self, **options):
super(XmlDjangoLexer, self).__init__(XmlLexer, DjangoLexer, **options)
def analyse_text(text):
rv = DjangoLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class CssDjangoLexer(DelegatingLexer):
"""
Subclass of the `DjangoLexer` that highlights unlexed data with the
`CssLexer`.
"""
name = 'CSS+Django/Jinja'
aliases = ['css+django', 'css+jinja']
alias_filenames = ['*.css']
mimetypes = ['text/css+django', 'text/css+jinja']
def __init__(self, **options):
super(CssDjangoLexer, self).__init__(CssLexer, DjangoLexer, **options)
def analyse_text(text):
return DjangoLexer.analyse_text(text) - 0.05
class JavascriptDjangoLexer(DelegatingLexer):
"""
Subclass of the `DjangoLexer` that highlights unlexed data with the
`JavascriptLexer`.
"""
name = 'JavaScript+Django/Jinja'
aliases = ['js+django', 'javascript+django',
'js+jinja', 'javascript+jinja']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+django',
'application/x-javascript+jinja',
'text/x-javascript+django',
'text/x-javascript+jinja',
'text/javascript+django',
'text/javascript+jinja']
def __init__(self, **options):
super(JavascriptDjangoLexer, self).__init__(JavascriptLexer, DjangoLexer,
**options)
def analyse_text(text):
return DjangoLexer.analyse_text(text) - 0.05
class JspRootLexer(RegexLexer):
"""
Base for the `JspLexer`. Yields `Token.Other` for area outside of
JSP tags.
*New in Pygments 0.7.*
"""
tokens = {
'root': [
(r'<%\S?', Keyword, 'sec'),
# FIXME: I want to make these keywords but still parse attributes.
(r'</?jsp:(forward|getProperty|include|plugin|setProperty|useBean).*?>',
Keyword),
(r'[^<]+', Other),
(r'<', Other),
],
'sec': [
(r'%>', Keyword, '#pop'),
# note: '\w\W' != '.' without DOTALL.
(r'[\w\W]+?(?=%>|\Z)', using(JavaLexer)),
],
}
class JspLexer(DelegatingLexer):
"""
Lexer for Java Server Pages.
*New in Pygments 0.7.*
"""
name = 'Java Server Page'
aliases = ['jsp']
filenames = ['*.jsp']
mimetypes = ['application/x-jsp']
def __init__(self, **options):
super(JspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
def analyse_text(text):
rv = JavaLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
if '<%' in text and '%>' in text:
rv += 0.1
return rv
class EvoqueLexer(RegexLexer):
"""
For files using the Evoque templating system.
*New in Pygments 1.1.*
"""
name = 'Evoque'
aliases = ['evoque']
filenames = ['*.evoque']
mimetypes = ['application/x-evoque']
flags = re.DOTALL
tokens = {
'root': [
(r'[^#$]+', Other),
(r'#\[', Comment.Multiline, 'comment'),
(r'\$\$', Other),
# svn keywords
(r'\$\w+:[^$\n]*\$', Comment.Multiline),
# directives: begin, end
(r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
bygroups(Punctuation, Name.Builtin, Punctuation, None,
String, Punctuation)),
# directives: evoque, overlay
# see doc for handling first name arg: /directives/evoque/
#+ minor inconsistency: the "name" in e.g. $overlay{name=site_base}
# should be using(PythonLexer), not passed out as String
(r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?'
r'(.*?)((?(4)%)\})',
bygroups(Punctuation, Name.Builtin, Punctuation, None,
String, using(PythonLexer), Punctuation)),
# directives: if, for, prefer, test
(r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
bygroups(Punctuation, Name.Builtin, Punctuation, None,
using(PythonLexer), Punctuation)),
# directive clauses (no {} expression)
(r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
# expressions
(r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
bygroups(Punctuation, None, using(PythonLexer),
Name.Builtin, None, None, Punctuation)),
(r'#', Other),
],
'comment': [
(r'[^\]#]', Comment.Multiline),
(r'#\[', Comment.Multiline, '#push'),
(r'\]#', Comment.Multiline, '#pop'),
(r'[\]#]', Comment.Multiline)
],
}
class EvoqueHtmlLexer(DelegatingLexer):
"""
Subclass of the `EvoqueLexer` that highlights unlexed data with the
`HtmlLexer`.
*New in Pygments 1.1.*
"""
name = 'HTML+Evoque'
aliases = ['html+evoque']
filenames = ['*.html']
mimetypes = ['text/html+evoque']
def __init__(self, **options):
super(EvoqueHtmlLexer, self).__init__(HtmlLexer, EvoqueLexer,
**options)
class EvoqueXmlLexer(DelegatingLexer):
"""
Subclass of the `EvoqueLexer` that highlights unlexed data with the
`XmlLexer`.
*New in Pygments 1.1.*
"""
name = 'XML+Evoque'
aliases = ['xml+evoque']
filenames = ['*.xml']
mimetypes = ['application/xml+evoque']
def __init__(self, **options):
super(EvoqueXmlLexer, self).__init__(XmlLexer, EvoqueLexer,
**options)
class ColdfusionLexer(RegexLexer):
"""
Coldfusion statements
"""
name = 'cfstatement'
aliases = ['cfs']
filenames = []
mimetypes = []
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
(r'//.*', Comment),
(r'\+\+|--', Operator),
(r'[-+*/^&=!]', Operator),
(r'<=|>=|<|>', Operator),
(r'mod\b', Operator),
(r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
(r'\|\||&&', Operator),
(r'"', String.Double, 'string'),
# There is a special rule for allowing html in single quoted
# strings, evidently.
(r"'.*?'", String.Single),
(r'\d+', Number),
(r'(if|else|len|var|case|default|break|switch)\b', Keyword),
(r'([A-Za-z_$][A-Za-z0-9_.]*)(\s*)(\()',
bygroups(Name.Function, Text, Punctuation)),
(r'[A-Za-z_$][A-Za-z0-9_.]*', Name.Variable),
(r'[()\[\]{};:,.\\]', Punctuation),
(r'\s+', Text),
],
'string': [
(r'""', String.Double),
(r'#.+?#', String.Interp),
(r'[^"#]+', String.Double),
(r'#', String.Double),
(r'"', String.Double, '#pop'),
],
}
class ColdfusionMarkupLexer(RegexLexer):
"""
Coldfusion markup only
"""
name = 'Coldfusion'
aliases = ['cf']
filenames = []
mimetypes = []
tokens = {
'root': [
(r'[^<]+', Other),
include('tags'),
(r'<[^<>]*', Other),
],
'tags': [
(r'(?s)<!---.*?--->', Comment.Multiline),
(r'(?s)<!--.*?-->', Comment),
(r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
(r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
# negative lookbehind is for strings with embedded >
(r'(?s)(</?cf(?:component|include|if|else|elseif|loop|return|'
r'dbinfo|dump|abort|location|invoke|throw|file|savecontent|'
r'mailpart|mail|header|content|zip|image|lock|argument|try|'
r'catch|break|directory|http|set|function|param)\b)(.*?)((?<!\\)>)',
bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
],
'cfoutput': [
(r'[^#<]+', Other),
(r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
Punctuation)),
#(r'<cfoutput.*?>', Name.Builtin, '#push'),
(r'</cfoutput.*?>', Name.Builtin, '#pop'),
include('tags'),
(r'(?s)<[^<>]*', Other),
(r'#', Other),
],
}
class ColdfusionHtmlLexer(DelegatingLexer):
"""
Coldfusion markup in html
"""
name = 'Coldfusion HTML'
aliases = ['cfm']
filenames = ['*.cfm', '*.cfml', '*.cfc']
mimetypes = ['application/x-coldfusion']
def __init__(self, **options):
super(ColdfusionHtmlLexer, self).__init__(HtmlLexer, ColdfusionMarkupLexer,
**options)
class SspLexer(DelegatingLexer):
"""
Lexer for Scalate Server Pages.
*New in Pygments 1.4.*
"""
name = 'Scalate Server Page'
aliases = ['ssp']
filenames = ['*.ssp']
mimetypes = ['application/x-ssp']
def __init__(self, **options):
super(SspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
def analyse_text(text):
rv = 0.0
if re.search('val \w+\s*:', text):
rv += 0.6
if looks_like_xml(text):
rv += 0.2
if '<%' in text and '%>' in text:
rv += 0.1
return rv
class TeaTemplateRootLexer(RegexLexer):
"""
Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
code blocks.
*New in Pygments 1.5.*
"""
tokens = {
'root': [
(r'<%\S?', Keyword, 'sec'),
(r'[^<]+', Other),
(r'<', Other),
],
'sec': [
(r'%>', Keyword, '#pop'),
# note: '\w\W' != '.' without DOTALL.
(r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
],
}
class TeaTemplateLexer(DelegatingLexer):
"""
Lexer for `Tea Templates <http://teatrove.org/>`_.
*New in Pygments 1.5.*
"""
name = 'Tea'
aliases = ['tea']
filenames = ['*.tea']
mimetypes = ['text/x-tea']
def __init__(self, **options):
super(TeaTemplateLexer, self).__init__(XmlLexer,
TeaTemplateRootLexer, **options)
def analyse_text(text):
rv = TeaLangLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
if '<%' in text and '%>' in text:
rv += 0.1
return rv
class LassoHtmlLexer(DelegatingLexer):
"""
Subclass of the `LassoLexer` which highlights unhandled data with the
`HtmlLexer`.
Nested JavaScript and CSS is also highlighted.
*New in Pygments 1.6.*
"""
name = 'HTML+Lasso'
aliases = ['html+lasso']
alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
'*.incl', '*.inc', '*.las']
mimetypes = ['text/html+lasso',
'application/x-httpd-lasso',
'application/x-httpd-lasso[89]']
def __init__(self, **options):
super(LassoHtmlLexer, self).__init__(HtmlLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.01
if re.search(r'<\w+>', text, re.I):
rv += 0.2
if html_doctype_matches(text):
rv += 0.5
return rv
class LassoXmlLexer(DelegatingLexer):
"""
Subclass of the `LassoLexer` which highlights unhandled data with the
`XmlLexer`.
*New in Pygments 1.6.*
"""
name = 'XML+Lasso'
aliases = ['xml+lasso']
alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
'*.incl', '*.inc', '*.las']
mimetypes = ['application/xml+lasso']
def __init__(self, **options):
super(LassoXmlLexer, self).__init__(XmlLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.01
if looks_like_xml(text):
rv += 0.4
return rv
class LassoCssLexer(DelegatingLexer):
"""
Subclass of the `LassoLexer` which highlights unhandled data with the
`CssLexer`.
*New in Pygments 1.6.*
"""
name = 'CSS+Lasso'
aliases = ['css+lasso']
alias_filenames = ['*.css']
mimetypes = ['text/css+lasso']
def __init__(self, **options):
options['requiredelimiters'] = True
super(LassoCssLexer, self).__init__(CssLexer, LassoLexer, **options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.05
if re.search(r'\w+:.+?;', text):
rv += 0.1
if 'padding:' in text:
rv += 0.1
return rv
class LassoJavascriptLexer(DelegatingLexer):
"""
Subclass of the `LassoLexer` which highlights unhandled data with the
`JavascriptLexer`.
*New in Pygments 1.6.*
"""
name = 'JavaScript+Lasso'
aliases = ['js+lasso', 'javascript+lasso']
alias_filenames = ['*.js']
mimetypes = ['application/x-javascript+lasso',
'text/x-javascript+lasso',
'text/javascript+lasso']
def __init__(self, **options):
options['requiredelimiters'] = True
super(LassoJavascriptLexer, self).__init__(JavascriptLexer, LassoLexer,
**options)
def analyse_text(text):
rv = LassoLexer.analyse_text(text) - 0.05
if 'function' in text:
rv += 0.2
return rv
|
mit
|
WhiteMagic/JoystickGremlin
|
gremlin/ui/dialogs.py
|
1
|
45199
|
# -*- coding: utf-8; -*-
# Copyright (C) 2015 - 2019 Lionel Ott
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import subprocess
import sys
import winreg
from PyQt5 import QtCore, QtGui, QtWidgets
import dill
import gremlin
from . import common, ui_about
class OptionsUi(common.BaseDialogUi):
"""UI allowing the configuration of a variety of options."""
def __init__(self, parent=None):
"""Creates a new options UI instance.
:param parent the parent of this widget
"""
super().__init__(parent)
# Actual configuration object being managed
self.config = gremlin.config.Configuration()
self.setMinimumWidth(400)
self.setWindowTitle("Options")
self.main_layout = QtWidgets.QVBoxLayout(self)
self.tab_container = QtWidgets.QTabWidget()
self.main_layout.addWidget(self.tab_container)
self._create_general_page()
self._create_profile_page()
self._create_hidguardian_page()
def _create_general_page(self):
"""Creates the general options page."""
self.general_page = QtWidgets.QWidget()
self.general_layout = QtWidgets.QVBoxLayout(self.general_page)
# Highlight input option
self.highlight_input = QtWidgets.QCheckBox(
"Highlight currently used input"
)
self.highlight_input.clicked.connect(self._highlight_input)
self.highlight_input.setChecked(self.config.highlight_input)
# Switch to highlighted device
self.highlight_device = QtWidgets.QCheckBox(
"Highlight swaps device tabs"
)
self.highlight_device.clicked.connect(self._highlight_device)
self.highlight_device.setChecked(self.config.highlight_device)
# Close to system tray option
self.close_to_systray = QtWidgets.QCheckBox(
"Closing minimizes to system tray"
)
self.close_to_systray.clicked.connect(self._close_to_systray)
self.close_to_systray.setChecked(self.config.close_to_tray)
# Activate profile on launch
self.activate_on_launch = QtWidgets.QCheckBox(
"Activate profile on launch"
)
self.activate_on_launch.clicked.connect(self._activate_on_launch)
self.activate_on_launch.setChecked(self.config.activate_on_launch)
# Start minimized option
self.start_minimized = QtWidgets.QCheckBox(
"Start Joystick Gremlin minimized"
)
self.start_minimized.clicked.connect(self._start_minimized)
self.start_minimized.setChecked(self.config.start_minimized)
# Start on user login
self.start_with_windows = QtWidgets.QCheckBox(
"Start Joystick Gremlin with Windows"
)
self.start_with_windows.clicked.connect(self._start_windows)
self.start_with_windows.setChecked(self._start_windows_enabled())
# Show message on mode change
self.show_mode_change_message = QtWidgets.QCheckBox(
"Show message when changing mode"
)
self.show_mode_change_message.clicked.connect(
self._show_mode_change_message
)
self.show_mode_change_message.setChecked(
self.config.mode_change_message
)
# Default action selection
self.default_action_layout = QtWidgets.QHBoxLayout()
self.default_action_label = QtWidgets.QLabel("Default action")
self.default_action_dropdown = QtWidgets.QComboBox()
self.default_action_layout.addWidget(self.default_action_label)
self.default_action_layout.addWidget(self.default_action_dropdown)
self._init_action_dropdown()
self.default_action_layout.addStretch()
# Macro axis polling rate
self.macro_axis_polling_layout = QtWidgets.QHBoxLayout()
self.macro_axis_polling_label = \
QtWidgets.QLabel("Macro axis polling rate")
self.macro_axis_polling_value = common.DynamicDoubleSpinBox()
self.macro_axis_polling_value.setRange(0.001, 1.0)
self.macro_axis_polling_value.setSingleStep(0.05)
self.macro_axis_polling_value.setDecimals(3)
self.macro_axis_polling_value.setValue(
self.config.macro_axis_polling_rate
)
self.macro_axis_polling_value.valueChanged.connect(
self._macro_axis_polling_rate
)
self.macro_axis_polling_layout.addWidget(self.macro_axis_polling_label)
self.macro_axis_polling_layout.addWidget(self.macro_axis_polling_value)
self.macro_axis_polling_layout.addStretch()
# Macro axis minimum change value
self.macro_axis_minimum_change_layout = QtWidgets.QHBoxLayout()
self.macro_axis_minimum_change_label = \
QtWidgets.QLabel("Macro axis minimum change value")
self.macro_axis_minimum_change_value = common.DynamicDoubleSpinBox()
self.macro_axis_minimum_change_value.setRange(0.00001, 1.0)
self.macro_axis_minimum_change_value.setSingleStep(0.01)
self.macro_axis_minimum_change_value.setDecimals(5)
self.macro_axis_minimum_change_value.setValue(
self.config.macro_axis_minimum_change_rate
)
self.macro_axis_minimum_change_value.valueChanged.connect(
self._macro_axis_minimum_change_value
)
self.macro_axis_minimum_change_layout.addWidget(
self.macro_axis_minimum_change_label
)
self.macro_axis_minimum_change_layout.addWidget(
self.macro_axis_minimum_change_value
)
self.macro_axis_minimum_change_layout.addStretch()
self.general_layout.addWidget(self.highlight_input)
self.general_layout.addWidget(self.highlight_device)
self.general_layout.addWidget(self.close_to_systray)
self.general_layout.addWidget(self.activate_on_launch)
self.general_layout.addWidget(self.start_minimized)
self.general_layout.addWidget(self.start_with_windows)
self.general_layout.addWidget(self.show_mode_change_message)
self.general_layout.addLayout(self.default_action_layout)
self.general_layout.addLayout(self.macro_axis_polling_layout)
self.general_layout.addLayout(self.macro_axis_minimum_change_layout)
self.general_layout.addStretch()
self.tab_container.addTab(self.general_page, "General")
def _create_profile_page(self):
"""Creates the profile options page."""
self.profile_page = QtWidgets.QWidget()
self.profile_page_layout = QtWidgets.QVBoxLayout(self.profile_page)
# Autoload profile option
self.autoload_checkbox = QtWidgets.QCheckBox(
"Automatically load profile based on current application"
)
self.autoload_checkbox.clicked.connect(self._autoload_profiles)
self.autoload_checkbox.setChecked(self.config.autoload_profiles)
self.keep_last_autoload_checkbox = QtWidgets.QCheckBox(
"Keep profile active on focus loss"
)
self.keep_last_autoload_checkbox.setToolTip("""If this option is off, profiles that have been configured to load automatically when an application gains focus
will deactivate when that application loses focus.
If this option is on, the last active profile will remain active until a different profile is loaded.""")
self.keep_last_autoload_checkbox.clicked.connect(self._keep_last_autoload)
self.keep_last_autoload_checkbox.setChecked(self.config.keep_last_autoload)
self.keep_last_autoload_checkbox.setEnabled(self.config.autoload_profiles)
# Executable dropdown list
self.executable_layout = QtWidgets.QHBoxLayout()
self.executable_label = QtWidgets.QLabel("Executable")
self.executable_selection = QtWidgets.QComboBox()
self.executable_selection.setMinimumWidth(300)
self.executable_selection.currentTextChanged.connect(
self._show_executable
)
self.executable_add = QtWidgets.QPushButton()
self.executable_add.setIcon(QtGui.QIcon("gfx/button_add.png"))
self.executable_add.clicked.connect(self._new_executable)
self.executable_remove = QtWidgets.QPushButton()
self.executable_remove.setIcon(QtGui.QIcon("gfx/button_delete.png"))
self.executable_remove.clicked.connect(self._remove_executable)
self.executable_edit = QtWidgets.QPushButton()
self.executable_edit.setIcon(QtGui.QIcon("gfx/button_edit.png"))
self.executable_edit.clicked.connect(self._edit_executable)
self.executable_list = QtWidgets.QPushButton()
self.executable_list.setIcon(QtGui.QIcon("gfx/list_show.png"))
self.executable_list.clicked.connect(self._list_executables)
self.executable_layout.addWidget(self.executable_label)
self.executable_layout.addWidget(self.executable_selection)
self.executable_layout.addWidget(self.executable_add)
self.executable_layout.addWidget(self.executable_remove)
self.executable_layout.addWidget(self.executable_edit)
self.executable_layout.addWidget(self.executable_list)
self.executable_layout.addStretch()
self.profile_layout = QtWidgets.QHBoxLayout()
self.profile_field = QtWidgets.QLineEdit()
self.profile_field.textChanged.connect(self._update_profile)
self.profile_field.editingFinished.connect(self._update_profile)
self.profile_select = QtWidgets.QPushButton()
self.profile_select.setIcon(QtGui.QIcon("gfx/button_edit.png"))
self.profile_select.clicked.connect(self._select_profile)
self.profile_layout.addWidget(self.profile_field)
self.profile_layout.addWidget(self.profile_select)
self.profile_page_layout.addWidget(self.autoload_checkbox)
self.profile_page_layout.addWidget(self.keep_last_autoload_checkbox)
self.profile_page_layout.addLayout(self.executable_layout)
self.profile_page_layout.addLayout(self.profile_layout)
self.profile_page_layout.addStretch()
self.tab_container.addTab(self.profile_page, "Profiles")
self.populate_executables()
def _create_hidguardian_page(self):
self.hg_page = QtWidgets.QWidget()
self.hg_page_layout = QtWidgets.QVBoxLayout(self.hg_page)
# Display instructions for non admin users
if not gremlin.util.is_user_admin():
label = QtWidgets.QLabel(
"In order to use HidGuardian to both specify the devices to "
"hide via HidGuardian as well as have Gremlin see them, "
"Gremlin has to be run as Administrator."
)
label.setStyleSheet("QLabel { background-color : '#FFF4B0'; }")
label.setWordWrap(True)
label.setFrameShape(QtWidgets.QFrame.Box)
label.setMargin(10)
self.hg_page_layout.addWidget(label)
else:
# Get list of devices affected by HidGuardian
hg = gremlin.hid_guardian.HidGuardian()
hg_device_list = hg.get_device_list()
self.hg_device_layout = QtWidgets.QGridLayout()
self.hg_device_layout.addWidget(
QtWidgets.QLabel("<b>Device Name</b>"), 0, 0
)
self.hg_device_layout.addWidget(
QtWidgets.QLabel("<b>Hidden</b>"), 0, 1
)
devices = gremlin.joystick_handling.joystick_devices()
devices_added = []
for i, dev in enumerate(devices):
# Don't add vJoy to this list
if dev.name == "vJoy Device":
continue
# For identical VID / PID devices only add one instance
vid_pid_key = (dev.vendor_id, dev.product_id)
if vid_pid_key in devices_added:
continue
# Set checkbox state based on whether or not HidGuardian tracks
# the device. Add a callback with pid/vid to add / remove said
# device from the list of devices handled by HidGuardian
self.hg_device_layout.addWidget(
QtWidgets.QLabel(dev.name), i+1, 0
)
checkbox = QtWidgets.QCheckBox("")
checkbox.setChecked(vid_pid_key in hg_device_list)
checkbox.stateChanged.connect(self._create_hg_cb(dev))
self.hg_device_layout.addWidget(checkbox, i+1, 1)
devices_added.append(vid_pid_key)
self.hg_page_layout.addLayout(self.hg_device_layout)
self.hg_page_layout.addStretch()
label = QtWidgets.QLabel(
"After making changes to the devices hidden by HidGuardian "
"the devices that should now be hidden or shown to other"
"applications need to be unplugged and plugged back in for "
"the changes to take effect."
)
label.setStyleSheet("QLabel { background-color : '#FFF4B0'; }")
label.setWordWrap(True)
label.setFrameShape(QtWidgets.QFrame.Box)
label.setMargin(10)
self.hg_page_layout.addWidget(label)
self.tab_container.addTab(self.hg_page, "HidGuardian")
def closeEvent(self, event):
"""Closes the calibration window.
:param event the close event
"""
self.config.save()
super().closeEvent(event)
def populate_executables(self, executable_name=None):
"""Populates the profile drop down menu.
:param executable_name name of the executable to pre select
"""
self.profile_field.textChanged.disconnect(self._update_profile)
self.executable_selection.clear()
executable_list = self.config.get_executable_list()
for path in executable_list:
self.executable_selection.addItem(path)
self.profile_field.textChanged.connect(self._update_profile)
# Select the provided executable if it exists, otherwise the first one
# in the list
index = 0
if executable_name is not None and executable_name in executable_list:
index = self.executable_selection.findText(executable_name)
self.executable_selection.setCurrentIndex(index)
def _autoload_profiles(self, clicked):
"""Stores profile autoloading preference.
:param clicked whether or not the checkbox is ticked
"""
self.keep_last_autoload_checkbox.setEnabled(clicked)
self.config.autoload_profiles = clicked
self.config.save()
def _keep_last_autoload(self, clicked):
"""Stores keep last autoload preference.
:param clicked whether or not the checkbox is ticked
"""
self.config.keep_last_autoload = clicked
self.config.save()
def _activate_on_launch(self, clicked):
"""Stores activation of profile on launch preference.
:param clicked whether or not the checkbox is ticked
"""
self.config.activate_on_launch = clicked
self.config.save()
def _close_to_systray(self, clicked):
"""Stores closing to system tray preference.
:param clicked whether or not the checkbox is ticked
"""
self.config.close_to_tray = clicked
self.config.save()
def _start_minimized(self, clicked):
"""Stores start minimized preference.
:param clicked whether or not the checkbox is ticked
"""
self.config.start_minimized = clicked
self.config.save()
def _start_windows(self, clicked):
"""Set registry entry to launch Joystick Gremlin on login.
:param clicked True if launch should happen on login, False otherwise
"""
if clicked:
path = os.path.abspath(sys.argv[0])
subprocess.run(
'reg add "HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Run" /V "Joystick Gremlin" /t REG_SZ /F /D "{}"'.format(path)
)
else:
subprocess.run(
'reg delete "HKEY_CURRENT_USER\Software\Microsoft\Windows\CurrentVersion\Run" /F /V "Joystick Gremlin"'
)
self.activateWindow()
def _start_windows_enabled(self):
"""Returns whether or not Gremlin should launch on login.
:return True if Gremlin launches on login, False otherwise
"""
key_handle = winreg.OpenKey(
winreg.HKEY_CURRENT_USER,
r"Software\Microsoft\Windows\CurrentVersion\Run"
)
key_info = winreg.QueryInfoKey(key_handle)
for i in range(key_info[1]):
value_info = winreg.EnumValue(key_handle, i)
if value_info[0] == "Joystick Gremlin":
return True
return False
def _highlight_input(self, clicked):
"""Stores preference for input highlighting.
:param clicked whether or not the checkbox is ticked
"""
self.config.highlight_input = clicked
self.config.save()
def _highlight_device(self, clicked):
"""Stores preference for device highlighting.
:param clicked whether or not the checkbox is ticked
"""
self.config.highlight_device = clicked
self.config.save()
def _list_executables(self):
"""Shows a list of executables for the user to pick."""
self.executable_list_view = ProcessWindow()
self.executable_list_view.process_selected.connect(self._add_executable)
self.executable_list_view.show()
def _add_executable(self, fname):
"""Adds the provided executable to the list of configurations.
:param fname the executable for which to add a mapping
"""
if fname not in self.config.get_executable_list():
self.config.set_profile(fname, "")
self.populate_executables(fname)
else:
self.executable_selection.setCurrentIndex(
self.executable_selection.findText(fname)
)
def _edit_executable(self):
"""Allows editing the path of an executable."""
new_text, flag = QtWidgets.QInputDialog.getText(
self,
"Change Executable / RegExp",
"Change the executable text or enter a regular expression to use.",
QtWidgets.QLineEdit.Normal,
self.executable_selection.currentText()
)
# If the user did click on ok update the entry
old_entry = self.executable_selection.currentText()
if flag:
if old_entry not in self.config.get_executable_list():
self._add_executable(new_text)
else:
self.config.set_profile(
new_text,
self.config.get_profile(old_entry)
)
self.config.remove_profile(old_entry)
self.populate_executables(new_text)
def _new_executable(self):
"""Prompts the user to select a new executable to add to the
profile.
"""
fname, _ = QtWidgets.QFileDialog.getOpenFileName(
None,
"Path to executable",
"C:\\",
"Executable (*.exe)"
)
if fname != "":
self._add_executable(fname)
def _remove_executable(self):
"""Removes the current executable from the configuration."""
self.config.remove_profile(self.executable_selection.currentText())
self.populate_executables()
def _select_profile(self):
"""Displays a file selection dialog for a profile.
If a valid file is selected the mapping from executable to
profile is updated.
"""
fname, _ = QtWidgets.QFileDialog.getOpenFileName(
None,
"Path to executable",
gremlin.util.userprofile_path(),
"Profile (*.xml)"
)
if fname != "":
self.profile_field.setText(fname)
self.config.set_profile(
self.executable_selection.currentText(),
self.profile_field.text()
)
def _show_executable(self, exec_path):
"""Displays the profile associated with the given executable.
:param exec_path path to the executable to shop
"""
self.profile_field.setText(self.config.get_profile(exec_path))
def _show_mode_change_message(self, clicked):
"""Stores the user's preference for mode change notifications.
:param clicked whether or not the checkbox is ticked"""
self.config.mode_change_message = clicked
self.config.save()
def _update_profile(self):
"""Updates the profile associated with the current executable."""
self.config.set_profile(
self.executable_selection.currentText(),
self.profile_field.text()
)
def _init_action_dropdown(self):
"""Initializes the action selection dropdown menu."""
plugins = gremlin.plugin_manager.ActionPlugins()
for act in sorted(plugins.repository.values(), key=lambda x: x.name):
self.default_action_dropdown.addItem(act.name)
self.default_action_dropdown.setCurrentText(self.config.default_action)
self.default_action_dropdown.currentTextChanged.connect(
self._update_default_action
)
def _update_default_action(self, value):
"""Updates the config with the newly selected action name.
:param value the name of the newly selected action
"""
self.config.default_action = value
self.config.save()
def _macro_axis_polling_rate(self, value):
"""Updates the config with the newly set polling rate.
:param value the new polling rate
"""
self.config.macro_axis_polling_rate = value
self.config.save()
def _macro_axis_minimum_change_value(self, value):
"""Updates the config with the newly set minimum change value.
:param value the new minimum change value
"""
self.config.macro_axis_minimum_change_rate = value
def _create_hg_cb(self, *params):
return lambda x: self._update_hg_device(x, *params)
def _update_hg_device(self, state, device):
hg = gremlin.hid_guardian.HidGuardian()
if state == QtCore.Qt.Checked:
hg.add_device(device.vendor_id, device.product_id)
else:
hg.remove_device(device.vendor_id, device.product_id)
class ProcessWindow(common.BaseDialogUi):
"""Displays active processes in a window for the user to select."""
# Signal emitted when the user selects a process
process_selected = QtCore.pyqtSignal(str)
def __init__(self, parent=None):
"""Creates a new instance.
:param parent the parent of the widget
"""
super().__init__(parent)
self.setWindowTitle("Process List")
self.setMinimumWidth(400)
self.setMinimumHeight(600)
self.main_layout = QtWidgets.QVBoxLayout(self)
self.list_model = QtCore.QStringListModel()
self.list_model.setStringList(
gremlin.process_monitor.list_current_processes()
)
self.list_view = QtWidgets.QListView()
self.list_view.setModel(self.list_model)
self.list_view.setEditTriggers(
QtWidgets.QAbstractItemView.NoEditTriggers
)
self.main_layout.addWidget(self.list_view)
self.select_button = QtWidgets.QPushButton("Select")
self.select_button.clicked.connect(self._select)
self.main_layout.addWidget(self.select_button)
def _select(self):
"""Emits the process_signal when the select button is pressed."""
self.process_selected.emit(self.list_view.currentIndex().data())
self.close()
class LogWindowUi(common.BaseDialogUi):
"""Window displaying log file content."""
def __init__(self, parent=None):
"""Creates a new instance.
:param parent the parent of this widget
"""
super().__init__(parent)
self.setWindowTitle("Log Viewer")
self.setMinimumWidth(600)
self.main_layout = QtWidgets.QVBoxLayout(self)
self.tab_container = QtWidgets.QTabWidget()
self.main_layout.addWidget(self.tab_container)
self._ui_elements = {}
self._create_log_display(
os.path.join(gremlin.util.userprofile_path(), "system.log"),
"System"
)
self._create_log_display(
os.path.join(gremlin.util.userprofile_path(), "user.log"),
"User"
)
self.watcher = gremlin.util.FileWatcher([
os.path.join(gremlin.util.userprofile_path(), "system.log"),
os.path.join(gremlin.util.userprofile_path(), "user.log")
])
self.watcher.file_changed.connect(self._reload)
def closeEvent(self, event):
"""Handles closing of the window.
:param event the closing event
"""
self.watcher.stop()
super().closeEvent(event)
def _create_log_display(self, fname, title):
"""Creates a new tab displaying log file contents.
:param fname path to the file whose content to display
:param title the title of the tab
"""
page = QtWidgets.QWidget()
layout = QtWidgets.QVBoxLayout(page)
log_display = QtWidgets.QTextEdit()
log_display.setText(open(fname).read())
button = QtWidgets.QPushButton("Clear log")
button.clicked.connect(lambda: self._clear_log(fname))
layout.addWidget(log_display)
layout.addWidget(button)
self._ui_elements[fname] = {
"page": page,
"layout": layout,
"button": button,
"log_display": log_display
}
self.tab_container.addTab(
self._ui_elements[fname]["page"],
title
)
def _clear_log(self, fname):
"""Clears the specified log file.
:param fname path to the file to clear
"""
open(fname, "w").close()
def _reload(self, fname):
"""Reloads the content of tab displaying the given file.
:param fname name of the file whose content to update
"""
widget = self._ui_elements[fname]["log_display"]
widget.setText(open(fname).read())
widget.verticalScrollBar().setValue(
widget.verticalScrollBar().maximum()
)
class AboutUi(common.BaseDialogUi):
"""Widget which displays information about the application."""
def __init__(self, parent=None):
"""Creates a new about widget.
This creates a simple widget which shows version information
and various software licenses.
:param parent parent of this widget
"""
super().__init__(parent)
self.ui = ui_about.Ui_About()
self.ui.setupUi(self)
self.ui.about.setHtml(
open(gremlin.util.resource_path("about/about.html")).read()
)
self.ui.jg_license.setHtml(
open(gremlin.util.resource_path("about/joystick_gremlin.html")).read()
)
license_list = [
"about/third_party_licenses.html",
"about/modernuiicons.html",
"about/pyqt.html",
"about/pywin32.html",
"about/qt5.html",
"about/reportlab.html",
"about/vjoy.html",
]
third_party_licenses = ""
for fname in license_list:
third_party_licenses += open(gremlin.util.resource_path(fname)).read()
third_party_licenses += "<hr>"
self.ui.third_party_licenses.setHtml(third_party_licenses)
class ModeManagerUi(common.BaseDialogUi):
"""Enables the creation of modes and configuring their inheritance."""
# Signal emitted when mode configuration changes
modes_changed = QtCore.pyqtSignal()
def __init__(self, profile_data, parent=None):
"""Creates a new instance.
:param profile_data the data being profile whose modes are being
configured
:param parent the parent of this widget
"""
super().__init__(parent)
self._profile = profile_data
self.setWindowTitle("Mode Manager")
self.mode_dropdowns = {}
self.mode_rename = {}
self.mode_delete = {}
self.mode_callbacks = {}
self._create_ui()
# Disable keyboard event handler
el = gremlin.event_handler.EventListener()
el.keyboard_hook.stop()
def closeEvent(self, event):
"""Emits the closed event when this widget is being closed.
:param event the close event details
"""
# Re-enable keyboard event handler
el = gremlin.event_handler.EventListener()
el.keyboard_hook.start()
super().closeEvent(event)
def _create_ui(self):
"""Creates the required UII elements."""
self.main_layout = QtWidgets.QVBoxLayout(self)
self.mode_layout = QtWidgets.QGridLayout()
self.main_layout.addLayout(self.mode_layout)
self.main_layout.addStretch()
self.add_button = QtWidgets.QPushButton("Add Mode")
self.add_button.clicked.connect(self._add_mode_cb)
label = QtWidgets.QLabel(
"Modes are by default self contained configurations. Specifying "
"a parent for a mode causes the the mode \"inherits\" all actions "
"defined in the parent, unless the mode configures its own actions "
"for specific inputs."
)
label.setStyleSheet("QLabel { background-color : '#FFF4B0'; }")
label.setWordWrap(True)
label.setFrameShape(QtWidgets.QFrame.Box)
label.setMargin(10)
self.main_layout.addWidget(label)
self.main_layout.addWidget(self.add_button)
self._populate_mode_layout()
def _populate_mode_layout(self):
"""Generates the mode layout UI displaying the different modes."""
# Clear potentially existing content
common.clear_layout(self.mode_layout)
self.mode_dropdowns = {}
self.mode_rename = {}
self.mode_delete = {}
self.mode_callbacks = {}
# Obtain mode names and the mode they inherit from
mode_list = {}
for device in self._profile.devices.values():
for mode in device.modes.values():
if mode.name not in mode_list:
# FIXME: somewhere a mode's name is not set
if mode.name is None:
continue
mode_list[mode.name] = mode.inherit
# Add header information
self.mode_layout.addWidget(QtWidgets.QLabel("<b>Name</b>"), 0, 0)
self.mode_layout.addWidget(QtWidgets.QLabel("<b>Parent</b>"), 0, 1)
# Create UI element for each mode
row = 1
for mode, inherit in sorted(mode_list.items()):
self.mode_layout.addWidget(QtWidgets.QLabel(mode), row, 0)
self.mode_dropdowns[mode] = QtWidgets.QComboBox()
self.mode_dropdowns[mode].addItem("None")
self.mode_dropdowns[mode].setMinimumContentsLength(20)
for name in sorted(mode_list.keys()):
if name != mode:
self.mode_dropdowns[mode].addItem(name)
self.mode_callbacks[mode] = self._create_inheritance_change_cb(mode)
self.mode_dropdowns[mode].currentTextChanged.connect(
self.mode_callbacks[mode]
)
self.mode_dropdowns[mode].setCurrentText(inherit)
# Rename mode button
self.mode_rename[mode] = QtWidgets.QPushButton(
QtGui.QIcon("gfx/button_edit.png"), ""
)
self.mode_layout.addWidget(self.mode_rename[mode], row, 2)
self.mode_rename[mode].clicked.connect(
self._create_rename_mode_cb(mode)
)
# Delete mode button
self.mode_delete[mode] = QtWidgets.QPushButton(
QtGui.QIcon("gfx/mode_delete"), ""
)
self.mode_layout.addWidget(self.mode_delete[mode], row, 3)
self.mode_delete[mode].clicked.connect(
self._create_delete_mode_cb(mode)
)
self.mode_layout.addWidget(self.mode_dropdowns[mode], row, 1)
row += 1
def _create_inheritance_change_cb(self, mode):
"""Returns a lambda function callback to change the inheritance of
a mode.
This is required as otherwise lambda functions created within a
function do not behave as desired.
:param mode the mode for which the callback is being created
:return customized lambda function
"""
return lambda x: self._change_mode_inheritance(mode, x)
def _create_rename_mode_cb(self, mode):
"""Returns a lambda function callback to rename a mode.
This is required as otherwise lambda functions created within a
function do not behave as desired.
:param mode the mode for which the callback is being created
:return customized lambda function
"""
return lambda: self._rename_mode(mode)
def _create_delete_mode_cb(self, mode):
"""Returns a lambda function callback to delete the given mode.
This is required as otherwise lambda functions created within a
function do not behave as desired.
:param mode the mode to remove
:return lambda function to perform the removal
"""
return lambda: self._delete_mode(mode)
def _change_mode_inheritance(self, mode, inherit):
"""Updates the inheritance information of a given mode.
:param mode the mode to update
:param inherit the name of the mode this mode inherits from
"""
# Check if this inheritance would cause a cycle, turning the
# tree structure into a graph
has_inheritance_cycle = False
if inherit != "None":
all_modes = list(self._profile.devices.values())[0].modes
cur_mode = inherit
while all_modes[cur_mode].inherit is not None:
if all_modes[cur_mode].inherit == mode:
has_inheritance_cycle = True
break
cur_mode = all_modes[cur_mode].inherit
# Update the inheritance information in the profile
if not has_inheritance_cycle:
for name, device in self._profile.devices.items():
if inherit == "None":
inherit = None
device.ensure_mode_exists(mode)
device.modes[mode].inherit = inherit
self.modes_changed.emit()
def _rename_mode(self, mode_name):
"""Asks the user for the new name for the given mode.
If the user provided name for the mode is invalid the
renaming is aborted and no change made.
:param mode_name new name for the mode
"""
# Retrieve new name from the user
name, user_input = QtWidgets.QInputDialog.getText(
self,
"Mode name",
"",
QtWidgets.QLineEdit.Normal,
mode_name
)
if user_input:
if name in gremlin.profile.mode_list(self._profile):
gremlin.util.display_error(
"A mode with the name \"{}\" already exists".format(name)
)
else:
# Update the renamed mode in each device
for device in self._profile.devices.values():
device.modes[name] = device.modes[mode_name]
device.modes[name].name = name
del device.modes[mode_name]
# Update inheritance information
for mode in device.modes.values():
if mode.inherit == mode_name:
mode.inherit = name
self.modes_changed.emit()
self._populate_mode_layout()
def _delete_mode(self, mode_name):
"""Removes the specified mode.
Performs an update of the inheritance of all modes that inherited
from the deleted mode.
:param mode_name the name of the mode to delete
"""
# Obtain mode from which the mode we want to delete inherits
parent_of_deleted = None
for mode in list(self._profile.devices.values())[0].modes.values():
if mode.name == mode_name:
parent_of_deleted = mode.inherit
# Assign the inherited mode of the the deleted one to all modes that
# inherit from the mode to be deleted
for device in self._profile.devices.values():
for mode in device.modes.values():
if mode.inherit == mode_name:
mode.inherit = parent_of_deleted
# Remove the mode from the profile
for device in self._profile.devices.values():
del device.modes[mode_name]
# Update the ui
self._populate_mode_layout()
self.modes_changed.emit()
def _add_mode_cb(self, checked):
"""Asks the user for a new mode to add.
If the user provided name for the mode is invalid no mode is
added.
:param checked flag indicating whether or not the checkbox is active
"""
name, user_input = QtWidgets.QInputDialog.getText(None, "Mode name", "")
if user_input:
if name in gremlin.profile.mode_list(self._profile):
gremlin.util.display_error(
"A mode with the name \"{}\" already exists".format(name)
)
else:
for device in self._profile.devices.values():
new_mode = gremlin.profile.Mode(device)
new_mode.name = name
device.modes[name] = new_mode
self.modes_changed.emit()
self._populate_mode_layout()
class DeviceInformationUi(common.BaseDialogUi):
"""Widget which displays information about all connected joystick
devices."""
def __init__(self, parent=None):
"""Creates a new instance.
:param parent the parent widget
"""
super().__init__(parent)
self.devices = gremlin.joystick_handling.joystick_devices()
self.setWindowTitle("Device Information")
self.main_layout = QtWidgets.QGridLayout(self)
self.main_layout.addWidget(QtWidgets.QLabel("<b>Name</b>"), 0, 0)
self.main_layout.addWidget(QtWidgets.QLabel("<b>Axes</b>"), 0, 1)
self.main_layout.addWidget(QtWidgets.QLabel("<b>Buttons</b>"), 0, 2)
self.main_layout.addWidget(QtWidgets.QLabel("<b>Hats</b>"), 0, 3)
self.main_layout.addWidget(QtWidgets.QLabel("<b>Vendor ID</b>"), 0, 4)
self.main_layout.addWidget(QtWidgets.QLabel("<b>Product ID</b>"), 0, 5)
self.main_layout.addWidget(QtWidgets.QLabel("<b>GUID"), 0, 6)
for i, entry in enumerate(self.devices):
self.main_layout.addWidget(
QtWidgets.QLabel(entry.name), i+1, 0
)
self.main_layout.addWidget(
QtWidgets.QLabel(str(entry.axis_count)), i+1, 1
)
self.main_layout.addWidget(
QtWidgets.QLabel(str(entry.button_count)), i+1, 2
)
self.main_layout.addWidget(
QtWidgets.QLabel(str(entry.hat_count)), i+1, 3
)
self.main_layout.addWidget(
QtWidgets.QLabel("{:04X}".format(entry.vendor_id)), i+1, 4
)
self.main_layout.addWidget(
QtWidgets.QLabel("{:04X}".format(entry.product_id)), i+1, 5
)
guid_field = QtWidgets.QLineEdit()
guid_field.setText(str(entry.device_guid))
guid_field.setReadOnly(True)
guid_field.setMinimumWidth(230)
guid_field.setMaximumWidth(230)
self.main_layout.addWidget(guid_field, i+1, 6)
self.close_button = QtWidgets.QPushButton("Close")
self.close_button.clicked.connect(lambda: self.close())
self.main_layout.addWidget(self.close_button, len(self.devices)+1, 3)
class SwapDevicesUi(common.BaseDialogUi):
"""UI Widget that allows users to swap identical devices."""
def __init__(self, profile, parent=None):
"""Creates a new instance.
:param profile the current profile
:param parent the parent of this widget
"""
super().__init__(parent)
self.profile = profile
# Create UI elements
self.setWindowTitle("Swap Devices")
self.main_layout = QtWidgets.QVBoxLayout(self)
self._create_swap_ui()
def _create_swap_ui(self):
"""Displays possible groups of swappable devices."""
common.clear_layout(self.main_layout)
profile_modifier = gremlin.profile.ProfileModifier(self.profile)
device_list = profile_modifier.device_information_list()
device_layout = QtWidgets.QGridLayout()
for i, data in enumerate(device_list):
# Ignore the keyboard
if data.device_guid == dill.GUID_Keyboard:
continue
# Ignore devices with no remappable entries
if (data.containers + data.conditions + data.merge_axis) == 0:
continue
# UI elements for this devic
name = QtWidgets.QLabel(data.name)
name.setAlignment(QtCore.Qt.AlignTop)
labels = QtWidgets.QLabel("Containers\nConditions\nMerge Axis")
counts = QtWidgets.QLabel("{:d}\n{:d}\n{:d}".format(
data.containers, data.conditions, data.merge_axis
))
counts.setAlignment(QtCore.Qt.AlignRight)
record_button = QtWidgets.QPushButton(
"Assigned to: {} - {}".format(data.device_guid, data.name)
)
record_button.clicked.connect(
self._create_request_user_input_cb(data.device_guid)
)
# Combine labels and counts into it's own layout
layout = QtWidgets.QHBoxLayout()
layout.addWidget(labels)
layout.addWidget(counts)
layout.addStretch()
# Put everything together
device_layout.addWidget(name, i, 0)
device_layout.addLayout(layout, i, 1)
device_layout.addWidget(record_button, i, 2, QtCore.Qt.AlignTop)
self.main_layout.addLayout(device_layout)
self.main_layout.addStretch()
def _create_request_user_input_cb(self, device_guid):
"""Creates the callback handling user device selection.
:param device_guid GUID of the associated device
:return callback function for user input selection handling
"""
return lambda: self._request_user_input(
lambda event: self._user_input_cb(event, device_guid)
)
def _user_input_cb(self, event, device_guid):
"""Processes input events to update the UI and model.
:param event the input event to process
:param device_guid GUID of the selected device
"""
profile_modifier = gremlin.profile.ProfileModifier(self.profile)
profile_modifier.change_device_guid(
device_guid,
event.device_guid
)
self._create_swap_ui()
def _request_user_input(self, callback):
"""Prompts the user for the input to bind to this item.
:param callback function to call with the accepted input
"""
self.input_dialog = common.InputListenerWidget(
callback,
[
gremlin.common.InputType.JoystickAxis,
gremlin.common.InputType.JoystickButton,
gremlin.common.InputType.JoystickHat
],
return_kb_event=False,
multi_keys=False
)
# Display the dialog centered in the middle of the UI
root = self
while root.parent():
root = root.parent()
geom = root.geometry()
self.input_dialog.setGeometry(
geom.x() + geom.width() / 2 - 150,
geom.y() + geom.height() / 2 - 75,
300,
150
)
self.input_dialog.show()
|
gpl-3.0
|
lthurlow/Boolean-Constrained-Routing
|
networkx-1.8.1/build/lib/networkx/algorithms/isolate.py
|
46
|
1669
|
# encoding: utf-8
"""
Functions for identifying isolate (degree zero) nodes.
"""
# Copyright (C) 2004-2011 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
import networkx as nx
__author__ = """\n""".join(['Drew Conway <[email protected]>',
'Aric Hagberg <[email protected]>'])
__all__=['is_isolate','isolates']
def is_isolate(G,n):
"""Determine of node n is an isolate (degree zero).
Parameters
----------
G : graph
A networkx graph
n : node
A node in G
Returns
-------
isolate : bool
True if n has no neighbors, False otherwise.
Examples
--------
>>> G=nx.Graph()
>>> G.add_edge(1,2)
>>> G.add_node(3)
>>> nx.is_isolate(G,2)
False
>>> nx.is_isolate(G,3)
True
"""
return G.degree(n)==0
def isolates(G):
"""Return list of isolates in the graph.
Isolates are nodes with no neighbors (degree zero).
Parameters
----------
G : graph
A networkx graph
Returns
-------
isolates : list
List of isolate nodes.
Examples
--------
>>> G = nx.Graph()
>>> G.add_edge(1,2)
>>> G.add_node(3)
>>> nx.isolates(G)
[3]
To remove all isolates in the graph use
>>> G.remove_nodes_from(nx.isolates(G))
>>> G.nodes()
[1, 2]
For digraphs isolates have zero in-degree and zero out_degre
>>> G = nx.DiGraph([(0,1),(1,2)])
>>> G.add_node(3)
>>> nx.isolates(G)
[3]
"""
return [n for (n,d) in G.degree_iter() if d==0]
|
mit
|
moio/spacewalk
|
backend/server/test/unit-test/test_rhnLib_timestamp.py
|
1
|
2399
|
#!/usr/bin/python
#
# Copyright (c) 2008--2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
#
#
#
import sys
import time
import unittest
from spacewalk.common import rhnLib
class Tests(unittest.TestCase):
def _test_timestamp_1(self):
# Start with some timestamp, and verify that
# timestamp(strftime(t)) is # t
t = 85345
increment = 123456
while t < time.time() + increment:
is_eq, t1, tstr, t2 = self._test(t)
#self.assertEqual(t, t2, "%s %s %s %s" % (t, t2, ttuple, tstr))
if not is_eq:
print "%s %s %s" % (t1, t2, tstr)
t = t + increment
def _str(self, t):
tformat = "%Y-%m-%d %H:%M:%S"
ttuple = time.localtime(t)
return time.strftime(tformat, ttuple)
def _test(self, t, dstshift=0):
t = int(t)
tstr = self._str(t)
t2 = int(rhnLib.timestamp(tstr))
return (t+dstshift == t2), t, tstr, t2
def _test_timestamp_2(self):
y = 1969
while y < 2015:
y = y + 1
# Guess that year's time switch
tlist = [y, 10, 31, 1, 41, 37, 0, 0, -1]
t = time.mktime(tlist)
tlist = list(time.localtime(t))
# Last Sat of October
tlist[2] = tlist[2] - (1 + tlist[6]) % 7
t = int(time.mktime(tlist))
is_eq, t1, tstr, t2 = self._test(t)
if not is_eq:
print "%s %s %s" % (t, t2, tstr)
def test_timestamp_3(self):
t = 57739297
dstshift = (time.localtime(t)[8] - time.daylight) * 3600
is_eq, t1, tstr, t2 = self._test(t, dstshift)
self.failUnless(is_eq, "Failed: %s, %s" % (t1, t2))
def _test_timestamp_4(self):
return self.test_timestamp_3()
if __name__ == '__main__':
sys.exit(unittest.main() or 0)
|
gpl-2.0
|
Marshal-82/pi_video_looper
|
Adafruit_Video_Looper/usb_drive_mounter.py
|
8
|
3078
|
# Copyright 2015 Adafruit Industries.
# Author: Tony DiCola
# License: GNU GPLv2, see LICENSE.txt
import glob
import subprocess
import time
import pyudev
class USBDriveMounter(object):
"""Service for automatically mounting attached USB drives."""
def __init__(self, root='/mnt/usbdrive', readonly=False):
"""Create an instance of the USB drive mounter service. Root is an
optional parameter which specifies the location and file name prefix for
mounted drives (a number will be appended to each mounted drive file
name). Readonly is a boolean that indicates if the drives should be
mounted as read-only or not (default false, writable).
"""
self._root = root
self._readonly = readonly
self._context = pyudev.Context()
def remove_all(self):
"""Unmount and remove mount points for all mounted drives."""
for path in glob.glob(self._root + '*'):
subprocess.call(['umount', '-l', path])
subprocess.call(['rm', '-r', path])
def mount_all(self):
"""Mount all attached USB drives. Readonly is a boolean that specifies
if the drives should be mounted read only (defaults to false).
"""
self.remove_all()
# Enumerate USB drive partitions by path like /dev/sda1, etc.
nodes = [x.device_node for x in self._context.list_devices(subsystem='block',
DEVTYPE='partition') \
if 'ID_BUS' in x and x['ID_BUS'] == 'usb']
# Mount each drive under the mount root.
for i, node in enumerate(nodes):
path = self._root + str(i)
subprocess.call(['mkdir', path])
args = ['mount']
if self._readonly:
args.append('-r')
args.extend([node, path])
subprocess.check_call(args)
def start_monitor(self):
"""Initialize monitoring of USB drive changes."""
self._monitor = pyudev.Monitor.from_netlink(self._context)
self._monitor.filter_by('block', 'partition')
self._monitor.start()
def poll_changes(self):
"""Check for changes to USB drives. Returns true if there was a USB
drive change, otherwise false.
"""
# Look for a drive change.
device = self._monitor.poll(0)
# If a USB drive changed (added/remove) remount all drives.
if device is not None and device['ID_BUS'] == 'usb':
return True
# Else nothing changed.
return False
if __name__ == '__main__':
# Run as a service that mounts all USB drives as read-only under the default
# path of /mnt/usbdrive*.
drive_mounter = USBDriveMounter(readonly=True)
drive_mounter.mount_all()
drive_mounter.start_monitor()
print 'Listening for USB drive changes (press Ctrl-C to quite)...'
while True:
if drive_mounter.poll_changes():
print 'USB drives changed!'
drive_mounter.mount_all()
time.sleep(0)
|
gpl-2.0
|
Denisolt/Tensorflow_Chat_Bot
|
local/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/contrib/appengine.py
|
360
|
7937
|
from __future__ import absolute_import
import logging
import os
import warnings
from ..exceptions import (
HTTPError,
HTTPWarning,
MaxRetryError,
ProtocolError,
TimeoutError,
SSLError
)
from ..packages.six import BytesIO
from ..request import RequestMethods
from ..response import HTTPResponse
from ..util.timeout import Timeout
from ..util.retry import Retry
try:
from google.appengine.api import urlfetch
except ImportError:
urlfetch = None
log = logging.getLogger(__name__)
class AppEnginePlatformWarning(HTTPWarning):
pass
class AppEnginePlatformError(HTTPError):
pass
class AppEngineManager(RequestMethods):
"""
Connection manager for Google App Engine sandbox applications.
This manager uses the URLFetch service directly instead of using the
emulated httplib, and is subject to URLFetch limitations as described in
the App Engine documentation here:
https://cloud.google.com/appengine/docs/python/urlfetch
Notably it will raise an AppEnginePlatformError if:
* URLFetch is not available.
* If you attempt to use this on GAEv2 (Managed VMs), as full socket
support is available.
* If a request size is more than 10 megabytes.
* If a response size is more than 32 megabtyes.
* If you use an unsupported request method such as OPTIONS.
Beyond those cases, it will raise normal urllib3 errors.
"""
def __init__(self, headers=None, retries=None, validate_certificate=True):
if not urlfetch:
raise AppEnginePlatformError(
"URLFetch is not available in this environment.")
if is_prod_appengine_mvms():
raise AppEnginePlatformError(
"Use normal urllib3.PoolManager instead of AppEngineManager"
"on Managed VMs, as using URLFetch is not necessary in "
"this environment.")
warnings.warn(
"urllib3 is using URLFetch on Google App Engine sandbox instead "
"of sockets. To use sockets directly instead of URLFetch see "
"https://urllib3.readthedocs.io/en/latest/contrib.html.",
AppEnginePlatformWarning)
RequestMethods.__init__(self, headers)
self.validate_certificate = validate_certificate
self.retries = retries or Retry.DEFAULT
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# Return False to re-raise any potential exceptions
return False
def urlopen(self, method, url, body=None, headers=None,
retries=None, redirect=True, timeout=Timeout.DEFAULT_TIMEOUT,
**response_kw):
retries = self._get_retries(retries, redirect)
try:
response = urlfetch.fetch(
url,
payload=body,
method=method,
headers=headers or {},
allow_truncated=False,
follow_redirects=(
redirect and
retries.redirect != 0 and
retries.total),
deadline=self._get_absolute_timeout(timeout),
validate_certificate=self.validate_certificate,
)
except urlfetch.DeadlineExceededError as e:
raise TimeoutError(self, e)
except urlfetch.InvalidURLError as e:
if 'too large' in str(e):
raise AppEnginePlatformError(
"URLFetch request too large, URLFetch only "
"supports requests up to 10mb in size.", e)
raise ProtocolError(e)
except urlfetch.DownloadError as e:
if 'Too many redirects' in str(e):
raise MaxRetryError(self, url, reason=e)
raise ProtocolError(e)
except urlfetch.ResponseTooLargeError as e:
raise AppEnginePlatformError(
"URLFetch response too large, URLFetch only supports"
"responses up to 32mb in size.", e)
except urlfetch.SSLCertificateError as e:
raise SSLError(e)
except urlfetch.InvalidMethodError as e:
raise AppEnginePlatformError(
"URLFetch does not support method: %s" % method, e)
http_response = self._urlfetch_response_to_http_response(
response, **response_kw)
# Check for redirect response
if (http_response.get_redirect_location() and
retries.raise_on_redirect and redirect):
raise MaxRetryError(self, url, "too many redirects")
# Check if we should retry the HTTP response.
if retries.is_forced_retry(method, status_code=http_response.status):
retries = retries.increment(
method, url, response=http_response, _pool=self)
log.info("Forced retry: %s", url)
retries.sleep()
return self.urlopen(
method, url,
body=body, headers=headers,
retries=retries, redirect=redirect,
timeout=timeout, **response_kw)
return http_response
def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
if is_prod_appengine():
# Production GAE handles deflate encoding automatically, but does
# not remove the encoding header.
content_encoding = urlfetch_resp.headers.get('content-encoding')
if content_encoding == 'deflate':
del urlfetch_resp.headers['content-encoding']
transfer_encoding = urlfetch_resp.headers.get('transfer-encoding')
# We have a full response's content,
# so let's make sure we don't report ourselves as chunked data.
if transfer_encoding == 'chunked':
encodings = transfer_encoding.split(",")
encodings.remove('chunked')
urlfetch_resp.headers['transfer-encoding'] = ','.join(encodings)
return HTTPResponse(
# In order for decoding to work, we must present the content as
# a file-like object.
body=BytesIO(urlfetch_resp.content),
headers=urlfetch_resp.headers,
status=urlfetch_resp.status_code,
**response_kw
)
def _get_absolute_timeout(self, timeout):
if timeout is Timeout.DEFAULT_TIMEOUT:
return 5 # 5s is the default timeout for URLFetch.
if isinstance(timeout, Timeout):
if timeout._read is not timeout._connect:
warnings.warn(
"URLFetch does not support granular timeout settings, "
"reverting to total timeout.", AppEnginePlatformWarning)
return timeout.total
return timeout
def _get_retries(self, retries, redirect):
if not isinstance(retries, Retry):
retries = Retry.from_int(
retries, redirect=redirect, default=self.retries)
if retries.connect or retries.read or retries.redirect:
warnings.warn(
"URLFetch only supports total retries and does not "
"recognize connect, read, or redirect retry parameters.",
AppEnginePlatformWarning)
return retries
def is_appengine():
return (is_local_appengine() or
is_prod_appengine() or
is_prod_appengine_mvms())
def is_appengine_sandbox():
return is_appengine() and not is_prod_appengine_mvms()
def is_local_appengine():
return ('APPENGINE_RUNTIME' in os.environ and
'Development/' in os.environ['SERVER_SOFTWARE'])
def is_prod_appengine():
return ('APPENGINE_RUNTIME' in os.environ and
'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and
not is_prod_appengine_mvms())
def is_prod_appengine_mvms():
return os.environ.get('GAE_VM', False) == 'true'
|
gpl-3.0
|
anoopcs9/samba
|
python/samba/kcc/ldif_import_export.py
|
10
|
14000
|
# LDIF helper functions for the samba_kcc tool
#
# Copyright (C) Dave Craft 2011
# Copyright (C) Andrew Bartlett 2015
#
# Andrew Bartlett's alleged work performed by his underlings Douglas
# Bagnall and Garming Sam.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from samba import Ldb, ldb, read_and_sub_file
from samba.auth import system_session
from samba.samdb import SamDB
from samba.common import dsdb_Dn
class LdifError(Exception):
pass
def write_search_result(samdb, f, res):
for msg in res:
lstr = samdb.write_ldif(msg, ldb.CHANGETYPE_NONE)
f.write("%s" % lstr)
def ldif_to_samdb(dburl, lp, ldif_file, forced_local_dsa=None):
"""Routine to import all objects and attributes that are relevent
to the KCC algorithms from a previously exported LDIF file.
The point of this function is to allow a programmer/debugger to
import an LDIF file with non-security relevent information that
was previously extracted from a DC database. The LDIF file is used
to create a temporary abbreviated database. The KCC algorithm can
then run against this abbreviated database for debug or test
verification that the topology generated is computationally the
same between different OSes and algorithms.
:param dburl: path to the temporary abbreviated db to create
:param ldif_file: path to the ldif file to import
"""
if os.path.exists(dburl):
raise LdifError("Specify a database (%s) that doesn't already exist." %
dburl)
# Use ["modules:"] as we are attempting to build a sam
# database as opposed to start it here.
tmpdb = Ldb(url=dburl, session_info=system_session(),
lp=lp, options=["modules:"])
tmpdb.transaction_start()
try:
data = read_and_sub_file(ldif_file, None)
tmpdb.add_ldif(data, None)
if forced_local_dsa:
tmpdb.modify_ldif("""dn: @ROOTDSE
changetype: modify
replace: dsServiceName
dsServiceName: CN=NTDS Settings,%s
""" % forced_local_dsa)
tmpdb.add_ldif("""dn: @MODULES
@LIST: rootdse,extended_dn_in,extended_dn_out_ldb,objectguid
-
""")
except Exception, estr:
tmpdb.transaction_cancel()
raise LdifError("Failed to import %s: %s" % (ldif_file, estr))
tmpdb.transaction_commit()
# We have an abbreviated list of options here because we have built
# an abbreviated database. We use the rootdse and extended-dn
# modules only during this re-open
samdb = SamDB(url=dburl, session_info=system_session(), lp=lp)
return samdb
def samdb_to_ldif_file(samdb, dburl, lp, creds, ldif_file):
"""Routine to extract all objects and attributes that are relevent
to the KCC algorithms from a DC database.
The point of this function is to allow a programmer/debugger to
extract an LDIF file with non-security relevent information from
a DC database. The LDIF file can then be used to "import" via
the import_ldif() function this file into a temporary abbreviated
database. The KCC algorithm can then run against this abbreviated
database for debug or test verification that the topology generated
is computationally the same between different OSes and algorithms.
:param dburl: LDAP database URL to extract info from
:param ldif_file: output LDIF file name to create
"""
try:
samdb = SamDB(url=dburl,
session_info=system_session(),
credentials=creds, lp=lp)
except ldb.LdbError, (enum, estr):
raise LdifError("Unable to open sam database (%s) : %s" %
(dburl, estr))
if os.path.exists(ldif_file):
raise LdifError("Specify a file (%s) that doesn't already exist." %
ldif_file)
try:
f = open(ldif_file, "w")
except IOError as ioerr:
raise LdifError("Unable to open (%s) : %s" % (ldif_file, str(ioerr)))
try:
# Query Partitions
attrs = ["objectClass",
"objectGUID",
"cn",
"whenChanged",
"objectSid",
"Enabled",
"systemFlags",
"dnsRoot",
"nCName",
"msDS-NC-Replica-Locations",
"msDS-NC-RO-Replica-Locations"]
sstr = "CN=Partitions,%s" % samdb.get_config_basedn()
res = samdb.search(base=sstr, scope=ldb.SCOPE_SUBTREE,
attrs=attrs,
expression="(objectClass=crossRef)")
# Write partitions output
write_search_result(samdb, f, res)
# Query cross reference container
attrs = ["objectClass",
"objectGUID",
"cn",
"whenChanged",
"fSMORoleOwner",
"systemFlags",
"msDS-Behavior-Version",
"msDS-EnabledFeature"]
sstr = "CN=Partitions,%s" % samdb.get_config_basedn()
res = samdb.search(base=sstr, scope=ldb.SCOPE_SUBTREE,
attrs=attrs,
expression="(objectClass=crossRefContainer)")
# Write cross reference container output
write_search_result(samdb, f, res)
# Query Sites
attrs = ["objectClass",
"objectGUID",
"cn",
"whenChanged",
"systemFlags"]
sstr = "CN=Sites,%s" % samdb.get_config_basedn()
sites = samdb.search(base=sstr, scope=ldb.SCOPE_SUBTREE,
attrs=attrs,
expression="(objectClass=site)")
# Write sites output
write_search_result(samdb, f, sites)
# Query NTDS Site Settings
for msg in sites:
sitestr = str(msg.dn)
attrs = ["objectClass",
"objectGUID",
"cn",
"whenChanged",
"interSiteTopologyGenerator",
"interSiteTopologyFailover",
"schedule",
"options"]
sstr = "CN=NTDS Site Settings,%s" % sitestr
res = samdb.search(base=sstr, scope=ldb.SCOPE_BASE,
attrs=attrs)
# Write Site Settings output
write_search_result(samdb, f, res)
# Naming context list
nclist = []
# Query Directory Service Agents
for msg in sites:
sstr = str(msg.dn)
ncattrs = ["hasMasterNCs",
"msDS-hasMasterNCs",
"hasPartialReplicaNCs",
"msDS-HasDomainNCs",
"msDS-hasFullReplicaNCs",
"msDS-HasInstantiatedNCs"]
attrs = ["objectClass",
"objectGUID",
"cn",
"whenChanged",
"invocationID",
"options",
"msDS-isRODC",
"msDS-Behavior-Version"]
res = samdb.search(base=sstr, scope=ldb.SCOPE_SUBTREE,
attrs=attrs + ncattrs,
expression="(objectClass=nTDSDSA)")
# Spin thru all the DSAs looking for NC replicas
# and build a list of all possible Naming Contexts
# for subsequent retrieval below
for msg in res:
for k in msg.keys():
if k in ncattrs:
for value in msg[k]:
# Some of these have binary DNs so
# use dsdb_Dn to split out relevent parts
dsdn = dsdb_Dn(samdb, value)
dnstr = str(dsdn.dn)
if dnstr not in nclist:
nclist.append(dnstr)
# Write DSA output
write_search_result(samdb, f, res)
# Query NTDS Connections
for msg in sites:
sstr = str(msg.dn)
attrs = ["objectClass",
"objectGUID",
"cn",
"whenChanged",
"options",
"whenCreated",
"enabledConnection",
"schedule",
"transportType",
"fromServer",
"systemFlags"]
res = samdb.search(base=sstr, scope=ldb.SCOPE_SUBTREE,
attrs=attrs,
expression="(objectClass=nTDSConnection)")
# Write NTDS Connection output
write_search_result(samdb, f, res)
# Query Intersite transports
attrs = ["objectClass",
"objectGUID",
"cn",
"whenChanged",
"options",
"name",
"bridgeheadServerListBL",
"transportAddressAttribute"]
sstr = "CN=Inter-Site Transports,CN=Sites,%s" % \
samdb.get_config_basedn()
res = samdb.search(sstr, scope=ldb.SCOPE_SUBTREE,
attrs=attrs,
expression="(objectClass=interSiteTransport)")
# Write inter-site transport output
write_search_result(samdb, f, res)
# Query siteLink
attrs = ["objectClass",
"objectGUID",
"cn",
"whenChanged",
"systemFlags",
"options",
"schedule",
"replInterval",
"siteList",
"cost"]
sstr = "CN=Sites,%s" % \
samdb.get_config_basedn()
res = samdb.search(sstr, scope=ldb.SCOPE_SUBTREE,
attrs=attrs,
expression="(objectClass=siteLink)",
controls=['extended_dn:0'])
# Write siteLink output
write_search_result(samdb, f, res)
# Query siteLinkBridge
attrs = ["objectClass",
"objectGUID",
"cn",
"whenChanged",
"siteLinkList"]
sstr = "CN=Sites,%s" % samdb.get_config_basedn()
res = samdb.search(sstr, scope=ldb.SCOPE_SUBTREE,
attrs=attrs,
expression="(objectClass=siteLinkBridge)")
# Write siteLinkBridge output
write_search_result(samdb, f, res)
# Query servers containers
# Needed for samdb.server_site_name()
attrs = ["objectClass",
"objectGUID",
"cn",
"whenChanged",
"systemFlags"]
sstr = "CN=Sites,%s" % samdb.get_config_basedn()
res = samdb.search(sstr, scope=ldb.SCOPE_SUBTREE,
attrs=attrs,
expression="(objectClass=serversContainer)")
# Write servers container output
write_search_result(samdb, f, res)
# Query servers
# Needed because some transport interfaces refer back to
# attributes found in the server object. Also needed
# so extended-dn will be happy with dsServiceName in rootDSE
attrs = ["objectClass",
"objectGUID",
"cn",
"whenChanged",
"systemFlags",
"dNSHostName",
"mailAddress"]
sstr = "CN=Sites,%s" % samdb.get_config_basedn()
res = samdb.search(sstr, scope=ldb.SCOPE_SUBTREE,
attrs=attrs,
expression="(objectClass=server)")
# Write server output
write_search_result(samdb, f, res)
# Query Naming Context replicas
attrs = ["objectClass",
"objectGUID",
"cn",
"whenChanged",
"objectSid",
"fSMORoleOwner",
"msDS-Behavior-Version",
"repsFrom",
"repsTo"]
for sstr in nclist:
res = samdb.search(sstr, scope=ldb.SCOPE_BASE,
attrs=attrs)
# Write naming context output
write_search_result(samdb, f, res)
# Query rootDSE replicas
attrs = ["objectClass",
"objectGUID",
"cn",
"whenChanged",
"rootDomainNamingContext",
"configurationNamingContext",
"schemaNamingContext",
"defaultNamingContext",
"dsServiceName"]
sstr = ""
res = samdb.search(sstr, scope=ldb.SCOPE_BASE,
attrs=attrs)
# Record the rootDSE object as a dn as it
# would appear in the base ldb file. We have
# to save it this way because we are going to
# be importing as an abbreviated database.
res[0].dn = ldb.Dn(samdb, "@ROOTDSE")
# Write rootdse output
write_search_result(samdb, f, res)
except ldb.LdbError, (enum, estr):
raise LdifError("Error processing (%s) : %s" % (sstr, estr))
f.close()
|
gpl-3.0
|
UTNkar/moore
|
src/materialize/templatetags/materialize.py
|
1
|
1928
|
from django import template
from django.template import loader
from django.utils.translation import gettext_lazy as _
register = template.Library()
def get_widget_name(field):
return field.field.widget.__class__.__name__
def append_classes(field, widget=''):
field.field.label_suffix = ''
classes = field.field.widget.attrs.get('class', '')
classes += ' validate'
if field.errors:
classes += ' invalid'
if widget == 'Textarea':
classes += ' materialize-textarea'
if widget == 'DateInput':
classes += ' datepicker'
field.field.widget.attrs['class'] = classes
def render_field(templ, field, label=None, prefix=None):
if field == '':
return None
t = loader.get_template(templ)
if label is not None:
field.label = _(label)
c = {
'field': field,
'prefix': prefix,
}
return t.render(c)
@register.simple_tag
def materialize_field(field, prefix=None, label=None):
if field == '':
return
widget = get_widget_name(field)
t = ''
# TODO: DateTimeInput
if widget in ['EmailInput', 'DateInput', 'DateTimeInput', 'NumberInput',
'PasswordInput', 'Select', 'Textarea', 'TextInput',
'URLInput']:
append_classes(field, widget)
t = 'materialize/form/input.html'
elif widget == 'CheckboxInput':
t = 'materialize/form/p_input.html'
elif widget == 'SelectMultiple':
w = field.field.widget
w.template_name = 'materialize/form/multiple_input.html'
t = 'materialize/form/input.html'
else:
raise NotImplementedError('Widget %s not yet supported' % widget)
return render_field(t, field, label, prefix)
@register.inclusion_tag('materialize/pagination.html')
def materialize_pagination(page, url):
return {
'page': page,
'paginator': page.paginator,
'url': url,
}
|
agpl-3.0
|
splav/servo
|
components/script/dom/bindings/codegen/parser/tests/test_extended_attributes.py
|
8
|
2886
|
import WebIDL
def WebIDLTest(parser, harness):
parser.parse("""
[NoInterfaceObject]
interface TestExtendedAttr {
[Unforgeable] readonly attribute byte b;
};
""")
results = parser.finish()
parser = parser.reset()
parser.parse("""
[Pref="foo.bar",Pref=flop]
interface TestExtendedAttr {
[Pref="foo.bar"] attribute byte b;
};
""")
results = parser.finish()
parser = parser.reset()
parser.parse("""
interface TestLenientThis {
[LenientThis] attribute byte b;
};
""")
results = parser.finish()
harness.ok(results[0].members[0].hasLenientThis(),
"Should have a lenient this")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface TestLenientThis2 {
[LenientThis=something] attribute byte b;
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "[LenientThis] must take no arguments")
parser = parser.reset()
parser.parse("""
interface TestClamp {
void testClamp([Clamp] long foo);
void testNotClamp(long foo);
};
""")
results = parser.finish()
# Pull out the first argument out of the arglist of the first (and
# only) signature.
harness.ok(results[0].members[0].signatures()[0][1][0].type.hasClamp(),
"Should be clamped")
harness.ok(not results[0].members[1].signatures()[0][1][0].type.hasClamp(),
"Should not be clamped")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface TestClamp2 {
void testClamp([Clamp=something] long foo);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "[Clamp] must take no arguments")
parser = parser.reset()
parser.parse("""
interface TestEnforceRange {
void testEnforceRange([EnforceRange] long foo);
void testNotEnforceRange(long foo);
};
""")
results = parser.finish()
# Pull out the first argument out of the arglist of the first (and
# only) signature.
harness.ok(results[0].members[0].signatures()[0][1][0].type.hasEnforceRange(),
"Should be enforceRange")
harness.ok(not results[0].members[1].signatures()[0][1][0].type.hasEnforceRange(),
"Should not be enforceRange")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface TestEnforceRange2 {
void testEnforceRange([EnforceRange=something] long foo);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "[EnforceRange] must take no arguments")
|
mpl-2.0
|
benpatterson/edx-platform
|
lms/djangoapps/verify_student/admin.py
|
48
|
1958
|
from ratelimitbackend import admin
from verify_student.models import (
SoftwareSecurePhotoVerification,
VerificationStatus,
SkippedReverification,
)
class SoftwareSecurePhotoVerificationAdmin(admin.ModelAdmin):
"""
Admin for the SoftwareSecurePhotoVerification table.
"""
list_display = ('id', 'user', 'status', 'receipt_id', 'submitted_at', 'updated_at')
raw_id_fields = ('user', 'reviewing_user')
search_fields = (
'receipt_id',
)
class VerificationStatusAdmin(admin.ModelAdmin):
"""
Admin for the VerificationStatus table.
"""
list_display = ('timestamp', 'user', 'status', 'checkpoint')
readonly_fields = ()
search_fields = ('checkpoint__checkpoint_location', 'user__username')
raw_id_fields = ('user',)
def get_readonly_fields(self, request, obj=None):
"""When editing an existing record, all fields should be read-only.
VerificationStatus records should be immutable; to change the user's
status, create a new record with the updated status and a more
recent timestamp.
"""
if obj:
return self.readonly_fields + ('status', 'checkpoint', 'user', 'response', 'error')
return self.readonly_fields
class SkippedReverificationAdmin(admin.ModelAdmin):
"""Admin for the SkippedReverification table. """
list_display = ('created_at', 'user', 'course_id', 'checkpoint')
raw_id_fields = ('user',)
readonly_fields = ('user', 'course_id')
search_fields = ('user__username', 'course_id', 'checkpoint__checkpoint_location')
def has_add_permission(self, request):
"""Skipped verifications can't be created in Django admin. """
return False
admin.site.register(SoftwareSecurePhotoVerification, SoftwareSecurePhotoVerificationAdmin)
admin.site.register(SkippedReverification, SkippedReverificationAdmin)
admin.site.register(VerificationStatus, VerificationStatusAdmin)
|
agpl-3.0
|
trondhindenes/ansible
|
lib/ansible/modules/windows/win_environment.py
|
35
|
2820
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Jon Hawkesworth (@jhawkesworth) <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_environment
version_added: '2.0'
short_description: Modify environment variables on windows hosts
description:
- Uses .net Environment to set or remove environment variables and can set at User, Machine or Process level.
- User level environment variables will be set, but not available until the user has logged off and on again.
options:
state:
description:
- Set to C(present) to ensure environment variable is set.
- Set to C(absent) to ensure it is removed.
choices: [ absent, present ]
default: present
name:
description:
- The name of the environment variable.
required: yes
value:
description:
- The value to store in the environment variable.
- Must be set when C(state=present) and cannot be an empty string.
- Can be omitted for C(state=absent).
level:
description:
- The level at which to set the environment variable.
- Use C(machine) to set for all users.
- Use C(user) to set for the current user that ansible is connected as.
- Use C(process) to set for the current process. Probably not that useful.
choices: [ machine, user, process ]
required: yes
author:
- Jon Hawkesworth (@jhawkesworth)
notes:
- This module is best-suited for setting the entire value of an
environment variable. For safe element-based management of
path-like environment vars, use the M(win_path) module.
- This module does not broadcast change events.
This means that the minority of windows applications which can have
their environment changed without restarting will not be notified and
therefore will need restarting to pick up new environment settings.
User level environment variables will require the user to log out
and in again before they become available.
'''
EXAMPLES = r'''
- name: Set an environment variable for all users
win_environment:
state: present
name: TestVariable
value: Test value
level: machine
- name: Remove an environment variable for the current user
win_environment:
state: absent
name: TestVariable
level: user
'''
RETURN = r'''
before_value:
description: the value of the environment key before a change, this is null if it didn't exist
returned: always
type: string
sample: C:\Windows\System32
value:
description: the value the environment key has been set to, this is null if removed
returned: always
type: string
sample: C:\Program Files\jdk1.8
'''
|
gpl-3.0
|
annajordanous/network-analysis
|
get_minimal.py
|
2
|
5752
|
import add_data as ad
import get_soundcloud_data as gsd
import genre_relationships as gr
import sqlite3
class DbHandler(object):
def __init__(self,db_name):
self.conn = sqlite3.connect(db_name)
self.curs = self.conn.cursor()
def create_table(self,table_name):
ad.create_table(self.curs,table_name)
def create_tables_if_needed(self,table_list):
present = gr.check_tables(self.curs,table_list)
for n,t in enumerate(present):
if not t: self.create_table(table_list[n])
def write(self,table_name,data):
ad.insert_tuple_data_set_into_DB(self.curs,table_name,data)
self.conn.commit()
def collected_users(self):
self.curs.execute('SELECT id FROM users')
return [r[0] for r in self.curs.fetchall()]
def collected_followers(self,user):
self.curs.execute('SELECT * FROM x_follows_y '
'WHERE followed=?',(user,))
return len(self.curs.fetchall())
def collected_followings(self,user):
self.curs.execute('SELECT * FROM x_follows_y '
'WHERE follower=?',(user,))
return len(self.curs.fetchall())
def total_followers(self,user):
return self.curs.execute('SELECT "followers_count" '
'FROM users WHERE id=?',(user,)).next()[0]
def total_followings(self,user):
return self.curs.execute('SELECT "followings_count" '
'FROM users WHERE id=?',(user,)).next()[0]
class DataHandler(object):
table = ''
vars = []
def __init__(self,db_handler,limit):
self.db_handler = db_handler
self.limit = limit
self.data = []
def clear(self):
self.data = []
def save(self):
self.db_handler.write(self.table,self.data)
self.clear()
def add_record(self,r):
if len(self.data) >= self.limit: self.save()
class UserData(DataHandler):
table = 'users'
vars=ad.att_list(ad.att_string(ad.tables[table]))
def __init__(self,db_handler,limit=100):
self.collected = set([])
super(UserData,self).__init__(db_handler,limit)
def extract(self,d,k):
try:
return d[k]
except KeyError:
return None
def add_record(self,id,d):
self.data.append([self.extract(d,v) for v in UserData.vars])
self.collected.add(id)
super(UserData,self).add_record(d)
def update(self,d):
for k,v in d.iteritems():
self.add_record(k,d[k])
class FollowData(DataHandler):
table = 'x_follows_y'
vars=ad.att_list(ad.att_string(ad.tables[table]))
def __init__(self,db_handler,limit=100):
super(FollowData,self).__init__(db_handler,limit)
def add_record(self,t):
self.data.append(t)
super(FollowData,self).add_record(t)
def update(self,s):
for t in s:
self.add_record(t)
def user_dicts(resource):
return {u.obj['id']:u.obj for u in resource.data}
def starting_user(user_id):
return gsd.client.get('/users/' + str(user_id)).obj
def follows_user(user_id):
follows = user_dicts(gsd.client.get('/users/'+ str(user_id) +'/followers'))
x_follows_y = {(x,user_id) for x in follows}
return follows,x_follows_y
def followed_by_user(user_id):
followed_by = user_dicts(gsd.client.get('/users/'+ str(user_id) +'/followings'))
x_follows_y = {(user_id,y) for y in followed_by}
return followed_by,x_follows_y
def from_users(collect_followers,collect_following):
user_data,x_follows_y = {},set([])
for user in collect_followers:
us,xfy = follows_user(user)
user_data.update(us)
x_follows_y.update(xfy)
for user in collect_following:
us,xfy = followed_by_user(user)
user_data.update(us)
x_follows_y.update(xfy)
return user_data, x_follows_y
def collected(dbh,thresh):
cu = dbh.collected_users()
collected_followers = {u for u in cu if dbh.collected_followers(u)
>= (dbh.total_followers(u) * thresh)}
collected_following = {u for u in cu if dbh.collected_followings(u)
>= (dbh.total_followings(u) * thresh)}
return collected_followers,collected_following
def to_collect(user_data,collected_followers,collected_following):
return (user_data.collected - collected_followers,
user_data.collected - collected_following)
def snowb(start_at,dbh,user_data,follow_data,steps,thresh):
user_data.update({start_at:starting_user(start_at)})
collected_followers,collected_following = collected(dbh,thresh)
for i in range(steps):
if i==0:
to_collect_followers,to_collect_following = {start_at},{start_at}
else:
(to_collect_followers,
to_collect_following) = to_collect(user_data,
collected_followers,
collected_following)
ud,xfy = from_users(to_collect_followers,to_collect_following)
user_data.update(ud)
follow_data.update(xfy)
collected_followers.update(to_collect_followers)
collected_following.update(to_collect_following)
return user_data,follow_data
def collect(dbname,start_at,steps=1,thresh=0.5):
dbh = DbHandler(dbname)
dbh.create_tables_if_needed(['users','x_follows_y'])
user_data = UserData(dbh,50)
follow_data = FollowData(dbh,50)
return snowb(start_at,dbh,user_data,follow_data,
steps,thresh)
Slackk = 202195
Sephirot = 81070
Ms_Skyrym = 15899888
def test(steps=1):
return collect('testit7.sqlite',Ms_Skyrym,steps=steps)
|
gpl-2.0
|
sanger-pathogens/ariba
|
ariba/tests/reference_data_test.py
|
2
|
32281
|
import unittest
import filecmp
import os
import pyfastaq
from ariba import reference_data, sequence_metadata
modules_dir = os.path.dirname(os.path.abspath(reference_data.__file__))
data_dir = os.path.join(modules_dir, 'tests', 'data')
class TestReferenceData(unittest.TestCase):
def test_init_fails(self):
'''Test __init__ fails when it should'''
empty_fasta = os.path.join(data_dir, 'reference_data_init_fails.empty.fa')
empty_tsv = os.path.join(data_dir, 'reference_data_init_fails.empty.tsv')
fasta = os.path.join(data_dir, 'reference_data_init_fails.in.fa')
with self.assertRaises(reference_data.Error):
reference_data.ReferenceData([empty_fasta], [empty_tsv])
reference_data.ReferenceData([fasta], [empty_tsv])
def test_init_ok(self):
'''Test init with good input'''
fasta_in = os.path.join(data_dir, 'reference_data_init_ok.in.fa')
tsv_in = os.path.join(data_dir, 'reference_data_init_ok.in.tsv')
meta1 = sequence_metadata.SequenceMetadata('gene1\t1\t0\tR2S\t.\tconfers killer rabbit resistance')
meta2 = sequence_metadata.SequenceMetadata("gene2\t1\t0\tI42L\t.\tremoves tardigrade's space-living capability")
expected_metadata = {
'gene1': {
'seq_type': 'p',
'variant_only': False,
'n': {},
'p': {1: {meta1}},
'.': set(),
},
'gene2': {
'seq_type': 'p',
'variant_only': False,
'n': {},
'p': {41: {meta2}},
'.': set(),
}
}
ref_data = reference_data.ReferenceData([fasta_in], [tsv_in])
self.assertEqual(expected_metadata, ref_data.metadata)
expected_seqs_dict = {
'gene1': pyfastaq.sequences.Fasta('gene1', 'CATCGTCGTCTATCGTCGTCCTAG'),
'gene2': pyfastaq.sequences.Fasta('gene2', 'AAAAACCCCGGGGTTTT')
}
self.assertEqual(expected_seqs_dict, ref_data.sequences)
self.assertEqual({}, ref_data.ariba_to_original_name)
self.assertEqual({}, ref_data.extra_parameters)
rename_file = os.path.join(data_dir, 'reference_data_init_ok.rename.tsv')
parameters_file = os.path.join(data_dir, 'reference_data_init_ok.params.json')
ref_data = reference_data.ReferenceData([fasta_in], [tsv_in],
rename_file=rename_file, parameters_file=parameters_file)
expected_rename_dict = {'gene1': 'original_gene1', 'gene2': 'original_gene2'}
self.assertEqual(expected_rename_dict, ref_data.ariba_to_original_name)
expected_extra_parameters = {'foo': 'bar', 'spam': 'eggs'}
self.assertEqual(expected_extra_parameters, ref_data.extra_parameters)
def test_load_rename_file(self):
'''Test _load_rename_file'''
infile = os.path.join(data_dir, 'reference_data_load_rename_file.tsv')
got = reference_data.ReferenceData._load_rename_file(infile)
expected = {
'ariba1': 'original1',
'ariba2': 'original2'
}
self.assertEqual(expected, got)
def test_load_metadata_tsv(self):
'''Test _load_metadata_tsv'''
meta1 = sequence_metadata.SequenceMetadata('gene1\t0\t0\tA42G\t.\tfree text')
meta2 = sequence_metadata.SequenceMetadata('gene1\t0\t0\tG13T\t.\tconfers killer rabbit resistance')
meta3 = sequence_metadata.SequenceMetadata("gene2\t1\t1\tI42L\t.\tremoves tardigrade's space-living capability")
expected = {
'gene1': {
'seq_type': 'n',
'variant_only': False,
'n': {12: {meta2}, 41: {meta1}},
'p': {},
'.': set(),
},
'gene2': {
'seq_type': 'p',
'variant_only': True,
'n': {},
'p': {41: {meta3}},
'.': set(),
}
}
got = {}
tsv_file = os.path.join(data_dir, 'reference_data_load_metadata_tsv.tsv')
reference_data.ReferenceData._load_metadata_tsv(tsv_file, got)
self.assertEqual(expected, got)
def test_load_all_metadata_tsvs(self):
'''Test _load_all_metadata_tsvs'''
input_files = [os.path.join(data_dir, 'reference_data_load_all_metadata_tsvs.' + x + '.tsv') for x in ['1', '2']]
meta1 = sequence_metadata.SequenceMetadata('gene1\t0\t0\tA42G\t.\tfree text')
meta2 = sequence_metadata.SequenceMetadata('gene1\t0\t0\tG13T\t.\tconfers killer rabbit resistance')
meta3 = sequence_metadata.SequenceMetadata("gene2\t1\t0\tI42L\t.\tremoves tardigrade's space-living capability")
expected = {
'gene1': {
'seq_type': 'n',
'variant_only': False,
'n': {12: {meta2}, 41: {meta1}},
'p': {},
'.': set(),
},
'gene2': {
'seq_type': 'p',
'variant_only': False,
'n': {},
'p': {41: {meta3}},
'.': set(),
}
}
got = reference_data.ReferenceData._load_all_metadata_tsvs(input_files)
self.assertEqual(expected, got)
def test_load_fasta_file(self):
'''Test _load_fasta_file'''
got = {}
expected = {'seq1': pyfastaq.sequences.Fasta('seq1', 'ACGT')}
filename = os.path.join(data_dir, 'reference_data_load_fasta_file.fa')
reference_data.ReferenceData._load_fasta_file(filename, got)
self.assertEqual(expected, got)
def test_load_all_fasta_files(self):
'''Test _load_all_fasta_files'''
filenames = [os.path.join(data_dir, 'reference_data_load_all_fasta_files.in.' + x) for x in ['1', '2']]
expected = {
'seq1': pyfastaq.sequences.Fasta('seq1', 'ACGT'),
'seq2': pyfastaq.sequences.Fasta('seq2', 'TTTT')
}
got = reference_data.ReferenceData._load_all_fasta_files(filenames)
self.assertEqual(expected, got)
def test_load_input_check_seq_names_ok(self):
'''Test _load_input_files_and_check_seq_names with good input'''
fasta_files = [os.path.join(data_dir, 'reference_data_load_input_check_seq_names.good.fa.' + x) for x in ['1', '2']]
metadata_files = [os.path.join(data_dir, 'reference_data_load_input_check_seq_names.good.csv.' + x) for x in ['1', '2']]
expected_seqs = {
'seq1': pyfastaq.sequences.Fasta('seq1', 'ACGT'),
'seq2': pyfastaq.sequences.Fasta('seq2', 'TTTT')
}
meta1 = sequence_metadata.SequenceMetadata('seq1\t0\t0\tA1G\t.\tfree text')
meta2 = sequence_metadata.SequenceMetadata("seq2\t0\t0\t.\t.\tspam eggs")
expected_meta = {
'seq1': {
'seq_type': 'n',
'variant_only': False,
'n': {0: {meta1}},
'p': {},
'.': set(),
},
'seq2': {
'seq_type': 'n',
'variant_only': False,
'n': {},
'p': {},
'.': {meta2},
}
}
got_seqs, got_meta = reference_data.ReferenceData._load_input_files_and_check_seq_names(fasta_files, metadata_files)
self.assertEqual(expected_seqs, got_seqs)
self.assertEqual(expected_meta, got_meta)
def test_load_input_check_seq_names_bad(self):
'''Test _load_input_files_and_check_seq_names with bad input'''
fasta_files = [os.path.join(data_dir, 'reference_data_load_input_check_seq_names.bad.fa.' + x) for x in ['1', '2']]
metadata_files = [os.path.join(data_dir, 'reference_data_load_input_check_seq_names.bad.csv.' + x) for x in ['1', '2']]
with self.assertRaises(reference_data.Error):
reference_data.ReferenceData._load_input_files_and_check_seq_names(fasta_files, metadata_files)
def test_write_metadata_tsv(self):
'''Test _write_metadata_tsv'''
metadata_tsv_in = os.path.join(data_dir, 'reference_data_write_metadata_tsv.tsv')
metadata_tsv_expected = os.path.join(data_dir, 'reference_data_write_metadata_tsv.expected.tsv')
tmp_tsv = 'tmp.test_write_metadata_tsv.out.tsv'
metadata = reference_data.ReferenceData._load_all_metadata_tsvs([metadata_tsv_in])
reference_data.ReferenceData._write_metadata_tsv(metadata, tmp_tsv)
self.assertTrue(filecmp.cmp(metadata_tsv_expected, tmp_tsv, shallow=False))
os.unlink(tmp_tsv)
def test_write_sequences_to_files(self):
'''Test _write_sequences_to_files'''
sequences = {
'seq1': pyfastaq.sequences.Fasta('seq1', 'ACGT'),
'seq2': pyfastaq.sequences.Fasta('seq2', 'ACGTA'),
'seq3': pyfastaq.sequences.Fasta('seq3', 'ACGTAC'),
'seq4': pyfastaq.sequences.Fasta('seq4', 'ACGTAAA'),
'seq5': pyfastaq.sequences.Fasta('seq5', 'ACGTCCC'),
}
metadata = {
'seq1': {'seq_type': 'n', 'variant_only': False},
'seq2': {'seq_type': 'n', 'variant_only': True},
'seq3': {'seq_type': 'p', 'variant_only': False},
'seq4': {'seq_type': 'p', 'variant_only': True},
'seq5': {'seq_type': 'n', 'variant_only': False},
}
tmp_prefix = 'tmp.test_write_sequences_to_files'
reference_data.ReferenceData._write_sequences_to_files(sequences, metadata, tmp_prefix)
expected_prefix = os.path.join(data_dir, 'reference_data_write_sequences_to_files')
for suffix in ['gene.fa', 'gene.varonly.fa', 'noncoding.fa', 'noncoding.varonly.fa', 'all.fa']:
expected = expected_prefix + '.' + suffix
got = tmp_prefix + '.' + suffix
self.assertTrue(filecmp.cmp(expected, got, shallow=False))
os.unlink(got)
def test_filter_bad_variant_data(self):
'''Test _filter_bad_variant_data'''
fasta_in = os.path.join(data_dir, 'reference_data_filter_bad_data.in.fa')
metadata_tsv = os.path.join(data_dir, 'reference_data_filter_bad_data_metadata.in.tsv')
sequences, metadata = reference_data.ReferenceData._load_input_files_and_check_seq_names([fasta_in], [metadata_tsv])
tmp_prefix = 'tmp.test_filter_bad_variant_data'
got_line_count = reference_data.ReferenceData._filter_bad_variant_data(sequences, metadata, tmp_prefix, set())
expected_prefix = os.path.join(data_dir, 'reference_data_filter_bad_data.expected')
with open(os.path.join(data_dir, 'reference_data_filter_bad_data.expected.check_metadata.log')) as f:
expected_line_count = len(f.readlines())
self.assertEqual(expected_line_count, got_line_count)
for suffix in ['check_metadata.log', 'check_metadata.tsv']:
expected = expected_prefix + '.' + suffix
got = tmp_prefix + '.' + suffix
self.assertTrue(filecmp.cmp(expected, got, shallow=False))
os.unlink(got)
expected_seqs = {}
pyfastaq.tasks.file_to_dict(os.path.join(data_dir, 'reference_data_filter_bad_data.expected.all.fa'), expected_seqs)
self.assertEqual(expected_seqs, sequences)
def test_try_to_get_gene_seq(self):
'''Test _try_to_get_gene_seq'''
tests = [
(pyfastaq.sequences.Fasta('x', 'ACGTG'), None, 'REMOVE\tToo short. Length: 5'),
(pyfastaq.sequences.Fasta('x', 'A' * 100), None, 'REMOVE\tToo long. Length: 100'),
(pyfastaq.sequences.Fasta('x', 'GAGGAGCCG'), None, 'REMOVE\tDoes not look like a gene (tried both strands and all reading frames) GAGGAGCCG'),
(pyfastaq.sequences.Fasta('x', 'ATGTAACCT'), None, 'REMOVE\tDoes not look like a gene (tried both strands and all reading frames) ATGTAACCT'),
(pyfastaq.sequences.Fasta('x', 'ATGCCTTAA'), pyfastaq.sequences.Fasta('x', 'ATGCCTTAA'), 'KEEP\tMade into gene. strand=+, frame=0')
]
for seq, got_seq, message in tests:
self.assertEqual((got_seq, message), reference_data.ReferenceData._try_to_get_gene_seq(seq, 6, 99))
def test_check_noncoding_seq(self):
'''Test _check_noncoding_seq'''
tests = [
(pyfastaq.sequences.Fasta('x', 'A' * 3), False, 'REMOVE\tToo short. Length: 3'),
(pyfastaq.sequences.Fasta('x', 'A' * 21), False, 'REMOVE\tToo long. Length: 21'),
(pyfastaq.sequences.Fasta('x', 'A' * 5), True, None),
(pyfastaq.sequences.Fasta('x', 'A' * 4), True, None),
(pyfastaq.sequences.Fasta('x', 'A' * 20), True, None)
]
for seq, valid, message in tests:
self.assertEqual((valid, message), reference_data.ReferenceData._check_noncoding_seq(seq, 4, 20))
def test_remove_bad_genes(self):
'''Test _remove_bad_genes'''
test_seq_dict = {}
fasta_file = os.path.join(data_dir, 'reference_data_remove_bad_genes.in.fa')
metadata_file = os.path.join(data_dir, 'reference_data_remove_bad_genes.in.tsv')
metadata = reference_data.ReferenceData._load_all_metadata_tsvs([metadata_file])
pyfastaq.tasks.file_to_dict(fasta_file, test_seq_dict)
tmp_log = 'tmp.test_remove_bad_genes.log'
expected_removed = {'g1', 'g2', 'g3', 'g4'}
got_removed = reference_data.ReferenceData._remove_bad_genes(test_seq_dict, metadata, tmp_log, min_gene_length=6, max_gene_length=99)
self.assertEqual(expected_removed, got_removed)
expected_dict = {
'g5': pyfastaq.sequences.Fasta('g5', 'ATGCCTTAA'),
'noncoding1': pyfastaq.sequences.Fasta('noncoding1', 'AAAAAAAAAAAAAAAAAAAAAAA')
}
self.assertEqual(expected_dict, test_seq_dict)
expected_log = os.path.join(data_dir, 'reference_data_test_remove_bad_genes.log')
self.assertTrue(filecmp.cmp(expected_log, tmp_log, shallow=False))
os.unlink(tmp_log)
def test_remove_bad_noncoding_seqs(self):
'''Test _remove_bad_noncoding_seqs'''
test_seq_dict = {}
fasta_file = os.path.join(data_dir, 'reference_data_remove_bad_noncoding.in.fa')
metadata_file = os.path.join(data_dir, 'reference_data_remove_bad_noncoding.in.tsv')
metadata = reference_data.ReferenceData._load_all_metadata_tsvs([metadata_file])
pyfastaq.tasks.file_to_dict(fasta_file, test_seq_dict)
tmp_log = 'tmp.test_remove_bad_noncoding.log'
expected_removed = {'noncoding1','noncoding2'}
got_removed = reference_data.ReferenceData._remove_bad_noncoding_seqs(test_seq_dict, metadata, tmp_log,
min_noncoding_length=6, max_noncoding_length=15)
self.assertEqual(expected_removed, got_removed)
expected_dict = {
'noncoding3': pyfastaq.sequences.Fasta('noncoding3', 'CCCCCC'),
'noncoding4': pyfastaq.sequences.Fasta('noncoding4', 'TTTTTTTTTTTTTTT'),
'noncoding5': pyfastaq.sequences.Fasta('noncoding5', 'AAAAAAAAAAAA')
}
self.assertEqual(expected_dict, test_seq_dict)
expected_log = os.path.join(data_dir, 'reference_data_test_remove_bad_noncoding.log')
self.assertTrue(filecmp.cmp(expected_log, tmp_log, shallow=False))
os.unlink(tmp_log)
def test_new_seq_name(self):
'''Test _new_seq_name'''
tests = [
('name', 'name'),
('name_a', 'name_a'),
('name.a', 'name.a'),
('name-a', 'name_a'),
('name!', 'name_'),
('name:foo', 'name_foo'),
('name:!@foo', 'name___foo'),
]
for name, expected in tests:
self.assertEqual(expected, reference_data.ReferenceData._new_seq_name(name))
def test_seq_names_to_rename_dict(self):
'''Test _seq_names_to_rename_dict'''
names = {
'foo',
'bar!',
'bar:',
'bar,',
'spam',
'eggs,123',
'ab(c1',
'ab(c)2',
'ab[c]3',
'abc;4',
"abc'5",
'abc"6',
'abc|7',
r'''zaphod<>/\b{}[]|!''',
}
got = reference_data.ReferenceData._seq_names_to_rename_dict(names)
expected = {
'bar!': 'bar_',
'bar,': 'bar__1',
'bar:': 'bar__2',
'ab(c1': 'ab_c1',
'ab(c)2': 'ab_c_2',
'ab[c]3': 'ab_c_3',
'abc;4': 'abc_4',
"abc'5": 'abc_5',
'abc"6': 'abc_6',
'abc|7': 'abc_7',
'eggs,123': 'eggs_123',
r'''zaphod<>/\b{}[]|!''': 'zaphod____b______',
}
self.assertEqual(expected, got)
def test_rename_names_in_seq_dict(self):
'''Test _rename_names_in_seq_dict'''
original_seqs = {
'pa abc': pyfastaq.sequences.Fasta('pa abc', 'AAAA'),
'pa 1': pyfastaq.sequences.Fasta('pa 1', 'CCC'),
'vo:': pyfastaq.sequences.Fasta('vo:', 'GGG'),
'nonc': pyfastaq.sequences.Fasta('nonc', 'TTT'),
}
rename_dict = {
'pa abc': 'pa',
'pa 1': 'pa_1',
'vo:': 'vo_',
}
expected = {
'pa': pyfastaq.sequences.Fasta('pa', 'AAAA'),
'pa_1': pyfastaq.sequences.Fasta('pa_1', 'CCC'),
'vo_': pyfastaq.sequences.Fasta('vo_', 'GGG'),
'nonc': pyfastaq.sequences.Fasta('nonc', 'TTT'),
}
got = reference_data.ReferenceData._rename_names_in_seq_dict(original_seqs, rename_dict)
self.assertEqual(expected, got)
def test_rename_metadata_set(self):
'''Test _rename_metadata_set'''
metaset = {
sequence_metadata.SequenceMetadata('foo 1\t1\t0\t.\t.\tdescription'),
sequence_metadata.SequenceMetadata('foo 1\t1\t0\tI42L\t.\tspam eggs')
}
expected = {
sequence_metadata.SequenceMetadata('new_name\t1\t0\t.\t.\tdescription'),
sequence_metadata.SequenceMetadata('new_name\t1\t0\tI42L\t.\tspam eggs')
}
got = reference_data.ReferenceData._rename_metadata_set(metaset, 'new_name')
self.assertEqual(expected, got)
def test_rename_names_in_metadata(self):
'''Test _rename_names_in_metadata'''
meta1 = sequence_metadata.SequenceMetadata('gene1\t0\t0\tA42G\t.\tfree text')
meta2 = sequence_metadata.SequenceMetadata('gene1\t0\t0\tA42T\t.\tfree text2')
meta3 = sequence_metadata.SequenceMetadata('gene1\t0\t0\t.\t.\tfree text3')
meta4 = sequence_metadata.SequenceMetadata('gene1\t0\t0\tG13T\t.\tconfers killer rabbit resistance')
meta5 = sequence_metadata.SequenceMetadata("gene2\t1\t0\tI42L\t.\tremoves tardigrade's space-living capability")
meta1rename = sequence_metadata.SequenceMetadata('new_gene1\t0\t0\tA42G\t.\tfree text')
meta2rename = sequence_metadata.SequenceMetadata('new_gene1\t0\t0\tA42T\t.\tfree text2')
meta3rename = sequence_metadata.SequenceMetadata('new_gene1\t0\t0\t.\t.\tfree text3')
meta4rename = sequence_metadata.SequenceMetadata('new_gene1\t0\t0\tG13T\t.\tconfers killer rabbit resistance')
metadata = {
'gene1': {
'n': {12: {meta4}, 41: {meta1, meta2}},
'p': {},
'.': {meta3},
},
'gene2': {
'n': {},
'p': {41: {meta5}},
'.': set(),
}
}
expected = {
'new_gene1': {
'n': {12: {meta4rename}, 41: {meta1rename, meta2rename}},
'p': {},
'.': {meta3rename},
},
'gene2': {
'n': {},
'p': {41: {meta5}},
'.': set(),
}
}
rename_dict = {'gene1': 'new_gene1'}
got = reference_data.ReferenceData._rename_names_in_metadata(metadata, rename_dict)
self.assertEqual(expected, got)
def test_rename_sequences(self):
'''Test rename_sequences'''
fasta_in = os.path.join(data_dir, 'reference_data_rename_sequences.fa')
tsv_in = os.path.join(data_dir, 'reference_data_rename_sequences_metadata.tsv')
refdata = reference_data.ReferenceData([fasta_in], [tsv_in])
tmp_out = 'tmp.test_rename_sequences.out'
refdata.rename_sequences(tmp_out)
expected_file = os.path.join(data_dir, 'reference_data_test_rename_sequences.out')
self.assertTrue(filecmp.cmp(expected_file, tmp_out, shallow=False))
os.unlink(tmp_out)
meta1 = sequence_metadata.SequenceMetadata('noncoding1\t0\t0\t.\t.\toriginal name "noncoding1 blah"')
meta3 = sequence_metadata.SequenceMetadata('pres_abs1_1\t0\t0\t.\t.\toriginal name "pres_abs1 foo bar spam eggs"')
meta5 = sequence_metadata.SequenceMetadata('pres_abs1\t0\t0\t.\t.\toriginal name "pres\'abs1"')
meta6 = sequence_metadata.SequenceMetadata('pres_abs2\t0\t0\t.\t.\toriginal name "pres_abs2"')
meta7 = sequence_metadata.SequenceMetadata('pres_abs3\t0\t0\t.\t.\toriginal name "pres!abs3"')
meta8 = sequence_metadata.SequenceMetadata('var_only1_2\t0\t0\t.\t.\toriginal name "var_only1 hello"')
meta9 = sequence_metadata.SequenceMetadata('var_only1\t0\t0\t.\t.\toriginal name "var,only1"')
meta10 = sequence_metadata.SequenceMetadata('var_only1_1\t0\t0\t.\t.\toriginal name "var:only1 boo"')
meta11 = sequence_metadata.SequenceMetadata('var_only2\t0\t0\t.\t.\toriginal name "var_only2"')
expected_meta = {
'noncoding1': {'seq_type': 'n', 'variant_only': False, 'n': {}, 'p': {}, '.': {meta1}},
'pres_abs1_1': {'seq_type': 'n', 'variant_only': False, 'n': {}, 'p': {}, '.': {meta3}},
'pres_abs1': {'seq_type': 'n', 'variant_only': False, 'n': {}, 'p': {}, '.': {meta5}},
'pres_abs2': {'seq_type': 'n', 'variant_only': False, 'n': {}, 'p': {}, '.': {meta6}},
'pres_abs3': {'seq_type': 'n', 'variant_only': False, 'n': {}, 'p': {}, '.': {meta7}},
'var_only1_2': {'seq_type': 'n', 'variant_only': False, 'n': {}, 'p': {}, '.': {meta8}},
'var_only1': {'seq_type': 'n', 'variant_only': False, 'n': {}, 'p': {}, '.': {meta9}},
'var_only1_1': {'seq_type': 'n', 'variant_only': False, 'n': {}, 'p': {}, '.': {meta10}},
'var_only2': {'seq_type': 'n', 'variant_only': False, 'n': {}, 'p': {}, '.': {meta11}},
}
self.maxDiff = None
self.assertEqual(set(expected_meta.keys()), set(refdata.metadata.keys()))
self.assertEqual(expected_meta, refdata.metadata)
expected_seqs_dict = {
'noncoding1': pyfastaq.sequences.Fasta('noncoding1', 'AAAA'),
'pres_abs1_1': pyfastaq.sequences.Fasta('pres_abs1_1', 'ACGT'),
'pres_abs1': pyfastaq.sequences.Fasta('pres_abs1', 'CCCC'),
'pres_abs2': pyfastaq.sequences.Fasta('pres_abs2', 'TTTT'),
'pres_abs3': pyfastaq.sequences.Fasta('pres_abs3', 'GGGG'),
'var_only1_2': pyfastaq.sequences.Fasta('var_only1_2', 'AAAA'),
'var_only1': pyfastaq.sequences.Fasta('var_only1', 'GGGG'),
'var_only1_1': pyfastaq.sequences.Fasta('var_only1_1', 'CCCC'),
'var_only2': pyfastaq.sequences.Fasta('var_only2', 'TTTT'),
}
self.assertEqual(expected_seqs_dict, refdata.sequences)
expected_rename_dict = {
'pres!abs3': 'pres_abs3',
'pres\'abs1': 'pres_abs1',
'pres_abs1': 'pres_abs1_1',
'var,only1': 'var_only1',
'var:only1': 'var_only1_1',
'var_only1': 'var_only1_2',
}
self.assertEqual(expected_rename_dict, refdata.rename_dict)
def test_sequence_type(self):
'''Test sequence_type'''
fasta_in = os.path.join(data_dir, 'reference_data_sequence_type.in.fa')
tsv_in = os.path.join(data_dir, 'reference_data_sequence_type.in.tsv')
refdata = reference_data.ReferenceData([fasta_in], [tsv_in])
tests = [
('gene', ('p', False)),
('gene.var_only', ('p', True)),
('noncoding', ('n', False)),
('noncoding.var_only', ('n', True)),
]
for name, expected in tests:
self.assertEqual(expected, refdata.sequence_type(name))
def test_sequence(self):
'''Test sequence'''
fasta_in = os.path.join(data_dir, 'reference_data_sequence.in.fa')
tsv_in = os.path.join(data_dir, 'reference_data_sequence.in.tsv')
expected = pyfastaq.sequences.Fasta('seq1', 'ATGTTTTAA')
refdata = reference_data.ReferenceData([fasta_in], [tsv_in])
self.assertEqual(expected, refdata.sequence('seq1'))
def test_all_non_wild_type_variants(self):
'''Test all_non_wild_type_variants'''
tsv_file = os.path.join(data_dir, 'reference_data_test_all_non_wild_type_variants.tsv')
fasta_in = os.path.join(data_dir, 'reference_data_test_all_non_wild_type_variants.ref.fa')
refdata = reference_data.ReferenceData([fasta_in], [tsv_file])
v1 = sequence_metadata.SequenceMetadata('var_only_gene\t1\t1\tP3Q\t.\tref has wild type P')
v2 = sequence_metadata.SequenceMetadata('var_only_gene\t1\t1\tG4I\t.\tref has wild type F')
v3 = sequence_metadata.SequenceMetadata('var_only_gene\t1\t1\tI5V\t.\tref has variant V instead of I')
v4 = sequence_metadata.SequenceMetadata('var_only_gene\t1\t1\tF6I\t.\tref has wild type F')
p1 = sequence_metadata.SequenceMetadata('presence_absence_gene\t1\t0\tN2I\t.\tref has wild type N')
p2 = sequence_metadata.SequenceMetadata('presence_absence_gene\t1\t0\tA4G\t.\tref has variant G instead of A')
n1 = sequence_metadata.SequenceMetadata('non_coding\t0\t0\tA2C\t.\tref has wild type A')
n2 = sequence_metadata.SequenceMetadata('non_coding\t0\t0\tC4T\t.\tref has variant T instead of C')
var_only_expected = {
'n': {},
'p': {2: {v1}, 3: {v2}, 4: {v3}, 5: {v4}}
}
pres_abs_expected = {
'n': {},
'p': {1: {p1}, 3: {p2}},
}
non_coding_expected = {
'n': {1: {n1}, 3: {n2}},
'p': {}
}
self.assertEqual(var_only_expected, refdata.all_non_wild_type_variants('var_only_gene'))
self.assertEqual(pres_abs_expected, refdata.all_non_wild_type_variants('presence_absence_gene'))
self.assertEqual(non_coding_expected, refdata.all_non_wild_type_variants('non_coding'))
self.assertEqual({'n': {}, 'p': {}}, refdata.all_non_wild_type_variants('not_a_known_sequence'))
def test_write_cluster_allocation_file(self):
'''Test write_cluster_allocation_file'''
clusters = {
'0': {'cluster0.1', 'cluster0.2'},
'1': {'cluster1.1', 'cluster1.2'},
'11': {'cluster11.1', 'cluster11.2'},
'2': {'cluster2.1'}
}
tmpfile = 'tmp.test_write_cluster_allocation_file.out'
reference_data.ReferenceData.write_cluster_allocation_file(clusters, tmpfile)
expected_file = os.path.join(data_dir, 'reference_data_test_write_cluster_allocation_file.expected')
self.assertTrue(filecmp.cmp(expected_file, tmpfile, shallow=False))
os.unlink(tmpfile)
def test_cluster_with_cdhit(self):
'''Test cluster_with_cd_hit'''
fasta_in = os.path.join(data_dir, 'reference_data_test_cluster_with_cdhit.in.fa')
tsv_in = os.path.join(data_dir, 'reference_data_test_cluster_with_cdhit.in.tsv')
refdata = reference_data.ReferenceData([fasta_in], [tsv_in])
outprefix = 'tmp.test_cluster_with_cdhit'
expected_clusters = {
'0': {'noncoding1'},
'1': {'presence_absence1', 'presence_absence2'},
'2': {'presence_absence3', 'presence_absence4'},
}
got_clusters = refdata.cluster_with_cdhit(outprefix)
self.assertEqual(expected_clusters, got_clusters)
expected_clusters_file = os.path.join(data_dir, 'reference_data_test_cluster_with_cdhit.expected.clusters.tsv')
got_clusters_file = outprefix + '.clusters.tsv'
self.assertTrue(filecmp.cmp(expected_clusters_file, got_clusters_file, shallow=False))
os.unlink(got_clusters_file)
os.unlink(outprefix + '.all.fa')
os.unlink(outprefix + '.gene.fa')
os.unlink(outprefix + '.gene.varonly.fa')
os.unlink(outprefix + '.noncoding.fa')
os.unlink(outprefix + '.noncoding.varonly.fa')
def test_cluster_w_cdhit_clstrs_file(self):
'''Test cluster_with_cd_hit clusters from file'''
fasta_in = os.path.join(data_dir, 'reference_data_cluster_w_cdhit_clstrs_file.in.fa')
meta_tsv_in = os.path.join(data_dir, 'reference_data_cluster_w_cdhit_clstrs_file.in.meta.tsv')
cluster_tsv_in = os.path.join(data_dir, 'reference_data_cluster_w_cdhit_clstrs_file.in.clstrs.tsv')
refdata = reference_data.ReferenceData([fasta_in], [meta_tsv_in])
outprefix = 'tmp.test_cluster_with_cdhit_clusters_in_file'
expected_clusters = {
'0': {'presence_absence1', 'presence_absence3', 'presence_absence4'},
'1': {'presence_absence2'},
'2': {'noncoding1'},
'3': {'noncoding2'},
}
got_clusters = refdata.cluster_with_cdhit(outprefix, clusters_file=cluster_tsv_in)
self.assertEqual(expected_clusters, got_clusters)
expected_clusters_file = os.path.join(data_dir, 'reference_data_cluster_w_cdhit_clstrs_file.expect.clstrs.tsv')
got_clusters_file = outprefix + '.clusters.tsv'
self.assertTrue(filecmp.cmp(expected_clusters_file, got_clusters_file, shallow=False))
os.unlink(got_clusters_file)
os.unlink(outprefix + '.all.fa')
os.unlink(outprefix + '.gene.fa')
os.unlink(outprefix + '.gene.varonly.fa')
os.unlink(outprefix + '.noncoding.fa')
os.unlink(outprefix + '.noncoding.varonly.fa')
def test_cluster_w_cdhit_nocluster(self):
'''Test cluster_with_cd_hit do not run cdhit'''
fasta_in = os.path.join(data_dir, 'reference_data_cluster_w_cdhit_nocluster.in.fa')
tsv_in = os.path.join(data_dir, 'reference_data_cluster_w_cdhit_nocluster.in.tsv')
refdata = reference_data.ReferenceData([fasta_in], [tsv_in])
outprefix = 'tmp.test_cluster_with_cdhit_nocluster'
expected_clusters = {
'0': {'noncoding1'},
'1': {'noncoding2'},
'2': {'presence_absence1'},
'3': {'presence_absence2'},
'4': {'presence_absence3'},
'5': {'presence_absence4'},
}
got_clusters = refdata.cluster_with_cdhit(outprefix, nocluster=True)
self.assertEqual(expected_clusters, got_clusters)
expected_clusters_file = os.path.join(data_dir, 'reference_data_cluster_w_cdhit_nocluster.expect.tsv')
got_clusters_file = outprefix + '.clusters.tsv'
self.assertTrue(filecmp.cmp(expected_clusters_file, got_clusters_file, shallow=False))
os.unlink(got_clusters_file)
os.unlink(outprefix + '.all.fa')
os.unlink(outprefix + '.gene.fa')
os.unlink(outprefix + '.gene.varonly.fa')
os.unlink(outprefix + '.noncoding.fa')
os.unlink(outprefix + '.noncoding.varonly.fa')
def test_write_seqs_to_fasta(self):
'''Test write_seqs_to_fasta'''
fasta_in = os.path.join(data_dir, 'reference_data_test_write_seqs_to_fasta.in.fa')
tsv_in = os.path.join(data_dir, 'reference_data_test_write_seqs_to_fasta.in.tsv')
refdata = reference_data.ReferenceData([fasta_in], [tsv_in])
expected_outfile = os.path.join(data_dir, 'reference_data_test_write_seqs_to_fasta.expected.fa')
tmpfile = 'tmp.test.reference_data.write_seqs_to_fasta.out.fa'
refdata.write_seqs_to_fasta(tmpfile, {'seq1', 'seq4', 'seq5'})
self.assertTrue(filecmp.cmp(expected_outfile, tmpfile, shallow=False))
os.unlink(tmpfile)
|
gpl-3.0
|
stepos01/ns3-lr-wpan-mlme
|
src/visualizer/visualizer/base.py
|
160
|
3799
|
import ns.point_to_point
import ns.csma
import ns.wifi
import ns.bridge
import ns.internet
import ns.mesh
import ns.wimax
import ns.wimax
import ns.lte
import gobject
import os.path
import sys
PIXELS_PER_METER = 3.0 # pixels-per-meter, at 100% zoom level
class PyVizObject(gobject.GObject):
__gtype_name__ = "PyVizObject"
def tooltip_query(self, tooltip):
tooltip.set_text("TODO: tooltip for %r" % self)
class Link(PyVizObject):
pass
class InformationWindow(object):
def update(self):
raise NotImplementedError
class NetDeviceTraits(object):
def __init__(self, is_wireless=None, is_virtual=False):
assert is_virtual or is_wireless is not None
self.is_wireless = is_wireless
self.is_virtual = is_virtual
netdevice_traits = {
ns.point_to_point.PointToPointNetDevice: NetDeviceTraits(is_wireless=False),
ns.csma.CsmaNetDevice: NetDeviceTraits(is_wireless=False),
ns.wifi.WifiNetDevice: NetDeviceTraits(is_wireless=True),
ns.bridge.BridgeNetDevice: NetDeviceTraits(is_virtual=True),
ns.internet.LoopbackNetDevice: NetDeviceTraits(is_virtual=True, is_wireless=False),
ns.mesh.MeshPointDevice: NetDeviceTraits(is_virtual=True),
ns.wimax.SubscriberStationNetDevice: NetDeviceTraits(is_wireless=True),
ns.wimax.BaseStationNetDevice: NetDeviceTraits(is_wireless=True),
ns.lte.LteUeNetDevice: NetDeviceTraits(is_wireless=True),
ns.lte.LteEnbNetDevice: NetDeviceTraits(is_wireless=True),
}
def lookup_netdevice_traits(class_type):
try:
return netdevice_traits[class_type]
except KeyError:
sys.stderr.write("WARNING: no NetDeviceTraits registered for device type %r; "
"I will assume this is a non-virtual wireless device, "
"but you should edit %r, variable 'netdevice_traits',"
" to make sure.\n" % (class_type.__name__, __file__))
t = NetDeviceTraits(is_virtual=False, is_wireless=True)
netdevice_traits[class_type] = t
return t
def transform_distance_simulation_to_canvas(d):
return d*PIXELS_PER_METER
def transform_point_simulation_to_canvas(x, y):
return x*PIXELS_PER_METER, y*PIXELS_PER_METER
def transform_distance_canvas_to_simulation(d):
return d/PIXELS_PER_METER
def transform_point_canvas_to_simulation(x, y):
return x/PIXELS_PER_METER, y/PIXELS_PER_METER
plugins = []
plugin_modules = {}
def register_plugin(plugin_init_func, plugin_name=None, plugin_module=None):
"""
Register a plugin.
@param plugin: a callable object that will be invoked whenever a
Visualizer object is created, like this: plugin(visualizer)
"""
assert callable(plugin_init_func)
plugins.append(plugin_init_func)
if plugin_module is not None:
plugin_modules[plugin_name] = plugin_module
plugins_loaded = False
def load_plugins():
global plugins_loaded
if plugins_loaded:
return
plugins_loaded = True
plugins_dir = os.path.join(os.path.dirname(__file__), 'plugins')
old_path = list(sys.path)
sys.path.insert(0, plugins_dir)
for filename in os.listdir(plugins_dir):
name, ext = os.path.splitext(filename)
if ext != '.py':
continue
try:
plugin_module = __import__(name)
except ImportError, ex:
print >> sys.stderr, "Could not load plugin %r: %s" % (filename, str(ex))
continue
try:
plugin_func = plugin_module.register
except AttributeError:
print >> sys.stderr, "Plugin %r has no 'register' function" % name
else:
#print >> sys.stderr, "Plugin %r registered" % name
register_plugin(plugin_func, name, plugin_module)
sys.path = old_path
|
gpl-2.0
|
TedaLIEz/sentry
|
src/sentry/rules/conditions/tagged_event.py
|
25
|
2665
|
"""
sentry.rules.conditions.tagged_event
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from collections import OrderedDict
from django import forms
from sentry.rules.conditions.base import EventCondition
class MatchType(object):
EQUAL = 'eq'
NOT_EQUAL = 'ne'
STARTS_WITH = 'sw'
ENDS_WITH = 'ew'
CONTAINS = 'co'
NOT_CONTAINS = 'nc'
MATCH_CHOICES = OrderedDict([
(MatchType.EQUAL, 'equals'),
(MatchType.NOT_EQUAL, 'does not equal'),
(MatchType.STARTS_WITH, 'starts with'),
(MatchType.ENDS_WITH, 'ends with'),
(MatchType.CONTAINS, 'contains'),
(MatchType.NOT_CONTAINS, 'does not contain'),
])
class TaggedEventForm(forms.Form):
key = forms.CharField(widget=forms.TextInput(attrs={'placeholder': 'key'}))
match = forms.ChoiceField(MATCH_CHOICES.items(), widget=forms.Select(
attrs={'style': 'width:150px'},
))
value = forms.CharField(widget=forms.TextInput(attrs={'placeholder': 'value'}))
class TaggedEventCondition(EventCondition):
form_cls = TaggedEventForm
label = 'An events tags match {key} {match} {value}'
def passes(self, event, state, **kwargs):
key = self.get_option('key')
match = self.get_option('match')
value = self.get_option('value')
if not (key and match and value):
return False
value = value.lower()
key = key.lower()
tags = (v.lower() for k, v in event.get_tags() if k.lower() == key)
if match == MatchType.EQUAL:
for t_value in tags:
if t_value == value:
return True
return False
elif match == MatchType.NOT_EQUAL:
for t_value in tags:
if t_value == value:
return False
return True
elif match == MatchType.STARTS_WITH:
for t_value in tags:
if t_value.startswith(value):
return True
return False
elif match == MatchType.ENDS_WITH:
for t_value in tags:
if t_value.endswith(value):
return True
return False
elif match == MatchType.CONTAINS:
for t_value in tags:
if value in t_value:
return True
return False
elif match == MatchType.NOT_CONTAINS:
for t_value in tags:
if value in t_value:
return False
return True
|
bsd-3-clause
|
BT-jmichaud/account-financial-reporting
|
account_financial_report_webkit/__openerp__.py
|
18
|
8410
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Authors: Nicolas Bessi, Guewen Baconnier
# Copyright Camptocamp SA 2011
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Financial Reports - Webkit',
'description': """
Financial Reports - Webkit
==========================
This module adds or replaces the following standard OpenERP financial reports:
- General ledger
- Trial Balance (simple or comparative view)
- Partner ledger
- Partner balance
- Open invoices report
- Aged Partner Balance
Main improvements per report:
-----------------------------
The General ledger: details of all entries posted in your books sorted by
account.
* Filter by account is available in the wizard (no need to go to the
Chart of Accounts to do this anymore) or by View account (the report
will display all regular children accounts) i.e. you can select all
P&L accounts.
* The report only prints accounts with moves OR with a non
null balance. No more endless report with empty accounts (field:
display account is hidden)
* initial balance computation on the fly if no open entry posted
* Thanks to a new checkbox in the account form, you will have the
possibility to centralize any account you like. This means you do
not want to see all entries posted under the account ‘VAT on sales’;
you will only see aggregated amounts by periods.
* Counterpart account is displayed for each transaction (3 accounts max.)
to ease searching.
* Better ergonomy on the wizard: important information is displayed in
the top part, filters are in the middle, and options are in the
bottom or on a separate tab. There is more specific filtering on
separate tabs. No more unique wizard layout for all financial
reports (we have removed the journal tab for the GL report)
* improved report style
The partner ledger: details of entries relative to payable &
receivable accounts posted in your books sorted by account and
partner.
* Filter by partner now available
* Now you can see Accounts then Partner with subtotals for each
account allowing you to check you data with trial balance and
partner balance for instance. Accounts are ordered in the same way as
in the Chart of account
* Period have been added (date only is not filled in since date can be
outside period)
* Reconciliation code added
* Subtotal by account
* Alphabetical sorting (same as in partner balance)
Open invoice report : other version of the partner ledger showing
unreconciled / partially reconciled entries.
* Possibility to print unreconciled transactions only at any date in
the past (thanks to the new field: `last_rec_date` which computes
the last move line reconciliation date). No more pain to get open
invoices at the last closing date.
* no initial balance computed because the report shows open invoices
from previous years.
The Trial balance: list of accounts with balances
* You can either see the columns: initial balance, debit, credit,
end balance or compare balances over 4 periods of your choice
* You can select the "opening" filter to get the opening trial balance
only
* If you create an extra virtual chart (using consolidated account) of
accounts for your P&L and your balance sheet, you can print your
statutory accounts (with comparison over years for instance)
* If you compare 2 periods, you will get the differences in values and
in percent
The Partner balance: list of account with balances
* Subtotal by account and partner
* Alphabetical sorting (same as in partner balance)
Aged Partner Balance: Summary of aged open amount per partner
This report is an accounting tool helping in various tasks.
You can credit control or partner balance provisions computation for instance.
The aged balance report allows you to print balances per partner
like the trial balance but add an extra information :
* It will split balances into due amounts
(due date not reached à the end date of the report) and overdue amounts
Overdue data are also split by period.
* For each partner following columns will be displayed:
* Total balance (all figures must match with same date partner balance
report).
This column equals the sum of all following columns)
* Due
* Overdue <= 30 days
* Overdue <= 60 days
* Overdue <= 90 days
* Overdue <= 120 days
* Older
Hypothesis / Contraints of aged partner balance
* Overdues columns will be by default be based on 30 days range fix number of
days. This can be changed by changes the RANGES constraint
* All data will be displayed in company currency
* When partial payments, the payment must appear in the same colums than the
invoice (Except if multiple payment terms)
* Data granularity: partner (will not display figures at invoices level)
* The report aggregate data per account with sub-totals
* Initial balance must be calculated the same way that
the partner balance / Ignoring the opening entry
in special period (idem open invoice report)
* Only accounts with internal type payable or receivable are considered
(idem open invoice report)
* If maturity date is null then use move line date
Limitations:
------------
In order to run properly this module makes sure you have installed the
library `wkhtmltopdf` for the pdf rendering (the library path must be
set in a System Parameter `webkit_path`).
Initial balances in these reports are based either on opening entry
posted in the opening period or computed on the fly. So make sure
that your past accounting opening entries are in an opening period.
Initials balances are not computed when using the Date filter (since a
date can be outside its logical period and the initial balance could
be different when computed by data or by initial balance for the
period). The opening period is assumed to be the Jan. 1st of the year
with an opening flag and the first period of the year must start also
on Jan 1st.
Totals for amounts in currencies are effective if the partner belongs to
an account with a secondary currency.
HTML headers and footers are deactivated for these reports because of
an issue in wkhtmltopdf
(http://code.google.com/p/wkhtmltopdf/issues/detail?id=656) Instead,
the header and footer are created as text with arguments passed to
wkhtmltopdf. The texts are defined inside the report classes.
""",
'version': '1.1.0',
'author': "Camptocamp,Odoo Community Association (OCA)",
'license': 'AGPL-3',
'category': 'Finance',
'website': 'http://www.camptocamp.com',
'images': [
'images/ledger.png', ],
'depends': ['account',
'report_webkit'],
'demo': [],
'data': ['account_view.xml',
'data/financial_webkit_header.xml',
'report/report.xml',
'wizard/wizard.xml',
'wizard/balance_common_view.xml',
'wizard/general_ledger_wizard_view.xml',
'wizard/partners_ledger_wizard_view.xml',
'wizard/trial_balance_wizard_view.xml',
'wizard/partner_balance_wizard_view.xml',
'wizard/open_invoices_wizard_view.xml',
'wizard/aged_partner_balance_wizard.xml',
'wizard/print_journal_view.xml',
'report_menus.xml',
],
# tests order matter
'test': ['tests/general_ledger.yml',
'tests/partner_ledger.yml',
'tests/trial_balance.yml',
'tests/partner_balance.yml',
'tests/open_invoices.yml',
'tests/aged_trial_balance.yml'],
# 'tests/account_move_line.yml'
'active': False,
'installable': True,
'application': True,
}
|
agpl-3.0
|
wolverineav/neutron
|
neutron/extensions/bgp.py
|
3
|
8616
|
# Copyright 2016 Hewlett Packard Development Coompany LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from neutron._i18n import _
from neutron.api import extensions
from neutron.api.v2 import attributes as attr
from neutron.api.v2 import resource_helper as rh
from neutron.common import exceptions
from neutron.services.bgp.common import constants as bgp_consts
BGP_EXT_ALIAS = 'bgp'
BGP_SPEAKER_RESOURCE_NAME = 'bgp-speaker'
BGP_SPEAKER_BODY_KEY_NAME = 'bgp_speaker'
BGP_PEER_BODY_KEY_NAME = 'bgp_peer'
RESOURCE_ATTRIBUTE_MAP = {
BGP_SPEAKER_RESOURCE_NAME + 's': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True, 'primary_key': True},
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': attr.NAME_MAX_LEN},
'is_visible': True, 'default': ''},
'local_as': {'allow_post': True, 'allow_put': False,
'validate': {'type:range': (bgp_consts.MIN_ASNUM,
bgp_consts.MAX_ASNUM)},
'is_visible': True, 'default': None,
'required_by_policy': False,
'enforce_policy': False},
'ip_version': {'allow_post': True, 'allow_put': False,
'validate': {'type:values': [4, 6]},
'is_visible': True, 'default': None,
'required_by_policy': False,
'enforce_policy': False},
'tenant_id': {'allow_post': True, 'allow_put': False,
'required_by_policy': False,
'validate': {'type:string': attr.TENANT_ID_MAX_LEN},
'is_visible': True},
'peers': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid_list': None},
'is_visible': True, 'default': [],
'required_by_policy': False,
'enforce_policy': True},
'networks': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid_list': None},
'is_visible': True, 'default': [],
'required_by_policy': False,
'enforce_policy': True},
'advertise_floating_ip_host_routes': {
'allow_post': True,
'allow_put': True,
'convert_to': attr.convert_to_boolean,
'validate': {'type:boolean': None},
'is_visible': True, 'default': True,
'required_by_policy': False,
'enforce_policy': True},
'advertise_tenant_networks': {
'allow_post': True,
'allow_put': True,
'convert_to': attr.convert_to_boolean,
'validate': {'type:boolean': None},
'is_visible': True, 'default': True,
'required_by_policy': False,
'enforce_policy': True},
},
'bgp-peers': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True, 'primary_key': True},
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': attr.NAME_MAX_LEN},
'is_visible': True, 'default': ''},
'peer_ip': {'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'validate': {'type:ip_address': None},
'is_visible': True},
'remote_as': {'allow_post': True, 'allow_put': False,
'validate': {'type:range': (bgp_consts.MIN_ASNUM,
bgp_consts.MAX_ASNUM)},
'is_visible': True, 'default': None,
'required_by_policy': False,
'enforce_policy': False},
'auth_type': {'allow_post': True, 'allow_put': False,
'required_by_policy': True,
'validate': {'type:values':
bgp_consts.SUPPORTED_AUTH_TYPES},
'is_visible': True},
'password': {'allow_post': True, 'allow_put': True,
'required_by_policy': True,
'validate': {'type:string_or_none': None},
'is_visible': False,
'default': None},
'tenant_id': {'allow_post': True, 'allow_put': False,
'required_by_policy': False,
'validate': {'type:string': attr.TENANT_ID_MAX_LEN},
'is_visible': True}
}
}
# Dynamic Routing Exceptions
class BgpSpeakerNotFound(exceptions.NotFound):
message = _("BGP speaker %(id)s could not be found.")
class BgpPeerNotFound(exceptions.NotFound):
message = _("BGP peer %(id)s could not be found.")
class BgpPeerNotAuthenticated(exceptions.NotFound):
message = _("BGP peer %(bgp_peer_id)s not authenticated.")
class BgpSpeakerPeerNotAssociated(exceptions.NotFound):
message = _("BGP peer %(bgp_peer_id)s is not associated with "
"BGP speaker %(bgp_speaker_id)s.")
class BgpSpeakerNetworkNotAssociated(exceptions.NotFound):
message = _("Network %(network_id)s is not associated with "
"BGP speaker %(bgp_speaker_id)s.")
class BgpSpeakerNetworkBindingError(exceptions.Conflict):
message = _("Network %(network_id)s is already bound to BgpSpeaker "
"%(bgp_speaker_id)s.")
class NetworkNotBound(exceptions.NotFound):
message = _("Network %(network_id)s is not bound to a BgpSpeaker.")
class DuplicateBgpPeerIpException(exceptions.Conflict):
_message = _("BGP Speaker %(bgp_speaker_id)s is already configured to "
"peer with a BGP Peer at %(peer_ip)s, it cannot peer with "
"BGP Peer %(bgp_peer_id)s.")
class InvalidBgpPeerMd5Authentication(exceptions.BadRequest):
message = _("A password must be supplied when using auth_type md5.")
class NetworkNotBoundForIpVersion(NetworkNotBound):
message = _("Network %(network_id)s is not bound to a IPv%(ip_version)s "
"BgpSpeaker.")
class Bgp(extensions.ExtensionDescriptor):
@classmethod
def get_name(cls):
return "Neutron BGP Dynamic Routing Extension"
@classmethod
def get_alias(cls):
return BGP_EXT_ALIAS
@classmethod
def get_description(cls):
return("Discover and advertise routes for Neutron prefixes "
"dynamically via BGP")
@classmethod
def get_updated(cls):
return "2014-07-01T15:37:00-00:00"
@classmethod
def get_resources(cls):
plural_mappings = rh.build_plural_mappings(
{}, RESOURCE_ATTRIBUTE_MAP)
attr.PLURALS.update(plural_mappings)
action_map = {BGP_SPEAKER_RESOURCE_NAME:
{'add_bgp_peer': 'PUT',
'remove_bgp_peer': 'PUT',
'add_gateway_network': 'PUT',
'remove_gateway_network': 'PUT',
'get_advertised_routes': 'GET'}}
exts = rh.build_resource_info(plural_mappings,
RESOURCE_ATTRIBUTE_MAP,
BGP_EXT_ALIAS,
action_map=action_map)
return exts
def get_extended_resources(self, version):
if version == "2.0":
return RESOURCE_ATTRIBUTE_MAP
else:
return {}
def update_attributes_map(self, attributes):
super(Bgp, self).update_attributes_map(
attributes, extension_attrs_map=RESOURCE_ATTRIBUTE_MAP)
|
apache-2.0
|
charlesbrandt/medley
|
medley/playlist.py
|
1
|
15002
|
from __future__ import print_function
from __future__ import absolute_import
from builtins import str
from builtins import next
from builtins import object
import os, codecs, re
from .helpers import save_json, load_json
from .content import Content
class Position(object):
"""
more than just a number of an item
or index of a list
we just want to hold a position and length
from this we can determine the number for previous, next
and provide increment and decrement options
loop is tracked here
error checking and representing positions
"""
def __init__(self, length=0, position=0, loop=True):
self._index = position
self._length = length
self.loop = loop
def __int__(self):
return self.position
def __str__(self):
return str(self.position)
def __repr__(self):
return self.position
def _get_length(self):
return self._length
def _set_length(self, l):
self.change_length(l)
length = property(_get_length, _set_length)
def _get_index(self):
return self._index
def _set_index(self, p):
self._index = self.check(p)
position = property(_get_index, _set_index)
def end(self):
"""
the value for the last object
"""
return self.length-1
def at_end(self):
"""
return a boolean value for if our position is equal to the end
"""
return self.position == self.end()
def change_length(self, length):
"""
position needs to know how long the list is
we can change that later if we don't know the length
"""
self._length = length
#go ahead one just to make sure we weren't beyond the new length
self.decrement()
self.increment()
def check(self, position):
"""
accept a vaule for a position
check to make sure it falls within the range of acceptable values
if greater, go to the end
if less than 0, go to the beginning
could consider doing a mod operation and taking the remainder as
the new position.
"""
if position < 0:
return 0
elif position >= 0 and position <= self.end():
return int(position)
else:
return self.end()
def next(self, value=1):
"""
gives the position for the next item
but does not actually increment the index
"""
if self.position+value >= self.length:
if self.loop:
return 0
else:
#staying at the end
#return self.position
#return self.length-1
return self.end()
else:
return self.position+value
def previous(self, value=1):
"""
gives the position for the next item
but does not actually increment the index
"""
if self.position-value < 0:
if self.loop:
#return self.length-1
return self.end()
else:
#staying at the beginning
#(should be 0 already)
return 0
else:
return self.position-value
def increment(self, value=1):
"""
changes the actual index variable
"""
self.position = self.next(value)
return self.position
def decrement(self, value=1):
"""
changes the actual index variable
"""
self.position = self.previous(value)
return self.position
def debug(self):
"""
return a string representation of current state
"""
result = ''
result += "Position index: %s\n" % self._index
result += "Length: %s\n" % self._length
result += "Loop: %s\n" % self.loop
return result
#previously: (too generic)
#class Items(list):
class PositionList(list):
"""
generic list with a position associated with it
position will get updated with call to update()
otherwise...
changing the position is left to the caller
"""
def __init__(self, items=[], position=0):
list.__init__(self)
self.extend(items)
self._position = Position(len(items), position)
#quick way to access the current item directly
#rather than having get return the value
#if items:
# self.current = self.get()
#else:
# #if nothing was sent, be sure to initialize current later!
# self.current = None
#special case for get_next...
#if we're new, return 0
#otherwise... all other rules apply
self.new = True
#wrap position object, so that we can assign a new position to the list
#as though it were an attribute.
#this simplifies the interface to the list of items.
def _get_position(self):
return self._position
def _set_position(self, p):
self.go(p)
position = property(_get_position, _set_position)
#aka get_current?
#def get(self, position=None):
def current(self):
"""
shortcut for get() without a specific position passed in
"""
return self.get()
def get(self, position=None):
"""
get calls will not change our position
"""
#make sure position's length is always current:
self.update_length()
#print "Received position: %s" % position
#print "Current position: %s" % self._position
#print "Length: %s" % len(self)
#should we update current here? or use current?
if position is None:
#use our current position
return self[int(self._position)]
else:
#checking if position is out of range here:
return self[self._position.check(position)]
#changing the interface to be the same as it is with Position object:
#def get_previous(self):
def previous(self):
"""
get the previous item in the list without changing position
"""
return self.get(self._position.previous())
#def get_next(self):
def __next__(self):
"""
get the next item in the list without changing position
"""
if self.new:
self.new = False
return self.get()
else:
return self.get(next(self._position))
def go(self, position=None):
"""
go calls will update the local position object
"""
item = self.get(position)
if not position is None:
#whew! this is a tricky line...
#setting the position object's internal position:
self._position.position = position
#self.current = item
print(self._position.debug())
print("passed position: %s" % position)
return item
#changing the interface to be the same as it is with Position object:
#def go_next(self):
def increment(self):
"""
go to the next item in the list (and change our position accordingly)
"""
return self.go(next(self._position))
#def go_previous(self):
def decrement(self):
"""
go to the previous item in the list
(and change our position accordingly)
"""
return self.go(self._position.previous())
#maybe rename to update_length to avoid confusion with replace functionality
#def update(self):
def update_length(self):
"""
update the position so it knows our new length
should be called any time items are added or removed to the list
"""
self._position.change_length(len(self))
def replace(self, item):
"""
replace the item in the current position
with the item passed in
"""
self[int(self._position)] = item
def clear(self):
del self[:]
self.update_length()
#aka
## class Sources(Items):
class Playlist(PositionList):
"""
Similar to a collection in that it holds a group of Content objects,
but not geared toward a single source of content.
Also, not specific to any single playlist format (e.g. M3U).
Because it holds Content objects,
there is much more meta data available than a typical playlist
very similar concepts to old mindstream sources module:
/c/medley/medley/sources.py
A generic Playlist object
These may help:
http://docs.python.org/2/library/collections.html
Previously:
A collection of Source objects
and a destination path for the logs generated
aka Playlist, Medialist
consider the best way to handle Segments in a Content object
for Playlist use:
Separate copies of Content in the Playlist for each Segment?
-- be careful not to save that Content object back and overwrite all
previous segments
Playlist reorders list of Segments associated with Content
-- more difficult to split segments of one piece of content in between
segments of another piece of content, within a list
also:
when editing a segment, save changes to main json parent Content
"""
def __init__(self, items=[], log_path=None, debug=False):
PositionList.__init__(self, items)
if log_path is None:
self.log_path = '/c/logs/transfer'
else:
self.log_path = log_path
#whether or not to update a content's source json file
#or just make the changes to the list locally
#
#generally with a playlist you don't want to update the content source
#e.g. subtractively limiting content segments to only favorites...
# wouldn't want to remove those segments from the content source
# just from the current playlist
#
#this should not matter if a content object is edited directly
self.sync_contents = False
self.debug = debug
#save and load:
#use helpers
#save_json(destination, self[:])
#and
#Playlist(load_json(source)) #assuming json contains a list of Contents
#any other format should be used
def set_current(self, item):
"""
if we have item
set the position to be that item
(useful when selecting next item externally)
"""
self.go(self.index(item))
#no need to return anything... already have the item
def add_if_new(self, source):
if not self.has_path(source.path):
self.append(source)
return True
else:
print("Already have: %s" % source.path)
return False
def has_path(self, path):
"""
go through all of our items and see if we have the path
"""
found = False
for i in self:
#print "m3u path: %s" % i.path
#print "chk path: %s" % path
if str(i.path) == str(path):
found = True
break
return found
def save(self, destination):
"""
consider using ContentPointer object here.. (is it useful?)
"""
items = []
for content in self:
json_path = os.path.join(content.path, content.json_source)
items.append( [json_path, content.segment_id] )
save_json(destination, items)
#def load_playlist(fname):
def load(self, fname, all_contents={}):
"""
if you want to keep track of all contents loaded,
pass in a dictionary of all_contents...
load will update that with any new Content objects,
and reuse any existing objects from there
originally from medley.player.list_tree.load_playlist(fname)
expects the playlist to hold:
- the content source path
- the segment id
then loads the content from the source, and selects the correct segment
"""
self.clear()
items = load_json(fname)
#print items
contents = []
for item in items:
if self.debug:
print(item)
print("")
(json_source, segment_id) = item
if json_source in all_contents:
if self.debug:
print("Matched existing Content object with path: %s" % json_source)
content = all_contents[json_source]
else:
try:
if self.debug:
print("loading: %s" % json_source)
content = Content(json_source)
all_contents[json_source] = content
except:
print("removing item. could not load: %s" % json_source)
#print json_source
try:
segment = content.get_segment(segment_id)
except:
raise ValueError("Could not locate content... is it still available locally?")
#print segment.to_dict()
#print ""
#print ""
contents.append(segment)
#return Playlist(contents)
self.extend(contents)
#update position_list so it knows
self.update_length()
def sort_path(self):
#self.sort(key=lambda source: str(source.path))
self.sort(key=sorter)
def log_current(self, add_tags=[]):
"""
log that a play was just completed
this is very similar to osbrowser.node log_action?
could move into moments.journal
would need the log path, the file being logged (or file parent path)
and the entry to use
"""
entry = self.now_playing()
entry.tags.union(add_tags)
#log in default log directory
j = Journal()
now = Timestamp(now=True)
log_name = os.path.join(self.log_path , now.filename())
j.from_file(log_name)
j.update_entry(entry)
j.to_file()
# log in action.txt for current media's directory
cur_item = self.current()
parent_path = os.path.dirname(str(cur_item.path))
action = os.path.join(parent_path, 'action.txt')
j2 = Journal()
j2.from_file(action)
j2.update_entry(entry)
j2.to_file()
def now_playing(self):
"""
return an entry for what is playing
"""
cur_item = self.get()
return cur_item.as_moment(new_entry=True)
class ListTree(object):
"""
hold a hierarchy of playlists and collections
"""
def __init__(self):
#a place to keep track of all playlists and collections
#associated with the grouping
#can be hierarchical
self.playlists = []
#the goal is to store these for easy loading later
|
mit
|
mcarton/thefuck
|
tests/rules/test_fix_file.py
|
3
|
7028
|
# -*- coding: utf-8 -*-
import pytest
import os
from thefuck.rules.fix_file import match, get_new_command
from tests.utils import Command
# (script, file, line, col (or None), stdout, stderr)
tests = (
('gcc a.c', 'a.c', 3, 1, '',
"""
a.c: In function 'main':
a.c:3:1: error: expected expression before '}' token
}
^
"""),
('clang a.c', 'a.c', 3, 1, '',
"""
a.c:3:1: error: expected expression
}
^
"""),
('perl a.pl', 'a.pl', 3, None, '',
"""
syntax error at a.pl line 3, at EOF
Execution of a.pl aborted due to compilation errors.
"""),
('perl a.pl', 'a.pl', 2, None, '',
"""
Search pattern not terminated at a.pl line 2.
"""),
('sh a.sh', 'a.sh', 2, None, '',
"""
a.sh: line 2: foo: command not found
"""),
('zsh a.sh', 'a.sh', 2, None, '',
"""
a.sh:2: command not found: foo
"""),
('bash a.sh', 'a.sh', 2, None, '',
"""
a.sh: line 2: foo: command not found
"""),
('rustc a.rs', 'a.rs', 2, 5, '',
"""
a.rs:2:5: 2:6 error: unexpected token: `+`
a.rs:2 +
^
"""),
('cargo build', 'src/lib.rs', 3, 5, '',
"""
Compiling test v0.1.0 (file:///tmp/fix-error/test)
src/lib.rs:3:5: 3:6 error: unexpected token: `+`
src/lib.rs:3 +
^
Could not compile `test`.
To learn more, run the command again with --verbose.
"""),
('python a.py', 'a.py', 2, None, '',
"""
File "a.py", line 2
+
^
SyntaxError: invalid syntax
"""),
('python a.py', 'a.py', 8, None, '',
"""
Traceback (most recent call last):
File "a.py", line 8, in <module>
match("foo")
File "a.py", line 5, in match
m = re.search(None, command)
File "/usr/lib/python3.4/re.py", line 170, in search
return _compile(pattern, flags).search(string)
File "/usr/lib/python3.4/re.py", line 293, in _compile
raise TypeError("first argument must be string or compiled pattern")
TypeError: first argument must be string or compiled pattern
"""),
(u'python café.py', u'café.py', 8, None, '',
u"""
Traceback (most recent call last):
File "café.py", line 8, in <module>
match("foo")
File "café.py", line 5, in match
m = re.search(None, command)
File "/usr/lib/python3.4/re.py", line 170, in search
return _compile(pattern, flags).search(string)
File "/usr/lib/python3.4/re.py", line 293, in _compile
raise TypeError("first argument must be string or compiled pattern")
TypeError: first argument must be string or compiled pattern
"""),
('ruby a.rb', 'a.rb', 3, None, '',
"""
a.rb:3: syntax error, unexpected keyword_end
"""),
('lua a.lua', 'a.lua', 2, None, '',
"""
lua: a.lua:2: unexpected symbol near '+'
"""),
('fish a.sh', '/tmp/fix-error/a.sh', 2, None, '',
"""
fish: Unknown command 'foo'
/tmp/fix-error/a.sh (line 2): foo
^
"""),
('./a', './a', 2, None, '',
"""
awk: ./a:2: BEGIN { print "Hello, world!" + }
awk: ./a:2: ^ syntax error
"""),
('llc a.ll', 'a.ll', 1, 2, '',
"""
llc: a.ll:1:2: error: expected top-level entity
+
^
"""),
('go build a.go', 'a.go', 1, 2, '',
"""
can't load package:
a.go:1:2: expected 'package', found '+'
"""),
('make', 'Makefile', 2, None, '',
"""
bidule
make: bidule: Command not found
Makefile:2: recipe for target 'target' failed
make: *** [target] Error 127
"""),
('git st', '/home/martin/.config/git/config', 1, None, '',
"""
fatal: bad config file line 1 in /home/martin/.config/git/config
"""),
('node fuck.js asdf qwer', '/Users/pablo/Workspace/barebones/fuck.js', '2', 5, '',
"""
/Users/pablo/Workspace/barebones/fuck.js:2
conole.log(arg); // this should read console.log(arg);
^
ReferenceError: conole is not defined
at /Users/pablo/Workspace/barebones/fuck.js:2:5
at Array.forEach (native)
at Object.<anonymous> (/Users/pablo/Workspace/barebones/fuck.js:1:85)
at Module._compile (module.js:460:26)
at Object.Module._extensions..js (module.js:478:10)
at Module.load (module.js:355:32)
at Function.Module._load (module.js:310:12)
at Function.Module.runMain (module.js:501:10)
at startup (node.js:129:16)
at node.js:814:3
"""),
('pep8', './tests/rules/test_systemctl.py', 17, 80,
"""
./tests/rules/test_systemctl.py:17:80: E501 line too long (93 > 79 characters)
./tests/rules/test_systemctl.py:18:80: E501 line too long (103 > 79 characters)
./tests/rules/test_whois.py:20:80: E501 line too long (89 > 79 characters)
./tests/rules/test_whois.py:22:80: E501 line too long (83 > 79 characters)
""", ''),
('py.test', '/home/thefuck/tests/rules/test_fix_file.py', 218, None,
"""
monkeypatch = <_pytest.monkeypatch.monkeypatch object at 0x7fdb76a25b38>
test = ('fish a.sh', '/tmp/fix-error/a.sh', 2, None, '', "\\nfish: Unknown command 'foo'\\n/tmp/fix-error/a.sh (line 2): foo\\n ^\\n")
@pytest.mark.parametrize('test', tests)
@pytest.mark.usefixtures('no_memoize')
def test_get_new_command(monkeypatch, test):
> mocker.patch('os.path.isfile', return_value=True)
E NameError: name 'mocker' is not defined
/home/thefuck/tests/rules/test_fix_file.py:218: NameError
""", ''),
)
@pytest.mark.parametrize('test', tests)
@pytest.mark.usefixtures('no_memoize')
def test_match(mocker, monkeypatch, test):
mocker.patch('os.path.isfile', return_value=True)
monkeypatch.setenv('EDITOR', 'dummy_editor')
assert match(Command(stdout=test[4], stderr=test[5]))
@pytest.mark.parametrize('test', tests)
@pytest.mark.usefixtures('no_memoize')
def test_no_editor(mocker, monkeypatch, test):
mocker.patch('os.path.isfile', return_value=True)
if 'EDITOR' in os.environ:
monkeypatch.delenv('EDITOR')
assert not match(Command(stdout=test[4], stderr=test[5]))
@pytest.mark.parametrize('test', tests)
@pytest.mark.usefixtures('no_memoize')
def test_not_file(mocker, monkeypatch, test):
mocker.patch('os.path.isfile', return_value=False)
monkeypatch.setenv('EDITOR', 'dummy_editor')
assert not match(Command(stdout=test[4], stderr=test[5]))
@pytest.mark.parametrize('test', tests)
@pytest.mark.usefixtures('no_memoize')
def test_get_new_command(mocker, monkeypatch, test):
mocker.patch('os.path.isfile', return_value=True)
monkeypatch.setenv('EDITOR', 'dummy_editor')
cmd = Command(script=test[0], stdout=test[4], stderr=test[5])
#assert (get_new_command(cmd, Settings({})) ==
# 'dummy_editor {} +{} && {}'.format(test[1], test[2], test[0]))
@pytest.mark.parametrize('test', tests)
@pytest.mark.usefixtures('no_memoize')
def test_get_new_command_with_settings(mocker, monkeypatch, test, settings):
mocker.patch('os.path.isfile', return_value=True)
monkeypatch.setenv('EDITOR', 'dummy_editor')
cmd = Command(script=test[0], stdout=test[4], stderr=test[5])
settings.fixcolcmd = '{editor} {file} +{line}:{col}'
if test[3]:
assert (get_new_command(cmd) ==
u'dummy_editor {} +{}:{} && {}'.format(test[1], test[2], test[3], test[0]))
else:
assert (get_new_command(cmd) ==
u'dummy_editor {} +{} && {}'.format(test[1], test[2], test[0]))
|
mit
|
windskyer/nova
|
nova/tests/unit/virt/test_imagecache.py
|
63
|
7011
|
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from nova import block_device
from nova.compute import vm_states
from nova import context
from nova import objects
from nova import test
from nova.tests.unit import fake_instance
from nova.virt import imagecache
CONF = cfg.CONF
swap_bdm_128 = [block_device.BlockDeviceDict(
{'id': 1, 'instance_uuid': 'fake-instance',
'device_name': '/dev/sdb1',
'source_type': 'blank',
'destination_type': 'local',
'delete_on_termination': True,
'guest_format': 'swap',
'disk_bus': 'scsi',
'volume_size': 128,
'boot_index': -1})]
swap_bdm_256 = [block_device.BlockDeviceDict(
{'id': 1, 'instance_uuid': 'fake-instance',
'device_name': '/dev/sdb1',
'source_type': 'blank',
'destination_type': 'local',
'delete_on_termination': True,
'guest_format': 'swap',
'disk_bus': 'scsi',
'volume_size': 256,
'boot_index': -1})]
class ImageCacheManagerTests(test.NoDBTestCase):
def test_configurationi_defaults(self):
self.assertEqual(2400, CONF.image_cache_manager_interval)
self.assertEqual('_base', CONF.image_cache_subdirectory_name)
self.assertTrue(CONF.remove_unused_base_images)
self.assertEqual(24 * 3600,
CONF.remove_unused_original_minimum_age_seconds)
def test_cache_manager(self):
cache_manager = imagecache.ImageCacheManager()
self.assertTrue(cache_manager.remove_unused_base_images)
self.assertRaises(NotImplementedError,
cache_manager.update, None, [])
self.assertRaises(NotImplementedError,
cache_manager._get_base)
base_images = cache_manager._list_base_images(None)
self.assertEqual([], base_images['unexplained_images'])
self.assertEqual([], base_images['originals'])
self.assertRaises(NotImplementedError,
cache_manager._age_and_verify_cached_images,
None, [], None)
def test_list_running_instances(self):
instances = [{'image_ref': '1',
'host': CONF.host,
'id': '1',
'uuid': '123',
'vm_state': '',
'task_state': ''},
{'image_ref': '2',
'host': CONF.host,
'id': '2',
'uuid': '456',
'vm_state': '',
'task_state': ''},
{'image_ref': '2',
'kernel_id': '21',
'ramdisk_id': '22',
'host': 'remotehost',
'id': '3',
'uuid': '789',
'vm_state': '',
'task_state': ''}]
all_instances = [fake_instance.fake_instance_obj(None, **instance)
for instance in instances]
image_cache_manager = imagecache.ImageCacheManager()
self.mox.StubOutWithMock(objects.block_device.BlockDeviceMappingList,
'get_by_instance_uuid')
ctxt = context.get_admin_context()
objects.block_device.BlockDeviceMappingList.get_by_instance_uuid(
ctxt, '123').AndReturn(swap_bdm_256)
objects.block_device.BlockDeviceMappingList.get_by_instance_uuid(
ctxt, '456').AndReturn(swap_bdm_128)
objects.block_device.BlockDeviceMappingList.get_by_instance_uuid(
ctxt, '789').AndReturn(swap_bdm_128)
self.mox.ReplayAll()
# The argument here should be a context, but it's mocked out
running = image_cache_manager._list_running_instances(ctxt,
all_instances)
self.assertEqual(4, len(running['used_images']))
self.assertEqual((1, 0, ['instance-00000001']),
running['used_images']['1'])
self.assertEqual((1, 1, ['instance-00000002',
'instance-00000003']),
running['used_images']['2'])
self.assertEqual((0, 1, ['instance-00000003']),
running['used_images']['21'])
self.assertEqual((0, 1, ['instance-00000003']),
running['used_images']['22'])
self.assertIn('instance-00000001', running['instance_names'])
self.assertIn('123', running['instance_names'])
self.assertEqual(4, len(running['image_popularity']))
self.assertEqual(1, running['image_popularity']['1'])
self.assertEqual(2, running['image_popularity']['2'])
self.assertEqual(1, running['image_popularity']['21'])
self.assertEqual(1, running['image_popularity']['22'])
self.assertEqual(len(running['used_swap_images']), 2)
self.assertIn('swap_128', running['used_swap_images'])
self.assertIn('swap_256', running['used_swap_images'])
def test_list_resizing_instances(self):
instances = [{'image_ref': '1',
'host': CONF.host,
'id': '1',
'uuid': '123',
'vm_state': vm_states.RESIZED,
'task_state': None}]
all_instances = [fake_instance.fake_instance_obj(None, **instance)
for instance in instances]
image_cache_manager = imagecache.ImageCacheManager()
self.mox.StubOutWithMock(objects.block_device.BlockDeviceMappingList,
'get_by_instance_uuid')
ctxt = context.get_admin_context()
objects.block_device.BlockDeviceMappingList.get_by_instance_uuid(
ctxt, '123').AndReturn(swap_bdm_256)
self.mox.ReplayAll()
running = image_cache_manager._list_running_instances(ctxt,
all_instances)
self.assertEqual(1, len(running['used_images']))
self.assertEqual((1, 0, ['instance-00000001']),
running['used_images']['1'])
self.assertEqual(set(['instance-00000001', '123',
'instance-00000001_resize', '123_resize']),
running['instance_names'])
self.assertEqual(1, len(running['image_popularity']))
self.assertEqual(1, running['image_popularity']['1'])
|
gpl-2.0
|
Scriptkiddi/Ankipubsub-Client
|
pubsub/gui/auto_gen/publish_deck.py
|
1
|
4376
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'publish_deck.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_publishDeckForm(object):
def setupUi(self, publishDeckForm):
publishDeckForm.setObjectName(_fromUtf8("publishDeckForm"))
publishDeckForm.resize(400, 300)
self.groupBox = QtGui.QGroupBox(publishDeckForm)
self.groupBox.setGeometry(QtCore.QRect(9, 19, 381, 71))
self.groupBox.setTitle(_fromUtf8(""))
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.comboBox = QtGui.QComboBox(self.groupBox)
self.comboBox.setGeometry(QtCore.QRect(0, 20, 191, 25))
self.comboBox.setObjectName(_fromUtf8("comboBox"))
self.label = QtGui.QLabel(self.groupBox)
self.label.setGeometry(QtCore.QRect(0, 0, 121, 16))
self.label.setObjectName(_fromUtf8("label"))
self.label_2 = QtGui.QLabel(self.groupBox)
self.label_2.setGeometry(QtCore.QRect(200, 0, 131, 20))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.publicName = QtGui.QLineEdit(self.groupBox)
self.publicName.setEnabled(False)
self.publicName.setGeometry(QtCore.QRect(200, 20, 181, 25))
self.publicName.setObjectName(_fromUtf8("publicName"))
self.pushButtonPublishDeck = QtGui.QPushButton(publishDeckForm)
self.pushButtonPublishDeck.setGeometry(QtCore.QRect(30, 260, 92, 27))
self.pushButtonPublishDeck.setObjectName(_fromUtf8("pushButtonPublishDeck"))
self.pushButtonCancel = QtGui.QPushButton(publishDeckForm)
self.pushButtonCancel.setGeometry(QtCore.QRect(280, 260, 92, 27))
self.pushButtonCancel.setObjectName(_fromUtf8("pushButtonCancel"))
self.checkBox = QtGui.QCheckBox(publishDeckForm)
self.checkBox.setGeometry(QtCore.QRect(20, 90, 271, 20))
self.checkBox.setObjectName(_fromUtf8("checkBox"))
self.checkBox_2 = QtGui.QCheckBox(publishDeckForm)
self.checkBox_2.setGeometry(QtCore.QRect(20, 150, 311, 20))
self.checkBox_2.setObjectName(_fromUtf8("checkBox_2"))
self.label_3 = QtGui.QLabel(publishDeckForm)
self.label_3.setGeometry(QtCore.QRect(20, 120, 121, 16))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.label_4 = QtGui.QLabel(publishDeckForm)
self.label_4.setGeometry(QtCore.QRect(20, 190, 111, 16))
self.label_4.setObjectName(_fromUtf8("label_4"))
self.readPassword = QtGui.QLineEdit(publishDeckForm)
self.readPassword.setGeometry(QtCore.QRect(150, 110, 113, 25))
self.readPassword.setObjectName(_fromUtf8("readPassword"))
self.writePassword = QtGui.QLineEdit(publishDeckForm)
self.writePassword.setGeometry(QtCore.QRect(150, 180, 113, 25))
self.writePassword.setObjectName(_fromUtf8("writePassword"))
self.retranslateUi(publishDeckForm)
QtCore.QMetaObject.connectSlotsByName(publishDeckForm)
def retranslateUi(self, publishDeckForm):
publishDeckForm.setWindowTitle(_translate("publishDeckForm", "Publish A Deck", None))
self.label.setText(_translate("publishDeckForm", "Select your Deck:", None))
self.label_2.setText(_translate("publishDeckForm", "Set a Public Name:", None))
self.pushButtonPublishDeck.setText(_translate("publishDeckForm", "Publish Deck", None))
self.pushButtonCancel.setText(_translate("publishDeckForm", "Cancel", None))
self.checkBox.setText(_translate("publishDeckForm", "Users need a password to read cards.", None))
self.checkBox_2.setText(_translate("publishDeckForm", "Users need a password to write cards", None))
self.label_3.setText(_translate("publishDeckForm", "Reading Password:", None))
self.label_4.setText(_translate("publishDeckForm", "Writing Password:", None))
|
gpl-3.0
|
TheNikiz/android_kernel_huawei_msm8909
|
scripts/gcc-wrapper.py
|
580
|
3524
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of The Linux Foundation nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Invoke gcc, looking for warnings, and causing a failure if there are
# non-whitelisted warnings.
import errno
import re
import os
import sys
import subprocess
# Note that gcc uses unicode, which may depend on the locale. TODO:
# force LANG to be set to en_US.UTF-8 to get consistent warnings.
allowed_warnings = set([
"return_address.c:63",
"kprobes.c:1493",
"rcutree.c:1614",
"af_unix.c:893",
"nl80211.c:58",
"jhash.h:137",
"cmpxchg.h:162",
"ping.c:87",
])
# Capture the name of the object file, can find it.
ofile = None
warning_re = re.compile(r'''(.*/|)([^/]+\.[a-z]+:\d+):(\d+:)? warning:''')
def interpret_warning(line):
"""Decode the message from gcc. The messages we care about have a filename, and a warning"""
line = line.rstrip('\n')
m = warning_re.match(line)
if m and m.group(2) not in allowed_warnings:
print "error, forbidden warning:", m.group(2)
# If there is a warning, remove any object if it exists.
if ofile:
try:
os.remove(ofile)
except OSError:
pass
sys.exit(1)
def run_gcc():
args = sys.argv[1:]
# Look for -o
try:
i = args.index('-o')
global ofile
ofile = args[i+1]
except (ValueError, IndexError):
pass
compiler = sys.argv[0]
try:
proc = subprocess.Popen(args, stderr=subprocess.PIPE)
for line in proc.stderr:
print line,
interpret_warning(line)
result = proc.wait()
except OSError as e:
result = e.errno
if result == errno.ENOENT:
print args[0] + ':',e.strerror
print 'Is your PATH set correctly?'
else:
print ' '.join(args), str(e)
return result
if __name__ == '__main__':
status = run_gcc()
sys.exit(status)
|
gpl-2.0
|
gzc/isystem
|
bigdata/spark/sparktest/ml/kmeans.py
|
1
|
1161
|
# -*- coding: utf-8 -*-
from pyspark.mllib.clustering import KMeans, KMeansModel
from numpy import array
from math import sqrt
from pyspark import SparkContext, SparkConf
conf = SparkConf()
conf.setAppName("deep test")#.setMaster("spark://192.168.1.14:7077")#.setExecutorEnv("CLASSPATH", path)
#conf.set("spark.scheduler.mode", "FAIR")
#conf.set("spark.cores.max",44)
#conf.set("spark.executor.memory",'5g')
sc = SparkContext(conf=conf)
# Load and parse the data
data = sc.textFile("kmeans_data.txt")
parsedData = data.map(lambda line: array([float(x) for x in line.split(' ')]))
# Build the model (cluster the data)
clusters = KMeans.train(parsedData, 2, maxIterations=10,
runs=10, initializationMode="random")
# Evaluate clustering by computing Within Set Sum of Squared Errors
def error(point):
center = clusters.centers[clusters.predict(point)]
return sqrt(sum([x**2 for x in (point - center)]))
WSSSE = parsedData.map(lambda point: error(point)).reduce(lambda x, y: x + y)
print("Within Set Sum of Squared Error = " + str(WSSSE))
# Save and load model
clusters.save(sc, "myModelPath")
sameModel = KMeansModel.load(sc, "myModelPath")
|
mit
|
yjpark/dotfiles
|
bin/osx/unity-fix-sln.py
|
1
|
3323
|
#!/usr/bin/env python
import os
import sys
import argparse
import glob
from threading import Timer
from blessings import Terminal
term = Terminal()
test_mode = False
verbose_mode = False
watch_delay = 5
def info(msg):
print term.normal + msg
def verbose(msg):
if verbose_mode:
info(msg)
def error(msg):
print term.red + msg
def format_error(err):
return term.red(err)
def format_path(path):
return term.blue(path)
def format_param(param):
return term.yellow(param)
START_TAG = 'GlobalSection(MonoDevelopProperties) = preSolution'
END_TAG = 'EndGlobalSection'
def fix_sln(path):
if not os.path.isfile(path):
error('File Not Exist: ' + format_path(path))
return
else:
verbose('Processing File: ' + format_path(path))
lines = open(path).readlines()
tmp_path = path + '.tmp'
output = open(tmp_path, 'w')
def write_line(line):
output.write('%s' % line)
def comment_line(line):
if line.startswith('#'):
output.write('%s' % line)
return 0
else:
output.write('# %s' % line)
return 1
changed_lines = 0
start_tag_found = False
end_tag_found = False
for line in lines:
if end_tag_found:
write_line(line)
elif start_tag_found:
changed_lines = changed_lines + comment_line(line)
if line.find(END_TAG) >= 0:
end_tag_found = True
elif line.find(START_TAG) >= 0:
start_tag_found = True
changed_lines = changed_lines + comment_line(line)
else:
write_line(line)
output.close()
if changed_lines > 0:
verbose('Changed Line Number: ' + format_param('%s' % changed_lines))
os.system('cp %s %s' % (tmp_path, path))
else:
verbose('No Need to Change: ' + format_path(path))
os.system('rm %s' % tmp_path)
def fix_all():
for f in glob.glob('*.sln'):
fix_sln(f)
def watch_stdin():
while True:
try:
line = sys.stdin.readline()
except KeyboardInterrupt:
break
if not line:
break
path = line.replace('\n', '').strip()
verbose('Delay Processing: ' + format_param('%s Seconds -> ' % watch_delay) + format_path(path))
t = Timer(watch_delay, fix_sln, (path, ))
t.start()
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('-t', '--test', action='store_true', help='Test Only, Not Overriding Original Files')
parser.add_argument('-w', '--watch', action='store_true', help='Watch Mode, Working with fsw')
parser.add_argument('-a', '--all', action='store_true', help='Processing All Files in The Current Project')
parser.add_argument('file', nargs='*')
args = parser.parse_args()
global test_mode
test_mode = args.test
global verbose_mode
verbose_mode = args.verbose
if args.watch:
watch_stdin()
elif args.all:
fix_all()
elif args.file:
for path in args.file:
fix_sln(path)
else:
info('Please provide the files to process, or use "--all" to process all files')
sys.exit(1)
if __name__ == '__main__':
main()
|
mit
|
tudyzhb/yichui
|
djangotoolbox/db/basecompiler.py
|
5
|
22890
|
import datetime
import random
from django.conf import settings
from django.db.models.fields import NOT_PROVIDED
from django.db.models.query import QuerySet
from django.db.models.sql import aggregates as sqlaggregates
from django.db.models.sql.compiler import SQLCompiler
from django.db.models.sql.constants import LOOKUP_SEP, MULTI, SINGLE
from django.db.models.sql.where import AND, OR
from django.db.utils import DatabaseError, IntegrityError
from django.utils.tree import Node
EMULATED_OPS = {
'exact': lambda x, y: y in x if isinstance(x, (list, tuple)) else x == y,
'iexact': lambda x, y: x.lower() == y.lower(),
'startswith': lambda x, y: x.startswith(y[0]),
'istartswith': lambda x, y: x.lower().startswith(y[0].lower()),
'isnull': lambda x, y: x is None if y else x is not None,
'in': lambda x, y: x in y,
'lt': lambda x, y: x < y,
'lte': lambda x, y: x <= y,
'gt': lambda x, y: x > y,
'gte': lambda x, y: x >= y,
}
class NonrelQuery(object):
"""
Base class for nonrel queries.
Compilers build a nonrel query when they want to fetch some data.
They work by first allowing sql.compiler.SQLCompiler to partly build
a sql.Query, constructing a NonrelQuery query on top of it, and then
iterating over its results.
This class provides in-memory filtering and ordering and a
framework for converting SQL constraint tree built by Django to a
"representation" more suitable for most NoSQL databases.
TODO: Replace with FetchCompiler, there are too many query concepts
around, and it isn't a good abstraction for NoSQL databases.
TODO: Nonrel currently uses constraint's tree built by Django for
its SQL back-ends to handle filtering. However, Django
intermingles translating its lookup / filtering abstraction
to a logical formula with some preprocessing for joins and
this results in hacks in nonrel. It would be a better to pull
out SQL-specific parts from the constraints preprocessing.
"""
# ----------------------------------------------
# Public API
# ----------------------------------------------
def __init__(self, compiler, fields):
self.compiler = compiler
self.connection = compiler.connection
self.ops = compiler.connection.ops
self.query = compiler.query # sql.Query
self.fields = fields
self._negated = False
def fetch(self, low_mark=0, high_mark=None):
"""
Returns an iterator over some part of query results.
"""
raise NotImplementedError
def count(self, limit=None):
"""
Returns the number of objects that would be returned, if
this query was executed, up to `limit`.
"""
raise NotImplementedError
def delete(self):
"""
Called by NonrelDeleteCompiler after it builds a delete query.
"""
raise NotImplementedError
def order_by(self, ordering):
"""
Reorders query results or execution order. Called by
NonrelCompilers during query building.
:param ordering: A list with (field, ascending) tuples or a
boolean -- use natural ordering, if any, when
the argument is True and its reverse otherwise
"""
raise NotImplementedError
def add_filter(self, field, lookup_type, negated, value):
"""
Adds a single constraint to the query. Called by add_filters for
each constraint leaf in the WHERE tree built by Django.
:param field: Lookup field (instance of Field); field.column
should be used for database keys
:param lookup_type: Lookup name (e.g. "startswith")
:param negated: Is the leaf negated
:param value: Lookup argument, such as a value to compare with;
already prepared for the database
"""
raise NotImplementedError
def add_filters(self, filters):
"""
Converts a constraint tree (sql.where.WhereNode) created by
Django's SQL query machinery to nonrel style filters, calling
add_filter for each constraint.
This assumes the database doesn't support alternatives of
constraints, you should override this method if it does.
TODO: Simulate both conjunctions and alternatives in general
let GAE override conjunctions not to split them into
multiple queries.
"""
if filters.negated:
self._negated = not self._negated
if not self._negated and filters.connector != AND:
raise DatabaseError("Only AND filters are supported.")
# Remove unneeded children from the tree.
children = self._get_children(filters.children)
if self._negated and filters.connector != OR and len(children) > 1:
raise DatabaseError("When negating a whole filter subgroup "
"(e.g. a Q object) the subgroup filters must "
"be connected via OR, so the non-relational "
"backend can convert them like this: "
"'not (a OR b) => (not a) AND (not b)'.")
# Recursively call the method for internal tree nodes, add a
# filter for each leaf.
for child in children:
if isinstance(child, Node):
self.add_filters(child)
continue
field, lookup_type, value = self._decode_child(child)
self.add_filter(field, lookup_type, self._negated, value)
if filters.negated:
self._negated = not self._negated
# ----------------------------------------------
# Internal API for reuse by subclasses
# ----------------------------------------------
def _decode_child(self, child):
"""
Produces arguments suitable for add_filter from a WHERE tree
leaf (a tuple).
"""
# TODO: Call get_db_prep_lookup directly, constrain.process
# doesn't do much more.
constraint, lookup_type, annotation, value = child
packed, value = constraint.process(lookup_type, value, self.connection)
alias, column, db_type = packed
field = constraint.field
opts = self.query.model._meta
if alias and alias != opts.db_table:
raise DatabaseError("This database doesn't support JOINs "
"and multi-table inheritance.")
# For parent.child_set queries the field held by the constraint
# is the parent's primary key, while the field the filter
# should consider is the child's foreign key field.
if column != field.column:
assert field.primary_key
field = (f for f in opts.fields if f.column == column).next()
assert field.rel is not None
value = self._normalize_lookup_value(
lookup_type, value, field, annotation)
return field, lookup_type, value
def _normalize_lookup_value(self, lookup_type, value, field, annotation):
"""
Undoes preparations done by `Field.get_db_prep_lookup` not
suitable for nonrel back-ends and passes the lookup argument
through nonrel's `value_for_db`.
TODO: Blank `Field.get_db_prep_lookup` and remove this method.
"""
# Undo Field.get_db_prep_lookup putting most values in a list
# (a subclass may override this, so check if it's a list) and
# losing the (True / False) argument to the "isnull" lookup.
if lookup_type not in ('in', 'range', 'year') and \
isinstance(value, (tuple, list)):
if len(value) > 1:
raise DatabaseError("Filter lookup type was %s; expected the "
"filter argument not to be a list. Only "
"'in'-filters can be used with lists." %
lookup_type)
elif lookup_type == 'isnull':
value = annotation
else:
value = value[0]
# Remove percents added by Field.get_db_prep_lookup (useful
# if one were to use the value in a LIKE expression).
if lookup_type in ('startswith', 'istartswith'):
value = value[:-1]
elif lookup_type in ('endswith', 'iendswith'):
value = value[1:]
elif lookup_type in ('contains', 'icontains'):
value = value[1:-1]
# Prepare the value for a database using the nonrel framework.
return self.ops.value_for_db(value, field, lookup_type)
def _get_children(self, children):
"""
Filters out nodes of the given contraint tree not needed for
nonrel queries; checks that given constraints are supported.
"""
result = []
for child in children:
if isinstance(child, tuple):
constraint, lookup_type, _, value = child
# When doing a lookup using a QuerySet Django would use
# a subquery, but this won't work for nonrel.
# TODO: Add a supports_subqueries feature and let
# Django evaluate subqueries instead of passing
# them as SQL strings (QueryWrappers) to
# filtering.
if isinstance(value, QuerySet):
raise DatabaseError("Subqueries are not supported (yet).")
# Remove leafs that were automatically added by
# sql.Query.add_filter to handle negations of outer
# joins.
if lookup_type == 'isnull' and constraint.field is None:
continue
result.append(child)
return result
def _matches_filters(self, entity, filters):
"""
Checks if an entity returned by the database satisfies
constraints in a WHERE tree (in-memory filtering).
"""
# Filters without rules match everything.
if not filters.children:
return True
result = filters.connector == AND
for child in filters.children:
# Recursively check a subtree,
if isinstance(child, Node):
submatch = self._matches_filters(entity, child)
# Check constraint leaf, emulating a database condition.
else:
field, lookup_type, lookup_value = self._decode_child(child)
entity_value = entity[field.column]
if entity_value is None:
if isinstance(lookup_value, (datetime.datetime, datetime.date,
datetime.time)):
submatch = lookup_type in ('lt', 'lte')
elif lookup_type in (
'startswith', 'contains', 'endswith', 'iexact',
'istartswith', 'icontains', 'iendswith'):
submatch = False
else:
submatch = EMULATED_OPS[lookup_type](
entity_value, lookup_value)
else:
submatch = EMULATED_OPS[lookup_type](
entity_value, lookup_value)
if filters.connector == OR and submatch:
result = True
break
elif filters.connector == AND and not submatch:
result = False
break
if filters.negated:
return not result
return result
def _order_in_memory(self, lhs, rhs):
for field, ascending in self.compiler._get_ordering():
column = field.column
result = cmp(lhs.get(column), rhs.get(column))
if result != 0:
return result if ascending else -result
return 0
class NonrelCompiler(SQLCompiler):
"""
Base class for data fetching back-end compilers.
Note that nonrel compilers derive from sql.compiler.SQLCompiler and
thus hold a reference to a sql.Query, not a NonrelQuery.
TODO: Separate FetchCompiler from the abstract NonrelCompiler.
"""
def __init__(self, query, connection, using):
"""
Initializes the underlying SQLCompiler.
"""
super(NonrelCompiler, self).__init__(query, connection, using)
self.ops = self.connection.ops
# ----------------------------------------------
# Public API
# ----------------------------------------------
def results_iter(self):
"""
Returns an iterator over the results from executing query given
to this compiler. Called by QuerySet methods.
"""
fields = self.get_fields()
results = self.build_query(fields).fetch(
self.query.low_mark, self.query.high_mark)
for entity in results:
yield self._make_result(entity, fields)
def has_results(self):
return self.get_count(check_exists=True)
def execute_sql(self, result_type=MULTI):
"""
Handles SQL-like aggregate queries. This class only emulates COUNT
by using abstract NonrelQuery.count method.
"""
aggregates = self.query.aggregate_select.values()
# Simulate a count().
if aggregates:
assert len(aggregates) == 1
aggregate = aggregates[0]
assert isinstance(aggregate, sqlaggregates.Count)
opts = self.query.get_meta()
assert aggregate.col == '*' or \
aggregate.col == (opts.db_table, opts.pk.column)
count = self.get_count()
if result_type is SINGLE:
return [count]
elif result_type is MULTI:
return [[count]]
raise NotImplementedError("The database backend only supports "
"count() queries.")
# ----------------------------------------------
# Additional NonrelCompiler API
# ----------------------------------------------
def _make_result(self, entity, fields):
"""
Decodes values for the given fields from the database entity.
The entity is assumed to be a dict using field database column
names as keys. Decodes values using `value_from_db` as well as
the standard `convert_values`.
"""
result = []
for field in fields:
value = entity.get(field.column, NOT_PROVIDED)
if value is NOT_PROVIDED:
value = field.get_default()
else:
value = self.ops.value_from_db(value, field)
value = self.query.convert_values(value, field,
self.connection)
if value is None and not field.null:
raise IntegrityError("Non-nullable field %s can't be None!" %
field.name)
result.append(value)
return result
def check_query(self):
"""
Checks if the current query is supported by the database.
In general, we expect queries requiring JOINs (many-to-many
relations, abstract model bases, or model spanning filtering),
using DISTINCT (through `QuerySet.distinct()`, which is not
required in most situations) or using the SQL-specific
`QuerySet.extra()` to not work with nonrel back-ends.
"""
if (len([a for a in self.query.alias_map if
self.query.alias_refcount[a]]) > 1 or
self.query.distinct or self.query.extra or self.query.having):
raise DatabaseError("This query is not supported by the database.")
def get_count(self, check_exists=False):
"""
Counts objects matching the current filters / constraints.
:param check_exists: Only check if any object matches
"""
if check_exists:
high_mark = 1
else:
high_mark = self.query.high_mark
return self.build_query().count(high_mark)
def build_query(self, fields=None):
"""
Checks if the underlying SQL query is supported and prepares
a NonrelQuery to be executed on the database.
"""
self.check_query()
if fields is None:
fields = self.get_fields()
query = self.query_class(self, fields)
query.add_filters(self.query.where)
query.order_by(self._get_ordering())
# This at least satisfies the most basic unit tests.
if settings.DEBUG:
self.connection.queries.append({'sql': repr(query)})
return query
def get_fields(self):
"""
Returns fields which should get loaded from the back-end by the
current query.
"""
# We only set this up here because related_select_fields isn't
# populated until execute_sql() has been called.
if self.query.select_fields:
fields = (self.query.select_fields +
self.query.related_select_fields)
else:
fields = self.query.model._meta.fields
# If the field was deferred, exclude it from being passed
# into `resolve_columns` because it wasn't selected.
only_load = self.deferred_to_columns()
if only_load:
db_table = self.query.model._meta.db_table
only_load = dict((k, v) for k, v in only_load.items()
if v or k == db_table)
if len(only_load.keys()) > 1:
raise DatabaseError("Multi-table inheritance is not "
"supported by non-relational DBs %s." %
repr(only_load))
fields = [f for f in fields if db_table in only_load and
f.column in only_load[db_table]]
query_model = self.query.model
if query_model._meta.proxy:
query_model = query_model._meta.proxy_for_model
for field in fields:
if field.model._meta != query_model._meta:
raise DatabaseError("Multi-table inheritance is not "
"supported by non-relational DBs.")
return fields
def _get_ordering(self):
"""
Returns a list of (field, ascending) tuples that the query
results should be ordered by. If there is no field ordering
defined returns just the standard_ordering (a boolean, needed
for MongoDB "$natural" ordering).
"""
opts = self.query.get_meta()
if not self.query.default_ordering:
ordering = self.query.order_by
else:
ordering = self.query.order_by or opts.ordering
if not ordering:
return self.query.standard_ordering
field_ordering = []
for order in ordering:
if LOOKUP_SEP in order:
raise DatabaseError("Ordering can't span tables on "
"non-relational backends (%s)." % order)
if order == '?':
raise DatabaseError("Randomized ordering isn't supported by "
"the backend.")
ascending = not order.startswith('-')
if not self.query.standard_ordering:
ascending = not ascending
name = order.lstrip('+-')
if name == 'pk':
name = opts.pk.name
field_ordering.append((opts.get_field(name), ascending))
return field_ordering
class NonrelInsertCompiler(NonrelCompiler):
"""
Base class for all compliers that create new entities or objects
in the database. It has to define execute_sql method due to being
used in place of a SQLInsertCompiler.
TODO: Analyze if it's always true that when field is None we should
use the PK from self.query (check if the column assertion
below ever fails).
"""
def execute_sql(self, return_id=False):
to_insert = []
pk_field = self.query.get_meta().pk
for obj in self.query.objs:
field_values = {}
for field in self.query.fields:
value = field.get_db_prep_save(
getattr(obj, field.attname) if self.query.raw else field.pre_save(obj, obj._state.adding),
connection=self.connection
)
if value is None and not field.null and not field.primary_key:
raise IntegrityError("You can't set %s (a non-nullable "
"field) to None!" % field.name)
# Prepare value for database, note that query.values have
# already passed through get_db_prep_save.
value = self.ops.value_for_db(value, field)
field_values[field.column] = value
to_insert.append(field_values)
key = self.insert(to_insert, return_id=return_id)
# Pass the key value through normal database deconversion.
return self.ops.convert_values(self.ops.value_from_db(key, pk_field), pk_field)
def insert(self, values, return_id):
"""
Creates a new entity to represent a model.
Note that the returned key will go through the same database
deconversions that every value coming from the database does
(`convert_values` and `value_from_db`).
:param values: The model object as a list of (field, value)
pairs; each value is already prepared for the
database
:param return_id: Whether to return the id or key of the newly
created entity
"""
raise NotImplementedError
class NonrelUpdateCompiler(NonrelCompiler):
def execute_sql(self, result_type):
values = []
for field, _, value in self.query.values:
if hasattr(value, 'prepare_database_save'):
value = value.prepare_database_save(field)
else:
value = field.get_db_prep_save(value,
connection=self.connection)
value = self.ops.value_for_db(value, field)
values.append((field, value))
return self.update(values)
def update(self, values):
"""
Changes an entity that already exists in the database.
:param values: A list of (field, new-value) pairs
"""
raise NotImplementedError
class NonrelDeleteCompiler(NonrelCompiler):
def execute_sql(self, result_type=MULTI):
self.build_query([self.query.get_meta().pk]).delete()
|
bsd-3-clause
|
proxysh/Safejumper-for-Desktop
|
buildmac/Resources/env/lib/python2.7/site-packages/pip/_vendor/retrying.py
|
934
|
9972
|
## Copyright 2013-2014 Ray Holder
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
import random
from pip._vendor import six
import sys
import time
import traceback
# sys.maxint / 2, since Python 3.2 doesn't have a sys.maxint...
MAX_WAIT = 1073741823
def retry(*dargs, **dkw):
"""
Decorator function that instantiates the Retrying object
@param *dargs: positional arguments passed to Retrying object
@param **dkw: keyword arguments passed to the Retrying object
"""
# support both @retry and @retry() as valid syntax
if len(dargs) == 1 and callable(dargs[0]):
def wrap_simple(f):
@six.wraps(f)
def wrapped_f(*args, **kw):
return Retrying().call(f, *args, **kw)
return wrapped_f
return wrap_simple(dargs[0])
else:
def wrap(f):
@six.wraps(f)
def wrapped_f(*args, **kw):
return Retrying(*dargs, **dkw).call(f, *args, **kw)
return wrapped_f
return wrap
class Retrying(object):
def __init__(self,
stop=None, wait=None,
stop_max_attempt_number=None,
stop_max_delay=None,
wait_fixed=None,
wait_random_min=None, wait_random_max=None,
wait_incrementing_start=None, wait_incrementing_increment=None,
wait_exponential_multiplier=None, wait_exponential_max=None,
retry_on_exception=None,
retry_on_result=None,
wrap_exception=False,
stop_func=None,
wait_func=None,
wait_jitter_max=None):
self._stop_max_attempt_number = 5 if stop_max_attempt_number is None else stop_max_attempt_number
self._stop_max_delay = 100 if stop_max_delay is None else stop_max_delay
self._wait_fixed = 1000 if wait_fixed is None else wait_fixed
self._wait_random_min = 0 if wait_random_min is None else wait_random_min
self._wait_random_max = 1000 if wait_random_max is None else wait_random_max
self._wait_incrementing_start = 0 if wait_incrementing_start is None else wait_incrementing_start
self._wait_incrementing_increment = 100 if wait_incrementing_increment is None else wait_incrementing_increment
self._wait_exponential_multiplier = 1 if wait_exponential_multiplier is None else wait_exponential_multiplier
self._wait_exponential_max = MAX_WAIT if wait_exponential_max is None else wait_exponential_max
self._wait_jitter_max = 0 if wait_jitter_max is None else wait_jitter_max
# TODO add chaining of stop behaviors
# stop behavior
stop_funcs = []
if stop_max_attempt_number is not None:
stop_funcs.append(self.stop_after_attempt)
if stop_max_delay is not None:
stop_funcs.append(self.stop_after_delay)
if stop_func is not None:
self.stop = stop_func
elif stop is None:
self.stop = lambda attempts, delay: any(f(attempts, delay) for f in stop_funcs)
else:
self.stop = getattr(self, stop)
# TODO add chaining of wait behaviors
# wait behavior
wait_funcs = [lambda *args, **kwargs: 0]
if wait_fixed is not None:
wait_funcs.append(self.fixed_sleep)
if wait_random_min is not None or wait_random_max is not None:
wait_funcs.append(self.random_sleep)
if wait_incrementing_start is not None or wait_incrementing_increment is not None:
wait_funcs.append(self.incrementing_sleep)
if wait_exponential_multiplier is not None or wait_exponential_max is not None:
wait_funcs.append(self.exponential_sleep)
if wait_func is not None:
self.wait = wait_func
elif wait is None:
self.wait = lambda attempts, delay: max(f(attempts, delay) for f in wait_funcs)
else:
self.wait = getattr(self, wait)
# retry on exception filter
if retry_on_exception is None:
self._retry_on_exception = self.always_reject
else:
self._retry_on_exception = retry_on_exception
# TODO simplify retrying by Exception types
# retry on result filter
if retry_on_result is None:
self._retry_on_result = self.never_reject
else:
self._retry_on_result = retry_on_result
self._wrap_exception = wrap_exception
def stop_after_attempt(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Stop after the previous attempt >= stop_max_attempt_number."""
return previous_attempt_number >= self._stop_max_attempt_number
def stop_after_delay(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Stop after the time from the first attempt >= stop_max_delay."""
return delay_since_first_attempt_ms >= self._stop_max_delay
def no_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Don't sleep at all before retrying."""
return 0
def fixed_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Sleep a fixed amount of time between each retry."""
return self._wait_fixed
def random_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""Sleep a random amount of time between wait_random_min and wait_random_max"""
return random.randint(self._wait_random_min, self._wait_random_max)
def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
"""
Sleep an incremental amount of time after each attempt, starting at
wait_incrementing_start and incrementing by wait_incrementing_increment
"""
result = self._wait_incrementing_start + (self._wait_incrementing_increment * (previous_attempt_number - 1))
if result < 0:
result = 0
return result
def exponential_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
exp = 2 ** previous_attempt_number
result = self._wait_exponential_multiplier * exp
if result > self._wait_exponential_max:
result = self._wait_exponential_max
if result < 0:
result = 0
return result
def never_reject(self, result):
return False
def always_reject(self, result):
return True
def should_reject(self, attempt):
reject = False
if attempt.has_exception:
reject |= self._retry_on_exception(attempt.value[1])
else:
reject |= self._retry_on_result(attempt.value)
return reject
def call(self, fn, *args, **kwargs):
start_time = int(round(time.time() * 1000))
attempt_number = 1
while True:
try:
attempt = Attempt(fn(*args, **kwargs), attempt_number, False)
except:
tb = sys.exc_info()
attempt = Attempt(tb, attempt_number, True)
if not self.should_reject(attempt):
return attempt.get(self._wrap_exception)
delay_since_first_attempt_ms = int(round(time.time() * 1000)) - start_time
if self.stop(attempt_number, delay_since_first_attempt_ms):
if not self._wrap_exception and attempt.has_exception:
# get() on an attempt with an exception should cause it to be raised, but raise just in case
raise attempt.get()
else:
raise RetryError(attempt)
else:
sleep = self.wait(attempt_number, delay_since_first_attempt_ms)
if self._wait_jitter_max:
jitter = random.random() * self._wait_jitter_max
sleep = sleep + max(0, jitter)
time.sleep(sleep / 1000.0)
attempt_number += 1
class Attempt(object):
"""
An Attempt encapsulates a call to a target function that may end as a
normal return value from the function or an Exception depending on what
occurred during the execution.
"""
def __init__(self, value, attempt_number, has_exception):
self.value = value
self.attempt_number = attempt_number
self.has_exception = has_exception
def get(self, wrap_exception=False):
"""
Return the return value of this Attempt instance or raise an Exception.
If wrap_exception is true, this Attempt is wrapped inside of a
RetryError before being raised.
"""
if self.has_exception:
if wrap_exception:
raise RetryError(self)
else:
six.reraise(self.value[0], self.value[1], self.value[2])
else:
return self.value
def __repr__(self):
if self.has_exception:
return "Attempts: {0}, Error:\n{1}".format(self.attempt_number, "".join(traceback.format_tb(self.value[2])))
else:
return "Attempts: {0}, Value: {1}".format(self.attempt_number, self.value)
class RetryError(Exception):
"""
A RetryError encapsulates the last Attempt instance right before giving up.
"""
def __init__(self, last_attempt):
self.last_attempt = last_attempt
def __str__(self):
return "RetryError[{0}]".format(self.last_attempt)
|
gpl-2.0
|
intfrr/laikaboss
|
laikaboss/modules/explode_ole.py
|
17
|
3204
|
# Copyright 2015 Lockheed Martin Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import olefile
import StringIO
from laikaboss.objectmodel import ModuleObject, ExternalVars, QuitScanException, \
GlobalScanTimeoutError, GlobalModuleTimeoutError
from laikaboss.util import log_module
from laikaboss.si_module import SI_MODULE
class EXPLODE_OLE(SI_MODULE):
def __init__(self,):
self.module_name = "EXPLODE_OLE"
self.global_search = "GLOBAL_SEARCH"
def _run(self, scanObject, result, depth, args):
moduleResult = []
minFileSize = 0 #Explode everything!
useUnvalidatedFilenames = 0
if 'minFileSize' in args:
try:
minFileSize = int(args['minFileSize'])
except (QuitScanException, GlobalScanTimeoutError, GlobalModuleTimeoutError):
raise
except:
pass
if 'useUnvalidatedFilenames' in args:
try:
minFileSize = int(args['useUnvalidatedFilenames'])
except (QuitScanException, GlobalScanTimeoutError, GlobalModuleTimeoutError):
raise
except:
pass
file = StringIO.StringIO()
file.write(scanObject.buffer)
file.flush()
file.seek(0)
ole = olefile.OleFileIO(file)
lstStreams = ole.listdir()
numStreams = 0
for stream in lstStreams:
try:
if ole.get_size(stream) >= minFileSize:
numStreams += 1
streamF = ole.openstream(stream)
childBuffer = streamF.read()
if childBuffer:
filename = "e_ole_stream_"+str(numStreams)
try:
u = unicode( str(stream), "utf-8" )
filename = u.encode( "utf-8" )
except (QuitScanException, GlobalScanTimeoutError, GlobalModuleTimeoutError):
raise
except:
pass #keep ole_stream_number as filename
moduleResult.append(ModuleObject(buffer=childBuffer,
externalVars=ExternalVars(filename=filename)))
except (QuitScanException, GlobalScanTimeoutError, GlobalModuleTimeoutError):
raise
except:
log_module("MSG", self.module_name, 0, scanObject, result, "ERROR EXTRACTING STREAM: "+str(stream))
ole.close()
file.close()
return moduleResult
|
apache-2.0
|
quantrocket-llc/quantrocket-client
|
quantrocket/db.py
|
1
|
11911
|
# Copyright 2017 QuantRocket - All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import getpass
from quantrocket.houston import houston
from quantrocket.exceptions import DataInsertionError
from quantrocket.cli.utils.output import json_to_cli
def list_databases(services=None, codes=None, detail=False, expand=False):
"""
List databases.
Parameters
----------
services : str, optional
limit to these services
codes: list of str, optional
limit to these codes
detail : bool
return database statistics (default is to return a
flat list of database names)
expand : bool
expand sharded databases to include individual shards
(default is to list sharded databases as a single database)
Returns
-------
dict
dict of lists of databases (one key for PostgreSQL databases and one for
SQLite databases)
Examples
--------
Load database details in a pandas DataFrame:
>>> from quantrocket.db import list_databases
>>> import itertools
>>> databases = list_databases(detail=True)
>>> databases = pd.DataFrame.from_records(itertools.chain(databases["sqlite"], databases["postgres"]))
"""
params = {}
if services:
params["services"] = services
if codes:
params["codes"] = codes
if detail:
params["detail"] = detail
if expand:
params["expand"] = expand
response = houston.get("/db/databases", params=params)
houston.raise_for_status_with_json(response)
return response.json()
def _cli_list_databases(*args, **kwargs):
return json_to_cli(list_databases, *args, **kwargs)
def get_s3_config():
"""
Return the current S3 configuration, if any.
See http://qrok.it/h/dbs3 to learn more.
Returns
-------
dict
configuration details
"""
response = houston.get("/db/s3config")
houston.raise_for_status_with_json(response)
# It's possible to get a 204 empty response
if not response.content:
return {}
return response.json()
def set_s3_config(access_key_id=None, secret_access_key=None, bucket=None, region=None):
"""
Set AWS S3 configuration for pushing and pulling databases to and from
S3.
See http://qrok.it/h/dbs3 to learn more.
Credentials are encrypted at rest and never leave your deployment.
Parameters
----------
access_key_id : str, optional
AWS access key ID
secret_access_key : str, optional
AWS secret access key (if omitted and access_key_id is provided,
will be prompted for secret_access_key)
bucket : str, optional
the S3 bucket name to push to/pull from
region : str, optional
the AWS region in which to create the bucket (default us-east-1).
Ignored if the bucket already exists.
Returns
-------
dict
status message
"""
if access_key_id and not secret_access_key:
secret_access_key = getpass.getpass(prompt="Enter AWS Secret Access Key: ")
data = {}
if access_key_id:
data["access_key_id"] = access_key_id
if secret_access_key:
data["secret_access_key"] = secret_access_key
if bucket:
data["bucket"] = bucket
if region:
data["region"] = region
response = houston.put("/db/s3config", data=data)
houston.raise_for_status_with_json(response)
return response.json()
def _cli_get_or_set_s3_config(access_key_id=None, secret_access_key=None,
bucket=None, region=None, *args, **kwargs):
if access_key_id or secret_access_key or bucket or region:
return json_to_cli(set_s3_config, access_key_id, secret_access_key, bucket, region, *args, **kwargs)
else:
return json_to_cli(get_s3_config, *args, **kwargs)
def s3_push_databases(services=None, codes=None):
"""
Push database(s) to Amazon S3.
See http://qrok.it/h/dbs3 to learn more.
Parameters
----------
serivces : list of str, optional
limit to these services
codes: list of str, optional
limit to these codes
Returns
-------
json
status message
"""
params = {}
if services:
params["services"] = services
if codes:
params["codes"] = codes
response = houston.put("/db/s3", params=params)
houston.raise_for_status_with_json(response)
return response.json()
def _cli_s3_push_databases(*args, **kwargs):
return json_to_cli(s3_push_databases, *args, **kwargs)
def s3_pull_databases(services=None, codes=None, force=False):
"""
Pull database(s) from Amazon S3.
See http://qrok.it/h/dbs3 to learn more.
Parameters
----------
serivces : list of str, optional
limit to these services
codes: list of str, optional
limit to these codes
force: bool
overwrite existing database if one exists (default is to
fail if one exists)
Returns
-------
json
status message
"""
params = {}
if services:
params["services"] = services
if codes:
params["codes"] = codes
if force:
params["force"] = force
response = houston.get("/db/s3", params=params)
houston.raise_for_status_with_json(response)
return response.json()
def _cli_s3_pull_databases(*args, **kwargs):
return json_to_cli(s3_pull_databases, *args, **kwargs)
def optimize_databases(services=None, codes=None):
"""
Optimize databases to improve performance.
This runs the 'VACUUM' command, which defragments the database and
reclaims disk space.
Parameters
----------
serivces : list of str, optional
limit to these service
codes: list of str, optional
limit to these codes
Returns
-------
json
status message
"""
params = {}
if codes:
params["codes"] = codes
if services:
params["services"] = services
response = houston.post("/db/optimizations", params=params)
houston.raise_for_status_with_json(response)
return response.json()
def _cli_optimize_databases(*args, **kwargs):
return json_to_cli(optimize_databases, *args, **kwargs)
def connect_sqlite(db_path):
"""
Returns a connection to a SQLite database.
Parameters
----------
db_path : str, required
full path to a SQLite database
Returns
-------
sqlalchemy.engine.Engine
database connection
"""
try:
from sqlalchemy import create_engine
except ImportError:
raise ValueError(
"this function requires sqlalchemy and must be run in a QuantRocket container")
conn = create_engine("sqlite:///{0}".format(db_path),
connect_args={"isolation_level": None})
# Set some speed optimizations
# Hand off writes to the OS and don't wait
conn.execute("PRAGMA synchronous = 0")
# Each page is ~1K; allow ~50MB
conn.execute("PRAGMA cache_size = 50000")
# Store temp tables in memory
conn.execute("PRAGMA temp_store = 2")
# Wait up to 10 seconds rather than instantly failing on SQLITE_BUSY
conn.execute("PRAGMA busy_timeout = 10000")
return conn
def _insert_into(df, table_name, conn, on_conflict):
import time
import subprocess
import os
temp_table_name = "temp_{0}".format(str(time.time()).replace(".", ""))
# Get the db path from the engine object
db_path = conn.url.database
temp_file_name = "/tmp/sqlite_{}.csv".format(temp_table_name)
# Cast booleans to ints or they will load into SQLite as strings
df_bools = df.select_dtypes(['bool'])
if not df_bools.empty:
df.loc[:, df_bools.columns] = df_bools.astype(int)
df.to_csv(temp_file_name, index=False)
# Close connection to avoid Database Is Locked
conn.dispose()
from_cols = [f"NULLIF({col}, '')" for col in df.columns]
queries = """
PRAGMA busy_timeout = 10000;
.bail on
.mode csv
.import {tempfile} {temptable}
INSERT OR {on_conflict} INTO {table} ({into_cols}) SELECT {from_cols} FROM {temptable};
DROP TABLE {temptable};
""".format(
table=table_name,
on_conflict=on_conflict,
into_cols=",".join(df.columns),
from_cols=",".join(from_cols),
tempfile=temp_file_name,
temptable=temp_table_name)
try:
subprocess.check_output(
["sqlite3", db_path],
input=bytes(queries.encode("utf-8")),
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
raise DataInsertionError(e.output)
os.remove(temp_file_name)
def insert_or_fail(df, table_name, conn):
"""
Insert a DataFrame into a SQLite database.
In the case of a duplicate record insertion, the function
will fail.
Parameters
----------
df : DataFrame, required
the DataFrame to insert. All DataFrame columns must
exist in the destination table. The DataFrame index
will not be inserted.
table_name : str, required
the name of the table to insert the DataFrame into.
The table must already exist in the database.
conn : sqlalchemy.engine.Engine, required
a connection object for the SQLite database
Returns
-------
None
Raises
------
quantrocket.exceptions.DataInsertionError
catch-all exception class for errors that occur when writing to the
SQLite database
"""
_insert_into(df, table_name, conn, "FAIL")
def insert_or_replace(df, table_name, conn):
"""
Insert a DataFrame into a SQLite database.
In the case of a duplicate record insertion, the incoming
record will replace the existing record.
Parameters
----------
df : DataFrame, required
the DataFrame to insert. All DataFrame columns must
exist in the destination table. The DataFrame index
will not be inserted.
table_name : str, required
the name of the table to insert the DataFrame into.
The table must already exist in the database.
conn : sqlalchemy.engine.Engine, required
a connection object for the SQLite database
Returns
-------
None
Raises
------
quantrocket.exceptions.DataInsertionError
catch-all exception class for errors that occur when writing to the
SQLite database
"""
_insert_into(df, table_name, conn, "REPLACE")
def insert_or_ignore(df, table_name, conn):
"""
Insert a DataFrame into a SQLite database.
In the case of a duplicate record insertion, the incoming
record will be ignored.
Parameters
----------
df : DataFrame, required
the DataFrame to insert. All DataFrame columns must
exist in the destination table. The DataFrame index
will not be inserted.
table_name : str, required
the name of the table to insert the DataFrame into.
The table must already exist in the database.
conn : sqlalchemy.engine.Engine, required
a connection object for the SQLite database
Returns
-------
None
Raises
------
quantrocket.exceptions.DataInsertionError
catch-all exception class for errors that occur when writing to the
SQLite database
"""
_insert_into(df, table_name, conn, "IGNORE")
|
apache-2.0
|
Jgarcia-IAS/localizacion
|
openerp/addons-extra/odoo-pruebas/odoo-server/addons/hr_attendance/wizard/__init__.py
|
375
|
1073
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_attendance_error
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
waprin/continuous-deployment-demo
|
lib/werkzeug/debug/console.py
|
280
|
5557
|
# -*- coding: utf-8 -*-
"""
werkzeug.debug.console
~~~~~~~~~~~~~~~~~~~~~~
Interactive console support.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD.
"""
import sys
import code
from types import CodeType
from werkzeug.utils import escape
from werkzeug.local import Local
from werkzeug.debug.repr import debug_repr, dump, helper
_local = Local()
class HTMLStringO(object):
"""A StringO version that HTML escapes on write."""
def __init__(self):
self._buffer = []
def isatty(self):
return False
def close(self):
pass
def flush(self):
pass
def seek(self, n, mode=0):
pass
def readline(self):
if len(self._buffer) == 0:
return ''
ret = self._buffer[0]
del self._buffer[0]
return ret
def reset(self):
val = ''.join(self._buffer)
del self._buffer[:]
return val
def _write(self, x):
if isinstance(x, bytes):
x = x.decode('utf-8', 'replace')
self._buffer.append(x)
def write(self, x):
self._write(escape(x))
def writelines(self, x):
self._write(escape(''.join(x)))
class ThreadedStream(object):
"""Thread-local wrapper for sys.stdout for the interactive console."""
def push():
if not isinstance(sys.stdout, ThreadedStream):
sys.stdout = ThreadedStream()
_local.stream = HTMLStringO()
push = staticmethod(push)
def fetch():
try:
stream = _local.stream
except AttributeError:
return ''
return stream.reset()
fetch = staticmethod(fetch)
def displayhook(obj):
try:
stream = _local.stream
except AttributeError:
return _displayhook(obj)
# stream._write bypasses escaping as debug_repr is
# already generating HTML for us.
if obj is not None:
_local._current_ipy.locals['_'] = obj
stream._write(debug_repr(obj))
displayhook = staticmethod(displayhook)
def __setattr__(self, name, value):
raise AttributeError('read only attribute %s' % name)
def __dir__(self):
return dir(sys.__stdout__)
def __getattribute__(self, name):
if name == '__members__':
return dir(sys.__stdout__)
try:
stream = _local.stream
except AttributeError:
stream = sys.__stdout__
return getattr(stream, name)
def __repr__(self):
return repr(sys.__stdout__)
# add the threaded stream as display hook
_displayhook = sys.displayhook
sys.displayhook = ThreadedStream.displayhook
class _ConsoleLoader(object):
def __init__(self):
self._storage = {}
def register(self, code, source):
self._storage[id(code)] = source
# register code objects of wrapped functions too.
for var in code.co_consts:
if isinstance(var, CodeType):
self._storage[id(var)] = source
def get_source_by_code(self, code):
try:
return self._storage[id(code)]
except KeyError:
pass
def _wrap_compiler(console):
compile = console.compile
def func(source, filename, symbol):
code = compile(source, filename, symbol)
console.loader.register(code, source)
return code
console.compile = func
class _InteractiveConsole(code.InteractiveInterpreter):
def __init__(self, globals, locals):
code.InteractiveInterpreter.__init__(self, locals)
self.globals = dict(globals)
self.globals['dump'] = dump
self.globals['help'] = helper
self.globals['__loader__'] = self.loader = _ConsoleLoader()
self.more = False
self.buffer = []
_wrap_compiler(self)
def runsource(self, source):
source = source.rstrip() + '\n'
ThreadedStream.push()
prompt = self.more and '... ' or '>>> '
try:
source_to_eval = ''.join(self.buffer + [source])
if code.InteractiveInterpreter.runsource(self,
source_to_eval, '<debugger>', 'single'):
self.more = True
self.buffer.append(source)
else:
self.more = False
del self.buffer[:]
finally:
output = ThreadedStream.fetch()
return prompt + source + output
def runcode(self, code):
try:
eval(code, self.globals, self.locals)
except Exception:
self.showtraceback()
def showtraceback(self):
from werkzeug.debug.tbtools import get_current_traceback
tb = get_current_traceback(skip=1)
sys.stdout._write(tb.render_summary())
def showsyntaxerror(self, filename=None):
from werkzeug.debug.tbtools import get_current_traceback
tb = get_current_traceback(skip=4)
sys.stdout._write(tb.render_summary())
def write(self, data):
sys.stdout.write(data)
class Console(object):
"""An interactive console."""
def __init__(self, globals=None, locals=None):
if locals is None:
locals = {}
if globals is None:
globals = {}
self._ipy = _InteractiveConsole(globals, locals)
def eval(self, code):
_local._current_ipy = self._ipy
old_sys_stdout = sys.stdout
try:
return self._ipy.runsource(code)
finally:
sys.stdout = old_sys_stdout
|
apache-2.0
|
batxes/4c2vhic
|
Six_zebra_models/Six_zebra_models_final_output_0.1_-0.1_13000/Six_zebra_models48389.py
|
2
|
13921
|
import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "particle_0 geometry" not in marker_sets:
s=new_marker_set('particle_0 geometry')
marker_sets["particle_0 geometry"]=s
s= marker_sets["particle_0 geometry"]
mark=s.place_marker((2576.9, -2154.2, 10182.1), (0.7, 0.7, 0.7), 507.685)
if "particle_1 geometry" not in marker_sets:
s=new_marker_set('particle_1 geometry')
marker_sets["particle_1 geometry"]=s
s= marker_sets["particle_1 geometry"]
mark=s.place_marker((2409.47, -2893.23, 9950.54), (0.7, 0.7, 0.7), 479.978)
if "particle_2 geometry" not in marker_sets:
s=new_marker_set('particle_2 geometry')
marker_sets["particle_2 geometry"]=s
s= marker_sets["particle_2 geometry"]
mark=s.place_marker((3467.25, -1262.86, 10103.1), (0.7, 0.7, 0.7), 681.834)
if "particle_3 geometry" not in marker_sets:
s=new_marker_set('particle_3 geometry')
marker_sets["particle_3 geometry"]=s
s= marker_sets["particle_3 geometry"]
mark=s.place_marker((4725.17, 707.828, 10287.8), (0.7, 0.7, 0.7), 522.532)
if "particle_4 geometry" not in marker_sets:
s=new_marker_set('particle_4 geometry')
marker_sets["particle_4 geometry"]=s
s= marker_sets["particle_4 geometry"]
mark=s.place_marker((5109.56, 1350.38, 10294), (0, 1, 0), 751.925)
if "particle_5 geometry" not in marker_sets:
s=new_marker_set('particle_5 geometry')
marker_sets["particle_5 geometry"]=s
s= marker_sets["particle_5 geometry"]
mark=s.place_marker((6510.12, -6.43347, 11160.8), (0.7, 0.7, 0.7), 437.001)
if "particle_6 geometry" not in marker_sets:
s=new_marker_set('particle_6 geometry')
marker_sets["particle_6 geometry"]=s
s= marker_sets["particle_6 geometry"]
mark=s.place_marker((7618, 1518.65, 10784.9), (0.7, 0.7, 0.7), 710.767)
if "particle_7 geometry" not in marker_sets:
s=new_marker_set('particle_7 geometry')
marker_sets["particle_7 geometry"]=s
s= marker_sets["particle_7 geometry"]
mark=s.place_marker((9081.16, 1719.3, 11689.1), (0.7, 0.7, 0.7), 762.077)
if "particle_8 geometry" not in marker_sets:
s=new_marker_set('particle_8 geometry')
marker_sets["particle_8 geometry"]=s
s= marker_sets["particle_8 geometry"]
mark=s.place_marker((9884.71, 3034.06, 12014.7), (0.7, 0.7, 0.7), 726.799)
if "particle_9 geometry" not in marker_sets:
s=new_marker_set('particle_9 geometry')
marker_sets["particle_9 geometry"]=s
s= marker_sets["particle_9 geometry"]
mark=s.place_marker((11097.7, 4399.26, 11840.9), (0.7, 0.7, 0.7), 885.508)
if "particle_10 geometry" not in marker_sets:
s=new_marker_set('particle_10 geometry')
marker_sets["particle_10 geometry"]=s
s= marker_sets["particle_10 geometry"]
mark=s.place_marker((10346.4, 5810.31, 12502.9), (0.7, 0.7, 0.7), 778.489)
if "particle_11 geometry" not in marker_sets:
s=new_marker_set('particle_11 geometry')
marker_sets["particle_11 geometry"]=s
s= marker_sets["particle_11 geometry"]
mark=s.place_marker((11128.2, 6350.95, 14395.7), (0.7, 0.7, 0.7), 790.333)
if "particle_12 geometry" not in marker_sets:
s=new_marker_set('particle_12 geometry')
marker_sets["particle_12 geometry"]=s
s= marker_sets["particle_12 geometry"]
mark=s.place_marker((11959, 6784.63, 16236.9), (0.7, 0.7, 0.7), 707.721)
if "particle_13 geometry" not in marker_sets:
s=new_marker_set('particle_13 geometry')
marker_sets["particle_13 geometry"]=s
s= marker_sets["particle_13 geometry"]
mark=s.place_marker((11027.2, 5467.85, 16117.4), (0.7, 0.7, 0.7), 651.166)
if "particle_14 geometry" not in marker_sets:
s=new_marker_set('particle_14 geometry')
marker_sets["particle_14 geometry"]=s
s= marker_sets["particle_14 geometry"]
mark=s.place_marker((12159.6, 6151.27, 15072.6), (0.7, 0.7, 0.7), 708.61)
if "particle_15 geometry" not in marker_sets:
s=new_marker_set('particle_15 geometry')
marker_sets["particle_15 geometry"]=s
s= marker_sets["particle_15 geometry"]
mark=s.place_marker((12388.9, 6167.66, 13491.5), (0.7, 0.7, 0.7), 490.595)
if "particle_16 geometry" not in marker_sets:
s=new_marker_set('particle_16 geometry')
marker_sets["particle_16 geometry"]=s
s= marker_sets["particle_16 geometry"]
mark=s.place_marker((11921.3, 5341.77, 12357.6), (0.7, 0.7, 0.7), 591.565)
if "particle_17 geometry" not in marker_sets:
s=new_marker_set('particle_17 geometry')
marker_sets["particle_17 geometry"]=s
s= marker_sets["particle_17 geometry"]
mark=s.place_marker((11390.7, 4637.22, 11015.9), (0.7, 0.7, 0.7), 581.287)
if "particle_18 geometry" not in marker_sets:
s=new_marker_set('particle_18 geometry')
marker_sets["particle_18 geometry"]=s
s= marker_sets["particle_18 geometry"]
mark=s.place_marker((11609.7, 2814.51, 11018.9), (0.7, 0.7, 0.7), 789.529)
if "particle_19 geometry" not in marker_sets:
s=new_marker_set('particle_19 geometry')
marker_sets["particle_19 geometry"]=s
s= marker_sets["particle_19 geometry"]
mark=s.place_marker((11733.8, 2378.3, 9525.32), (0.7, 0.7, 0.7), 623.587)
if "particle_20 geometry" not in marker_sets:
s=new_marker_set('particle_20 geometry')
marker_sets["particle_20 geometry"]=s
s= marker_sets["particle_20 geometry"]
mark=s.place_marker((12316.9, 2142.93, 7800.43), (0.7, 0.7, 0.7), 1083.56)
if "particle_21 geometry" not in marker_sets:
s=new_marker_set('particle_21 geometry')
marker_sets["particle_21 geometry"]=s
s= marker_sets["particle_21 geometry"]
mark=s.place_marker((13586.8, 1811.71, 6687.8), (0.7, 0.7, 0.7), 504.258)
if "particle_22 geometry" not in marker_sets:
s=new_marker_set('particle_22 geometry')
marker_sets["particle_22 geometry"]=s
s= marker_sets["particle_22 geometry"]
mark=s.place_marker((12406.2, 2601.7, 6879.11), (0.7, 0.7, 0.7), 805.519)
if "particle_23 geometry" not in marker_sets:
s=new_marker_set('particle_23 geometry')
marker_sets["particle_23 geometry"]=s
s= marker_sets["particle_23 geometry"]
mark=s.place_marker((11314.3, 4351.39, 7385.02), (0.7, 0.7, 0.7), 631.708)
if "particle_24 geometry" not in marker_sets:
s=new_marker_set('particle_24 geometry')
marker_sets["particle_24 geometry"]=s
s= marker_sets["particle_24 geometry"]
mark=s.place_marker((10947.4, 6415.64, 7878.46), (0.7, 0.7, 0.7), 805.942)
if "particle_25 geometry" not in marker_sets:
s=new_marker_set('particle_25 geometry')
marker_sets["particle_25 geometry"]=s
s= marker_sets["particle_25 geometry"]
mark=s.place_marker((10836.9, 7446.57, 8087.52), (1, 0.7, 0), 672.697)
if "particle_26 geometry" not in marker_sets:
s=new_marker_set('particle_26 geometry')
marker_sets["particle_26 geometry"]=s
s= marker_sets["particle_26 geometry"]
mark=s.place_marker((8440.39, 7370.37, 6822.11), (0.7, 0.7, 0.7), 797.863)
if "particle_27 geometry" not in marker_sets:
s=new_marker_set('particle_27 geometry')
marker_sets["particle_27 geometry"]=s
s= marker_sets["particle_27 geometry"]
mark=s.place_marker((7200.12, 8176.74, 5685.51), (1, 0.7, 0), 735.682)
if "particle_28 geometry" not in marker_sets:
s=new_marker_set('particle_28 geometry')
marker_sets["particle_28 geometry"]=s
s= marker_sets["particle_28 geometry"]
mark=s.place_marker((7083, 7360.16, 4741.68), (0.7, 0.7, 0.7), 602.14)
if "particle_29 geometry" not in marker_sets:
s=new_marker_set('particle_29 geometry')
marker_sets["particle_29 geometry"]=s
s= marker_sets["particle_29 geometry"]
mark=s.place_marker((7430.19, 6261.75, 2727.07), (0.7, 0.7, 0.7), 954.796)
if "particle_30 geometry" not in marker_sets:
s=new_marker_set('particle_30 geometry')
marker_sets["particle_30 geometry"]=s
s= marker_sets["particle_30 geometry"]
mark=s.place_marker((7190.87, 6650.52, 3180.65), (0.7, 0.7, 0.7), 1021.88)
if "particle_31 geometry" not in marker_sets:
s=new_marker_set('particle_31 geometry')
marker_sets["particle_31 geometry"]=s
s= marker_sets["particle_31 geometry"]
mark=s.place_marker((5930.19, 7147.78, 2972.98), (0.7, 0.7, 0.7), 909.323)
if "particle_32 geometry" not in marker_sets:
s=new_marker_set('particle_32 geometry')
marker_sets["particle_32 geometry"]=s
s= marker_sets["particle_32 geometry"]
mark=s.place_marker((4139.97, 7191.54, 1570.93), (0.7, 0.7, 0.7), 621.049)
if "particle_33 geometry" not in marker_sets:
s=new_marker_set('particle_33 geometry')
marker_sets["particle_33 geometry"]=s
s= marker_sets["particle_33 geometry"]
mark=s.place_marker((2904.07, 6849.32, 2279.13), (0.7, 0.7, 0.7), 525.154)
if "particle_34 geometry" not in marker_sets:
s=new_marker_set('particle_34 geometry')
marker_sets["particle_34 geometry"]=s
s= marker_sets["particle_34 geometry"]
mark=s.place_marker((2087.47, 5795.09, 3014.71), (0.7, 0.7, 0.7), 890.246)
if "particle_35 geometry" not in marker_sets:
s=new_marker_set('particle_35 geometry')
marker_sets["particle_35 geometry"]=s
s= marker_sets["particle_35 geometry"]
mark=s.place_marker((866.913, 4519.85, 2852.2), (0.7, 0.7, 0.7), 671.216)
if "particle_36 geometry" not in marker_sets:
s=new_marker_set('particle_36 geometry')
marker_sets["particle_36 geometry"]=s
s= marker_sets["particle_36 geometry"]
mark=s.place_marker((738.614, 2903.05, 2359.65), (0.7, 0.7, 0.7), 662.672)
if "particle_37 geometry" not in marker_sets:
s=new_marker_set('particle_37 geometry')
marker_sets["particle_37 geometry"]=s
s= marker_sets["particle_37 geometry"]
mark=s.place_marker((2298.81, 2965, 1929.2), (0.7, 0.7, 0.7), 646.682)
if "particle_38 geometry" not in marker_sets:
s=new_marker_set('particle_38 geometry')
marker_sets["particle_38 geometry"]=s
s= marker_sets["particle_38 geometry"]
mark=s.place_marker((2271.83, 4273.63, 1122.19), (0.7, 0.7, 0.7), 769.945)
if "particle_39 geometry" not in marker_sets:
s=new_marker_set('particle_39 geometry')
marker_sets["particle_39 geometry"]=s
s= marker_sets["particle_39 geometry"]
mark=s.place_marker((2826.7, 5771.04, 2324.03), (0.7, 0.7, 0.7), 606.92)
if "particle_40 geometry" not in marker_sets:
s=new_marker_set('particle_40 geometry')
marker_sets["particle_40 geometry"]=s
s= marker_sets["particle_40 geometry"]
mark=s.place_marker((1787.64, 6480.67, 2330.27), (0.7, 0.7, 0.7), 622.571)
if "particle_41 geometry" not in marker_sets:
s=new_marker_set('particle_41 geometry')
marker_sets["particle_41 geometry"]=s
s= marker_sets["particle_41 geometry"]
mark=s.place_marker((2873.62, 6118.08, 3074.13), (0.7, 0.7, 0.7), 466.865)
if "particle_42 geometry" not in marker_sets:
s=new_marker_set('particle_42 geometry')
marker_sets["particle_42 geometry"]=s
s= marker_sets["particle_42 geometry"]
mark=s.place_marker((2712.57, 5420.23, 3406.93), (0.7, 0.7, 0.7), 682.933)
if "particle_43 geometry" not in marker_sets:
s=new_marker_set('particle_43 geometry')
marker_sets["particle_43 geometry"]=s
s= marker_sets["particle_43 geometry"]
mark=s.place_marker((2673.55, 6017.4, 3052.18), (0.7, 0.7, 0.7), 809.326)
if "particle_44 geometry" not in marker_sets:
s=new_marker_set('particle_44 geometry')
marker_sets["particle_44 geometry"]=s
s= marker_sets["particle_44 geometry"]
mark=s.place_marker((4154.78, 7070.43, 2762.69), (0.7, 0.7, 0.7), 796.72)
if "particle_45 geometry" not in marker_sets:
s=new_marker_set('particle_45 geometry')
marker_sets["particle_45 geometry"]=s
s= marker_sets["particle_45 geometry"]
mark=s.place_marker((5479.64, 8167.16, 5079.18), (0.7, 0.7, 0.7), 870.026)
if "particle_46 geometry" not in marker_sets:
s=new_marker_set('particle_46 geometry')
marker_sets["particle_46 geometry"]=s
s= marker_sets["particle_46 geometry"]
mark=s.place_marker((5072.22, 9253.15, 6554.53), (0.7, 0.7, 0.7), 909.577)
if "particle_47 geometry" not in marker_sets:
s=new_marker_set('particle_47 geometry')
marker_sets["particle_47 geometry"]=s
s= marker_sets["particle_47 geometry"]
mark=s.place_marker((4351.56, 9421.1, 7436.82), (0, 1, 0), 500.536)
if "particle_48 geometry" not in marker_sets:
s=new_marker_set('particle_48 geometry')
marker_sets["particle_48 geometry"]=s
s= marker_sets["particle_48 geometry"]
mark=s.place_marker((2572.42, 10299.6, 7566.33), (0.7, 0.7, 0.7), 725.276)
if "particle_49 geometry" not in marker_sets:
s=new_marker_set('particle_49 geometry')
marker_sets["particle_49 geometry"]=s
s= marker_sets["particle_49 geometry"]
mark=s.place_marker((605.589, 12056.7, 7159.53), (0.7, 0.7, 0.7), 570.331)
if "particle_50 geometry" not in marker_sets:
s=new_marker_set('particle_50 geometry')
marker_sets["particle_50 geometry"]=s
s= marker_sets["particle_50 geometry"]
mark=s.place_marker((316.198, 11368.9, 5628.57), (0.7, 0.7, 0.7), 492.203)
if "particle_51 geometry" not in marker_sets:
s=new_marker_set('particle_51 geometry')
marker_sets["particle_51 geometry"]=s
s= marker_sets["particle_51 geometry"]
mark=s.place_marker((3091.97, 11312.4, 4707.97), (0, 1, 0), 547.7)
if "particle_52 geometry" not in marker_sets:
s=new_marker_set('particle_52 geometry')
marker_sets["particle_52 geometry"]=s
s= marker_sets["particle_52 geometry"]
mark=s.place_marker((2834.46, 10576.9, 4839.18), (0.7, 0.7, 0.7), 581.921)
if "particle_53 geometry" not in marker_sets:
s=new_marker_set('particle_53 geometry')
marker_sets["particle_53 geometry"]=s
s= marker_sets["particle_53 geometry"]
mark=s.place_marker((1538.37, 9466.16, 3980.7), (0.7, 0.7, 0.7), 555.314)
if "particle_54 geometry" not in marker_sets:
s=new_marker_set('particle_54 geometry')
marker_sets["particle_54 geometry"]=s
s= marker_sets["particle_54 geometry"]
mark=s.place_marker((656.009, 8210.38, 3939.29), (0.7, 0.7, 0.7), 404.219)
if "particle_55 geometry" not in marker_sets:
s=new_marker_set('particle_55 geometry')
marker_sets["particle_55 geometry"]=s
s= marker_sets["particle_55 geometry"]
mark=s.place_marker((1399.59, 6866.62, 4953.54), (0.7, 0.7, 0.7), 764.234)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
|
gpl-3.0
|
Pike/elmo
|
apps/elmo_commons/tests/test_bleach.py
|
2
|
1492
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""tests for bleach filter"""
from __future__ import absolute_import
from __future__ import unicode_literals
from django.template import Template
from django.template import Context
from elmo.test import TestCase
class BleachFilterTestCase(TestCase):
def test_bleach_safe(self):
template_as_string = """
{% load bleach_filters %}
{{ msg|bleach_safe }}
"""
template = Template(template_as_string)
msg = """
A url first: http://mozilla.org/page?a=b#top
or a link <a href="http://mozilla.com">mozilla.com</a>
nasty stuff: <script>alert('xss')</script>
basic <strong>HTML</strong>
but not so basic: <textarea>
""".strip()
context = Context({'msg': msg})
rendered = template.render(context).strip()
self.assertIn(
'<a href="http://mozilla.org/page?a=b#top" rel="nofollow">'
'http://mozilla.org/page?a=b#top</a>',
rendered)
self.assertIn(
'<a href="http://mozilla.com" rel="nofollow">mozilla.com</a>',
rendered)
self.assertIn('<script>alert(\'xss\')</script>', rendered)
self.assertIn('<strong>HTML</strong>', rendered)
self.assertIn('<textarea>', rendered)
|
mpl-2.0
|
xchenum/quantum
|
quantum/plugins/cisco/common/cisco_credentials_v2.py
|
9
|
3007
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Sumit Naiksatam, Cisco Systems, Inc.
import logging as LOG
from quantum.common.utils import find_config_file
from quantum.plugins.cisco.common import cisco_configparser as confp
from quantum.plugins.cisco.common import cisco_constants as const
from quantum.plugins.cisco.common import cisco_exceptions as cexc
from quantum.plugins.cisco.db import network_db_v2 as cdb
LOG.basicConfig(level=LOG.WARN)
LOG.getLogger(const.LOGGER_COMPONENT_NAME)
CREDENTIALS_FILE = find_config_file({'plugin': 'cisco'},
"credentials.ini")
TENANT = const.NETWORK_ADMIN
cp = confp.CiscoConfigParser(CREDENTIALS_FILE)
_creds_dictionary = cp.walk(cp.dummy)
class Store(object):
"""Credential Store"""
@staticmethod
def initialize():
for id in _creds_dictionary.keys():
try:
cdb.add_credential(TENANT, id,
_creds_dictionary[id][const.USERNAME],
_creds_dictionary[id][const.PASSWORD])
except cexc.CredentialAlreadyExists:
# We are quietly ignoring this, since it only happens
# if this class module is loaded more than once, in which
# case, the credentials are already populated
pass
@staticmethod
def put_credential(cred_name, username, password):
"""Set the username and password"""
credential = cdb.add_credential(TENANT, cred_name, username, password)
@staticmethod
def get_username(cred_name):
"""Get the username"""
credential = cdb.get_credential_name(TENANT, cred_name)
return credential[const.CREDENTIAL_USERNAME]
@staticmethod
def get_password(cred_name):
"""Get the password"""
credential = cdb.get_credential_name(TENANT, cred_name)
return credential[const.CREDENTIAL_PASSWORD]
@staticmethod
def get_credential(cred_name):
"""Get the username and password"""
credential = cdb.get_credential_name(TENANT, cred_name)
return {const.USERNAME: const.CREDENTIAL_USERNAME,
const.PASSWORD: const.CREDENTIAL_PASSWORD}
@staticmethod
def delete_credential(cred_name):
"""Delete a credential"""
cdb.remove_credential(TENANT, cred_name)
|
apache-2.0
|
google/earthenterprise
|
earth_enterprise/src/server/wsgi/common/string_utils.py
|
5
|
1113
|
#!/usr/bin/env python2.7
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The string_utils module.
This module contains different string utilities.
"""
# TODO: Currently SanitizeText()- function just removes leading and
# trailing whitespaces. Consider to add other sanitizing functionality,
# e.g. HTML-escaping.
def SanitizeText(text):
"""Sanitizes text.
Function removes leading and trailing whitespaces.
Args:
text: input string.
Returns:
sanitized string.
"""
return text.strip(" \t\n\r")
def main():
pass
if __name__ == "__main__":
main()
|
apache-2.0
|
xychix/gtrcrt
|
sendPacket.py
|
1
|
4229
|
#!/usr/bin/python
# Version 0.1
# This is POC code to proof that we can send out spoofed packets to arrive on the other end maintaining needed data.
# Ensure a wireshark / tcpdump is listening on the catcher side
# Python and Impacket need to be installed
import socket
import re
import struct
import types
import string
import time
from subprocess import *
import threading
from node import EndNode
from impacket import ImpactDecoder, ImpactPacket
class PingSweep(threading.Thread):
def __init__(self,conf,endnodeList):
threading.Thread.__init__(self)
self.conf = conf
self.endnodeList = endnodeList
def run(self):
conf = self.conf
endnodeList = self.endnodeList
iplist = conf['targets']
for endnodeId in endnodeList:
time.sleep(conf['delay'])
SendICMP(endnodeList[endnodeId].targetIp,endnodeId,0,64)
class TraceEndNode(threading.Thread):
def __init__(self,delay):
threading.Thread.__init__(self)
self.delay = delay
self.endnodes = []
def addEndNode(self,endnode):
self.endnodes.append(endnode)
def run(self):
if len(self.endnodes) > 0:
for endnode in self.endnodes:
for ttl in range(1,(endnode.distance+3)):
time.sleep(self.delay)
hopnr = ttl + (256 * endnode.times_processed)
SendICMP( endnode.targetIp,endnode.endnodeId,hopnr,ttl)
class PingReply:
def __init__(self, packet):
# Use ImpactDecoder to reconstruct the packet hierarchy.
rip = ImpactDecoder.IPDecoder().decode(packet)
# Extract the ICMP packet from its container (the IP packet).
ricmp = rip.child()
self.replyType = ricmp.get_icmp_type()
self.srcIp = rip.get_ip_src()
self.dstIp = rip.get_ip_dst()
self.valid = True
self.recv_ttl = rip.get_ip_ttl()
if ricmp.ICMP_ECHOREPLY == self.replyType:
data = ricmp.get_data_as_string()
self.endnodeId = socket.ntohs(struct.unpack('H',data[4:6])[0])
self.hopNr = socket.ntohs(struct.unpack('H',data[6:8])[0])
elif (ricmp.ICMP_UNREACH == self.replyType) or (ricmp.ICMP_TIMXCEED == self.replyType):
data = ricmp.get_data_as_string()
if len(data) < (36-8):
self.valid = False
return
self.endnodeId = socket.ntohs(struct.unpack('H',data[(32-8):(34-8)])[0])
self.hopNr = socket.ntohs(struct.unpack('H',data[(34-8):(36-8)])[0])
else:
self.valid = False
def SendICMP(dstIP,sessionNr,counter,ttl):
# prepare the IP part
ip = ImpactPacket.IP()
ip.set_ip_dst(dstIP)
#this counter isn't used.
ip.set_ip_id(counter)
ip.set_ip_ttl(ttl)
# prepare the ICMP part
icmp = ImpactPacket.ICMP()
#is used to read out uniquenumber in case of DU ICMP reply
icmp.set_icmp_id(sessionNr)
#is used to read out sessionnumber in case of DU ICMP reply
icmp.set_icmp_seq(counter)
#auto generate checksum
icmp.set_icmp_cksum(0)
icmp.auto_checksum = 1
icmp.set_icmp_type(icmp.ICMP_ECHO)
# prepare the payload
# put the target IP and the sequence number in the payload also for later recovery
data = socket.inet_aton(dstIP)+struct.pack('H',socket.htons(sessionNr))+struct.pack('H',socket.htons(counter))
# compose the total packet IP / icmp / payload
icmp.contains(ImpactPacket.Data(data))
ip.contains(icmp)
# Open a raw socket. Special permissions are usually required.
s = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_ICMP)
s.setsockopt(socket.IPPROTO_IP, socket.IP_HDRINCL, 1)
# and set it free
return s.sendto(ip.get_packet(), (dstIP, 0))
def isValidIp(ip):
pattern = r"\b(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\b"
if re.match(pattern, ip):
return True
else:
return False
def resolveIp(ip):
name = False
if isValidIp(ip):
try:
name = socket.gethostbyaddr(ip)[0]
except:
pass
if name: return name
return ip
|
gpl-2.0
|
seanli9jan/tensorflow
|
tensorflow/python/profiler/profiler_test.py
|
63
|
8452
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
from tensorflow.python.profiler import option_builder
# pylint: disable=g-bad-import-order
from tensorflow.python.profiler import model_analyzer
from tensorflow.python.profiler.internal import model_analyzer_testlib as lib
builder = option_builder.ProfileOptionBuilder
class ProfilerTest(test.TestCase):
def testProfileBasic(self):
ops.reset_default_graph()
outfile = os.path.join(test.get_temp_dir(), 'dump')
opts = (builder(builder.trainable_variables_parameter())
.with_file_output(outfile)
.with_accounted_types(['.*'])
.select(['params', 'float_ops', 'micros', 'bytes',
'device', 'op_types', 'occurrence']).build())
# Test the output without run_meta.
sess = session.Session()
r = lib.BuildFullModel()
sess.run(variables.global_variables_initializer())
# Test the output with run_meta.
run_meta = config_pb2.RunMetadata()
_ = sess.run(r,
options=config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE),
run_metadata=run_meta)
profiler = model_analyzer.Profiler(sess.graph)
profiler.add_step(1, run_meta)
profiler.profile_graph(opts)
with gfile.Open(outfile, 'r') as f:
profiler_str = f.read()
model_analyzer.profile(
sess.graph, cmd='graph', run_meta=run_meta, options=opts)
with gfile.Open(outfile, 'r') as f:
pma_str = f.read()
self.assertEqual(pma_str, profiler_str)
profiler.profile_name_scope(opts)
with gfile.Open(outfile, 'r') as f:
profiler_str = f.read()
model_analyzer.profile(
sess.graph, cmd='scope', run_meta=run_meta, options=opts)
with gfile.Open(outfile, 'r') as f:
pma_str = f.read()
self.assertEqual(pma_str, profiler_str)
profiler.profile_python(opts)
with gfile.Open(outfile, 'r') as f:
profiler_str = f.read()
model_analyzer.profile(
sess.graph, cmd='code', run_meta=run_meta, options=opts)
with gfile.Open(outfile, 'r') as f:
pma_str = f.read()
self.assertEqual(pma_str, profiler_str)
profiler.profile_operations(opts)
with gfile.Open(outfile, 'r') as f:
profiler_str = f.read()
model_analyzer.profile(
sess.graph, cmd='op', run_meta=run_meta, options=opts)
with gfile.Open(outfile, 'r') as f:
pma_str = f.read()
self.assertEqual(pma_str, profiler_str)
model_analyzer.profile(
sess.graph, cmd='scope', run_meta=run_meta, options=opts)
with gfile.Open(outfile, 'r') as f:
pma_str = f.read()
self.assertNotEqual(pma_str, profiler_str)
def testMultiStepProfile(self):
ops.reset_default_graph()
opts = builder.time_and_memory(min_bytes=0)
with session.Session() as sess:
r1, r2, r3 = lib.BuildSplitableModel()
sess.run(variables.global_variables_initializer())
profiler = model_analyzer.Profiler(sess.graph)
pb0 = profiler.profile_name_scope(opts)
run_meta = config_pb2.RunMetadata()
_ = sess.run(r1,
options=config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE),
run_metadata=run_meta)
profiler.add_step(1, run_meta)
pb1 = profiler.profile_name_scope(opts)
self.assertNotEqual(lib.SearchTFProfNode(pb1, 'DW'), None)
self.assertEqual(lib.SearchTFProfNode(pb1, 'DW2'), None)
self.assertEqual(lib.SearchTFProfNode(pb1, 'add'), None)
run_meta2 = config_pb2.RunMetadata()
_ = sess.run(r2,
options=config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE),
run_metadata=run_meta2)
profiler.add_step(2, run_meta2)
pb2 = profiler.profile_name_scope(opts)
self.assertNotEqual(lib.SearchTFProfNode(pb2, 'DW'), None)
self.assertNotEqual(lib.SearchTFProfNode(pb2, 'DW2'), None)
self.assertEqual(lib.SearchTFProfNode(pb2, 'add'), None)
run_meta3 = config_pb2.RunMetadata()
_ = sess.run(r3,
options=config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE),
run_metadata=run_meta3)
profiler.add_step(3, run_meta3)
pb3 = profiler.profile_name_scope(opts)
self.assertNotEqual(lib.SearchTFProfNode(pb3, 'DW'), None)
self.assertNotEqual(lib.SearchTFProfNode(pb3, 'DW2'), None)
self.assertNotEqual(lib.SearchTFProfNode(pb3, 'add'), None)
self.assertEqual(lib.SearchTFProfNode(pb0, 'Conv2D'), None)
self.assertGreater(lib.SearchTFProfNode(pb1, 'Conv2D').exec_micros, 0)
self.assertEqual(lib.SearchTFProfNode(pb1, 'Conv2D_1'), None)
self.assertGreater(lib.SearchTFProfNode(pb2, 'Conv2D_1').exec_micros, 0)
self.assertEqual(lib.SearchTFProfNode(pb2, 'add'), None)
self.assertGreater(lib.SearchTFProfNode(pb3, 'add').exec_micros, 0)
advice_pb = profiler.advise(model_analyzer.ALL_ADVICE)
self.assertTrue('AcceleratorUtilizationChecker' in advice_pb.checkers)
self.assertTrue('ExpensiveOperationChecker' in advice_pb.checkers)
self.assertTrue('OperationChecker' in advice_pb.checkers)
checker = advice_pb.checkers['AcceleratorUtilizationChecker']
if test.is_gpu_available():
self.assertGreater(len(checker.reports), 0)
else:
self.assertEqual(len(checker.reports), 0)
checker = advice_pb.checkers['ExpensiveOperationChecker']
self.assertGreater(len(checker.reports), 0)
def testMultipleProfilePerStep(self):
ops.reset_default_graph()
opts = (builder(builder.trainable_variables_parameter())
.with_empty_output()
.with_accounted_types(['.*'])
.select(['micros', 'bytes', 'peak_bytes',
'residual_bytes', 'output_bytes']).build())
r = lib.BuildSmallModel()
sess = session.Session()
profiler = model_analyzer.Profiler(sess.graph)
init_var_run_meta = config_pb2.RunMetadata()
sess.run(variables.global_variables_initializer(),
options=config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE),
run_metadata=init_var_run_meta)
train_run_meta = config_pb2.RunMetadata()
sess.run(r,
options=config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE),
run_metadata=train_run_meta)
profiler.add_step(0, train_run_meta)
ret1 = profiler.profile_name_scope(opts)
n1 = lib.SearchTFProfNode(
ret1, 'DW/Initializer/random_normal/RandomStandardNormal')
# Without the var initialization run_meta, it doesn't have the
# information of var_initialization.
self.assertEqual(n1.exec_micros, 0)
self.assertEqual(n1.requested_bytes, 0)
self.assertEqual(n1.peak_bytes, 0)
self.assertEqual(n1.residual_bytes, 0)
profiler.add_step(0, init_var_run_meta)
ret2 = profiler.profile_name_scope(opts)
n2 = lib.SearchTFProfNode(
ret2, 'DW/Initializer/random_normal/RandomStandardNormal')
# After adding the var initialization run_meta.
self.assertGreater(n2.exec_micros, 0)
self.assertGreater(n2.requested_bytes, 0)
self.assertGreater(n2.peak_bytes, 0)
self.assertGreater(n2.residual_bytes, 0)
if __name__ == '__main__':
test.main()
|
apache-2.0
|
zcbenz/cefode-chromium
|
build/linux/rewrite_dirs.py
|
259
|
2013
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Rewrites paths in -I, -L and other option to be relative to a sysroot."""
import sys
import os
import optparse
REWRITE_PREFIX = ['-I',
'-idirafter',
'-imacros',
'-imultilib',
'-include',
'-iprefix',
'-iquote',
'-isystem',
'-L']
def RewritePath(path, opts):
"""Rewrites a path by stripping the prefix and prepending the sysroot."""
sysroot = opts.sysroot
prefix = opts.strip_prefix
if os.path.isabs(path) and not path.startswith(sysroot):
if path.startswith(prefix):
path = path[len(prefix):]
path = path.lstrip('/')
return os.path.join(sysroot, path)
else:
return path
def RewriteLine(line, opts):
"""Rewrites all the paths in recognized options."""
args = line.split()
count = len(args)
i = 0
while i < count:
for prefix in REWRITE_PREFIX:
# The option can be either in the form "-I /path/to/dir" or
# "-I/path/to/dir" so handle both.
if args[i] == prefix:
i += 1
try:
args[i] = RewritePath(args[i], opts)
except IndexError:
sys.stderr.write('Missing argument following %s\n' % prefix)
break
elif args[i].startswith(prefix):
args[i] = prefix + RewritePath(args[i][len(prefix):], opts)
i += 1
return ' '.join(args)
def main(argv):
parser = optparse.OptionParser()
parser.add_option('-s', '--sysroot', default='/', help='sysroot to prepend')
parser.add_option('-p', '--strip-prefix', default='', help='prefix to strip')
opts, args = parser.parse_args(argv[1:])
for line in sys.stdin.readlines():
line = RewriteLine(line.strip(), opts)
print line
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
bsd-3-clause
|
aforalee/keystone
|
keystone/trust/core.py
|
8
|
9651
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Main entry point into the Trust service."""
import abc
from oslo_config import cfg
from oslo_log import log
import six
from six.moves import zip
from keystone.common import dependency
from keystone.common import manager
from keystone import exception
from keystone.i18n import _
from keystone import notifications
CONF = cfg.CONF
LOG = log.getLogger(__name__)
@dependency.requires('identity_api')
@dependency.provider('trust_api')
class Manager(manager.Manager):
"""Default pivot point for the Trust backend.
See :mod:`keystone.common.manager.Manager` for more details on how this
dynamically calls the backend.
"""
driver_namespace = 'keystone.trust'
_TRUST = "OS-TRUST:trust"
def __init__(self):
super(Manager, self).__init__(CONF.trust.driver)
@staticmethod
def _validate_redelegation(redelegated_trust, trust):
# Validate against:
# 0 < redelegation_count <= max_redelegation_count
max_redelegation_count = CONF.trust.max_redelegation_count
redelegation_depth = redelegated_trust.get('redelegation_count', 0)
if not (0 < redelegation_depth <= max_redelegation_count):
raise exception.Forbidden(
_('Remaining redelegation depth of %(redelegation_depth)d'
' out of allowed range of [0..%(max_count)d]') %
{'redelegation_depth': redelegation_depth,
'max_count': max_redelegation_count})
# remaining_uses is None
remaining_uses = trust.get('remaining_uses')
if remaining_uses is not None:
raise exception.Forbidden(
_('Field "remaining_uses" is set to %(value)s'
' while it must not be set in order to redelegate a trust'),
value=remaining_uses)
# expiry times
trust_expiry = trust.get('expires_at')
redelegated_expiry = redelegated_trust['expires_at']
if trust_expiry:
# redelegated trust is from backend and has no tzinfo
if redelegated_expiry < trust_expiry.replace(tzinfo=None):
raise exception.Forbidden(
_('Requested expiration time is more '
'than redelegated trust can provide'))
else:
trust['expires_at'] = redelegated_expiry
# trust roles is a subset of roles of the redelegated trust
parent_roles = set(role['id']
for role in redelegated_trust['roles'])
if not all(role['id'] in parent_roles for role in trust['roles']):
raise exception.Forbidden(
_('Some of requested roles are not in redelegated trust'))
def get_trust_pedigree(self, trust_id):
trust = self.driver.get_trust(trust_id)
trust_chain = [trust]
if trust and trust.get('redelegated_trust_id'):
trusts = self.driver.list_trusts_for_trustor(
trust['trustor_user_id'])
while trust_chain[-1].get('redelegated_trust_id'):
for t in trusts:
if t['id'] == trust_chain[-1]['redelegated_trust_id']:
trust_chain.append(t)
break
return trust_chain
def get_trust(self, trust_id, deleted=False):
trust = self.driver.get_trust(trust_id, deleted)
if trust and trust.get('redelegated_trust_id') and not deleted:
trust_chain = self.get_trust_pedigree(trust_id)
for parent, child in zip(trust_chain[1:], trust_chain):
self._validate_redelegation(parent, child)
try:
self.identity_api.assert_user_enabled(
parent['trustee_user_id'])
except (AssertionError, exception.NotFound):
raise exception.Forbidden(
_('One of the trust agents is disabled or deleted'))
return trust
def create_trust(self, trust_id, trust, roles, redelegated_trust=None,
initiator=None):
"""Create a new trust.
:returns: a new trust
"""
# Default for initial trust in chain is max_redelegation_count
max_redelegation_count = CONF.trust.max_redelegation_count
requested_count = trust.get('redelegation_count')
redelegatable = (trust.pop('allow_redelegation', False)
and requested_count != 0)
if not redelegatable:
trust['redelegation_count'] = requested_count = 0
remaining_uses = trust.get('remaining_uses')
if remaining_uses is not None and remaining_uses <= 0:
msg = _('remaining_uses must be a positive integer or null.')
raise exception.ValidationError(msg)
else:
# Validate requested redelegation depth
if requested_count and requested_count > max_redelegation_count:
raise exception.Forbidden(
_('Requested redelegation depth of %(requested_count)d '
'is greater than allowed %(max_count)d') %
{'requested_count': requested_count,
'max_count': max_redelegation_count})
# Decline remaining_uses
if trust.get('remaining_uses') is not None:
raise exception.ValidationError(
_('remaining_uses must not be set if redelegation is '
'allowed'))
if redelegated_trust:
trust['redelegated_trust_id'] = redelegated_trust['id']
remaining_count = redelegated_trust['redelegation_count'] - 1
# Validate depth consistency
if (redelegatable and requested_count and
requested_count != remaining_count):
msg = _('Modifying "redelegation_count" upon redelegation is '
'forbidden. Omitting this parameter is advised.')
raise exception.Forbidden(msg)
trust.setdefault('redelegation_count', remaining_count)
# Check entire trust pedigree validity
pedigree = self.get_trust_pedigree(redelegated_trust['id'])
for t in pedigree:
self._validate_redelegation(t, trust)
trust.setdefault('redelegation_count', max_redelegation_count)
ref = self.driver.create_trust(trust_id, trust, roles)
notifications.Audit.created(self._TRUST, trust_id, initiator=initiator)
return ref
def delete_trust(self, trust_id, initiator=None):
"""Remove a trust.
:raises: keystone.exception.TrustNotFound
Recursively remove given and redelegated trusts
"""
trust = self.driver.get_trust(trust_id)
trusts = self.driver.list_trusts_for_trustor(
trust['trustor_user_id'])
for t in trusts:
if t.get('redelegated_trust_id') == trust_id:
# recursive call to make sure all notifications are sent
try:
self.delete_trust(t['id'])
except exception.TrustNotFound:
# if trust was deleted by concurrent process
# consistency must not suffer
pass
# end recursion
self.driver.delete_trust(trust_id)
notifications.Audit.deleted(self._TRUST, trust_id, initiator)
@six.add_metaclass(abc.ABCMeta)
class TrustDriverV8(object):
@abc.abstractmethod
def create_trust(self, trust_id, trust, roles):
"""Create a new trust.
:returns: a new trust
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_trust(self, trust_id, deleted=False):
"""Get a trust by the trust id.
:param trust_id: the trust identifier
:type trust_id: string
:param deleted: return the trust even if it is deleted, expired, or
has no consumptions left
:type deleted: bool
"""
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_trusts(self):
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_trusts_for_trustee(self, trustee):
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def list_trusts_for_trustor(self, trustor):
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def delete_trust(self, trust_id):
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def consume_use(self, trust_id):
"""Consume one use when a trust was created with a limitation on its
uses, provided there are still uses available.
:raises: keystone.exception.TrustUseLimitReached,
keystone.exception.TrustNotFound
"""
raise exception.NotImplemented() # pragma: no cover
Driver = manager.create_legacy_driver(TrustDriverV8)
|
apache-2.0
|
jaggu303619/asylum
|
openerp/addons/hr_payroll/wizard/hr_payroll_contribution_register_report.py
|
52
|
2123
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from datetime import datetime
from dateutil import relativedelta
from openerp.osv import fields, osv
class payslip_lines_contribution_register(osv.osv_memory):
_name = 'payslip.lines.contribution.register'
_description = 'PaySlip Lines by Contribution Registers'
_columns = {
'date_from': fields.date('Date From', required=True),
'date_to': fields.date('Date To', required=True),
}
_defaults = {
'date_from': lambda *a: time.strftime('%Y-%m-01'),
'date_to': lambda *a: str(datetime.now() + relativedelta.relativedelta(months=+1, day=1, days=-1))[:10],
}
def print_report(self, cr, uid, ids, context=None):
datas = {
'ids': context.get('active_ids', []),
'model': 'hr.contribution.register',
'form': self.read(cr, uid, ids, [], context=context)[0]
}
return {
'type': 'ir.actions.report.xml',
'report_name': 'contribution.register.lines',
'datas': datas,
}
payslip_lines_contribution_register()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
melbit-kevinwessel/ansible-modules-extras
|
cloud/lxc/lxc_container.py
|
28
|
55183
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Kevin Carter <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
---
module: lxc_container
short_description: Manage LXC Containers
version_added: 1.8.0
description:
- Management of LXC containers
author: "Kevin Carter (@cloudnull)"
options:
name:
description:
- Name of a container.
required: true
backing_store:
choices:
- dir
- lvm
- loop
- btrfs
- overlayfs
description:
- Backend storage type for the container.
required: false
default: dir
template:
description:
- Name of the template to use within an LXC create.
required: false
default: ubuntu
template_options:
description:
- Template options when building the container.
required: false
config:
description:
- Path to the LXC configuration file.
required: false
default: /etc/lxc/default.conf
lv_name:
description:
- Name of the logical volume, defaults to the container name.
default: $CONTAINER_NAME
required: false
vg_name:
description:
- If Backend store is lvm, specify the name of the volume group.
default: lxc
required: false
thinpool:
description:
- Use LVM thin pool called TP.
required: false
fs_type:
description:
- Create fstype TYPE.
default: ext4
required: false
fs_size:
description:
- File system Size.
default: 5G
required: false
directory:
description:
- Place rootfs directory under DIR.
required: false
zfs_root:
description:
- Create zfs under given zfsroot.
required: false
container_command:
description:
- Run a command within a container.
required: false
lxc_path:
description:
- Place container under PATH
required: false
container_log:
choices:
- true
- false
description:
- Enable a container log for host actions to the container.
default: false
container_log_level:
choices:
- INFO
- ERROR
- DEBUG
description:
- Set the log level for a container where *container_log* was set.
required: false
default: INFO
clone_name:
version_added: "2.0"
description:
- Name of the new cloned server. This is only used when state is
clone.
required: false
default: false
clone_snapshot:
version_added: "2.0"
required: false
choices:
- true
- false
description:
- Create a snapshot a container when cloning. This is not supported
by all container storage backends. Enabling this may fail if the
backing store does not support snapshots.
default: false
archive:
choices:
- true
- false
description:
- Create an archive of a container. This will create a tarball of the
running container.
default: false
archive_path:
description:
- Path the save the archived container. If the path does not exist
the archive method will attempt to create it.
default: /tmp
archive_compression:
choices:
- gzip
- bzip2
- none
description:
- Type of compression to use when creating an archive of a running
container.
default: gzip
state:
choices:
- started
- stopped
- restarted
- absent
- frozen
description:
- Define the state of a container. If you clone a container using
`clone_name` the newly cloned container created in a stopped state.
The running container will be stopped while the clone operation is
happening and upon completion of the clone the original container
state will be restored.
required: false
default: started
container_config:
description:
- list of 'key=value' options to use when configuring a container.
required: false
requirements:
- 'lxc >= 1.0 # OS package'
- 'python >= 2.6 # OS Package'
- 'lxc-python2 >= 0.1 # PIP Package from https://github.com/lxc/python2-lxc'
notes:
- Containers must have a unique name. If you attempt to create a container
with a name that already exists in the users namespace the module will
simply return as "unchanged".
- The "container_command" can be used with any state except "absent". If
used with state "stopped" the container will be "started", the command
executed, and then the container "stopped" again. Likewise if the state
is "stopped" and the container does not exist it will be first created,
"started", the command executed, and then "stopped". If you use a "|"
in the variable you can use common script formatting within the variable
iteself The "container_command" option will always execute as BASH.
When using "container_command" a log file is created in the /tmp/ directory
which contains both stdout and stderr of any command executed.
- If "archive" is **true** the system will attempt to create a compressed
tarball of the running container. The "archive" option supports LVM backed
containers and will create a snapshot of the running container when
creating the archive.
- If your distro does not have a package for "python2-lxc", which is a
requirement for this module, it can be installed from source at
"https://github.com/lxc/python2-lxc" or installed via pip using the package
name lxc-python2.
"""
EXAMPLES = """
- name: Create a started container
lxc_container:
name: test-container-started
container_log: true
template: ubuntu
state: started
template_options: --release trusty
- name: Create a stopped container
lxc_container:
name: test-container-stopped
container_log: true
template: ubuntu
state: stopped
template_options: --release trusty
- name: Create a frozen container
lxc_container:
name: test-container-frozen
container_log: true
template: ubuntu
state: frozen
template_options: --release trusty
container_command: |
echo 'hello world.' | tee /opt/started-frozen
# Create filesystem container, configure it, and archive it, and start it.
- name: Create filesystem container
lxc_container:
name: test-container-config
backing_store: dir
container_log: true
template: ubuntu
state: started
archive: true
archive_compression: none
container_config:
- "lxc.aa_profile=unconfined"
- "lxc.cgroup.devices.allow=a *:* rmw"
template_options: --release trusty
# Create an lvm container, run a complex command in it, add additional
# configuration to it, create an archive of it, and finally leave the container
# in a frozen state. The container archive will be compressed using bzip2
- name: Create a frozen lvm container
lxc_container:
name: test-container-lvm
container_log: true
template: ubuntu
state: frozen
backing_store: lvm
template_options: --release trusty
container_command: |
apt-get update
apt-get install -y vim lxc-dev
echo 'hello world.' | tee /opt/started
if [[ -f "/opt/started" ]]; then
echo 'hello world.' | tee /opt/found-started
fi
container_config:
- "lxc.aa_profile=unconfined"
- "lxc.cgroup.devices.allow=a *:* rmw"
archive: true
archive_compression: bzip2
register: lvm_container_info
- name: Debug info on container "test-container-lvm"
debug: var=lvm_container_info
- name: Run a command in a container and ensure its in a "stopped" state.
lxc_container:
name: test-container-started
state: stopped
container_command: |
echo 'hello world.' | tee /opt/stopped
- name: Run a command in a container and ensure its it in a "frozen" state.
lxc_container:
name: test-container-stopped
state: frozen
container_command: |
echo 'hello world.' | tee /opt/frozen
- name: Start a container
lxc_container:
name: test-container-stopped
state: started
- name: Run a command in a container and then restart it
lxc_container:
name: test-container-started
state: restarted
container_command: |
echo 'hello world.' | tee /opt/restarted
- name: Run a complex command within a "running" container
lxc_container:
name: test-container-started
container_command: |
apt-get update
apt-get install -y curl wget vim apache2
echo 'hello world.' | tee /opt/started
if [[ -f "/opt/started" ]]; then
echo 'hello world.' | tee /opt/found-started
fi
# Create an archive of an existing container, save the archive to a defined
# path and then destroy it.
- name: Archive container
lxc_container:
name: test-container-started
state: absent
archive: true
archive_path: /opt/archives
# Create a container using overlayfs, create an archive of it, create a
# snapshot clone of the container and and finally leave the container
# in a frozen state. The container archive will be compressed using gzip.
- name: Create an overlayfs container archive and clone it
lxc_container:
name: test-container-overlayfs
container_log: true
template: ubuntu
state: started
backing_store: overlayfs
template_options: --release trusty
clone_snapshot: true
clone_name: test-container-overlayfs-clone-snapshot
archive: true
archive_compression: gzip
register: clone_container_info
- name: debug info on container "test-container"
debug: var=clone_container_info
- name: Clone a container using snapshot
lxc_container:
name: test-container-overlayfs-clone-snapshot
backing_store: overlayfs
clone_name: test-container-overlayfs-clone-snapshot2
clone_snapshot: true
- name: Create a new container and clone it
lxc_container:
name: test-container-new-archive
backing_store: dir
clone_name: test-container-new-archive-clone
- name: Archive and clone a container then destroy it
lxc_container:
name: test-container-new-archive
state: absent
clone_name: test-container-new-archive-destroyed-clone
archive: true
archive_compression: gzip
- name: Start a cloned container.
lxc_container:
name: test-container-new-archive-destroyed-clone
state: started
- name: Destroy a container
lxc_container:
name: "{{ item }}"
state: absent
with_items:
- test-container-stopped
- test-container-started
- test-container-frozen
- test-container-lvm
- test-container-config
- test-container-overlayfs
- test-container-overlayfs-clone
- test-container-overlayfs-clone-snapshot
- test-container-overlayfs-clone-snapshot2
- test-container-new-archive
- test-container-new-archive-clone
- test-container-new-archive-destroyed-clone
"""
try:
import lxc
except ImportError:
HAS_LXC = False
else:
HAS_LXC = True
# LXC_COMPRESSION_MAP is a map of available compression types when creating
# an archive of a container.
LXC_COMPRESSION_MAP = {
'gzip': {
'extension': 'tar.tgz',
'argument': '-czf'
},
'bzip2': {
'extension': 'tar.bz2',
'argument': '-cjf'
},
'none': {
'extension': 'tar',
'argument': '-cf'
}
}
# LXC_COMMAND_MAP is a map of variables that are available to a method based
# on the state the container is in.
LXC_COMMAND_MAP = {
'create': {
'variables': {
'config': '--config',
'template': '--template',
'backing_store': '--bdev',
'lxc_path': '--lxcpath',
'lv_name': '--lvname',
'vg_name': '--vgname',
'thinpool': '--thinpool',
'fs_type': '--fstype',
'fs_size': '--fssize',
'directory': '--dir',
'zfs_root': '--zfsroot'
}
},
'clone': {
'variables': {
'backing_store': '--backingstore',
'lxc_path': '--lxcpath',
'fs_size': '--fssize',
'name': '--orig',
'clone_name': '--new'
}
}
}
# LXC_BACKING_STORE is a map of available storage backends and options that
# are incompatible with the given storage backend.
LXC_BACKING_STORE = {
'dir': [
'lv_name', 'vg_name', 'fs_type', 'fs_size', 'thinpool'
],
'lvm': [
'zfs_root'
],
'btrfs': [
'lv_name', 'vg_name', 'thinpool', 'zfs_root'
],
'loop': [
'lv_name', 'vg_name', 'thinpool', 'zfs_root'
],
'overlayfs': [
'lv_name', 'vg_name', 'fs_type', 'fs_size', 'thinpool', 'zfs_root'
]
}
# LXC_LOGGING_LEVELS is a map of available log levels
LXC_LOGGING_LEVELS = {
'INFO': ['info', 'INFO', 'Info'],
'ERROR': ['error', 'ERROR', 'Error'],
'DEBUG': ['debug', 'DEBUG', 'Debug']
}
# LXC_ANSIBLE_STATES is a map of states that contain values of methods used
# when a particular state is evoked.
LXC_ANSIBLE_STATES = {
'started': '_started',
'stopped': '_stopped',
'restarted': '_restarted',
'absent': '_destroyed',
'frozen': '_frozen',
'clone': '_clone'
}
# This is used to attach to a running container and execute commands from
# within the container on the host. This will provide local access to a
# container without using SSH. The template will attempt to work within the
# home directory of the user that was attached to the container and source
# that users environment variables by default.
ATTACH_TEMPLATE = """#!/usr/bin/env bash
pushd "$(getent passwd $(whoami)|cut -f6 -d':')"
if [[ -f ".bashrc" ]];then
source .bashrc
fi
popd
# User defined command
%(container_command)s
"""
def create_script(command):
"""Write out a script onto a target.
This method should be backward compatible with Python 2.4+ when executing
from within the container.
:param command: command to run, this can be a script and can use spacing
with newlines as separation.
:type command: ``str``
"""
import os
import os.path as path
import subprocess
import tempfile
# Ensure that the directory /opt exists.
if not path.isdir('/opt'):
os.mkdir('/opt')
# Create the script.
script_file = path.join('/opt', '.lxc-attach-script')
f = open(script_file, 'wb')
try:
f.write(ATTACH_TEMPLATE % {'container_command': command})
f.flush()
finally:
f.close()
# Ensure the script is executable.
os.chmod(script_file, 1755)
# Get temporary directory.
tempdir = tempfile.gettempdir()
# Output log file.
stdout_file = open(path.join(tempdir, 'lxc-attach-script.log'), 'ab')
# Error log file.
stderr_file = open(path.join(tempdir, 'lxc-attach-script.err'), 'ab')
# Execute the script command.
try:
subprocess.Popen(
[script_file],
stdout=stdout_file,
stderr=stderr_file
).communicate()
finally:
# Close the log files.
stderr_file.close()
stdout_file.close()
# Remove the script file upon completion of execution.
os.remove(script_file)
class LxcContainerManagement(object):
def __init__(self, module):
"""Management of LXC containers via Ansible.
:param module: Processed Ansible Module.
:type module: ``object``
"""
self.module = module
self.state = self.module.params.get('state', None)
self.state_change = False
self.lxc_vg = None
self.container_name = self.module.params['name']
self.container = self.get_container_bind()
self.archive_info = None
self.clone_info = None
def get_container_bind(self):
return lxc.Container(name=self.container_name)
@staticmethod
def _roundup(num):
"""Return a rounded floating point number.
:param num: Number to round up.
:type: ``float``
:returns: Rounded up number.
:rtype: ``int``
"""
num, part = str(num).split('.')
num = int(num)
if int(part) != 0:
num += 1
return num
@staticmethod
def _container_exists(container_name):
"""Check if a container exists.
:param container_name: Name of the container.
:type: ``str``
:returns: True or False if the container is found.
:rtype: ``bol``
"""
if [i for i in lxc.list_containers() if i == container_name]:
return True
else:
return False
@staticmethod
def _add_variables(variables_dict, build_command):
"""Return a command list with all found options.
:param variables_dict: Pre-parsed optional variables used from a
seed command.
:type variables_dict: ``dict``
:param build_command: Command to run.
:type build_command: ``list``
:returns: list of command options.
:rtype: ``list``
"""
for key, value in variables_dict.items():
build_command.append(
'%s %s' % (key, value)
)
else:
return build_command
def _get_vars(self, variables):
"""Return a dict of all variables as found within the module.
:param variables: Hash of all variables to find.
:type variables: ``dict``
"""
# Remove incompatible storage backend options.
variables = variables.copy()
for v in LXC_BACKING_STORE[self.module.params['backing_store']]:
variables.pop(v, None)
return_dict = dict()
for k, v in variables.items():
_var = self.module.params.get(k)
if not [i for i in [None, ''] + BOOLEANS_FALSE if i == _var]:
return_dict[v] = _var
else:
return return_dict
def _run_command(self, build_command, unsafe_shell=False, timeout=600):
"""Return information from running an Ansible Command.
This will squash the build command list into a string and then
execute the command via Ansible. The output is returned to the method.
This output is returned as `return_code`, `stdout`, `stderr`.
Prior to running the command the method will look to see if the LXC
lockfile is present. If the lockfile "/var/lock/subsys/lxc" the method
will wait upto 10 minutes for it to be gone; polling every 5 seconds.
:param build_command: Used for the command and all options.
:type build_command: ``list``
:param unsafe_shell: Enable or Disable unsafe sell commands.
:type unsafe_shell: ``bol``
:param timeout: Time before the container create process quites.
:type timeout: ``int``
"""
lockfile = '/var/lock/subsys/lxc'
for _ in xrange(timeout):
if os.path.exists(lockfile):
time.sleep(1)
else:
return self.module.run_command(
' '.join(build_command),
use_unsafe_shell=unsafe_shell
)
else:
message = (
'The LXC subsystem is locked and after 5 minutes it never'
' became unlocked. Lockfile [ %s ]' % lockfile
)
self.failure(
error='LXC subsystem locked',
rc=0,
msg=message
)
def _config(self):
"""Configure an LXC container.
Write new configuration values to the lxc config file. This will
stop the container if it's running write the new options and then
restart the container upon completion.
"""
_container_config = self.module.params.get('container_config')
if not _container_config:
return False
container_config_file = self.container.config_file_name
with open(container_config_file, 'rb') as f:
container_config = f.readlines()
# Note used ast literal_eval because AnsibleModule does not provide for
# adequate dictionary parsing.
# Issue: https://github.com/ansible/ansible/issues/7679
# TODO(cloudnull) adjust import when issue has been resolved.
import ast
options_dict = ast.literal_eval(_container_config)
parsed_options = [i.split('=', 1) for i in options_dict]
config_change = False
for key, value in parsed_options:
new_entry = '%s = %s\n' % (key, value)
for option_line in container_config:
# Look for key in config
if option_line.startswith(key):
_, _value = option_line.split('=', 1)
config_value = ' '.join(_value.split())
line_index = container_config.index(option_line)
# If the sanitized values don't match replace them
if value != config_value:
line_index += 1
if new_entry not in container_config:
config_change = True
container_config.insert(line_index, new_entry)
# Break the flow as values are written or not at this point
break
else:
config_change = True
container_config.append(new_entry)
# If the config changed restart the container.
if config_change:
container_state = self._get_state()
if container_state != 'stopped':
self.container.stop()
with open(container_config_file, 'wb') as f:
f.writelines(container_config)
self.state_change = True
if container_state == 'running':
self._container_startup()
elif container_state == 'frozen':
self._container_startup()
self.container.freeze()
def _container_create_clone(self):
"""Clone a new LXC container from an existing container.
This method will clone an existing container to a new container using
the `clone_name` variable as the new container name. The method will
create a container if the container `name` does not exist.
Note that cloning a container will ensure that the original container
is "stopped" before the clone can be done. Because this operation can
require a state change the method will return the original container
to its prior state upon completion of the clone.
Once the clone is complete the new container will be left in a stopped
state.
"""
# Ensure that the state of the original container is stopped
container_state = self._get_state()
if container_state != 'stopped':
self.state_change = True
self.container.stop()
build_command = [
self.module.get_bin_path('lxc-clone', True),
]
build_command = self._add_variables(
variables_dict=self._get_vars(
variables=LXC_COMMAND_MAP['clone']['variables']
),
build_command=build_command
)
# Load logging for the instance when creating it.
if self.module.params.get('clone_snapshot') in BOOLEANS_TRUE:
build_command.append('--snapshot')
# Check for backing_store == overlayfs if so force the use of snapshot
# If overlay fs is used and snapshot is unset the clone command will
# fail with an unsupported type.
elif self.module.params.get('backing_store') == 'overlayfs':
build_command.append('--snapshot')
rc, return_data, err = self._run_command(build_command)
if rc != 0:
message = "Failed executing lxc-clone."
self.failure(
err=err, rc=rc, msg=message, command=' '.join(
build_command
)
)
else:
self.state_change = True
# Restore the original state of the origin container if it was
# not in a stopped state.
if container_state == 'running':
self.container.start()
elif container_state == 'frozen':
self.container.start()
self.container.freeze()
return True
def _create(self):
"""Create a new LXC container.
This method will build and execute a shell command to build the
container. It would have been nice to simply use the lxc python library
however at the time this was written the python library, in both py2
and py3 didn't support some of the more advanced container create
processes. These missing processes mainly revolve around backing
LXC containers with block devices.
"""
build_command = [
self.module.get_bin_path('lxc-create', True),
'--name %s' % self.container_name,
'--quiet'
]
build_command = self._add_variables(
variables_dict=self._get_vars(
variables=LXC_COMMAND_MAP['create']['variables']
),
build_command=build_command
)
# Load logging for the instance when creating it.
if self.module.params.get('container_log') in BOOLEANS_TRUE:
# Set the logging path to the /var/log/lxc if uid is root. else
# set it to the home folder of the user executing.
try:
if os.getuid() != 0:
log_path = os.getenv('HOME')
else:
if not os.path.isdir('/var/log/lxc/'):
os.makedirs('/var/log/lxc/')
log_path = '/var/log/lxc/'
except OSError:
log_path = os.getenv('HOME')
build_command.extend([
'--logfile %s' % os.path.join(
log_path, 'lxc-%s.log' % self.container_name
),
'--logpriority %s' % self.module.params.get(
'container_log_level'
).upper()
])
# Add the template commands to the end of the command if there are any
template_options = self.module.params.get('template_options', None)
if template_options:
build_command.append('-- %s' % template_options)
rc, return_data, err = self._run_command(build_command)
if rc != 0:
message = "Failed executing lxc-create."
self.failure(
err=err, rc=rc, msg=message, command=' '.join(build_command)
)
else:
self.state_change = True
def _container_data(self):
"""Returns a dict of container information.
:returns: container data
:rtype: ``dict``
"""
return {
'interfaces': self.container.get_interfaces(),
'ips': self.container.get_ips(),
'state': self._get_state(),
'init_pid': int(self.container.init_pid)
}
def _unfreeze(self):
"""Unfreeze a container.
:returns: True or False based on if the container was unfrozen.
:rtype: ``bol``
"""
unfreeze = self.container.unfreeze()
if unfreeze:
self.state_change = True
return unfreeze
def _get_state(self):
"""Return the state of a container.
If the container is not found the state returned is "absent"
:returns: state of a container as a lower case string.
:rtype: ``str``
"""
if self._container_exists(container_name=self.container_name):
return str(self.container.state).lower()
else:
return str('absent')
def _execute_command(self):
"""Execute a shell command."""
container_command = self.module.params.get('container_command')
if container_command:
container_state = self._get_state()
if container_state == 'frozen':
self._unfreeze()
elif container_state == 'stopped':
self._container_startup()
self.container.attach_wait(create_script, container_command)
self.state_change = True
def _container_startup(self, timeout=60):
"""Ensure a container is started.
:param timeout: Time before the destroy operation is abandoned.
:type timeout: ``int``
"""
self.container = self.get_container_bind()
for _ in xrange(timeout):
if self._get_state() != 'running':
self.container.start()
self.state_change = True
# post startup sleep for 1 second.
time.sleep(1)
else:
return True
else:
self.failure(
lxc_container=self._container_data(),
error='Failed to start container'
' [ %s ]' % self.container_name,
rc=1,
msg='The container [ %s ] failed to start. Check to lxc is'
' available and that the container is in a functional'
' state.' % self.container_name
)
def _check_archive(self):
"""Create a compressed archive of a container.
This will store archive_info in as self.archive_info
"""
if self.module.params.get('archive') in BOOLEANS_TRUE:
self.archive_info = {
'archive': self._container_create_tar()
}
def _check_clone(self):
"""Create a compressed archive of a container.
This will store archive_info in as self.archive_info
"""
clone_name = self.module.params.get('clone_name')
if clone_name:
if not self._container_exists(container_name=clone_name):
self.clone_info = {
'cloned': self._container_create_clone()
}
else:
self.clone_info = {
'cloned': False
}
def _destroyed(self, timeout=60):
"""Ensure a container is destroyed.
:param timeout: Time before the destroy operation is abandoned.
:type timeout: ``int``
"""
for _ in xrange(timeout):
if not self._container_exists(container_name=self.container_name):
break
# Check if the container needs to have an archive created.
self._check_archive()
# Check if the container is to be cloned
self._check_clone()
if self._get_state() != 'stopped':
self.state_change = True
self.container.stop()
if self.container.destroy():
self.state_change = True
# post destroy attempt sleep for 1 second.
time.sleep(1)
else:
self.failure(
lxc_container=self._container_data(),
error='Failed to destroy container'
' [ %s ]' % self.container_name,
rc=1,
msg='The container [ %s ] failed to be destroyed. Check'
' that lxc is available and that the container is in a'
' functional state.' % self.container_name
)
def _frozen(self, count=0):
"""Ensure a container is frozen.
If the container does not exist the container will be created.
:param count: number of times this command has been called by itself.
:type count: ``int``
"""
self.check_count(count=count, method='frozen')
if self._container_exists(container_name=self.container_name):
self._execute_command()
# Perform any configuration updates
self._config()
container_state = self._get_state()
if container_state == 'frozen':
pass
elif container_state == 'running':
self.container.freeze()
self.state_change = True
else:
self._container_startup()
self.container.freeze()
self.state_change = True
# Check if the container needs to have an archive created.
self._check_archive()
# Check if the container is to be cloned
self._check_clone()
else:
self._create()
count += 1
self._frozen(count)
def _restarted(self, count=0):
"""Ensure a container is restarted.
If the container does not exist the container will be created.
:param count: number of times this command has been called by itself.
:type count: ``int``
"""
self.check_count(count=count, method='restart')
if self._container_exists(container_name=self.container_name):
self._execute_command()
# Perform any configuration updates
self._config()
if self._get_state() != 'stopped':
self.container.stop()
self.state_change = True
# Run container startup
self._container_startup()
# Check if the container needs to have an archive created.
self._check_archive()
# Check if the container is to be cloned
self._check_clone()
else:
self._create()
count += 1
self._restarted(count)
def _stopped(self, count=0):
"""Ensure a container is stopped.
If the container does not exist the container will be created.
:param count: number of times this command has been called by itself.
:type count: ``int``
"""
self.check_count(count=count, method='stop')
if self._container_exists(container_name=self.container_name):
self._execute_command()
# Perform any configuration updates
self._config()
if self._get_state() != 'stopped':
self.container.stop()
self.state_change = True
# Check if the container needs to have an archive created.
self._check_archive()
# Check if the container is to be cloned
self._check_clone()
else:
self._create()
count += 1
self._stopped(count)
def _started(self, count=0):
"""Ensure a container is started.
If the container does not exist the container will be created.
:param count: number of times this command has been called by itself.
:type count: ``int``
"""
self.check_count(count=count, method='start')
if self._container_exists(container_name=self.container_name):
container_state = self._get_state()
if container_state == 'running':
pass
elif container_state == 'frozen':
self._unfreeze()
elif not self._container_startup():
self.failure(
lxc_container=self._container_data(),
error='Failed to start container'
' [ %s ]' % self.container_name,
rc=1,
msg='The container [ %s ] failed to start. Check to lxc is'
' available and that the container is in a functional'
' state.' % self.container_name
)
# Return data
self._execute_command()
# Perform any configuration updates
self._config()
# Check if the container needs to have an archive created.
self._check_archive()
# Check if the container is to be cloned
self._check_clone()
else:
self._create()
count += 1
self._started(count)
def _get_lxc_vg(self):
"""Return the name of the Volume Group used in LXC."""
build_command = [
self.module.get_bin_path('lxc-config', True),
"lxc.bdev.lvm.vg"
]
rc, vg, err = self._run_command(build_command)
if rc != 0:
self.failure(
err=err,
rc=rc,
msg='Failed to read LVM VG from LXC config',
command=' '.join(build_command)
)
else:
return str(vg.strip())
def _lvm_lv_list(self):
"""Return a list of all lv in a current vg."""
vg = self._get_lxc_vg()
build_command = [
self.module.get_bin_path('lvs', True)
]
rc, stdout, err = self._run_command(build_command)
if rc != 0:
self.failure(
err=err,
rc=rc,
msg='Failed to get list of LVs',
command=' '.join(build_command)
)
all_lvms = [i.split() for i in stdout.splitlines()][1:]
return [lv_entry[0] for lv_entry in all_lvms if lv_entry[1] == vg]
def _get_vg_free_pe(self, vg_name):
"""Return the available size of a given VG.
:param vg_name: Name of volume.
:type vg_name: ``str``
:returns: size and measurement of an LV
:type: ``tuple``
"""
build_command = [
'vgdisplay',
vg_name,
'--units',
'g'
]
rc, stdout, err = self._run_command(build_command)
if rc != 0:
self.failure(
err=err,
rc=rc,
msg='failed to read vg %s' % vg_name,
command=' '.join(build_command)
)
vg_info = [i.strip() for i in stdout.splitlines()][1:]
free_pe = [i for i in vg_info if i.startswith('Free')]
_free_pe = free_pe[0].split()
return float(_free_pe[-2]), _free_pe[-1]
def _get_lv_size(self, lv_name):
"""Return the available size of a given LV.
:param lv_name: Name of volume.
:type lv_name: ``str``
:returns: size and measurement of an LV
:type: ``tuple``
"""
vg = self._get_lxc_vg()
lv = os.path.join(vg, lv_name)
build_command = [
'lvdisplay',
lv,
'--units',
'g'
]
rc, stdout, err = self._run_command(build_command)
if rc != 0:
self.failure(
err=err,
rc=rc,
msg='failed to read lv %s' % lv,
command=' '.join(build_command)
)
lv_info = [i.strip() for i in stdout.splitlines()][1:]
_free_pe = [i for i in lv_info if i.startswith('LV Size')]
free_pe = _free_pe[0].split()
return self._roundup(float(free_pe[-2])), free_pe[-1]
def _lvm_snapshot_create(self, source_lv, snapshot_name,
snapshot_size_gb=5):
"""Create an LVM snapshot.
:param source_lv: Name of lv to snapshot
:type source_lv: ``str``
:param snapshot_name: Name of lv snapshot
:type snapshot_name: ``str``
:param snapshot_size_gb: Size of snapshot to create
:type snapshot_size_gb: ``int``
"""
vg = self._get_lxc_vg()
free_space, messurement = self._get_vg_free_pe(vg_name=vg)
if free_space < float(snapshot_size_gb):
message = (
'Snapshot size [ %s ] is > greater than [ %s ] on volume group'
' [ %s ]' % (snapshot_size_gb, free_space, vg)
)
self.failure(
error='Not enough space to create snapshot',
rc=2,
msg=message
)
# Create LVM Snapshot
build_command = [
self.module.get_bin_path('lvcreate', True),
"-n",
snapshot_name,
"-s",
os.path.join(vg, source_lv),
"-L%sg" % snapshot_size_gb
]
rc, stdout, err = self._run_command(build_command)
if rc != 0:
self.failure(
err=err,
rc=rc,
msg='Failed to Create LVM snapshot %s/%s --> %s'
% (vg, source_lv, snapshot_name)
)
def _lvm_lv_mount(self, lv_name, mount_point):
"""mount an lv.
:param lv_name: name of the logical volume to mount
:type lv_name: ``str``
:param mount_point: path on the file system that is mounted.
:type mount_point: ``str``
"""
vg = self._get_lxc_vg()
build_command = [
self.module.get_bin_path('mount', True),
"/dev/%s/%s" % (vg, lv_name),
mount_point,
]
rc, stdout, err = self._run_command(build_command)
if rc != 0:
self.failure(
err=err,
rc=rc,
msg='failed to mountlvm lv %s/%s to %s'
% (vg, lv_name, mount_point)
)
def _create_tar(self, source_dir):
"""Create an archive of a given ``source_dir`` to ``output_path``.
:param source_dir: Path to the directory to be archived.
:type source_dir: ``str``
"""
archive_path = self.module.params.get('archive_path')
if not os.path.isdir(archive_path):
os.makedirs(archive_path)
archive_compression = self.module.params.get('archive_compression')
compression_type = LXC_COMPRESSION_MAP[archive_compression]
# remove trailing / if present.
archive_name = '%s.%s' % (
os.path.join(
archive_path,
self.container_name
),
compression_type['extension']
)
build_command = [
self.module.get_bin_path('tar', True),
'--directory=%s' % os.path.realpath(
os.path.expanduser(source_dir)
),
compression_type['argument'],
archive_name,
'.'
]
rc, stdout, err = self._run_command(
build_command=build_command,
unsafe_shell=True
)
if rc != 0:
self.failure(
err=err,
rc=rc,
msg='failed to create tar archive',
command=' '.join(build_command)
)
return archive_name
def _lvm_lv_remove(self, lv_name):
"""Remove an LV.
:param lv_name: The name of the logical volume
:type lv_name: ``str``
"""
vg = self._get_lxc_vg()
build_command = [
self.module.get_bin_path('lvremove', True),
"-f",
"%s/%s" % (vg, lv_name),
]
rc, stdout, err = self._run_command(build_command)
if rc != 0:
self.failure(
err=err,
rc=rc,
msg='Failed to remove LVM LV %s/%s' % (vg, lv_name),
command=' '.join(build_command)
)
def _rsync_data(self, container_path, temp_dir):
"""Sync the container directory to the temp directory.
:param container_path: path to the container container
:type container_path: ``str``
:param temp_dir: path to the temporary local working directory
:type temp_dir: ``str``
"""
# This loop is created to support overlayfs archives. This should
# squash all of the layers into a single archive.
fs_paths = container_path.split(':')
if 'overlayfs' in fs_paths:
fs_paths.pop(fs_paths.index('overlayfs'))
for fs_path in fs_paths:
# Set the path to the container data
fs_path = os.path.dirname(fs_path)
# Run the sync command
build_command = [
self.module.get_bin_path('rsync', True),
'-aHAX',
fs_path,
temp_dir
]
rc, stdout, err = self._run_command(
build_command,
unsafe_shell=True
)
if rc != 0:
self.failure(
err=err,
rc=rc,
msg='failed to perform archive',
command=' '.join(build_command)
)
def _unmount(self, mount_point):
"""Unmount a file system.
:param mount_point: path on the file system that is mounted.
:type mount_point: ``str``
"""
build_command = [
self.module.get_bin_path('umount', True),
mount_point,
]
rc, stdout, err = self._run_command(build_command)
if rc != 0:
self.failure(
err=err,
rc=rc,
msg='failed to unmount [ %s ]' % mount_point,
command=' '.join(build_command)
)
def _overlayfs_mount(self, lowerdir, upperdir, mount_point):
"""mount an lv.
:param lowerdir: name/path of the lower directory
:type lowerdir: ``str``
:param upperdir: name/path of the upper directory
:type upperdir: ``str``
:param mount_point: path on the file system that is mounted.
:type mount_point: ``str``
"""
build_command = [
self.module.get_bin_path('mount', True),
'-t overlayfs',
'-o lowerdir=%s,upperdir=%s' % (lowerdir, upperdir),
'overlayfs',
mount_point,
]
rc, stdout, err = self._run_command(build_command)
if rc != 0:
self.failure(
err=err,
rc=rc,
msg='failed to mount overlayfs:%s:%s to %s -- Command: %s'
% (lowerdir, upperdir, mount_point, build_command)
)
def _container_create_tar(self):
"""Create a tar archive from an LXC container.
The process is as follows:
* Stop or Freeze the container
* Create temporary dir
* Copy container and config to temporary directory
* If LVM backed:
* Create LVM snapshot of LV backing the container
* Mount the snapshot to tmpdir/rootfs
* Restore the state of the container
* Create tar of tmpdir
* Clean up
"""
# Create a temp dir
temp_dir = tempfile.mkdtemp()
# Set the name of the working dir, temp + container_name
work_dir = os.path.join(temp_dir, self.container_name)
# LXC container rootfs
lxc_rootfs = self.container.get_config_item('lxc.rootfs')
# Test if the containers rootfs is a block device
block_backed = lxc_rootfs.startswith(os.path.join(os.sep, 'dev'))
# Test if the container is using overlayfs
overlayfs_backed = lxc_rootfs.startswith('overlayfs')
mount_point = os.path.join(work_dir, 'rootfs')
# Set the snapshot name if needed
snapshot_name = '%s_lxc_snapshot' % self.container_name
container_state = self._get_state()
try:
# Ensure the original container is stopped or frozen
if container_state not in ['stopped', 'frozen']:
if container_state == 'running':
self.container.freeze()
else:
self.container.stop()
# Sync the container data from the container_path to work_dir
self._rsync_data(lxc_rootfs, temp_dir)
if block_backed:
if snapshot_name not in self._lvm_lv_list():
if not os.path.exists(mount_point):
os.makedirs(mount_point)
# Take snapshot
size, measurement = self._get_lv_size(
lv_name=self.container_name
)
self._lvm_snapshot_create(
source_lv=self.container_name,
snapshot_name=snapshot_name,
snapshot_size_gb=size
)
# Mount snapshot
self._lvm_lv_mount(
lv_name=snapshot_name,
mount_point=mount_point
)
else:
self.failure(
err='snapshot [ %s ] already exists' % snapshot_name,
rc=1,
msg='The snapshot [ %s ] already exists. Please clean'
' up old snapshot of containers before continuing.'
% snapshot_name
)
elif overlayfs_backed:
lowerdir, upperdir = lxc_rootfs.split(':')[1:]
self._overlayfs_mount(
lowerdir=lowerdir,
upperdir=upperdir,
mount_point=mount_point
)
# Set the state as changed and set a new fact
self.state_change = True
return self._create_tar(source_dir=work_dir)
finally:
if block_backed or overlayfs_backed:
# unmount snapshot
self._unmount(mount_point)
if block_backed:
# Remove snapshot
self._lvm_lv_remove(snapshot_name)
# Restore original state of container
if container_state == 'running':
if self._get_state() == 'frozen':
self.container.unfreeze()
else:
self.container.start()
# Remove tmpdir
shutil.rmtree(temp_dir)
def check_count(self, count, method):
if count > 1:
self.failure(
error='Failed to %s container' % method,
rc=1,
msg='The container [ %s ] failed to %s. Check to lxc is'
' available and that the container is in a functional'
' state.' % (self.container_name, method)
)
def failure(self, **kwargs):
"""Return a Failure when running an Ansible command.
:param error: ``str`` Error that occurred.
:param rc: ``int`` Return code while executing an Ansible command.
:param msg: ``str`` Message to report.
"""
self.module.fail_json(**kwargs)
def run(self):
"""Run the main method."""
action = getattr(self, LXC_ANSIBLE_STATES[self.state])
action()
outcome = self._container_data()
if self.archive_info:
outcome.update(self.archive_info)
if self.clone_info:
outcome.update(self.clone_info)
self.module.exit_json(
changed=self.state_change,
lxc_container=outcome
)
def main():
"""Ansible Main module."""
module = AnsibleModule(
argument_spec=dict(
name=dict(
type='str',
required=True
),
template=dict(
type='str',
default='ubuntu'
),
backing_store=dict(
type='str',
choices=LXC_BACKING_STORE.keys(),
default='dir'
),
template_options=dict(
type='str'
),
config=dict(
type='str',
default='/etc/lxc/default.conf'
),
vg_name=dict(
type='str',
default='lxc'
),
thinpool=dict(
type='str'
),
fs_type=dict(
type='str',
default='ext4'
),
fs_size=dict(
type='str',
default='5G'
),
directory=dict(
type='str'
),
zfs_root=dict(
type='str'
),
lv_name=dict(
type='str'
),
lxc_path=dict(
type='str'
),
state=dict(
choices=LXC_ANSIBLE_STATES.keys(),
default='started'
),
container_command=dict(
type='str'
),
container_config=dict(
type='str'
),
container_log=dict(
choices=BOOLEANS,
default='false'
),
container_log_level=dict(
choices=[n for i in LXC_LOGGING_LEVELS.values() for n in i],
default='INFO'
),
clone_name=dict(
type='str',
required=False
),
clone_snapshot=dict(
choices=BOOLEANS,
default='false'
),
archive=dict(
choices=BOOLEANS,
default='false'
),
archive_path=dict(
type='str',
default='/tmp'
),
archive_compression=dict(
choices=LXC_COMPRESSION_MAP.keys(),
default='gzip'
)
),
supports_check_mode=False,
)
if not HAS_LXC:
module.fail_json(
msg='The `lxc` module is not importable. Check the requirements.'
)
lv_name = module.params.get('lv_name')
if not lv_name:
module.params['lv_name'] = module.params.get('name')
lxc_manage = LxcContainerManagement(module=module)
lxc_manage.run()
# import module bits
from ansible.module_utils.basic import *
main()
|
gpl-3.0
|
adist/drunken-sansa
|
openerp/addons/warning/warning.py
|
13
|
14330
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields,osv
from openerp.tools.translate import _
WARNING_MESSAGE = [
('no-message','No Message'),
('warning','Warning'),
('block','Blocking Message')
]
WARNING_HELP = _('Selecting the "Warning" option will notify user with the message, Selecting "Blocking Message" will throw an exception with the message and block the flow. The Message has to be written in the next field.')
class res_partner(osv.osv):
_inherit = 'res.partner'
_columns = {
'sale_warn' : fields.selection(WARNING_MESSAGE, 'Sales Order', help=WARNING_HELP, required=True),
'sale_warn_msg' : fields.text('Message for Sales Order'),
'purchase_warn' : fields.selection(WARNING_MESSAGE, 'Purchase Order', help=WARNING_HELP, required=True),
'purchase_warn_msg' : fields.text('Message for Purchase Order'),
'picking_warn' : fields.selection(WARNING_MESSAGE, 'Stock Picking', help=WARNING_HELP, required=True),
'picking_warn_msg' : fields.text('Message for Stock Picking'),
'invoice_warn' : fields.selection(WARNING_MESSAGE, 'Invoice', help=WARNING_HELP, required=True),
'invoice_warn_msg' : fields.text('Message for Invoice'),
}
_defaults = {
'sale_warn' : 'no-message',
'purchase_warn' : 'no-message',
'picking_warn' : 'no-message',
'invoice_warn' : 'no-message',
}
res_partner()
class sale_order(osv.osv):
_inherit = 'sale.order'
def onchange_partner_id(self, cr, uid, ids, part, context=None):
if not part:
return {'value':{'partner_invoice_id': False, 'partner_shipping_id':False, 'payment_term' : False}}
warning = {}
title = False
message = False
partner = self.pool.get('res.partner').browse(cr, uid, part, context=context)
if partner.sale_warn != 'no-message':
if partner.sale_warn == 'block':
raise osv.except_osv(_('Alert for %s!') % (partner.name), partner.sale_warn_msg)
title = _("Warning for %s") % partner.name
message = partner.sale_warn_msg
warning = {
'title': title,
'message': message,
}
result = super(sale_order, self).onchange_partner_id(cr, uid, ids, part, context=context)
if result.get('warning',False):
warning['title'] = title and title +' & '+ result['warning']['title'] or result['warning']['title']
warning['message'] = message and message + ' ' + result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
sale_order()
class purchase_order(osv.osv):
_inherit = 'purchase.order'
def onchange_partner_id(self, cr, uid, ids, part):
if not part:
return {'value':{'partner_address_id': False}}
warning = {}
title = False
message = False
partner = self.pool.get('res.partner').browse(cr, uid, part)
if partner.purchase_warn != 'no-message':
if partner.purchase_warn == 'block':
raise osv.except_osv(_('Alert for %s!') % (partner.name), partner.purchase_warn_msg)
title = _("Warning for %s") % partner.name
message = partner.purchase_warn_msg
warning = {
'title': title,
'message': message
}
result = super(purchase_order, self).onchange_partner_id(cr, uid, ids, part)
if result.get('warning',False):
warning['title'] = title and title +' & '+ result['warning']['title'] or result['warning']['title']
warning['message'] = message and message + ' ' + result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
purchase_order()
class account_invoice(osv.osv):
_inherit = 'account.invoice'
def onchange_partner_id(self, cr, uid, ids, type, partner_id,
date_invoice=False, payment_term=False, partner_bank_id=False, company_id=False):
if not partner_id:
return {'value': {
'account_id': False,
'payment_term': False,
}
}
warning = {}
title = False
message = False
partner = self.pool.get('res.partner').browse(cr, uid, partner_id)
if partner.invoice_warn != 'no-message':
if partner.invoice_warn == 'block':
raise osv.except_osv(_('Alert for %s!') % (partner.name), partner.invoice_warn_msg)
title = _("Warning for %s") % partner.name
message = partner.invoice_warn_msg
warning = {
'title': title,
'message': message
}
result = super(account_invoice, self).onchange_partner_id(cr, uid, ids, type, partner_id,
date_invoice=date_invoice, payment_term=payment_term,
partner_bank_id=partner_bank_id, company_id=company_id)
if result.get('warning',False):
warning['title'] = title and title +' & '+ result['warning']['title'] or result['warning']['title']
warning['message'] = message and message + ' ' + result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
account_invoice()
class stock_picking(osv.osv):
_inherit = 'stock.picking'
def onchange_partner_in(self, cr, uid, ids, partner_id=None, context=None):
if not partner_id:
return {}
partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context)
warning = {}
title = False
message = False
if partner.picking_warn != 'no-message':
if partner.picking_warn == 'block':
raise osv.except_osv(_('Alert for %s!') % (partner.name), partner.picking_warn_msg)
title = _("Warning for %s") % partner.name
message = partner.picking_warn_msg
warning = {
'title': title,
'message': message
}
result = super(stock_picking, self).onchange_partner_in(cr, uid, ids, partner_id, context)
if result.get('warning',False):
warning['title'] = title and title +' & '+ result['warning']['title'] or result['warning']['title']
warning['message'] = message and message + ' ' + result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
stock_picking()
# FIXME:(class stock_picking_in and stock_picking_out) this is a temporary workaround because of a framework bug (ref: lp:996816).
# It should be removed as soon as the bug is fixed
class stock_picking_in(osv.osv):
_inherit = 'stock.picking.in'
def onchange_partner_in(self, cr, uid, ids, partner_id=None, context=None):
if not partner_id:
return {}
partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context)
warning = {}
title = False
message = False
if partner.picking_warn != 'no-message':
if partner.picking_warn == 'block':
raise osv.except_osv(_('Alert for %s!') % (partner.name), partner.picking_warn_msg)
title = _("Warning for %s") % partner.name
message = partner.picking_warn_msg
warning = {
'title': title,
'message': message
}
result = super(stock_picking_in, self).onchange_partner_in(cr, uid, ids, partner_id, context)
if result.get('warning',False):
warning['title'] = title and title +' & '+ result['warning']['title'] or result['warning']['title']
warning['message'] = message and message + ' ' + result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
class stock_picking_out(osv.osv):
_inherit = 'stock.picking.out'
def onchange_partner_in(self, cr, uid, ids, partner_id=None, context=None):
if not partner_id:
return {}
partner = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context)
warning = {}
title = False
message = False
if partner.picking_warn != 'no-message':
if partner.picking_warn == 'block':
raise osv.except_osv(_('Alert for %s!') % (partner.name), partner.picking_warn_msg)
title = _("Warning for %s") % partner.name
message = partner.picking_warn_msg
warning = {
'title': title,
'message': message
}
result = super(stock_picking_out, self).onchange_partner_in(cr, uid, ids, partner_id, context)
if result.get('warning',False):
warning['title'] = title and title +' & '+ result['warning']['title'] or result['warning']['title']
warning['message'] = message and message + ' ' + result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
class product_product(osv.osv):
_inherit = 'product.product'
_columns = {
'sale_line_warn' : fields.selection(WARNING_MESSAGE,'Sales Order Line', help=WARNING_HELP, required=True),
'sale_line_warn_msg' : fields.text('Message for Sales Order Line'),
'purchase_line_warn' : fields.selection(WARNING_MESSAGE,'Purchase Order Line', help=WARNING_HELP, required=True),
'purchase_line_warn_msg' : fields.text('Message for Purchase Order Line'),
}
_defaults = {
'sale_line_warn' : 'no-message',
'purchase_line_warn' : 'no-message',
}
product_product()
class sale_order_line(osv.osv):
_inherit = 'sale.order.line'
def product_id_change(self, cr, uid, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, packaging=False,
fiscal_position=False, flag=False, context=None):
warning = {}
if not product:
return {'value': {'th_weight' : 0, 'product_packaging': False,
'product_uos_qty': qty}, 'domain': {'product_uom': [],
'product_uos': []}}
product_obj = self.pool.get('product.product')
product_info = product_obj.browse(cr, uid, product)
title = False
message = False
if product_info.sale_line_warn != 'no-message':
if product_info.sale_line_warn == 'block':
raise osv.except_osv(_('Alert for %s!') % (product_info.name), product_info.sale_line_warn_msg)
title = _("Warning for %s") % product_info.name
message = product_info.sale_line_warn_msg
warning['title'] = title
warning['message'] = message
result = super(sale_order_line, self).product_id_change( cr, uid, ids, pricelist, product, qty,
uom, qty_uos, uos, name, partner_id,
lang, update_tax, date_order, packaging, fiscal_position, flag, context=context)
if result.get('warning',False):
warning['title'] = title and title +' & '+result['warning']['title'] or result['warning']['title']
warning['message'] = message and message +'\n\n'+result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
sale_order_line()
class purchase_order_line(osv.osv):
_inherit = 'purchase.order.line'
def onchange_product_id(self,cr, uid, ids, pricelist, product, qty, uom,
partner_id, date_order=False, fiscal_position_id=False, date_planned=False,
name=False, price_unit=False, notes=False, context=None):
warning = {}
if not product:
return {'value': {'price_unit': 0.0, 'name':'','notes':'', 'product_uom' : False}, 'domain':{'product_uom':[]}}
product_obj = self.pool.get('product.product')
product_info = product_obj.browse(cr, uid, product)
title = False
message = False
if product_info.purchase_line_warn != 'no-message':
if product_info.purchase_line_warn == 'block':
raise osv.except_osv(_('Alert for %s!') % (product_info.name), product_info.purchase_line_warn_msg)
title = _("Warning for %s") % product_info.name
message = product_info.purchase_line_warn_msg
warning['title'] = title
warning['message'] = message
result = super(purchase_order_line, self).onchange_product_id(cr, uid, ids, pricelist, product, qty, uom,
partner_id, date_order, fiscal_position_id)
if result.get('warning',False):
warning['title'] = title and title +' & '+result['warning']['title'] or result['warning']['title']
warning['message'] = message and message +'\n\n'+result['warning']['message'] or result['warning']['message']
return {'value': result.get('value',{}), 'warning':warning}
purchase_order_line()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
nitely/http-lazy-headers
|
http_lazy_headers/fields/content_range.py
|
1
|
7244
|
# -*- coding: utf-8 -*-
from ..shared.generic import cleaners
from ..shared.utils import constraints
from .. import exceptions
from ..settings import settings
from ..shared import bases
from ..shared.values import ranges
from ..shared.utils import assertions
def content_range_bytes(
start=None,
end=None,
length=None):
"""
Shorthand for creating a\
ContentRange value
:param unit:
:param start:
:param end:
:param length:
:return:
"""
return (
ranges.RangesOptions.bytes,
(start, end),
length,
None)
def content_range_none():
# todo: change to ranges.RangesOptions.none
return None, None, None, None
def content_range_bytes_unsatisfied(length=None):
return ranges.RangesOptions.bytes, None, length, None
def content_range_other(unit, chars=None):
return unit, None, None, chars
class ContentRange(bases.SingleHeaderBase):
"""
Sent by server only.
Be aware, HTTP spec does not define\
a way to resume uploads (i.e: client side).
Creating values from the function helpers in\
this module instead of passing a mystic\
tuple is advised.
The "Content-Range" header field is sent\
in a single part 206 (Partial Content)\
response to indicate the partial range of\
the selected representation enclosed as the\
message payload, sent in each part of a\
multipart 206 response to indicate the range\
enclosed within each body part, and sent in\
416 (Range Not Satisfiable) responses to\
provide information about the selected\
representation.
Example::
ContentRange([
('bytes', (0, 100), 100, None)
])
# content-range: bytes 0-100/100
ContentRange([
content_range_bytes(
start=0,
end=100,
length=100)
])
# content-range: bytes 0-100/100
ContentRange([
content_range_bytes(
end=100,
length=100)
])
# content-range: bytes -100/100
ContentRange([
content_range_bytes(
length=100)
])
# content-range: bytes -/100
ContentRange([
content_range_bytes()
])
# content-range: bytes -/*
ContentRange([
('bytes', None, 100, None) # Unsatisfied
])
# content-range: bytes */100
ContentRange([
content_range_bytes_unsatisfied(
length=100)
])
# content-range: bytes */100
ContentRange([
('my-unit', None, None, '0-100-200-400')
])
# content-range: my-unit 0-100-200-400
ContentRange([
content_range_other(
unit='my-unit',
chars='0-100-200-400')
])
# content-range: my-unit 0-100-200-400
ContentRange([
(None, None, None, None)
])
# content-range: none
ContentRange([
content_range_none()
])
# content-range: none
`Ref. <http://httpwg.org/specs/rfc7233.html#header.content-range>`_
"""
name = 'content-range'
def check_one(self, value):
assertions.must_be_tuple_of(value, 4)
unit, unit_range, length, chars = value
if unit is None:
assertions.assertion(
all(not v
for v in value),
'"{}" received, a unit was '
'expected'.format(value))
return
if unit != ranges.RangesOptions.bytes:
assertions.must_be_token(unit)
not chars or assertions.must_be_ascii(chars)
assertions.assertion(
unit_range is None and
length is None,
'"{}" received, either unit '
'\'bytes\' or no range and '
'no length were expected'
.format(value))
return
# Bytes
start = None
end = None
if unit_range:
assertions.must_be_tuple_of(unit_range, 2)
start, end = unit_range
assertions.assertion(
start is None or
isinstance(start, int),
'Start range must be None or int')
assertions.assertion(
end is None or
isinstance(end, int),
'End range must be None or int')
assertions.assertion(
start is None or
end is None or
start <= end,
'"{}" range received, '
'start <= end was expected'
.format(value))
assertions.assertion(
length is None or
isinstance(length, int),
'Length must be None or int')
assertions.assertion(
start is None or
length is None or
start <= length,
'"{}" received, '
'start <= length was expected'
.format(value))
assertions.assertion(
end is None or
length is None or
end <= length,
'"{}" received, '
'end <= length was expected'
.format(value))
assertions.assertion(
not chars,
'"{}" received, '
'no chars was expected'
.format(value))
def to_str(self, values):
unit, range_, length, chars = values[0]
if not unit:
return 'none'
if unit != ranges.RangesOptions.bytes:
# A SP after other-unit is required
return '{unit} {chars}'.format(
unit=unit,
chars=chars or '')
if length is None:
length = '*'
if range_ is None:
return '{unit} */{length}'.format(
unit=unit,
length=length)
start, end = range_
if start is None:
start = ''
if end is None:
end = ''
return '{unit} {start}-{end}/{length}'.format(
unit=unit,
start=start,
end=end,
length=length)
def clean_one(self, raw_value):
try:
unit, chars = raw_value.split(' ', 1)
except ValueError:
unit = raw_value
chars = ''
constraints.must_be_token(unit)
unit = unit.lower()
if unit == ranges.RangesOptions.none:
return content_range_none()
if unit != ranges.RangesOptions.bytes:
return content_range_other(unit, chars)
try:
range_, length = chars.split('/', 1)
except ValueError:
raise exceptions.BadRequest(
'Expected "start-end/length"')
if length == '*':
length = None
if length is not None:
length = cleaners.clean_number(
length,
max_chars=settings.CONTENT_MAX_CHARS)
if range_ == '*':
return content_range_bytes_unsatisfied(length)
return content_range_bytes(
*cleaners.clean_bytes_range(range_),
length=length)
|
mit
|
thombashi/DataProperty
|
test/test_function.py
|
1
|
4369
|
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
import pytest
from dataproperty import get_integer_digit, get_number_of_digit
nan = float("nan")
inf = float("inf")
class Test_get_integer_digit:
@pytest.mark.parametrize(
["value", "expected"],
[
[0, 1],
[-0, 1],
[0.99, 1],
[-0.99, 1],
[".99", 1],
["-.99", 1],
[1.01, 1],
[-1.01, 1],
[9.99, 1],
[-9.99, 1],
["9.99", 1],
["-9.99", 1],
["0", 1],
["-0", 1],
[10, 2],
[-10, 2],
[99.99, 2],
[-99.99, 2],
["10", 2],
["-10", 2],
["99.99", 2],
["-99.99", 2],
[100, 3],
[-100, 3],
[999.99, 3],
[-999.99, 3],
["100", 3],
["-100", 3],
["999.99", 3],
["-999.99", 3],
[10000000000000000000, 20],
[-10000000000000000000, 20],
# float not enough precision
[10000000000000000000.99, 20],
[-10000000000000000000.99, 20],
["10000000000000000000", 20],
["-10000000000000000000", 20],
["99999999999999099999.99", 20],
["-99999999999999099999.99", 20],
],
)
def test_normal(self, value, expected):
assert get_integer_digit(value) == expected
@pytest.mark.parametrize(
["value", "expected"],
[
[999999999999999999999999999999.9999999999, 31],
[-999999999999999999999999999999.9999999999, 31],
["999999999999999999999999999999.9999999999", 30],
["-999999999999999999999999999999.9999999999", 30],
],
)
def test_abnormal(self, value, expected):
assert get_integer_digit(value) == expected
@pytest.mark.parametrize(
["value", "exception"],
[
[True, ValueError],
[False, ValueError],
[None, ValueError],
["test", ValueError],
["a", ValueError],
["0xff", ValueError],
[nan, ValueError],
[inf, ValueError],
],
)
def test_exception(self, value, exception):
with pytest.raises(exception):
get_integer_digit(value)
class Test_get_number_of_digit:
@pytest.mark.parametrize(
["value", "expected"],
[
[0, (1, 0)],
[-0, (1, 0)],
["0", (1, 0)],
["-0", (1, 0)],
[10, (2, 0)],
[-10, (2, 0)],
["10", (2, 0)],
["-10", (2, 0)],
[10.1, (2, 1)],
[-10.1, (2, 1)],
["10.1", (2, 1)],
["-10.1", (2, 1)],
[10.01, (2, 2)],
[-10.01, (2, 2)],
[10.001, (2, 2)],
[-10.001, (2, 2)],
[100.1, (3, 1)],
[-100.1, (3, 1)],
[100.01, (3, 1)],
[-100.01, (3, 1)],
[0.1, (1, 1)],
[-0.1, (1, 1)],
["0.1", (1, 1)],
["-0.1", (1, 1)],
[0.99, (1, 2)],
[-0.99, (1, 2)],
[".99", (1, 2)],
["-.99", (1, 2)],
[0.01, (1, 2)],
[-0.01, (1, 2)],
["0.01", (1, 2)],
["-0.01", (1, 2)],
[0.001, (1, 3)],
[-0.001, (1, 3)],
["0.001", (1, 3)],
["-0.001", (1, 3)],
[0.0001, (1, 4)],
[-0.0001, (1, 4)],
["0.0001", (1, 4)],
["-0.0001", (1, 4)],
[0.00001, (1, 4)],
[-0.00001, (1, 4)],
["0.00001", (1, 4)],
["-0.00001", (1, 4)],
[2e-05, (1, 4)],
[-2e-05, (1, 4)],
["2e-05", (1, 4)],
["-2e-05", (1, 4)],
],
)
def test_normal(self, value, expected):
assert get_number_of_digit(value) == expected
@pytest.mark.parametrize(
["value"], [[None], [True], [inf], [nan], ["0xff"], ["test"], ["いろは".encode()]]
)
def test_nan(self, value):
integer_digits, decimal_places = get_number_of_digit(value)
assert integer_digits is None
assert decimal_places is None
|
mit
|
UPDDI/mps-database-server
|
assays/migrations/0039.py
|
1
|
2368
|
# Generated by Django 2.2.10 on 2020-05-11 17:00
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cellsamples', '0010'),
('drugtrials', '0011'),
('assays', '0038'),
]
operations = [
migrations.CreateModel(
name='SpeciesParameters',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body_mass', models.FloatField(blank=True, help_text='Body Mass', null=True, verbose_name='Body Mass (kg)')),
('total_organ_weight', models.FloatField(blank=True, help_text='Total Organ Weight', null=True, verbose_name='Total Organ Weight (g)')),
('organ_tissue', models.FloatField(blank=True, help_text='Organ Tissue', null=True, verbose_name='Organ Tissue (cells/g)')),
('plasma_volume', models.FloatField(blank=True, help_text='Plasma Volume', null=True, verbose_name='VP (L)')),
('vp', models.FloatField(blank=True, help_text='Plasma Volume', null=True, verbose_name='VP (L/kg)')),
('ve', models.FloatField(blank=True, help_text='Extracellular Volume', null=True, verbose_name='VE (L/kg)')),
('rei', models.FloatField(blank=True, help_text='Extravascular/Intravascular Ratio', null=True, verbose_name='RE/I')),
('vr', models.FloatField(blank=True, help_text='Volume of Drug Distribution Minus Extracellular Space', null=True, verbose_name='VR')),
('absorptive_surface_area', models.FloatField(blank=True, help_text='Absorptive Surface Area (m^2)', null=True, verbose_name='Absorptive Surface Area')),
('ki', models.FloatField(blank=True, help_text='Inverse of Small Intestine Transit Time', null=True, verbose_name='Ki (1/min)')),
('organ', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cellsamples.Organ', verbose_name='Organ')),
('reference', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='assays.AssayReference', verbose_name='Reference')),
('species', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='drugtrials.Species', verbose_name='Species')),
],
),
]
|
mit
|
fisheess/modular_SSD_tensorflow
|
nets/lenet.py
|
1
|
3641
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains a variant of the LeNet model definition."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
slim = tf.contrib.slim
def lenet(images, num_classes=10, is_training=False,
dropout_keep_prob=0.5,
prediction_fn=slim.softmax,
scope='LeNet'):
"""Creates a variant of the LeNet model.
Note that since the output is a set of 'logits', the values fall in the
interval of (-infinity, infinity). Consequently, to convert the outputs to a
probability distribution over the characters, one will need to convert them
using the softmax function:
logits = lenet.lenet(images, is_training=False)
probabilities = tf.nn.softmax(logits)
predictions = tf.argmax(logits, 1)
Args:
images: A batch of `Tensors` of size [batch_size, height, width, channels].
num_classes: the number of classes in the dataset.
is_training: specifies whether or not we're currently training the model.
This variable will determine the behaviour of the dropout layer.
dropout_keep_prob: the percentage of activation values that are retained.
prediction_fn: a function to get predictions out of logits.
scope: Optional variable_scope.
Returns:
logits: the pre-softmax activations, a tensor of size
[batch_size, `num_classes`]
end_points: a dictionary from components of the network to the corresponding
activation.
"""
end_points = {}
with tf.variable_scope(scope, 'LeNet', [images, num_classes]):
net = slim.conv2d(images, 32, [5, 5], scope='conv1')
net = slim.max_pool2d(net, [2, 2], 2, scope='pool1')
net = slim.conv2d(net, 64, [5, 5], scope='conv2')
net = slim.max_pool2d(net, [2, 2], 2, scope='pool2')
net = slim.flatten(net)
end_points['Flatten'] = net
net = slim.fully_connected(net, 1024, scope='fc3')
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='dropout3')
logits = slim.fully_connected(net, num_classes, activation_fn=None,
scope='fc4')
end_points['Logits'] = logits
end_points['Predictions'] = prediction_fn(logits, scope='Predictions')
return logits, end_points
lenet.default_image_size = 28
def lenet_arg_scope(weight_decay=0.0):
"""Defines the default lenet argument scope.
Args:
weight_decay: The weight decay to use for regularizing the model.
Returns:
An `arg_scope` to use for the inception v3 model.
"""
with slim.arg_scope(
[slim.conv2d, slim.fully_connected],
weights_regularizer=slim.l2_regularizer(weight_decay),
weights_initializer=tf.truncated_normal_initializer(stddev=0.1),
activation_fn=tf.nn.relu) as sc:
return sc
|
mit
|
horstjens/ThePythonGameBook
|
en/python/battleship/chat_server.py
|
1
|
3173
|
#!/usr/bin/env python3
"""Server for multithreaded (asynchronous) chat application."""
from socket import AF_INET, socket, SOCK_STREAM
from threading import Thread
def accept_incoming_connections():
"""Sets up handling for incoming clients."""
while True:
client, client_address = SERVER.accept() # client_adress is ip and port
print("client {}:{} has connected with server".format(client_address[0], client_address[1]))
#client.send(bytes("Welcome to Battleships! Please type your name and press enter!", "utf8"))
addresses[client] = client_address
Thread(target=handle_client, args=(client,)).start()
def handle_client(client): # Takes client socket as argument.
"""Handles a single client connection."""
name = client.recv(BUFSIZ).decode("utf8")
#welcome = "Welcome {}! type 'quit' to exit".format(name)
if players[0] is None:
index = 0
client.send(bytes("welcome player1 ","utf8"))
print("welcome player1")
players[0] = name
elif players[1] is None:
index = 1
client.send(bytes("welcome player2 ","utf8"))
print("welcome player2")
players[1] = name
broadcast("player{} ({}) has joined the chat!".format(index+1, name), "server:")
#broadcast(bytes(msg, "utf8"))
clients[client] = name
if players[0] is not None and players[1] is not None:
broadcast("may the game begin!", "server:")
while True:
msg = client.recv(BUFSIZ) # msg is in byte format
#create string:
message = "".join([chr(i) for i in msg])
#if msg != bytes("quit", "utf8"):
# broadcast(msg, "player{} ({}): ".format(index+1,name))#, "utf8")
#else:
if message == "quit":
client.send(bytes("quit", "utf8"))
client.close()
del clients[client]
broadcast("player{}({}) has left the chat".format(index+1, name), "server:") # , "utf8"))
break
if message.lower()=="a2" and Game.turn % 2 == index:
broadcast("mfires at A2", "player{}({})".format(index+1, name))
Game.turn += 1
broadcast("turn {}. It is your turn, player{}".format(Game.turn, index+1))
else:
broadcast(message, "player{} ({}):".format(index+1,name))
def broadcast(msg, prefix=""): # prefix tells who is sending the message.
"""Broadcasts a message to all the clients. converts msg to bytes if necessary"""
msg2 = msg if isinstance(msg, bytes) else bytes(msg, 'utf8')
for sock in clients:
#sock.send(bytes(prefix, "utf8") + msg)
#print("message:", msg, type(msg))
#print("prefix:", prefix)
sock.send(bytes(prefix, "utf8") + msg2)
class Game:
turn = 1
players = [None, None]
clients = {}
addresses = {}
HOST = ''
PORT = 33000
BUFSIZ = 1024
ADDR = (HOST, PORT)
SERVER = socket(AF_INET, SOCK_STREAM)
SERVER.bind(ADDR)
if __name__ == "__main__":
SERVER.listen(5)
print("Waiting for connection...")
ACCEPT_THREAD = Thread(target=accept_incoming_connections)
ACCEPT_THREAD.start()
ACCEPT_THREAD.join()
SERVER.close()
|
gpl-3.0
|
mountain213/neo4jworkshop
|
REST_clients/Python_client/requests/packages/chardet/euckrprober.py
|
2931
|
1675
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCKRDistributionAnalysis
from .mbcssm import EUCKRSMModel
class EUCKRProber(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(EUCKRSMModel)
self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
self.reset()
def get_charset_name(self):
return "EUC-KR"
|
mit
|
greenoaktree/pfp
|
tests/test_fields.py
|
3
|
1741
|
#!/usr/bin/env python
# encoding: utf-8
import os
try:
from StringIO import StringIO
# StringIO does not exist in python3
except ImportError as e:
from io import StringIO
import struct
import sys
import unittest
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
import pfp
import pfp.errors
from pfp.fields import *
import pfp.utils
import utils
class TestNumericFields(unittest.TestCase, utils.UtilsMixin):
def setUp(self):
pass
def tearDown(self):
pass
def _do_parse(self, field, data):
field._pfp__parse(StringIO(data.decode("ISO-8859-1")))
def _do_endian_tests(self, field, format):
field.endian = pfp.fields.BIG_ENDIAN
self._do_parse(field, struct.pack(">" + format, 1))
self.assertEqual(field, 1)
field.endian = pfp.fields.LITTLE_ENDIAN
self._do_parse(field, struct.pack("<" + format, 1))
self.assertEqual(field, 1)
def test_char(self):
field = Char()
self._do_endian_tests(field, "b")
def test_uchar(self):
field = UChar()
self._do_endian_tests(field, "b")
def test_short(self):
field = Short()
self._do_endian_tests(field, "h")
def test_ushort(self):
field = UShort()
self._do_endian_tests(field, "H")
def test_int(self):
field = Int()
self._do_endian_tests(field, "i")
def test_uint(self):
field = UInt()
self._do_endian_tests(field, "I")
def test_int64(self):
field = Int64()
self._do_endian_tests(field, "q")
def test_int64(self):
field = UInt64()
self._do_endian_tests(field, "Q")
def test_const_int64(self):
dom = self._test_parse_build(
"",
"""
const uint64 PNGMAGIC = 0x89504E470D0A1A0AL;
Printf("%d", PNGMAGIC);
""",
stdout="9894494448401390090"
)
if __name__ == "__main__":
unittest.main()
|
mit
|
tiagocoutinho/bliss
|
bliss/common/measurementgroup.py
|
1
|
6755
|
# -*- coding: utf-8 -*-
#
# This file is part of the bliss project
#
# Copyright (c) 2016 Beamline Control Unit, ESRF
# Distributed under the GNU LGPLv3. See LICENSE for more info.
import itertools
from bliss import setup_globals
from bliss.config import settings
from .session import get_current as _current_session
class _active_mg_proxy(object):
def __getattribute__(self, attr):
if attr == '__class__':
return MeasurementGroup
return getattr(get_active(), attr)
def __setattr__(self, name, value):
active = get_active()
return setattr(active, name, value)
def __repr__(self):
return repr(get_active())
ACTIVE_MG = _active_mg_proxy()
def get_all():
"""
Return all measurement groups found in the global environment
"""
return [x for x in setup_globals.__dict__.values() if x != ACTIVE_MG and isinstance(x, MeasurementGroup)]
def get_active():
"""
Return the current active MeasurementGroup
Get the last known active measurement group from redis,
or get the first found in global environment (and set it as active).
If nothing works, returns a measurement group called None,
which does not specify any counter.
"""
all_mg = get_all()
name = get_active_name()
try:
if name is None:
mg = all_mg[0]
set_active_name(mg.name)
return mg
else:
for mg in all_mg:
if name == mg.name:
return mg
raise IndexError
except IndexError:
set_active_name(None)
return MeasurementGroup(None, { "counters": [] })
def get_active_name():
session = _current_session()
session_name = session.name if session is not None else 'unnamed'
active_mg_name = settings.SimpleSetting('%s:active_measurementgroup' % session_name)
return active_mg_name.get()
def set_active_name(name):
session = _current_session()
session_name = session.name if session is not None else 'unnamed'
active_mg_name = settings.SimpleSetting('%s:active_measurementgroup' %
session_name)
if name is None:
active_mg_name.clear()
else:
active_mg_name.set(name)
class MeasurementGroup(object):
def __init__(self,name,config_tree):
"""MeasurementGroup is a helper to activate detectors
for counting procedure.
name -- the measurement name
config_tree -- measurement configuration.
in this dictionary we need to have:
counters -- a name list of available counters
default -- if True set as default measurement
"""
counters_list = config_tree.get('counters')
if counters_list is None:
raise ValueError("MeasurementGroup: should have a counters list")
self.name = name
self._available_counters = list(counters_list)
self._current_config = settings.SimpleSetting('%s' % name,
default_value='default')
# disabled counters
self._counters_settings = settings.HashSetting('%s:%s' %
(name, self._current_config.get()))
@property
def state_names(self):
""" list of states for this measurement
"""
return list((x.split(':')[-1] for x in settings.scan(match='%s:*' % self.name)))
@property
def available(self):
"""available counters from the static config
"""
return self._available_counters
@property
def disable(self):
""" disabled counters name
"""
return [name for name in self.available if name in self._counters_settings]
@disable.setter
def disable(self,counters):
counter2disable = self.__counters2set(counters)
possible2disable = set(self._available_counters).intersection(counter2disable)
unpos2disable = counter2disable.difference(possible2disable)
if unpos2disable:
raise ValueError("MeasurementGroup: could not disable counters (%s)" %
(','.join(unpos2disable)))
self._counters_settings.update(dict((name,True) for name in counter2disable))
@property
def enable(self):
""" enabled counters name
"""
return [name for name in self.available if name not in self._counters_settings]
@enable.setter
def enable(self,counters):
counters = self.__counters2set(counters)
possible2enable = set(self._available_counters).intersection(counters)
unpos2enable = counters.difference(possible2enable)
if unpos2enable:
raise ValueError("MeasurementGroup: could not disable counters (%s)" %
(','.join(unpos2enable)))
self._counters_settings.remove(*counters)
@property
def state_names(self):
""" current configuration name for the measurment
"""
return self._current_config.get()
def switch_state(self,name):
self._current_config.set(name)
self._counters_settings = settings.HashSetting('%s:%s' %
(self.name,name))
def remove_states(self,*state_names):
"""
will remove one or several state(s) for this measurement
state_name -- the state name(s) you want to remove
"""
cnx = self._current_config._cnx()
names = ['%s:%s' % (self.name,name) for name in state_names]
cnx.delete(*names)
def copy_from_state(self,name):
"""
this will copy the configuration into the current
"""
tmp_hash = settings.HashSetting('%s:%s' % (self.name,name))
self._counters_settings.clear()
for k,v in tmp_hash.iteritems():
self._counters_settings[k] = v
def __counters2set(self,counters):
if not isinstance(counters,(tuple,list,set)):
counters = list((counters,))
return set((x.name if hasattr(x,'name') else x for x in counters))
def __repr__(self):
s = 'MeasurementGroup: %s (%s)\n\n' % (self.name,self._current_config.get())
enabled = list(self.enable) + ['Enabled']
max_len = max((len(x) for x in enabled))
str_format = ' %-' + '%ds' % max_len + ' %s\n'
s += str_format % ('Enabled','Disabled')
s += str_format % ('-' * max_len,'-' * max_len)
for enable,disable in itertools.izip_longest(self.enable,
self.disable,fillvalue=''):
s += str_format % (enable,disable)
return s
|
lgpl-3.0
|
earthoutreach/pykml
|
src/utilities/test_gen_pykml.py
|
7
|
34140
|
from pykml.kml_gx.factory import KML_ElementMaker as KML
from pykml.kml_gx.factory import ATOM_ElementMaker as ATOM
from pykml.kml_gx.factory import GX_ElementMaker as GX
doc = KML.kml(
KML.Document(
KML.name("KML Samples"),
KML.open("1"),
KML.description("Unleash your creativity with the help of these examples!"),
KML.Style(
KML.IconStyle(
KML.Icon(
KML.href("http://maps.google.com/mapfiles/kml/pal4/icon28.png"),
),
),
id="downArrowIcon", ),
KML.Style(
KML.IconStyle(
KML.Icon(
KML.href("http://maps.google.com/mapfiles/kml/pal3/icon19.png"),
),
),
KML.LineStyle(
KML.width("2"),
),
id="globeIcon", ),
KML.Style(
KML.LineStyle(
KML.color("7fff00ff"),
KML.width("4"),
),
KML.PolyStyle(
KML.color("7f00ff00"),
),
id="transPurpleLineGreenPoly", ),
KML.Style(
KML.LineStyle(
KML.color("7f00ffff"),
KML.width("4"),
),
KML.PolyStyle(
KML.color("7f00ff00"),
),
id="yellowLineGreenPoly", ),
KML.Style(
KML.LineStyle(
KML.color("87000000"),
KML.width("10"),
),
id="thickBlackLine", ),
KML.Style(
KML.LineStyle(
KML.color("ff0000ff"),
),
KML.PolyStyle(
KML.color("ffff0000"),
),
id="redLineBluePoly", ),
KML.Style(
KML.LineStyle(
KML.color("ffff0000"),
),
KML.PolyStyle(
KML.color("ff0000ff"),
),
id="blueLineRedPoly", ),
KML.Style(
KML.LineStyle(
KML.width("1.5"),
),
KML.PolyStyle(
KML.color("7d0000ff"),
),
id="transRedPoly", ),
KML.Style(
KML.LineStyle(
KML.width("1.5"),
),
KML.PolyStyle(
KML.color("7dff0000"),
),
id="transBluePoly", ),
KML.Style(
KML.LineStyle(
KML.width("1.5"),
),
KML.PolyStyle(
KML.color("7d00ff00"),
),
id="transGreenPoly", ),
KML.Style(
KML.LineStyle(
KML.width("1.5"),
),
KML.PolyStyle(
KML.color("7d00ffff"),
),
id="transYellowPoly", ),
KML.Style(
KML.BalloonStyle(
KML.text("
<b>$[name]</b>
<br /><br />
$[description]
"),
),
id="noDrivingDirections", ),
KML.Folder(
KML.name("Placemarks"),
KML.description("These are just some of the different kinds of placemarks with
which you can mark your favorite places"),
KML.LookAt(
KML.longitude("-122.0839597145766"),
KML.latitude("37.42222904525232"),
KML.altitude("0"),
KML.heading("-148.4122922628044"),
KML.tilt("40.5575073395506"),
KML.range("500.6566641072245"),
),
KML.Placemark(
KML.name("Simple placemark"),
KML.description("Attached to the ground. Intelligently places itself at the
height of the underlying terrain."),
KML.Point(
KML.coordinates("-122.0822035425683,37.42228990140251,0"),
),
),
KML.Placemark(
KML.name("Floating placemark"),
KML.visibility("0"),
KML.description("Floats a defined distance above the ground."),
KML.LookAt(
KML.longitude("-122.0839597145766"),
KML.latitude("37.42222904525232"),
KML.altitude("0"),
KML.heading("-148.4122922628044"),
KML.tilt("40.5575073395506"),
KML.range("500.6566641072245"),
),
KML.styleUrl("#downArrowIcon"),
KML.Point(
KML.altitudeMode("relativeToGround"),
KML.coordinates("-122.084075,37.4220033612141,50"),
),
),
KML.Placemark(
KML.name("Extruded placemark"),
KML.visibility("0"),
KML.description("Tethered to the ground by a customizable
"tail""),
KML.LookAt(
KML.longitude("-122.0845787421525"),
KML.latitude("37.42215078737763"),
KML.altitude("0"),
KML.heading("-148.4126684946234"),
KML.tilt("40.55750733918048"),
KML.range("365.2646606980322"),
),
KML.styleUrl("#globeIcon"),
KML.Point(
KML.extrude("1"),
KML.altitudeMode("relativeToGround"),
KML.coordinates("-122.0857667006183,37.42156927867553,50"),
),
),
),
KML.Folder(
KML.name("Styles and Markup"),
KML.visibility("0"),
KML.description("With KML it is easy to create rich, descriptive markup to
annotate and enrich your placemarks"),
KML.LookAt(
KML.longitude("-122.0845787422371"),
KML.latitude("37.42215078726837"),
KML.altitude("0"),
KML.heading("-148.4126777488172"),
KML.tilt("40.55750733930874"),
KML.range("365.2646826292919"),
),
KML.styleUrl("#noDrivingDirections"),
KML.Document(
KML.name("Highlighted Icon"),
KML.visibility("0"),
KML.description("Place your mouse over the icon to see it display the new
icon"),
KML.LookAt(
KML.longitude("-122.0856552124024"),
KML.latitude("37.4224281311035"),
KML.altitude("0"),
KML.heading("0"),
KML.tilt("0"),
KML.range("265.8520424250024"),
),
KML.Style(
KML.IconStyle(
KML.Icon(
KML.href("http://maps.google.com/mapfiles/kml/paddle/red-stars.png"),
),
),
id="highlightPlacemark", ),
KML.Style(
KML.IconStyle(
KML.Icon(
KML.href("http://maps.google.com/mapfiles/kml/paddle/wht-blank.png"),
),
),
id="normalPlacemark", ),
KML.StyleMap(
KML.Pair(
KML.key("normal"),
KML.styleUrl("#normalPlacemark"),
),
KML.Pair(
KML.key("highlight"),
KML.styleUrl("#highlightPlacemark"),
),
id="exampleStyleMap", ),
KML.Placemark(
KML.name("Roll over this icon"),
KML.visibility("0"),
KML.styleUrl("#exampleStyleMap"),
KML.Point(
KML.coordinates("-122.0856545755255,37.42243077405461,0"),
),
),
),
KML.Placemark(
KML.name("Descriptive HTML"),
KML.visibility("0"),
KML.description("Click on the blue link!<br><br>
Placemark descriptions can be enriched by using many standard HTML tags.<br>
For example:
<hr>
Styles:<br>
<i>Italics</i>,
<b>Bold</b>,
<u>Underlined</u>,
<s>Strike Out</s>,
subscript<sub>subscript</sub>,
superscript<sup>superscript</sup>,
<big>Big</big>,
<small>Small</small>,
<tt>Typewriter</tt>,
<em>Emphasized</em>,
<strong>Strong</strong>,
<code>Code</code>
<hr>
Fonts:<br>
<font color="red">red by name</font>,
<font color="#408010">leaf green by hexadecimal RGB</font>
<br>
<font size=1>size 1</font>,
<font size=2>size 2</font>,
<font size=3>size 3</font>,
<font size=4>size 4</font>,
<font size=5>size 5</font>,
<font size=6>size 6</font>,
<font size=7>size 7</font>
<br>
<font face=times>Times</font>,
<font face=verdana>Verdana</font>,
<font face=arial>Arial</font><br>
<hr>
Links:
<br>
<a href="http://earth.google.com/">Google Earth!</a>
<br>
or: Check out our website at www.google.com
<hr>
Alignment:<br>
<p align=left>left</p>
<p align=center>center</p>
<p align=right>right</p>
<hr>
Ordered Lists:<br>
<ol><li>First</li><li>Second</li><li>Third</li></ol>
<ol type="a"><li>First</li><li>Second</li><li>Third</li></ol>
<ol type="A"><li>First</li><li>Second</li><li>Third</li></ol>
<hr>
Unordered Lists:<br>
<ul><li>A</li><li>B</li><li>C</li></ul>
<ul type="circle"><li>A</li><li>B</li><li>C</li></ul>
<ul type="square"><li>A</li><li>B</li><li>C</li></ul>
<hr>
Definitions:<br>
<dl>
<dt>Google:</dt><dd>The best thing since sliced bread</dd>
</dl>
<hr>
Centered:<br><center>
Time present and time past<br>
Are both perhaps present in time future,<br>
And time future contained in time past.<br>
If all time is eternally present<br>
All time is unredeemable.<br>
</center>
<hr>
Block Quote:
<br>
<blockquote>
We shall not cease from exploration<br>
And the end of all our exploring<br>
Will be to arrive where we started<br>
And know the place for the first time.<br>
<i>-- T.S. Eliot</i>
</blockquote>
<br>
<hr>
Headings:<br>
<h1>Header 1</h1>
<h2>Header 2</h2>
<h3>Header 3</h3>
<h3>Header 4</h4>
<h3>Header 5</h5>
<hr>
Images:<br>
<i>Remote image</i><br>
<img src="http://code.google.com/apis/kml/documentation/googleSample.png"><br>
<i>Scaled image</i><br>
<img src="http://code.google.com/apis/kml/documentation/googleSample.png" width=100><br>
<hr>
Simple Tables:<br>
<table border="1" padding="1">
<tr><td>1</td><td>2</td><td>3</td><td>4</td><td>5</td></tr>
<tr><td>a</td><td>b</td><td>c</td><td>d</td><td>e</td></tr>
</table>
<br>
[Did you notice that double-clicking on the placemark doesn't cause the viewer to take you anywhere? This is because it is possible to directly author a "placeless placemark". If you look at the code for this example, you will see that it has neither a point coordinate nor a LookAt element.]"),
),
),
KML.Folder(
KML.name("Ground Overlays"),
KML.visibility("0"),
KML.description("Examples of ground overlays"),
KML.GroundOverlay(
KML.name("Large-scale overlay on terrain"),
KML.visibility("0"),
KML.description("Overlay shows Mount Etna erupting on July 13th, 2001."),
KML.LookAt(
KML.longitude("15.02468937557116"),
KML.latitude("37.67395167941667"),
KML.altitude("0"),
KML.heading("-16.5581842842829"),
KML.tilt("58.31228652890705"),
KML.range("30350.36838438907"),
),
KML.Icon(
KML.href("http://code.google.com/apis/kml/documentation/etna.jpg"),
),
KML.LatLonBox(
KML.north("37.91904192681665"),
KML.south("37.46543388598137"),
KML.east("15.35832653742206"),
KML.west("14.60128369746704"),
KML.rotation("-0.1556640799496235"),
),
),
),
KML.Folder(
KML.name("Screen Overlays"),
KML.visibility("0"),
KML.description("Screen overlays have to be authored directly in KML. These
examples illustrate absolute and dynamic positioning in screen space."),
KML.ScreenOverlay(
KML.name("Simple crosshairs"),
KML.visibility("0"),
KML.description("This screen overlay uses fractional positioning to put the
image in the exact center of the screen"),
KML.Icon(
KML.href("http://code.google.com/apis/kml/documentation/crosshairs.png"),
),
KML.overlayXY(x="0.5",y="0.5",xunits="fraction",yunits="fraction",),
KML.screenXY(x="0.5",y="0.5",xunits="fraction",yunits="fraction",),
KML.rotationXY(x="0.5",y="0.5",xunits="fraction",yunits="fraction",),
KML.size(x="0",y="0",xunits="pixels",yunits="pixels",),
),
KML.ScreenOverlay(
KML.name("Absolute Positioning: Top left"),
KML.visibility("0"),
KML.Icon(
KML.href("http://code.google.com/apis/kml/documentation/top_left.jpg"),
),
KML.overlayXY(x="0",y="1",xunits="fraction",yunits="fraction",),
KML.screenXY(x="0",y="1",xunits="fraction",yunits="fraction",),
KML.rotationXY(x="0",y="0",xunits="fraction",yunits="fraction",),
KML.size(x="0",y="0",xunits="fraction",yunits="fraction",),
),
KML.ScreenOverlay(
KML.name("Absolute Positioning: Top right"),
KML.visibility("0"),
KML.Icon(
KML.href("http://code.google.com/apis/kml/documentation/top_right.jpg"),
),
KML.overlayXY(x="1",y="1",xunits="fraction",yunits="fraction",),
KML.screenXY(x="1",y="1",xunits="fraction",yunits="fraction",),
KML.rotationXY(x="0",y="0",xunits="fraction",yunits="fraction",),
KML.size(x="0",y="0",xunits="fraction",yunits="fraction",),
),
KML.ScreenOverlay(
KML.name("Absolute Positioning: Bottom left"),
KML.visibility("0"),
KML.Icon(
KML.href("http://code.google.com/apis/kml/documentation/bottom_left.jpg"),
),
KML.overlayXY(x="0",y="-1",xunits="fraction",yunits="fraction",),
KML.screenXY(x="0",y="0",xunits="fraction",yunits="fraction",),
KML.rotationXY(x="0",y="0",xunits="fraction",yunits="fraction",),
KML.size(x="0",y="0",xunits="fraction",yunits="fraction",),
),
KML.ScreenOverlay(
KML.name("Absolute Positioning: Bottom right"),
KML.visibility("0"),
KML.Icon(
KML.href("http://code.google.com/apis/kml/documentation/bottom_right.jpg"),
),
KML.overlayXY(x="1",y="-1",xunits="fraction",yunits="fraction",),
KML.screenXY(x="1",y="0",xunits="fraction",yunits="fraction",),
KML.rotationXY(x="0",y="0",xunits="fraction",yunits="fraction",),
KML.size(x="0",y="0",xunits="fraction",yunits="fraction",),
),
KML.ScreenOverlay(
KML.name("Dynamic Positioning: Top of screen"),
KML.visibility("0"),
KML.Icon(
KML.href("http://code.google.com/apis/kml/documentation/dynamic_screenoverlay.jpg"),
),
KML.overlayXY(x="0",y="1",xunits="fraction",yunits="fraction",),
KML.screenXY(x="0",y="1",xunits="fraction",yunits="fraction",),
KML.rotationXY(x="0",y="0",xunits="fraction",yunits="fraction",),
KML.size(x="1",y="0.2",xunits="fraction",yunits="fraction",),
),
KML.ScreenOverlay(
KML.name("Dynamic Positioning: Right of screen"),
KML.visibility("0"),
KML.Icon(
KML.href("http://code.google.com/apis/kml/documentation/dynamic_right.jpg"),
),
KML.overlayXY(x="1",y="1",xunits="fraction",yunits="fraction",),
KML.screenXY(x="1",y="1",xunits="fraction",yunits="fraction",),
KML.rotationXY(x="0",y="0",xunits="fraction",yunits="fraction",),
KML.size(x="0",y="1",xunits="fraction",yunits="fraction",),
),
),
KML.Folder(
KML.name("Paths"),
KML.visibility("0"),
KML.description("Examples of paths. Note that the tessellate tag is by default
set to 0. If you want to create tessellated lines, they must be authored
(or edited) directly in KML."),
KML.Placemark(
KML.name("Tessellated"),
KML.visibility("0"),
KML.description("If the <tessellate> tag has a value of 1, the line will contour to the underlying terrain"),
KML.LookAt(
KML.longitude("-112.0822680013139"),
KML.latitude("36.09825589333556"),
KML.altitude("0"),
KML.heading("103.8120432044965"),
KML.tilt("62.04855796276328"),
KML.range("2889.145007690472"),
),
KML.LineString(
KML.tessellate("1"),
KML.coordinates(" -112.0814237830345,36.10677870477137,0
-112.0870267752693,36.0905099328766,0 "),
),
),
KML.Placemark(
KML.name("Untessellated"),
KML.visibility("0"),
KML.description("If the <tessellate> tag has a value of 0, the line follow a simple straight-line path from point to point"),
KML.LookAt(
KML.longitude("-112.0822680013139"),
KML.latitude("36.09825589333556"),
KML.altitude("0"),
KML.heading("103.8120432044965"),
KML.tilt("62.04855796276328"),
KML.range("2889.145007690472"),
),
KML.LineString(
KML.tessellate("0"),
KML.coordinates(" -112.080622229595,36.10673460007995,0
-112.085242575315,36.09049598612422,0 "),
),
),
KML.Placemark(
KML.name("Absolute"),
KML.visibility("0"),
KML.description("Transparent purple line"),
KML.LookAt(
KML.longitude("-112.2719329043177"),
KML.latitude("36.08890633450894"),
KML.altitude("0"),
KML.heading("-106.8161545998597"),
KML.tilt("44.60763714063257"),
KML.range("2569.386744398339"),
),
KML.styleUrl("#transPurpleLineGreenPoly"),
KML.LineString(
KML.tessellate("1"),
KML.altitudeMode("absolute"),
KML.coordinates(" -112.265654928602,36.09447672602546,2357
-112.2660384528238,36.09342608838671,2357
-112.2668139013453,36.09251058776881,2357
-112.2677826834445,36.09189827357996,2357
-112.2688557510952,36.0913137941187,2357
-112.2694810717219,36.0903677207521,2357
-112.2695268555611,36.08932171487285,2357
-112.2690144567276,36.08850916060472,2357
-112.2681528815339,36.08753813597956,2357
-112.2670588176031,36.08682685262568,2357
-112.2657374587321,36.08646312301303,2357 "),
),
),
KML.Placemark(
KML.name("Absolute Extruded"),
KML.visibility("0"),
KML.description("Transparent green wall with yellow outlines"),
KML.LookAt(
KML.longitude("-112.2643334742529"),
KML.latitude("36.08563154742419"),
KML.altitude("0"),
KML.heading("-125.7518698668815"),
KML.tilt("44.61038665812578"),
KML.range("4451.842204068102"),
),
KML.styleUrl("#yellowLineGreenPoly"),
KML.LineString(
KML.extrude("1"),
KML.tessellate("1"),
KML.altitudeMode("absolute"),
KML.coordinates(" -112.2550785337791,36.07954952145647,2357
-112.2549277039738,36.08117083492122,2357
-112.2552505069063,36.08260761307279,2357
-112.2564540158376,36.08395660588506,2357
-112.2580238976449,36.08511401044813,2357
-112.2595218489022,36.08584355239394,2357
-112.2608216347552,36.08612634548589,2357
-112.262073428656,36.08626019085147,2357
-112.2633204928495,36.08621519860091,2357
-112.2644963846444,36.08627897945274,2357
-112.2656969554589,36.08649599090644,2357 "),
),
),
KML.Placemark(
KML.name("Relative"),
KML.visibility("0"),
KML.description("Black line (10 pixels wide), height tracks terrain"),
KML.LookAt(
KML.longitude("-112.2580438551384"),
KML.latitude("36.1072674824385"),
KML.altitude("0"),
KML.heading("4.947421249553717"),
KML.tilt("44.61324882043339"),
KML.range("2927.61105910266"),
),
KML.styleUrl("#thickBlackLine"),
KML.LineString(
KML.tessellate("1"),
KML.altitudeMode("relativeToGround"),
KML.coordinates(" -112.2532845153347,36.09886943729116,645
-112.2540466121145,36.09919570465255,645
-112.254734666947,36.09984998366178,645
-112.255493345654,36.10051310621746,645
-112.2563157098468,36.10108441943419,645
-112.2568033076439,36.10159722088088,645
-112.257494011321,36.10204323542867,645
-112.2584106072308,36.10229131995655,645
-112.2596588987972,36.10240001286358,645
-112.2610581199487,36.10213176873407,645
-112.2626285262793,36.10157011437219,645 "),
),
),
KML.Placemark(
KML.name("Relative Extruded"),
KML.visibility("0"),
KML.description("Opaque blue walls with red outline, height tracks terrain"),
KML.LookAt(
KML.longitude("-112.2683594333433"),
KML.latitude("36.09884362144909"),
KML.altitude("0"),
KML.heading("-72.24271551768405"),
KML.tilt("44.60855445139561"),
KML.range("2184.193522571467"),
),
KML.styleUrl("#redLineBluePoly"),
KML.LineString(
KML.extrude("1"),
KML.tessellate("1"),
KML.altitudeMode("relativeToGround"),
KML.coordinates(" -112.2656634181359,36.09445214722695,630
-112.2652238941097,36.09520916122063,630
-112.2645079986395,36.09580763864907,630
-112.2638827428817,36.09628572284063,630
-112.2635746835406,36.09679275951239,630
-112.2635711822407,36.09740038871899,630
-112.2640296531825,36.09804913435539,630
-112.264327720538,36.09880337400301,630
-112.2642436562271,36.09963644790288,630
-112.2639148687042,36.10055381117246,630
-112.2626894973474,36.10149062823369,630 "),
),
),
),
KML.Folder(
KML.name("Polygons"),
KML.visibility("0"),
KML.description("Examples of polygon shapes"),
KML.Folder(
KML.name("Google Campus"),
KML.visibility("0"),
KML.description("A collection showing how easy it is to create 3-dimensional
buildings"),
KML.LookAt(
KML.longitude("-122.084120030116"),
KML.latitude("37.42174011925477"),
KML.altitude("0"),
KML.heading("-34.82469740081282"),
KML.tilt("53.454348562403"),
KML.range("276.7870053764046"),
),
KML.Placemark(
KML.name("Building 40"),
KML.visibility("0"),
KML.styleUrl("#transRedPoly"),
KML.Polygon(
KML.extrude("1"),
KML.altitudeMode("relativeToGround"),
KML.outerBoundaryIs(
KML.LinearRing(
KML.coordinates(" -122.0848938459612,37.42257124044786,17
-122.0849580979198,37.42211922626856,17
-122.0847469573047,37.42207183952619,17
-122.0845725380962,37.42209006729676,17
-122.0845954886723,37.42215932700895,17
-122.0838521118269,37.42227278564371,17
-122.083792243335,37.42203539112084,17
-122.0835076656616,37.42209006957106,17
-122.0834709464152,37.42200987395161,17
-122.0831221085748,37.4221046494946,17
-122.0829247374572,37.42226503990386,17
-122.0829339169385,37.42231242843094,17
-122.0833837359737,37.42225046087618,17
-122.0833607854248,37.42234159228745,17
-122.0834204551642,37.42237075460644,17
-122.083659133885,37.42251292011001,17
-122.0839758438952,37.42265873093781,17
-122.0842374743331,37.42265143972521,17
-122.0845036949503,37.4226514386435,17
-122.0848020460801,37.42261133916315,17
-122.0847882750515,37.42256395055121,17
-122.0848938459612,37.42257124044786,17 "),
),
),
),
),
KML.Placemark(
KML.name("Building 41"),
KML.visibility("0"),
KML.styleUrl("#transBluePoly"),
KML.Polygon(
KML.extrude("1"),
KML.altitudeMode("relativeToGround"),
KML.outerBoundaryIs(
KML.LinearRing(
KML.coordinates(" -122.0857412771483,37.42227033155257,17
-122.0858169768481,37.42231408832346,17
-122.085852582875,37.42230337469744,17
-122.0858799945639,37.42225686138789,17
-122.0858860101409,37.4222311076138,17
-122.0858069157288,37.42220250173855,17
-122.0858379542653,37.42214027058678,17
-122.0856732640519,37.42208690214408,17
-122.0856022926407,37.42214885429042,17
-122.0855902778436,37.422128290487,17
-122.0855841672237,37.42208171967246,17
-122.0854852065741,37.42210455874995,17
-122.0855067264352,37.42214267949824,17
-122.0854430712915,37.42212783846172,17
-122.0850990714904,37.42251282407603,17
-122.0856769818632,37.42281815323651,17
-122.0860162273783,37.42244918858722,17
-122.0857260327004,37.42229239604253,17
-122.0857412771483,37.42227033155257,17 "),
),
),
),
),
KML.Placemark(
KML.name("Building 42"),
KML.visibility("0"),
KML.styleUrl("#transGreenPoly"),
KML.Polygon(
KML.extrude("1"),
KML.altitudeMode("relativeToGround"),
KML.outerBoundaryIs(
KML.LinearRing(
KML.coordinates(" -122.0857862287242,37.42136208886969,25
-122.0857312990603,37.42136935989481,25
-122.0857312992918,37.42140934910903,25
-122.0856077073679,37.42138390166565,25
-122.0855802426516,37.42137299550869,25
-122.0852186221971,37.42137299504316,25
-122.0852277765639,37.42161656508265,25
-122.0852598189347,37.42160565894403,25
-122.0852598185499,37.42168200156,25
-122.0852369311478,37.42170017860346,25
-122.0852643957828,37.42176197982575,25
-122.0853239032746,37.42176198013907,25
-122.0853559454324,37.421852864452,25
-122.0854108752463,37.42188921823734,25
-122.0854795379357,37.42189285337048,25
-122.0855436229819,37.42188921797546,25
-122.0856260178042,37.42186013499926,25
-122.085937287963,37.42186013453605,25
-122.0859428718666,37.42160898590042,25
-122.0859655469861,37.42157992759144,25
-122.0858640462341,37.42147115002957,25
-122.0858548911215,37.42140571326184,25
-122.0858091162768,37.4214057134039,25
-122.0857862287242,37.42136208886969,25 "),
),
),
),
),
KML.Placemark(
KML.name("Building 43"),
KML.visibility("0"),
KML.styleUrl("#transYellowPoly"),
KML.Polygon(
KML.extrude("1"),
KML.altitudeMode("relativeToGround"),
KML.outerBoundaryIs(
KML.LinearRing(
KML.coordinates(" -122.0844371128284,37.42177253003091,19
-122.0845118855746,37.42191111542896,19
-122.0850470999805,37.42178755121535,19
-122.0850719913391,37.42143663023161,19
-122.084916406232,37.42137237822116,19
-122.0842193868167,37.42137237801626,19
-122.08421938659,37.42147617161496,19
-122.0838086419991,37.4214613409357,19
-122.0837899728564,37.42131306410796,19
-122.0832796534698,37.42129328840593,19
-122.0832609819207,37.42139213944298,19
-122.0829373621737,37.42137236399876,19
-122.0829062425667,37.42151569778871,19
-122.0828502269665,37.42176282576465,19
-122.0829435788635,37.42176776969635,19
-122.083217411188,37.42179248552686,19
-122.0835970430103,37.4217480074456,19
-122.0839455556771,37.42169364237603,19
-122.0840077894637,37.42176283815853,19
-122.084113587521,37.42174801104392,19
-122.0840762473784,37.42171341292375,19
-122.0841447047739,37.42167881534569,19
-122.084144704223,37.42181720660197,19
-122.0842503333074,37.4218170700446,19
-122.0844371128284,37.42177253003091,19 "),
),
),
),
),
),
KML.Folder(
KML.name("Extruded Polygon"),
KML.description("A simple way to model a building"),
KML.Placemark(
KML.name("The Pentagon"),
KML.LookAt(
KML.longitude("-77.05580139178142"),
KML.latitude("38.870832443487"),
KML.heading("59.88865561738225"),
KML.tilt("48.09646074797388"),
KML.range("742.0552506670548"),
),
KML.Polygon(
KML.extrude("1"),
KML.altitudeMode("relativeToGround"),
KML.outerBoundaryIs(
KML.LinearRing(
KML.coordinates(" -77.05788457660967,38.87253259892824,100
-77.05465973756702,38.87291016281703,100
-77.05315536854791,38.87053267794386,100
-77.05552622493516,38.868757801256,100
-77.05844056290393,38.86996206506943,100
-77.05788457660967,38.87253259892824,100 "),
),
),
KML.innerBoundaryIs(
KML.LinearRing(
KML.coordinates(" -77.05668055019126,38.87154239798456,100
-77.05542625960818,38.87167890344077,100
-77.05485125901024,38.87076535397792,100
-77.05577677433152,38.87008686581446,100
-77.05691162017543,38.87054446963351,100
-77.05668055019126,38.87154239798456,100 "),
),
),
),
),
),
KML.Folder(
KML.name("Absolute and Relative"),
KML.visibility("0"),
KML.description("Four structures whose roofs meet exactly. Turn on/off
terrain to see the difference between relative and absolute
positioning."),
KML.LookAt(
KML.longitude("-112.3348969157552"),
KML.latitude("36.14845533214919"),
KML.altitude("0"),
KML.heading("-86.91235037566909"),
KML.tilt("49.30695423894192"),
KML.range("990.6761201087104"),
),
KML.Placemark(
KML.name("Absolute"),
KML.visibility("0"),
KML.styleUrl("#transBluePoly"),
KML.Polygon(
KML.tessellate("1"),
KML.altitudeMode("absolute"),
KML.outerBoundaryIs(
KML.LinearRing(
KML.coordinates(" -112.3372510731295,36.14888505105317,1784
-112.3356128688403,36.14781540589019,1784
-112.3368169371048,36.14658677734382,1784
-112.3384408457543,36.14762778914076,1784
-112.3372510731295,36.14888505105317,1784 "),
),
),
),
),
KML.Placemark(
KML.name("Absolute Extruded"),
KML.visibility("0"),
KML.styleUrl("#transRedPoly"),
KML.Polygon(
KML.extrude("1"),
KML.tessellate("1"),
KML.altitudeMode("absolute"),
KML.outerBoundaryIs(
KML.LinearRing(
KML.coordinates(" -112.3396586818843,36.14637618647505,1784
-112.3380597654315,36.14531751871353,1784
-112.3368254237788,36.14659596244607,1784
-112.3384555043203,36.14762621763982,1784
-112.3396586818843,36.14637618647505,1784 "),
),
),
),
),
KML.Placemark(
KML.name("Relative"),
KML.visibility("0"),
KML.LookAt(
KML.longitude("-112.3350152490417"),
KML.latitude("36.14943123077423"),
KML.altitude("0"),
KML.heading("-118.9214100848499"),
KML.tilt("37.92486261093203"),
KML.range("345.5169113679813"),
),
KML.styleUrl("#transGreenPoly"),
KML.Polygon(
KML.tessellate("1"),
KML.altitudeMode("relativeToGround"),
KML.outerBoundaryIs(
KML.LinearRing(
KML.coordinates(" -112.3349463145932,36.14988705767721,100
-112.3354019540677,36.14941108398372,100
-112.3344428289146,36.14878490381308,100
-112.3331289492913,36.14780840132443,100
-112.3317019516947,36.14680755678357,100
-112.331131440106,36.1474173426228,100
-112.332616324338,36.14845453364654,100
-112.3339876620524,36.14926570522069,100
-112.3349463145932,36.14988705767721,100 "),
),
),
),
),
KML.Placemark(
KML.name("Relative Extruded"),
KML.visibility("0"),
KML.LookAt(
KML.longitude("-112.3351587892382"),
KML.latitude("36.14979247129029"),
KML.altitude("0"),
KML.heading("-55.42811560891606"),
KML.tilt("56.10280503739589"),
KML.range("401.0997279712519"),
),
KML.styleUrl("#transYellowPoly"),
KML.Polygon(
KML.extrude("1"),
KML.tessellate("1"),
KML.altitudeMode("relativeToGround"),
KML.outerBoundaryIs(
KML.LinearRing(
KML.coordinates(" -112.3348783983763,36.1514008468736,100
-112.3372535345629,36.14888517553886,100
-112.3356068927954,36.14781612679284,100
-112.3350034807972,36.14846469024177,100
-112.3358353861232,36.1489624162954,100
-112.3345888301373,36.15026229372507,100
-112.3337937856278,36.14978096026463,100
-112.3331798208424,36.1504472788618,100
-112.3348783983763,36.1514008468736,100 "),
),
),
),
),
),
),
),
)
from lxml import etree
print etree.tostring(doc,pretty_print=True)
|
bsd-3-clause
|
dcroc16/skunk_works
|
google_appengine/lib/django-1.4/django/contrib/formtools/tests/wizard/wizardtests/forms.py
|
313
|
2203
|
import os
import tempfile
from django import forms
from django.contrib.auth.models import User
from django.core.files.storage import FileSystemStorage
from django.forms.formsets import formset_factory
from django.forms.models import modelformset_factory
from django.http import HttpResponse
from django.template import Template, Context
from django.contrib.auth.models import User
from django.contrib.formtools.wizard.views import WizardView
temp_storage_location = tempfile.mkdtemp(dir=os.environ.get('DJANGO_TEST_TEMP_DIR'))
temp_storage = FileSystemStorage(location=temp_storage_location)
class Page1(forms.Form):
name = forms.CharField(max_length=100)
user = forms.ModelChoiceField(queryset=User.objects.all())
thirsty = forms.NullBooleanField()
class Page2(forms.Form):
address1 = forms.CharField(max_length=100)
address2 = forms.CharField(max_length=100)
file1 = forms.FileField()
class Page3(forms.Form):
random_crap = forms.CharField(max_length=100)
Page4 = formset_factory(Page3, extra=2)
class ContactWizard(WizardView):
file_storage = temp_storage
def done(self, form_list, **kwargs):
c = Context({
'form_list': [x.cleaned_data for x in form_list],
'all_cleaned_data': self.get_all_cleaned_data(),
})
for form in self.form_list.keys():
c[form] = self.get_cleaned_data_for_step(form)
c['this_will_fail'] = self.get_cleaned_data_for_step('this_will_fail')
return HttpResponse(Template('').render(c))
def get_context_data(self, form, **kwargs):
context = super(ContactWizard, self).get_context_data(form, **kwargs)
if self.storage.current_step == 'form2':
context.update({'another_var': True})
return context
class UserForm(forms.ModelForm):
class Meta:
model = User
fields = ('username', 'email')
UserFormSet = modelformset_factory(User, form=UserForm)
class SessionContactWizard(ContactWizard):
storage_name = 'django.contrib.formtools.wizard.storage.session.SessionStorage'
class CookieContactWizard(ContactWizard):
storage_name = 'django.contrib.formtools.wizard.storage.cookie.CookieStorage'
|
mit
|
jcpowermac/ansible
|
lib/ansible/modules/network/nxos/nxos_snmp_contact.py
|
106
|
3931
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_snmp_contact
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages SNMP contact info.
description:
- Manages SNMP contact information.
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- C(state=absent) removes the contact configuration if it is configured.
options:
contact:
description:
- Contact information.
required: true
state:
description:
- Manage the state of the resource.
required: true
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
# ensure snmp contact is configured
- nxos_snmp_contact:
contact: Test
state: present
'''
RETURN = '''
commands:
description: commands sent to the device
returned: always
type: list
sample: ["snmp-server contact New_Test"]
'''
import re
from ansible.module_utils.network.nxos.nxos import load_config, run_commands
from ansible.module_utils.network.nxos.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
def execute_show_command(command, module):
command = {
'command': command,
'output': 'text',
}
return run_commands(module, command)
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def get_snmp_contact(module):
contact = {}
contact_regex = r'^\s*snmp-server\scontact\s(?P<contact>.+)$'
body = execute_show_command('show run snmp', module)[0]
match_contact = re.search(contact_regex, body, re.M)
if match_contact:
contact['contact'] = match_contact.group("contact")
return contact
def main():
argument_spec = dict(
contact=dict(required=True, type='str'),
state=dict(choices=['absent', 'present'], default='present'),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
warnings = list()
check_args(module, warnings)
results = {'changed': False, 'commands': [], 'warnings': warnings}
contact = module.params['contact']
state = module.params['state']
existing = get_snmp_contact(module)
commands = []
if state == 'absent':
if existing and existing['contact'] == contact:
commands.append('no snmp-server contact')
elif state == 'present':
if not existing or existing['contact'] != contact:
commands.append('snmp-server contact {0}'.format(contact))
cmds = flatten_list(commands)
if cmds:
results['changed'] = True
if not module.check_mode:
load_config(module, cmds)
if 'configure' in cmds:
cmds.pop(0)
results['commands'] = cmds
module.exit_json(**results)
if __name__ == '__main__':
main()
|
gpl-3.0
|
timheap/wagtail
|
wagtail/wagtailimages/utils/feature_detection.py
|
3
|
2819
|
import os
from django.conf import settings
# only try to import OpenCV if WAGTAILIMAGES_FEATURE_DETECTION_ENABLED is True -
# avoids spurious "libdc1394 error: Failed to initialize libdc1394" errors on sites that
# don't even use OpenCV
if getattr(settings, 'WAGTAILIMAGES_FEATURE_DETECTION_ENABLED', False):
try:
import cv
opencv_available = True
except ImportError:
try:
import cv2.cv as cv
opencv_available = True
except ImportError:
opencv_available = False
else:
opencv_available = False
from wagtail.wagtailimages.utils.focal_point import FocalPoint, combine_focal_points
class FeatureDetector(object):
def __init__(self, image_size, image_mode, image_data):
self.image_size = image_size
self.image_mode = image_mode
self.image_data = image_data
def opencv_grey_image(self):
image = cv.CreateImageHeader(self.image_size, cv.IPL_DEPTH_8U, 3)
cv.SetData(image, self.image_data)
gray_image = cv.CreateImage(self.image_size, 8, 1)
convert_mode = getattr(cv, 'CV_%s2GRAY' % self.image_mode)
cv.CvtColor(image, gray_image, convert_mode)
return gray_image
def detect_features(self):
if opencv_available:
image = self.opencv_grey_image()
rows = self.image_size[0]
cols = self.image_size[1]
eig_image = cv.CreateMat(rows, cols, cv.CV_32FC1)
temp_image = cv.CreateMat(rows, cols, cv.CV_32FC1)
points = cv.GoodFeaturesToTrack(image, eig_image, temp_image, 20, 0.04, 1.0, useHarris=False)
if points:
return [FocalPoint(x, y, 1) for x, y in points]
return []
def detect_faces(self):
if opencv_available:
cascade_filename = os.path.join(os.path.dirname(__file__), 'face_detection', 'haarcascade_frontalface_alt2.xml')
cascade = cv.Load(cascade_filename)
image = self.opencv_grey_image()
cv.EqualizeHist(image, image)
min_size = (40, 40)
haar_scale = 1.1
min_neighbors = 3
haar_flags = 0
faces = cv.HaarDetectObjects(
image, cascade, cv.CreateMemStorage(0),
haar_scale, min_neighbors, haar_flags, min_size
)
if faces:
return [FocalPoint.from_square(face[0][0], face[0][1], face[0][2], face[0][3]) for face in faces]
return []
def get_focal_point(self):
# Face detection
faces = self.detect_faces()
if faces:
return combine_focal_points(faces)
# Feature detection
features = self.detect_features()
if features:
return combine_focal_points(features)
|
bsd-3-clause
|
ARMmbed/mbedtls
|
scripts/mbedtls_dev/build_tree.py
|
1
|
1411
|
"""Mbed TLS build tree information and manipulation.
"""
# Copyright The Mbed TLS Contributors
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
def looks_like_mbedtls_root(path: str) -> bool:
"""Whether the given directory looks like the root of the Mbed TLS source tree."""
return all(os.path.isdir(os.path.join(path, subdir))
for subdir in ['include', 'library', 'programs', 'tests'])
def chdir_to_root() -> None:
"""Detect the root of the Mbed TLS source tree and change to it.
The current directory must be up to two levels deep inside an Mbed TLS
source tree.
"""
for d in [os.path.curdir,
os.path.pardir,
os.path.join(os.path.pardir, os.path.pardir)]:
if looks_like_mbedtls_root(d):
os.chdir(d)
return
raise Exception('Mbed TLS source tree not found')
|
apache-2.0
|
Inspq/ansible
|
test/units/utils/test_shlex.py
|
174
|
1291
|
# (c) 2015, Marius Gedminas <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import unittest
from ansible.utils.shlex import shlex_split
class TestSplit(unittest.TestCase):
def test_trivial(self):
self.assertEqual(shlex_split("a b c"), ["a", "b", "c"])
def test_unicode(self):
self.assertEqual(shlex_split(u"a b \u010D"), [u"a", u"b", u"\u010D"])
def test_quoted(self):
self.assertEqual(shlex_split('"a b" c'), ["a b", "c"])
def test_comments(self):
self.assertEqual(shlex_split('"a b" c # d', comments=True), ["a b", "c"])
def test_error(self):
self.assertRaises(ValueError, shlex_split, 'a "b')
|
gpl-3.0
|
CeltonMcGrath/TACTIC
|
src/tactic/ui/examples/fx_anim_examples_wdg.py
|
6
|
3464
|
###########################################################
#
# Copyright (c) 2009, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ["SptFxSliderExampleWdg"]
from pyasm.web import DivWdg, HtmlElement, SpanWdg, Table, WebContainer
from pyasm.widget import SelectWdg, WidgetConfig, IconWdg
from tactic.ui.activator import ButtonForDropdownMenuWdg, AttachContextMenuWdg
from base_example_wdg import BaseExampleWdg
class SptFxSliderExampleWdg(BaseExampleWdg):
def get_example_title(my):
return "SPT Fx Slider Example"
def get_example_description(my):
return "Example of using an SPT Fx Slider (vs. a MooTools Fx Slider). The SPT Fx Slider can adapt to " \
"dynamically changing dimensions of its content, where the MooTools Fx Slider cannot. Currently still " \
"some clunkiness in the SPT Fx Slider -- needs some refinement to be able to be used more extensively."
def get_example_display(my):
div = DivWdg()
# --- Example of new spt.fx animation slider --------------------------------------------------------------
slide_div = DivWdg()
slide_div.set_id( "ui_play_sliding_thing" )
slide_div.set_style( "background: #9f9f9f; color: #0f0f0f; border: 1px solid black;" )
slide_div.add( "For a moment after Mr. and Mrs. Darling left the house the night-lights by the beds of the three children continued to burn clearly. They were awfully nice little night-lights, and one cannot help wishing that they could have kept awake to see Peter; but Wendy's light blinked and gave such a yawn that the other two yawned also, and before they could close their mouths all the three went out. There was another light in the room now, a thousand times brighter than the night-lights, and in the time we have taken to say this, it had been in all the drawers in the nursery, looking for Peter's shadow, rummaged the wardrobe and turned every pocket inside out. It was not really a light; it made this light by flashing about so quickly, but when it came to rest for a second you saw it was a fairy, no longer than your hand, but still growing. It was a girl called Tinker Bell exquisitely gowned in a skeleton leaf, cut low and square, through which her figure could be seen to the best advantage. She was slightly inclined to embonpoint." )
div.add( slide_div )
div.add( '<br/>' )
click_slide = DivWdg()
click_slide.add( "Click Me to Slide!" )
click_slide.set_style( "background: #0f0f0f; color: #9f9f9f; border: 1px solid black; width: 100px; " \
"cursor: pointer;" )
click_slide.add_behavior( { 'type': 'click_up',
'dst_el': 'ui_play_sliding_thing',
'cbfn_action': 'spt.fx.slide_anim_cbk',
'options': { 'direction': 'vertical',
'duration': 500, # time in milliseconds
'frame_rate': 15 # frames per second
}
} )
div.add( click_slide )
return div
|
epl-1.0
|
mancoast/CPythonPyc_test
|
cpython/277_test_startfile.py
|
120
|
1421
|
# Ridiculously simple test of the os.startfile function for Windows.
#
# empty.vbs is an empty file (except for a comment), which does
# nothing when run with cscript or wscript.
#
# A possible improvement would be to have empty.vbs do something that
# we can detect here, to make sure that not only the os.startfile()
# call succeeded, but also the the script actually has run.
import unittest
from test import test_support
import os
from os import path
from time import sleep
startfile = test_support.get_attribute(os, 'startfile')
class TestCase(unittest.TestCase):
def test_nonexisting(self):
self.assertRaises(OSError, startfile, "nonexisting.vbs")
def test_nonexisting_u(self):
self.assertRaises(OSError, startfile, u"nonexisting.vbs")
def test_empty(self):
empty = path.join(path.dirname(__file__), "empty.vbs")
startfile(empty)
startfile(empty, "open")
# Give the child process some time to exit before we finish.
# Otherwise the cleanup code will not be able to delete the cwd,
# because it is still in use.
sleep(0.1)
def test_empty_u(self):
empty = path.join(path.dirname(__file__), "empty.vbs")
startfile(unicode(empty, "mbcs"))
startfile(unicode(empty, "mbcs"), "open")
sleep(0.1)
def test_main():
test_support.run_unittest(TestCase)
if __name__=="__main__":
test_main()
|
gpl-3.0
|
brion/operations-debs-ffmpeg2theorawmf
|
frontend/Simple Theora Encoder.py
|
3
|
8467
|
#!/usr/bin/env python
# vi:si:et:sw=2:sts=2:ts=2
# -*- coding: utf-8 -*-
# Written 2007 by [email protected]
#
# see LICENSE.txt for license information
#
__version__ = "1.0"
import os
from os.path import join, dirname, basename, abspath
import sys
import time
import thread
import xmlrpclib
import wx
try:
from xml.etree.ElementTree import Element, SubElement, ElementTree, parse
except:
from elementtree.ElementTree import Element, SubElement, ElementTree, parse
from theoraenc.addVideoDialog import addVideoDialog
from theoraenc import theoraenc
#overwrite location of resources in submodules
if os.name != 'nt':
theoraenc.resourcePath = abspath(dirname(__file__))
class SimpleTheoraEncoder(wx.Frame):
queuedata = {}
_qd_key = {}
encodingQueueInitialized = False
inputFile = False
encoding = False
quit = False
def initMainInterface(self):
#TODO: addd menue
self.encodingQueue = wx.ListCtrl(self, -1, style=wx.LC_REPORT)
self.encodingQueue.SetPosition(wx.Point(10,50))
self.encodingQueue.SetSize(wx.Size(440, 165))
self.encodingQueue.Bind(wx.EVT_LIST_ITEM_SELECTED, self.OnItemSelected)
buttonSize = wx.Size(80,-1)
self.addItem = wx.Button(self, wx.ID_ANY, "Add...", wx.Point(460, 70), buttonSize)
self.Bind(wx.EVT_BUTTON, self.OnClickAdd, self.addItem)
self.removeItem = wx.Button(self, wx.ID_ANY, "Remove", wx.Point(460, 100), buttonSize)
self.Bind(wx.EVT_BUTTON, self.OnClickRemove, self.removeItem)
self.removeItem.Disable()
self.buttonEncode = wx.Button(self, wx.ID_ANY, "Encode", wx.Point(460, 190), buttonSize)
self.Bind(wx.EVT_BUTTON, self.OnEncode, self.buttonEncode)
self.buttonEncode.Disable()
self.Bind(wx.EVT_CLOSE, self.OnClose)
#Title
titleFont = wx.Font(14, wx.DEFAULT, wx.NORMAL, wx.NORMAL, False, u'Sans')
self.title = wx.StaticText(self, -1, "Simple Theora Encoder", wx.Point(10, 10))
self.title.SetFont(titleFont)
def __init__(self, parent, id, title, inputFile=None):
wx.Frame.__init__(self, parent, id, title, size=(550,230))
self.inputFile = inputFile
self.initMainInterface()
self.Show(True)
if self.addItem.IsEnabled:
self.OnClickAdd(None)
def initializeUploadQueue(self, selectItem = 0):
q = self.encodingQueue
q.ClearAll()
q.InsertColumn(0, "Name")
q.InsertColumn(1, "Stats")
q.SetColumnWidth(0, 200)
q.SetColumnWidth(1, 240)
q.itemDataMap = self.queuedata
items = self.queuedata.items()
for x in range(len(items)):
key, item = items[x]
self.queuedata[key]['itemID'] = x
self._qd_key[item['name']] = key
q.InsertStringItem(x, item['path'])
q.SetStringItem(x, 0, item['display_path'])
q.SetStringItem(x, 1, item['status'])
q.SetItemData(x, key)
# show how to select an item
self.currentItem = selectItem
if items:
q.SetItemState(self.currentItem, wx.LIST_STATE_SELECTED, wx.LIST_STATE_SELECTED)
else:
self.removeItem.Disable()
self.encodingQueueInitialized = True
def setItemStatus(self, itemID, value):
key = self.encodingQueue.GetItemData(itemID)
self.queuedata[key]['status'] = value
self.encodingQueue.SetStringItem(itemID, 1, value)
def updateItemStatus(self, name, status):
try:
item = self.queuedata[self._qd_key[name]]
except KeyError:
return
itemID = item['itemID']
if item['status'] != status:
item['status'] = status
#self.title.SetLabel(os.path.basename(item['path']) +': '+ status)
self.encodingQueue.SetStringItem(itemID, 1, status)
def getSettings(self, options):
settings = []
for key in ('width', 'height'):
if key in options and options[key]:
settings.append('--%s' % key)
settings.append("%s" % int(options[key]))
for key in ('videoquality', 'audioquality'):
if key in options and options[key]:
settings.append('--%s' % key)
settings.append("%s" % float(options[key]))
if 'subtitles' in options and options['subtitles']:
for s in options['subtitles']:
settings.append('--subtitles')
settings.append('%s' % s['file'])
settings.append('--subtitles-language')
settings.append('%s' % s['language'])
settings.append('--subtitles-category')
settings.append('%s' % s['category'])
settings.append('--subtitles-encoding')
settings.append('%s' % s['encoding'])
return settings
def encodeItem(self, item):
item['encoding'] = True
if self.currentItem == item['itemID']:
self.removeItem.SetLabel('Cancel')
self.setItemStatus(item['itemID'], 'encoding')
result = item['enc'].encode()
if not result:
self.setItemStatus(item['itemID'], 'encoding failed.')
else:
self.setItemStatus(item['itemID'], 'encoding done.')
item['encoded'] = True
item['encoding'] = False
return result
def encodeQueue(self, foo):
def nextItem():
items = self.queuedata.items()
for x in range(len(items)):
key, item = items[x]
if not item['encoded']:
return item
return None
next = nextItem()
while next and not self.quit:
self.encodeItem(next)
next = nextItem()
self.encoding = False
def addItemToQueue(self, videoFile, options):
name = os.path.basename(videoFile)
display_path = videoFile
if len(display_path) > 25:
display_path = "..." + display_path[-24:]
item = dict(
path = videoFile,
options = options,
display_path = display_path,
status = 'waiting... ',
listID = 0,
name = name,
)
item['encoding'] = False
item['encoded'] = False
item['enc'] = theoraenc.TheoraEnc(videoFile, None, lambda x: self.updateItemStatus(name, x))
item['enc'].settings = self.getSettings(options)
if self.encodingQueueInitialized:
x = self.encodingQueue.GetItemCount()
if self.queuedata:
key = max(self.queuedata.keys()) + 1
else:
key = 1
item['itemID'] = x
self.queuedata[key] = item
self.encodingQueue.InsertStringItem(x, item['path'])
self.encodingQueue.SetStringItem(x, 0, item['display_path'])
self.encodingQueue.SetStringItem(x, 1, item['status'])
self.encodingQueue.SetItemData(x, key)
else:
key = 1
self.queuedata[key] = item
self.initializeUploadQueue()
self._qd_key[name] = key
def OnItemSelected(self, event):
self.currentItem = event.m_itemIndex
key = self.encodingQueue.GetItemData(self.currentItem)
item = self.queuedata[key]
if item['encoding']:
self.removeItem.SetLabel('Cancel')
else:
self.removeItem.SetLabel('Remove')
self.removeItem.Enable()
def OnClickAdd(self, event):
result = addVideoDialog(self, theoraenc.hasKate, theoraenc.hasIconv)
time.sleep(0.5)
if result['ok']:
self.addItemToQueue(result['videoFile'], result)
if not self.encoding:
self.buttonEncode.Enable()
def OnClickRemove(self, event):
key = self.encodingQueue.GetItemData(self.currentItem)
if 'enc' in self.queuedata[key]:
self.queuedata[key]['enc'].cancel()
del self.queuedata[key]
self.initializeUploadQueue(self.currentItem)
def OnEncode(self, event):
if not self.encoding:
self.encoding = True
thread.start_new_thread(self.encodeQueue, ("foo", ))
self.buttonEncode.Disable()
def OnClose(self, event):
close = True
if self.encoding:
dlg = wx.MessageDialog(self,
"Videos are still encoded.\nDo you really want to close Simple Theora Encoder?",
"Confirm Exit", wx.OK|wx.CANCEL|wx.ICON_QUESTION)
result = dlg.ShowModal()
dlg.Destroy()
if result != wx.ID_OK:
close = False
if close:
self.quit = True
for key in self.queuedata:
if 'enc' in self.queuedata[key]:
try:
self.queuedata[key]['enc'].cancel()
except:
pass
self.Destroy()
def gui(inputFile = None):
app = wx.PySimpleApp()
frame=SimpleTheoraEncoder(None, wx.ID_ANY, 'Simple Theora Encoder', inputFile = inputFile)
app.MainLoop()
if __name__ == '__main__':
inputFile = None
if len(sys.argv) > 1 and not sys.argv[1].startswith('-'):
inputFile = sys.argv[1]
gui(inputFile)
|
gpl-2.0
|
rghe/ansible
|
lib/ansible/modules/system/service.py
|
26
|
61239
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Michael DeHaan <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: service
author:
- Ansible Core Team
- Michael DeHaan
version_added: "0.1"
short_description: Manage services
description:
- Controls services on remote hosts. Supported init systems include BSD init,
OpenRC, SysV, Solaris SMF, systemd, upstart.
- For Windows targets, use the M(win_service) module instead.
options:
name:
description:
- Name of the service.
required: true
state:
description:
- C(started)/C(stopped) are idempotent actions that will not run
commands unless necessary. C(restarted) will always bounce the
service. C(reloaded) will always reload. B(At least one of state
and enabled are required.) Note that reloaded will start the
service if it is not already started, even if your chosen init
system wouldn't normally.
choices: [ reloaded, restarted, started, stopped ]
sleep:
description:
- If the service is being C(restarted) then sleep this many seconds
between the stop and start command. This helps to workaround badly
behaving init scripts that exit immediately after signaling a process
to stop.
version_added: "1.3"
pattern:
description:
- If the service does not respond to the status command, name a
substring to look for as would be found in the output of the I(ps)
command as a stand-in for a status result. If the string is found,
the service will be assumed to be started.
version_added: "0.7"
enabled:
description:
- Whether the service should start on boot. B(At least one of state and
enabled are required.)
type: bool
runlevel:
description:
- "For OpenRC init scripts (ex: Gentoo) only. The runlevel that this service belongs to."
default: default
arguments:
description:
- Additional arguments provided on the command line
aliases: [ args ]
use:
description:
- The service module actually uses system specific modules, normally through auto detection, this setting can force a specific module.
- Normally it uses the value of the 'ansible_service_mgr' fact and falls back to the old 'service' module when none matching is found.
default: auto
version_added: 2.2
notes:
- For Windows targets, use the M(win_service) module instead.
'''
EXAMPLES = '''
- name: Start service httpd, if not started
service:
name: httpd
state: started
- name: Stop service httpd, if started
service:
name: httpd
state: stopped
- name: Restart service httpd, in all cases
service:
name: httpd
state: restarted
- name: Reload service httpd, in all cases
service:
name: httpd
state: reloaded
- name: Enable service httpd, and not touch the state
service:
name: httpd
enabled: yes
- name: Start service foo, based on running process /usr/bin/foo
service:
name: foo
pattern: /usr/bin/foo
state: started
- name: Restart network service for interface eth0
service:
name: network
state: restarted
args: eth0
'''
import glob
import json
import os
import platform
import re
import select
import shlex
import string
import subprocess
import tempfile
import time
# The distutils module is not shipped with SUNWPython on Solaris.
# It's in the SUNWPython-devel package which also contains development files
# that don't belong on production boxes. Since our Solaris code doesn't
# depend on LooseVersion, do not import it on Solaris.
if platform.system() != 'SunOS':
from distutils.version import LooseVersion
from ansible.module_utils.basic import AnsibleModule, load_platform_subclass
from ansible.module_utils.service import fail_if_missing
from ansible.module_utils.six import PY2, b
from ansible.module_utils._text import to_bytes, to_text
class Service(object):
"""
This is the generic Service manipulation class that is subclassed
based on platform.
A subclass should override the following action methods:-
- get_service_tools
- service_enable
- get_service_status
- service_control
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
def __new__(cls, *args, **kwargs):
return load_platform_subclass(Service, args, kwargs)
def __init__(self, module):
self.module = module
self.name = module.params['name']
self.state = module.params['state']
self.sleep = module.params['sleep']
self.pattern = module.params['pattern']
self.enable = module.params['enabled']
self.runlevel = module.params['runlevel']
self.changed = False
self.running = None
self.crashed = None
self.action = None
self.svc_cmd = None
self.svc_initscript = None
self.svc_initctl = None
self.enable_cmd = None
self.arguments = module.params.get('arguments', '')
self.rcconf_file = None
self.rcconf_key = None
self.rcconf_value = None
self.svc_change = False
# ===========================================
# Platform specific methods (must be replaced by subclass).
def get_service_tools(self):
self.module.fail_json(msg="get_service_tools not implemented on target platform")
def service_enable(self):
self.module.fail_json(msg="service_enable not implemented on target platform")
def get_service_status(self):
self.module.fail_json(msg="get_service_status not implemented on target platform")
def service_control(self):
self.module.fail_json(msg="service_control not implemented on target platform")
# ===========================================
# Generic methods that should be used on all platforms.
def execute_command(self, cmd, daemonize=False):
# Most things don't need to be daemonized
if not daemonize:
# chkconfig localizes messages and we're screen scraping so make
# sure we use the C locale
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
return self.module.run_command(cmd, environ_update=lang_env)
# This is complex because daemonization is hard for people.
# What we do is daemonize a part of this module, the daemon runs the
# command, picks up the return code and output, and returns it to the
# main process.
pipe = os.pipe()
pid = os.fork()
if pid == 0:
os.close(pipe[0])
# Set stdin/stdout/stderr to /dev/null
fd = os.open(os.devnull, os.O_RDWR)
if fd != 0:
os.dup2(fd, 0)
if fd != 1:
os.dup2(fd, 1)
if fd != 2:
os.dup2(fd, 2)
if fd not in (0, 1, 2):
os.close(fd)
# Make us a daemon. Yes, that's all it takes.
pid = os.fork()
if pid > 0:
os._exit(0)
os.setsid()
os.chdir("/")
pid = os.fork()
if pid > 0:
os._exit(0)
# Start the command
if PY2:
# Python 2.6's shlex.split can't handle text strings correctly
cmd = to_bytes(cmd, errors='surrogate_or_strict')
cmd = shlex.split(cmd)
else:
# Python3.x shex.split text strings.
cmd = to_text(cmd, errors='surrogate_or_strict')
cmd = [to_bytes(c, errors='surrogate_or_strict') for c in shlex.split(cmd)]
# In either of the above cases, pass a list of byte strings to Popen
# chkconfig localizes messages and we're screen scraping so make
# sure we use the C locale
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
p = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=lang_env, preexec_fn=lambda: os.close(pipe[1]))
stdout = b("")
stderr = b("")
fds = [p.stdout, p.stderr]
# Wait for all output, or until the main process is dead and its output is done.
while fds:
rfd, wfd, efd = select.select(fds, [], fds, 1)
if not (rfd + wfd + efd) and p.poll() is not None:
break
if p.stdout in rfd:
dat = os.read(p.stdout.fileno(), 4096)
if not dat:
fds.remove(p.stdout)
stdout += dat
if p.stderr in rfd:
dat = os.read(p.stderr.fileno(), 4096)
if not dat:
fds.remove(p.stderr)
stderr += dat
p.wait()
# Return a JSON blob to parent
blob = json.dumps([p.returncode, to_text(stdout), to_text(stderr)])
os.write(pipe[1], to_bytes(blob, errors='surrogate_or_strict'))
os.close(pipe[1])
os._exit(0)
elif pid == -1:
self.module.fail_json(msg="unable to fork")
else:
os.close(pipe[1])
os.waitpid(pid, 0)
# Wait for data from daemon process and process it.
data = b("")
while True:
rfd, wfd, efd = select.select([pipe[0]], [], [pipe[0]])
if pipe[0] in rfd:
dat = os.read(pipe[0], 4096)
if not dat:
break
data += dat
return json.loads(to_text(data, errors='surrogate_or_strict'))
def check_ps(self):
# Set ps flags
if platform.system() == 'SunOS':
psflags = '-ef'
else:
psflags = 'auxww'
# Find ps binary
psbin = self.module.get_bin_path('ps', True)
(rc, psout, pserr) = self.execute_command('%s %s' % (psbin, psflags))
# If rc is 0, set running as appropriate
if rc == 0:
self.running = False
lines = psout.split("\n")
for line in lines:
if self.pattern in line and "pattern=" not in line:
# so as to not confuse ./hacking/test-module
self.running = True
break
def check_service_changed(self):
if self.state and self.running is None:
self.module.fail_json(msg="failed determining service state, possible typo of service name?")
# Find out if state has changed
if not self.running and self.state in ["reloaded", "started"]:
self.svc_change = True
elif self.running and self.state in ["reloaded", "stopped"]:
self.svc_change = True
elif self.state == "restarted":
self.svc_change = True
if self.module.check_mode and self.svc_change:
self.module.exit_json(changed=True, msg='service state changed')
def modify_service_state(self):
# Only do something if state will change
if self.svc_change:
# Control service
if self.state in ['started']:
self.action = "start"
elif not self.running and self.state == 'reloaded':
self.action = "start"
elif self.state == 'stopped':
self.action = "stop"
elif self.state == 'reloaded':
self.action = "reload"
elif self.state == 'restarted':
self.action = "restart"
if self.module.check_mode:
self.module.exit_json(changed=True, msg='changing service state')
return self.service_control()
else:
# If nothing needs to change just say all is well
rc = 0
err = ''
out = ''
return rc, out, err
def service_enable_rcconf(self):
if self.rcconf_file is None or self.rcconf_key is None or self.rcconf_value is None:
self.module.fail_json(msg="service_enable_rcconf() requires rcconf_file, rcconf_key and rcconf_value")
self.changed = None
entry = '%s="%s"\n' % (self.rcconf_key, self.rcconf_value)
RCFILE = open(self.rcconf_file, "r")
new_rc_conf = []
# Build a list containing the possibly modified file.
for rcline in RCFILE:
# Parse line removing whitespaces, quotes, etc.
rcarray = shlex.split(rcline, comments=True)
if len(rcarray) >= 1 and '=' in rcarray[0]:
(key, value) = rcarray[0].split("=", 1)
if key == self.rcconf_key:
if value.upper() == self.rcconf_value:
# Since the proper entry already exists we can stop iterating.
self.changed = False
break
else:
# We found the key but the value is wrong, replace with new entry.
rcline = entry
self.changed = True
# Add line to the list.
new_rc_conf.append(rcline.strip() + '\n')
# We are done with reading the current rc.conf, close it.
RCFILE.close()
# If we did not see any trace of our entry we need to add it.
if self.changed is None:
new_rc_conf.append(entry)
self.changed = True
if self.changed is True:
if self.module.check_mode:
self.module.exit_json(changed=True, msg="changing service enablement")
# Create a temporary file next to the current rc.conf (so we stay on the same filesystem).
# This way the replacement operation is atomic.
rcconf_dir = os.path.dirname(self.rcconf_file)
rcconf_base = os.path.basename(self.rcconf_file)
(TMP_RCCONF, tmp_rcconf_file) = tempfile.mkstemp(dir=rcconf_dir, prefix="%s-" % rcconf_base)
# Write out the contents of the list into our temporary file.
for rcline in new_rc_conf:
os.write(TMP_RCCONF, rcline)
# Close temporary file.
os.close(TMP_RCCONF)
# Replace previous rc.conf.
self.module.atomic_move(tmp_rcconf_file, self.rcconf_file)
class LinuxService(Service):
"""
This is the Linux Service manipulation class - it is currently supporting
a mixture of binaries and init scripts for controlling services started at
boot, as well as for controlling the current state.
"""
platform = 'Linux'
distribution = None
def get_service_tools(self):
paths = ['/sbin', '/usr/sbin', '/bin', '/usr/bin']
binaries = ['service', 'chkconfig', 'update-rc.d', 'rc-service', 'rc-update', 'initctl', 'systemctl', 'start', 'stop', 'restart', 'insserv']
initpaths = ['/etc/init.d']
location = dict()
for binary in binaries:
location[binary] = self.module.get_bin_path(binary, opt_dirs=paths)
for initdir in initpaths:
initscript = "%s/%s" % (initdir, self.name)
if os.path.isfile(initscript):
self.svc_initscript = initscript
def check_systemd():
# tools must be installed
if location.get('systemctl', False):
# this should show if systemd is the boot init system
# these mirror systemd's own sd_boot test http://www.freedesktop.org/software/systemd/man/sd_booted.html
for canary in ["/run/systemd/system/", "/dev/.run/systemd/", "/dev/.systemd/"]:
if os.path.exists(canary):
return True
# If all else fails, check if init is the systemd command, using comm as cmdline could be symlink
try:
f = open('/proc/1/comm', 'r')
except IOError:
# If comm doesn't exist, old kernel, no systemd
return False
for line in f:
if 'systemd' in line:
return True
return False
# Locate a tool to enable/disable a service
if check_systemd():
# service is managed by systemd
self.__systemd_unit = self.name
self.svc_cmd = location['systemctl']
self.enable_cmd = location['systemctl']
elif location.get('initctl', False) and os.path.exists("/etc/init/%s.conf" % self.name):
# service is managed by upstart
self.enable_cmd = location['initctl']
# set the upstart version based on the output of 'initctl version'
self.upstart_version = LooseVersion('0.0.0')
try:
version_re = re.compile(r'\(upstart (.*)\)')
rc, stdout, stderr = self.module.run_command('%s version' % location['initctl'])
if rc == 0:
res = version_re.search(stdout)
if res:
self.upstart_version = LooseVersion(res.groups()[0])
except:
pass # we'll use the default of 0.0.0
self.svc_cmd = location['initctl']
elif location.get('rc-service', False):
# service is managed by OpenRC
self.svc_cmd = location['rc-service']
self.enable_cmd = location['rc-update']
return # already have service start/stop tool too!
elif self.svc_initscript:
# service is managed by with SysV init scripts
if location.get('update-rc.d', False):
# and uses update-rc.d
self.enable_cmd = location['update-rc.d']
elif location.get('insserv', None):
# and uses insserv
self.enable_cmd = location['insserv']
elif location.get('chkconfig', False):
# and uses chkconfig
self.enable_cmd = location['chkconfig']
if self.enable_cmd is None:
fail_if_missing(self.module, False, self.name, msg='host')
# If no service control tool selected yet, try to see if 'service' is available
if self.svc_cmd is None and location.get('service', False):
self.svc_cmd = location['service']
# couldn't find anything yet
if self.svc_cmd is None and not self.svc_initscript:
self.module.fail_json(msg='cannot find \'service\' binary or init script for service, possible typo in service name?, aborting')
if location.get('initctl', False):
self.svc_initctl = location['initctl']
def get_systemd_service_enabled(self):
def sysv_exists(name):
script = '/etc/init.d/' + name
return os.access(script, os.X_OK)
def sysv_is_enabled(name):
return bool(glob.glob('/etc/rc?.d/S??' + name))
service_name = self.__systemd_unit
(rc, out, err) = self.execute_command("%s is-enabled %s" % (self.enable_cmd, service_name,))
if rc == 0:
return True
elif out.startswith('disabled'):
return False
elif sysv_exists(service_name):
return sysv_is_enabled(service_name)
else:
return False
def get_systemd_status_dict(self):
# Check status first as show will not fail if service does not exist
(rc, out, err) = self.execute_command("%s show '%s'" % (self.enable_cmd, self.__systemd_unit,))
if rc != 0:
self.module.fail_json(msg='failure %d running systemctl show for %r: %s' % (rc, self.__systemd_unit, err))
elif 'LoadState=not-found' in out:
self.module.fail_json(msg='systemd could not find the requested service "%r": %s' % (self.__systemd_unit, err))
key = None
value_buffer = []
status_dict = {}
for line in out.splitlines():
if '=' in line:
if not key:
key, value = line.split('=', 1)
# systemd fields that are shell commands can be multi-line
# We take a value that begins with a "{" as the start of
# a shell command and a line that ends with "}" as the end of
# the command
if value.lstrip().startswith('{'):
if value.rstrip().endswith('}'):
status_dict[key] = value
key = None
else:
value_buffer.append(value)
else:
status_dict[key] = value
key = None
else:
if line.rstrip().endswith('}'):
status_dict[key] = '\n'.join(value_buffer)
key = None
else:
value_buffer.append(value)
else:
value_buffer.append(value)
return status_dict
def get_systemd_service_status(self):
d = self.get_systemd_status_dict()
if d.get('ActiveState') == 'active':
# run-once services (for which a single successful exit indicates
# that they are running as designed) should not be restarted here.
# Thus, we are not checking d['SubState'].
self.running = True
self.crashed = False
elif d.get('ActiveState') == 'failed':
self.running = False
self.crashed = True
elif d.get('ActiveState') is None:
self.module.fail_json(msg='No ActiveState value in systemctl show output for %r' % (self.__systemd_unit,))
else:
self.running = False
self.crashed = False
return self.running
def get_service_status(self):
if self.svc_cmd and self.svc_cmd.endswith('systemctl'):
return self.get_systemd_service_status()
self.action = "status"
rc, status_stdout, status_stderr = self.service_control()
# if we have decided the service is managed by upstart, we check for some additional output...
if self.svc_initctl and self.running is None:
# check the job status by upstart response
initctl_rc, initctl_status_stdout, initctl_status_stderr = self.execute_command("%s status %s %s" % (self.svc_initctl, self.name, self.arguments))
if "stop/waiting" in initctl_status_stdout:
self.running = False
elif "start/running" in initctl_status_stdout:
self.running = True
if self.svc_cmd and self.svc_cmd.endswith("rc-service") and self.running is None:
openrc_rc, openrc_status_stdout, openrc_status_stderr = self.execute_command("%s %s status" % (self.svc_cmd, self.name))
self.running = "started" in openrc_status_stdout
self.crashed = "crashed" in openrc_status_stderr
# Prefer a non-zero return code. For reference, see:
# http://refspecs.linuxbase.org/LSB_4.1.0/LSB-Core-generic/LSB-Core-generic/iniscrptact.html
if self.running is None and rc in [1, 2, 3, 4, 69]:
self.running = False
# if the job status is still not known check it by status output keywords
# Only check keywords if there's only one line of output (some init
# scripts will output verbosely in case of error and those can emit
# keywords that are picked up as false positives
if self.running is None and status_stdout.count('\n') <= 1:
# first transform the status output that could irritate keyword matching
cleanout = status_stdout.lower().replace(self.name.lower(), '')
if "stop" in cleanout:
self.running = False
elif "run" in cleanout:
self.running = not ("not " in cleanout)
elif "start" in cleanout and "not " not in cleanout:
self.running = True
elif 'could not access pid file' in cleanout:
self.running = False
elif 'is dead and pid file exists' in cleanout:
self.running = False
elif 'dead but subsys locked' in cleanout:
self.running = False
elif 'dead but pid file exists' in cleanout:
self.running = False
# if the job status is still not known and we got a zero for the
# return code, assume here that the service is running
if self.running is None and rc == 0:
self.running = True
# if the job status is still not known check it by special conditions
if self.running is None:
if self.name == 'iptables' and "ACCEPT" in status_stdout:
# iptables status command output is lame
# TODO: lookup if we can use a return code for this instead?
self.running = True
return self.running
def service_enable(self):
if self.enable_cmd is None:
self.module.fail_json(msg='cannot detect command to enable service %s, typo or init system potentially unknown' % self.name)
self.changed = True
action = None
#
# Upstart's initctl
#
if self.enable_cmd.endswith("initctl"):
def write_to_override_file(file_name, file_contents, ):
override_file = open(file_name, 'w')
override_file.write(file_contents)
override_file.close()
initpath = '/etc/init'
if self.upstart_version >= LooseVersion('0.6.7'):
manreg = re.compile(r'^manual\s*$', re.M | re.I)
config_line = 'manual\n'
else:
manreg = re.compile(r'^start on manual\s*$', re.M | re.I)
config_line = 'start on manual\n'
conf_file_name = "%s/%s.conf" % (initpath, self.name)
override_file_name = "%s/%s.override" % (initpath, self.name)
# Check to see if files contain the manual line in .conf and fail if True
if manreg.search(open(conf_file_name).read()):
self.module.fail_json(msg="manual stanza not supported in a .conf file")
self.changed = False
if os.path.exists(override_file_name):
override_file_contents = open(override_file_name).read()
# Remove manual stanza if present and service enabled
if self.enable and manreg.search(override_file_contents):
self.changed = True
override_state = manreg.sub('', override_file_contents)
# Add manual stanza if not present and service disabled
elif not (self.enable) and not (manreg.search(override_file_contents)):
self.changed = True
override_state = '\n'.join((override_file_contents, config_line))
# service already in desired state
else:
pass
# Add file with manual stanza if service disabled
elif not (self.enable):
self.changed = True
override_state = config_line
else:
# service already in desired state
pass
if self.module.check_mode:
self.module.exit_json(changed=self.changed)
# The initctl method of enabling and disabling services is much
# different than for the other service methods. So actually
# committing the change is done in this conditional and then we
# skip the boilerplate at the bottom of the method
if self.changed:
try:
write_to_override_file(override_file_name, override_state)
except:
self.module.fail_json(msg='Could not modify override file')
return
#
# SysV's chkconfig
#
if self.enable_cmd.endswith("chkconfig"):
if self.enable:
action = 'on'
else:
action = 'off'
(rc, out, err) = self.execute_command("%s --list %s" % (self.enable_cmd, self.name))
if 'chkconfig --add %s' % self.name in err:
self.execute_command("%s --add %s" % (self.enable_cmd, self.name))
(rc, out, err) = self.execute_command("%s --list %s" % (self.enable_cmd, self.name))
if self.name not in out:
self.module.fail_json(msg="service %s does not support chkconfig" % self.name)
# TODO: look back on why this is here
# state = out.split()[-1]
# Check if we're already in the correct state
if "3:%s" % action in out and "5:%s" % action in out:
self.changed = False
return
#
# Systemd's systemctl
#
if self.enable_cmd.endswith("systemctl"):
if self.enable:
action = 'enable'
else:
action = 'disable'
# Check if we're already in the correct state
service_enabled = self.get_systemd_service_enabled()
# self.changed should already be true
if self.enable == service_enabled:
self.changed = False
return
#
# OpenRC's rc-update
#
if self.enable_cmd.endswith("rc-update"):
if self.enable:
action = 'add'
else:
action = 'delete'
(rc, out, err) = self.execute_command("%s show" % self.enable_cmd)
for line in out.splitlines():
service_name, runlevels = line.split('|')
service_name = service_name.strip()
if service_name != self.name:
continue
runlevels = re.split(r'\s+', runlevels)
# service already enabled for the runlevel
if self.enable and self.runlevel in runlevels:
self.changed = False
# service already disabled for the runlevel
elif not self.enable and self.runlevel not in runlevels:
self.changed = False
break
else:
# service already disabled altogether
if not self.enable:
self.changed = False
if not self.changed:
return
#
# update-rc.d style
#
if self.enable_cmd.endswith("update-rc.d"):
enabled = False
slinks = glob.glob('/etc/rc?.d/S??' + self.name)
if slinks:
enabled = True
if self.enable != enabled:
self.changed = True
if self.enable:
action = 'enable'
klinks = glob.glob('/etc/rc?.d/K??' + self.name)
if not klinks:
if not self.module.check_mode:
(rc, out, err) = self.execute_command("%s %s defaults" % (self.enable_cmd, self.name))
if rc != 0:
if err:
self.module.fail_json(msg=err)
else:
self.module.fail_json(msg=out) % (self.enable_cmd, self.name, action)
else:
action = 'disable'
if not self.module.check_mode:
(rc, out, err) = self.execute_command("%s %s %s" % (self.enable_cmd, self.name, action))
if rc != 0:
if err:
self.module.fail_json(msg=err)
else:
self.module.fail_json(msg=out) % (self.enable_cmd, self.name, action)
else:
self.changed = False
return
#
# insserv (Debian <=7, SLES, others)
#
if self.enable_cmd.endswith("insserv"):
if self.enable:
(rc, out, err) = self.execute_command("%s -n -v %s" % (self.enable_cmd, self.name))
else:
(rc, out, err) = self.execute_command("%s -n -r -v %s" % (self.enable_cmd, self.name))
self.changed = False
for line in err.splitlines():
if self.enable and line.find('enable service') != -1:
self.changed = True
break
if not self.enable and line.find('remove service') != -1:
self.changed = True
break
if self.module.check_mode:
self.module.exit_json(changed=self.changed)
if not self.changed:
return
if self.enable:
(rc, out, err) = self.execute_command("%s %s" % (self.enable_cmd, self.name))
if (rc != 0) or (err != ''):
self.module.fail_json(msg=("Failed to install service. rc: %s, out: %s, err: %s" % (rc, out, err)))
return (rc, out, err)
else:
(rc, out, err) = self.execute_command("%s -r %s" % (self.enable_cmd, self.name))
if (rc != 0) or (err != ''):
self.module.fail_json(msg=("Failed to remove service. rc: %s, out: %s, err: %s" % (rc, out, err)))
return (rc, out, err)
#
# If we've gotten to the end, the service needs to be updated
#
self.changed = True
# we change argument order depending on real binary used:
# rc-update and systemctl need the argument order reversed
if self.enable_cmd.endswith("rc-update"):
args = (self.enable_cmd, action, self.name + " " + self.runlevel)
elif self.enable_cmd.endswith("systemctl"):
args = (self.enable_cmd, action, self.__systemd_unit)
else:
args = (self.enable_cmd, self.name, action)
if self.module.check_mode:
self.module.exit_json(changed=self.changed)
(rc, out, err) = self.execute_command("%s %s %s" % args)
if rc != 0:
if err:
self.module.fail_json(msg="Error when trying to %s %s: rc=%s %s" % (action, self.name, rc, err))
else:
self.module.fail_json(msg="Failure for %s %s: rc=%s %s" % (action, self.name, rc, out))
return (rc, out, err)
def service_control(self):
# Decide what command to run
svc_cmd = ''
arguments = self.arguments
if self.svc_cmd:
if not self.svc_cmd.endswith("systemctl"):
if self.svc_cmd.endswith("initctl"):
# initctl commands take the form <cmd> <action> <name>
svc_cmd = self.svc_cmd
arguments = "%s %s" % (self.name, arguments)
else:
# SysV and OpenRC take the form <cmd> <name> <action>
svc_cmd = "%s %s" % (self.svc_cmd, self.name)
else:
# systemd commands take the form <cmd> <action> <name>
svc_cmd = self.svc_cmd
arguments = "%s %s" % (self.__systemd_unit, arguments)
elif self.svc_cmd is None and self.svc_initscript:
# upstart
svc_cmd = "%s" % self.svc_initscript
# In OpenRC, if a service crashed, we need to reset its status to
# stopped with the zap command, before we can start it back.
if self.svc_cmd and self.svc_cmd.endswith('rc-service') and self.action == 'start' and self.crashed:
self.execute_command("%s zap" % svc_cmd, daemonize=True)
if self.action != "restart":
if svc_cmd != '':
# upstart or systemd or OpenRC
rc_state, stdout, stderr = self.execute_command("%s %s %s" % (svc_cmd, self.action, arguments), daemonize=True)
else:
# SysV
rc_state, stdout, stderr = self.execute_command("%s %s %s" % (self.action, self.name, arguments), daemonize=True)
elif self.svc_cmd and self.svc_cmd.endswith('rc-service'):
# All services in OpenRC support restart.
rc_state, stdout, stderr = self.execute_command("%s %s %s" % (svc_cmd, self.action, arguments), daemonize=True)
else:
# In other systems, not all services support restart. Do it the hard way.
if svc_cmd != '':
# upstart or systemd
rc1, stdout1, stderr1 = self.execute_command("%s %s %s" % (svc_cmd, 'stop', arguments), daemonize=True)
else:
# SysV
rc1, stdout1, stderr1 = self.execute_command("%s %s %s" % ('stop', self.name, arguments), daemonize=True)
if self.sleep:
time.sleep(self.sleep)
if svc_cmd != '':
# upstart or systemd
rc2, stdout2, stderr2 = self.execute_command("%s %s %s" % (svc_cmd, 'start', arguments), daemonize=True)
else:
# SysV
rc2, stdout2, stderr2 = self.execute_command("%s %s %s" % ('start', self.name, arguments), daemonize=True)
# merge return information
if rc1 != 0 and rc2 == 0:
rc_state = rc2
stdout = stdout2
stderr = stderr2
else:
rc_state = rc1 + rc2
stdout = stdout1 + stdout2
stderr = stderr1 + stderr2
return (rc_state, stdout, stderr)
class FreeBsdService(Service):
"""
This is the FreeBSD Service manipulation class - it uses the /etc/rc.conf
file for controlling services started at boot and the 'service' binary to
check status and perform direct service manipulation.
"""
platform = 'FreeBSD'
distribution = None
def get_service_tools(self):
self.svc_cmd = self.module.get_bin_path('service', True)
if not self.svc_cmd:
self.module.fail_json(msg='unable to find service binary')
self.sysrc_cmd = self.module.get_bin_path('sysrc')
def get_service_status(self):
rc, stdout, stderr = self.execute_command("%s %s %s %s" % (self.svc_cmd, self.name, 'onestatus', self.arguments))
if self.name == "pf":
self.running = "Enabled" in stdout
else:
if rc == 1:
self.running = False
elif rc == 0:
self.running = True
def service_enable(self):
if self.enable:
self.rcconf_value = "YES"
else:
self.rcconf_value = "NO"
rcfiles = ['/etc/rc.conf', '/etc/rc.conf.local', '/usr/local/etc/rc.conf']
for rcfile in rcfiles:
if os.path.isfile(rcfile):
self.rcconf_file = rcfile
rc, stdout, stderr = self.execute_command("%s %s %s %s" % (self.svc_cmd, self.name, 'rcvar', self.arguments))
try:
rcvars = shlex.split(stdout, comments=True)
except:
# TODO: add a warning to the output with the failure
pass
if not rcvars:
self.module.fail_json(msg="unable to determine rcvar", stdout=stdout, stderr=stderr)
# In rare cases, i.e. sendmail, rcvar can return several key=value pairs
# Usually there is just one, however. In other rare cases, i.e. uwsgi,
# rcvar can return extra uncommented data that is not at all related to
# the rcvar. We will just take the first key=value pair we come across
# and hope for the best.
for rcvar in rcvars:
if '=' in rcvar:
self.rcconf_key, default_rcconf_value = rcvar.split('=', 1)
break
if self.rcconf_key is None:
self.module.fail_json(msg="unable to determine rcvar", stdout=stdout, stderr=stderr)
if self.sysrc_cmd: # FreeBSD >= 9.2
rc, current_rcconf_value, stderr = self.execute_command("%s -n %s" % (self.sysrc_cmd, self.rcconf_key))
# it can happen that rcvar is not set (case of a system coming from the ports collection)
# so we will fallback on the default
if rc != 0:
current_rcconf_value = default_rcconf_value
if current_rcconf_value.strip().upper() != self.rcconf_value:
self.changed = True
if self.module.check_mode:
self.module.exit_json(changed=True, msg="changing service enablement")
rc, change_stdout, change_stderr = self.execute_command("%s %s=\"%s\"" % (self.sysrc_cmd, self.rcconf_key, self.rcconf_value))
if rc != 0:
self.module.fail_json(msg="unable to set rcvar using sysrc", stdout=change_stdout, stderr=change_stderr)
# sysrc does not exit with code 1 on permission error => validate successful change using service(8)
rc, check_stdout, check_stderr = self.execute_command("%s %s %s" % (self.svc_cmd, self.name, "enabled"))
if self.enable != (rc == 0): # rc = 0 indicates enabled service, rc = 1 indicates disabled service
self.module.fail_json(msg="unable to set rcvar: sysrc did not change value", stdout=change_stdout, stderr=change_stderr)
else:
self.changed = False
else: # Legacy (FreeBSD < 9.2)
try:
return self.service_enable_rcconf()
except Exception:
self.module.fail_json(msg='unable to set rcvar')
def service_control(self):
if self.action == "start":
self.action = "onestart"
if self.action == "stop":
self.action = "onestop"
if self.action == "reload":
self.action = "onereload"
ret = self.execute_command("%s %s %s %s" % (self.svc_cmd, self.name, self.action, self.arguments))
if self.sleep:
time.sleep(self.sleep)
return ret
class DragonFlyBsdService(FreeBsdService):
"""
This is the DragonFly BSD Service manipulation class - it uses the /etc/rc.conf
file for controlling services started at boot and the 'service' binary to
check status and perform direct service manipulation.
"""
platform = 'DragonFly'
distribution = None
def service_enable(self):
if self.enable:
self.rcconf_value = "YES"
else:
self.rcconf_value = "NO"
rcfiles = ['/etc/rc.conf'] # Overkill?
for rcfile in rcfiles:
if os.path.isfile(rcfile):
self.rcconf_file = rcfile
self.rcconf_key = "%s" % string.replace(self.name, "-", "_")
return self.service_enable_rcconf()
class OpenBsdService(Service):
"""
This is the OpenBSD Service manipulation class - it uses rcctl(8) or
/etc/rc.d scripts for service control. Enabling a service is
only supported if rcctl is present.
"""
platform = 'OpenBSD'
distribution = None
def get_service_tools(self):
self.enable_cmd = self.module.get_bin_path('rcctl')
if self.enable_cmd:
self.svc_cmd = self.enable_cmd
else:
rcdir = '/etc/rc.d'
rc_script = "%s/%s" % (rcdir, self.name)
if os.path.isfile(rc_script):
self.svc_cmd = rc_script
if not self.svc_cmd:
self.module.fail_json(msg='unable to find svc_cmd')
def get_service_status(self):
if self.enable_cmd:
rc, stdout, stderr = self.execute_command("%s %s %s" % (self.svc_cmd, 'check', self.name))
else:
rc, stdout, stderr = self.execute_command("%s %s" % (self.svc_cmd, 'check'))
if stderr:
self.module.fail_json(msg=stderr)
if rc == 1:
self.running = False
elif rc == 0:
self.running = True
def service_control(self):
if self.enable_cmd:
return self.execute_command("%s -f %s %s" % (self.svc_cmd, self.action, self.name))
else:
return self.execute_command("%s -f %s" % (self.svc_cmd, self.action))
def service_enable(self):
if not self.enable_cmd:
return super(OpenBsdService, self).service_enable()
rc, stdout, stderr = self.execute_command("%s %s %s %s" % (self.enable_cmd, 'getdef', self.name, 'flags'))
if stderr:
self.module.fail_json(msg=stderr)
getdef_string = stdout.rstrip()
# Depending on the service the string returned from 'getdef' may be
# either a set of flags or the boolean YES/NO
if getdef_string == "YES" or getdef_string == "NO":
default_flags = ''
else:
default_flags = getdef_string
rc, stdout, stderr = self.execute_command("%s %s %s %s" % (self.enable_cmd, 'get', self.name, 'flags'))
if stderr:
self.module.fail_json(msg=stderr)
get_string = stdout.rstrip()
# Depending on the service the string returned from 'get' may be
# either a set of flags or the boolean YES/NO
if get_string == "YES" or get_string == "NO":
current_flags = ''
else:
current_flags = get_string
# If there are arguments from the user we use these as flags unless
# they are already set.
if self.arguments and self.arguments != current_flags:
changed_flags = self.arguments
# If the user has not supplied any arguments and the current flags
# differ from the default we reset them.
elif not self.arguments and current_flags != default_flags:
changed_flags = ' '
# Otherwise there is no need to modify flags.
else:
changed_flags = ''
rc, stdout, stderr = self.execute_command("%s %s %s %s" % (self.enable_cmd, 'get', self.name, 'status'))
if self.enable:
if rc == 0 and not changed_flags:
return
if rc != 0:
status_action = "set %s status on" % (self.name)
else:
status_action = ''
if changed_flags:
flags_action = "set %s flags %s" % (self.name, changed_flags)
else:
flags_action = ''
else:
if rc == 1:
return
status_action = "set %s status off" % self.name
flags_action = ''
# Verify state assumption
if not status_action and not flags_action:
self.module.fail_json(msg="neither status_action or status_flags is set, this should never happen")
if self.module.check_mode:
self.module.exit_json(changed=True, msg="changing service enablement")
status_modified = 0
if status_action:
rc, stdout, stderr = self.execute_command("%s %s" % (self.enable_cmd, status_action))
if rc != 0:
if stderr:
self.module.fail_json(msg=stderr)
else:
self.module.fail_json(msg="rcctl failed to modify service status")
status_modified = 1
if flags_action:
rc, stdout, stderr = self.execute_command("%s %s" % (self.enable_cmd, flags_action))
if rc != 0:
if stderr:
if status_modified:
error_message = "rcctl modified service status but failed to set flags: " + stderr
else:
error_message = stderr
else:
if status_modified:
error_message = "rcctl modified service status but failed to set flags"
else:
error_message = "rcctl failed to modify service flags"
self.module.fail_json(msg=error_message)
self.changed = True
class NetBsdService(Service):
"""
This is the NetBSD Service manipulation class - it uses the /etc/rc.conf
file for controlling services started at boot, check status and perform
direct service manipulation. Init scripts in /etc/rcd are used for
controlling services (start/stop) as well as for controlling the current
state.
"""
platform = 'NetBSD'
distribution = None
def get_service_tools(self):
initpaths = ['/etc/rc.d'] # better: $rc_directories - how to get in here? Run: sh -c '. /etc/rc.conf ; echo $rc_directories'
for initdir in initpaths:
initscript = "%s/%s" % (initdir, self.name)
if os.path.isfile(initscript):
self.svc_initscript = initscript
if not self.svc_initscript:
self.module.fail_json(msg='unable to find rc.d script')
def service_enable(self):
if self.enable:
self.rcconf_value = "YES"
else:
self.rcconf_value = "NO"
rcfiles = ['/etc/rc.conf'] # Overkill?
for rcfile in rcfiles:
if os.path.isfile(rcfile):
self.rcconf_file = rcfile
self.rcconf_key = "%s" % string.replace(self.name, "-", "_")
return self.service_enable_rcconf()
def get_service_status(self):
self.svc_cmd = "%s" % self.svc_initscript
rc, stdout, stderr = self.execute_command("%s %s" % (self.svc_cmd, 'onestatus'))
if rc == 1:
self.running = False
elif rc == 0:
self.running = True
def service_control(self):
if self.action == "start":
self.action = "onestart"
if self.action == "stop":
self.action = "onestop"
self.svc_cmd = "%s" % self.svc_initscript
return self.execute_command("%s %s" % (self.svc_cmd, self.action), daemonize=True)
class SunOSService(Service):
"""
This is the SunOS Service manipulation class - it uses the svcadm
command for controlling services, and svcs command for checking status.
It also tries to be smart about taking the service out of maintenance
state if necessary.
"""
platform = 'SunOS'
distribution = None
def get_service_tools(self):
self.svcs_cmd = self.module.get_bin_path('svcs', True)
if not self.svcs_cmd:
self.module.fail_json(msg='unable to find svcs binary')
self.svcadm_cmd = self.module.get_bin_path('svcadm', True)
if not self.svcadm_cmd:
self.module.fail_json(msg='unable to find svcadm binary')
if self.svcadm_supports_sync():
self.svcadm_sync = '-s'
else:
self.svcadm_sync = ''
def svcadm_supports_sync(self):
# Support for synchronous restart/refresh is only supported on
# Oracle Solaris >= 11.2
for line in open('/etc/release', 'r').readlines():
m = re.match(r'\s+Oracle Solaris (\d+\.\d+).*', line.rstrip())
if m and m.groups()[0] >= 11.2:
return True
def get_service_status(self):
status = self.get_sunos_svcs_status()
# Only 'online' is considered properly running. Everything else is off
# or has some sort of problem.
if status == 'online':
self.running = True
else:
self.running = False
def get_sunos_svcs_status(self):
rc, stdout, stderr = self.execute_command("%s %s" % (self.svcs_cmd, self.name))
if rc == 1:
if stderr:
self.module.fail_json(msg=stderr)
else:
self.module.fail_json(msg=stdout)
lines = stdout.rstrip("\n").split("\n")
status = lines[-1].split(" ")[0]
# status is one of: online, offline, degraded, disabled, maintenance, uninitialized
# see man svcs(1)
return status
def service_enable(self):
# Get current service enablement status
rc, stdout, stderr = self.execute_command("%s -l %s" % (self.svcs_cmd, self.name))
if rc != 0:
if stderr:
self.module.fail_json(msg=stderr)
else:
self.module.fail_json(msg=stdout)
enabled = False
temporary = False
# look for enabled line, which could be one of:
# enabled true (temporary)
# enabled false (temporary)
# enabled true
# enabled false
for line in stdout.split("\n"):
if line.startswith("enabled"):
if "true" in line:
enabled = True
if "temporary" in line:
temporary = True
startup_enabled = (enabled and not temporary) or (not enabled and temporary)
if self.enable and startup_enabled:
return
elif (not self.enable) and (not startup_enabled):
return
if not self.module.check_mode:
# Mark service as started or stopped (this will have the side effect of
# actually stopping or starting the service)
if self.enable:
subcmd = "enable -rs"
else:
subcmd = "disable -s"
rc, stdout, stderr = self.execute_command("%s %s %s" % (self.svcadm_cmd, subcmd, self.name))
if rc != 0:
if stderr:
self.module.fail_json(msg=stderr)
else:
self.module.fail_json(msg=stdout)
self.changed = True
def service_control(self):
status = self.get_sunos_svcs_status()
# if starting or reloading, clear maintenance states
if self.action in ['start', 'reload', 'restart'] and status in ['maintenance', 'degraded']:
rc, stdout, stderr = self.execute_command("%s clear %s" % (self.svcadm_cmd, self.name))
if rc != 0:
return rc, stdout, stderr
status = self.get_sunos_svcs_status()
if status in ['maintenance', 'degraded']:
self.module.fail_json(msg="Failed to bring service out of %s status." % status)
if self.action == 'start':
subcmd = "enable -rst"
elif self.action == 'stop':
subcmd = "disable -st"
elif self.action == 'reload':
subcmd = "refresh %s" % (self.svcadm_sync)
elif self.action == 'restart' and status == 'online':
subcmd = "restart %s" % (self.svcadm_sync)
elif self.action == 'restart' and status != 'online':
subcmd = "enable -rst"
return self.execute_command("%s %s %s" % (self.svcadm_cmd, subcmd, self.name))
class AIX(Service):
"""
This is the AIX Service (SRC) manipulation class - it uses lssrc, startsrc, stopsrc
and refresh for service control. Enabling a service is currently not supported.
Would require to add an entry in the /etc/inittab file (mkitab, chitab and rmitab
commands)
"""
platform = 'AIX'
distribution = None
def get_service_tools(self):
self.lssrc_cmd = self.module.get_bin_path('lssrc', True)
if not self.lssrc_cmd:
self.module.fail_json(msg='unable to find lssrc binary')
self.startsrc_cmd = self.module.get_bin_path('startsrc', True)
if not self.startsrc_cmd:
self.module.fail_json(msg='unable to find startsrc binary')
self.stopsrc_cmd = self.module.get_bin_path('stopsrc', True)
if not self.stopsrc_cmd:
self.module.fail_json(msg='unable to find stopsrc binary')
self.refresh_cmd = self.module.get_bin_path('refresh', True)
if not self.refresh_cmd:
self.module.fail_json(msg='unable to find refresh binary')
def get_service_status(self):
status = self.get_aix_src_status()
# Only 'active' is considered properly running. Everything else is off
# or has some sort of problem.
if status == 'active':
self.running = True
else:
self.running = False
def get_aix_src_status(self):
rc, stdout, stderr = self.execute_command("%s -s %s" % (self.lssrc_cmd, self.name))
if rc == 1:
if stderr:
self.module.fail_json(msg=stderr)
else:
self.module.fail_json(msg=stdout)
lines = stdout.rstrip("\n").split("\n")
status = lines[-1].split(" ")[-1]
# status is one of: active, inoperative
return status
def service_control(self):
if self.action == 'start':
srccmd = self.startsrc_cmd
elif self.action == 'stop':
srccmd = self.stopsrc_cmd
elif self.action == 'reload':
srccmd = self.refresh_cmd
elif self.action == 'restart':
self.execute_command("%s -s %s" % (self.stopsrc_cmd, self.name))
srccmd = self.startsrc_cmd
if self.arguments and self.action == 'start':
return self.execute_command("%s -a \"%s\" -s %s" % (srccmd, self.arguments, self.name))
else:
return self.execute_command("%s -s %s" % (srccmd, self.name))
# ===========================================
# Main control flow
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str', required=True),
state=dict(type='str', choices=['started', 'stopped', 'reloaded', 'restarted']),
sleep=dict(type='int'),
pattern=dict(type='str'),
enabled=dict(type='bool'),
runlevel=dict(type='str', default='default'),
arguments=dict(type='str', default='', aliases=['args']),
),
supports_check_mode=True,
required_one_of=[['state', 'enabled']],
)
service = Service(module)
module.debug('Service instantiated - platform %s' % service.platform)
if service.distribution:
module.debug('Service instantiated - distribution %s' % service.distribution)
rc = 0
out = ''
err = ''
result = {}
result['name'] = service.name
# Find service management tools
service.get_service_tools()
# Enable/disable service startup at boot if requested
if service.module.params['enabled'] is not None:
# FIXME: ideally this should detect if we need to toggle the enablement state, though
# it's unlikely the changed handler would need to fire in this case so it's a minor thing.
service.service_enable()
result['enabled'] = service.enable
if module.params['state'] is None:
# Not changing the running state, so bail out now.
result['changed'] = service.changed
module.exit_json(**result)
result['state'] = service.state
# Collect service status
if service.pattern:
service.check_ps()
else:
service.get_service_status()
# Calculate if request will change service state
service.check_service_changed()
# Modify service state if necessary
(rc, out, err) = service.modify_service_state()
if rc != 0:
if err and "Job is already running" in err:
# upstart got confused, one such possibility is MySQL on Ubuntu 12.04
# where status may report it has no start/stop links and we could
# not get accurate status
pass
else:
if err:
module.fail_json(msg=err)
else:
module.fail_json(msg=out)
result['changed'] = service.changed | service.svc_change
if service.module.params['enabled'] is not None:
result['enabled'] = service.module.params['enabled']
if not service.module.params['state']:
status = service.get_service_status()
if status is None:
result['state'] = 'absent'
elif status is False:
result['state'] = 'started'
else:
result['state'] = 'stopped'
else:
# as we may have just bounced the service the service command may not
# report accurate state at this moment so just show what we ran
if service.module.params['state'] in ['reloaded', 'restarted', 'started']:
result['state'] = 'started'
else:
result['state'] = 'stopped'
module.exit_json(**result)
if __name__ == '__main__':
main()
|
gpl-3.0
|
alsrgv/tensorflow
|
tensorflow/python/util/memory.py
|
47
|
1845
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions related to Python memory management."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# TODO(b/115366440): Delete this function when a custom OrderedDict is added
def dismantle_ordered_dict(ordered_dict):
"""Remove reference cycle in OrderedDict `ordered_dict`.
Helpful for making sure the garbage collector doesn't need to run after
using an OrderedDict.
Args:
ordered_dict: A `OrderedDict` object to destroy. This object is unusable
after this function runs.
"""
# OrderedDict, makes a simple reference loop
# and hides it in an __attribute in some Python versions. We don't need to
# throw an error if we can't find it, but if we do find it we can break the
# loop to avoid creating work for the garbage collector.
problematic_cycle = ordered_dict.__dict__.get("_OrderedDict__root", None) # pylint: disable=protected-access
if problematic_cycle:
try:
del problematic_cycle[0][:]
except TypeError:
# This is probably not one of the problematic Python versions. Continue
# with the rest of our cleanup.
pass
|
apache-2.0
|
sarvesh-ranjan/swift
|
test/unit/container/test_reconciler.py
|
14
|
79314
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import contextlib
import mock
import operator
import time
import unittest
import urllib
import socket
import os
import errno
import itertools
import random
from collections import defaultdict
from datetime import datetime
from swift.container import reconciler
from swift.container.server import gen_resp_headers
from swift.common.direct_client import ClientException
from swift.common import swob
from swift.common.utils import split_path, Timestamp
from test.unit import debug_logger, FakeRing, fake_http_connect
from test.unit.common.middleware.helpers import FakeSwift
def timestamp_to_last_modified(timestamp):
return datetime.fromtimestamp(
float(Timestamp(timestamp))).strftime('%Y-%m-%dT%H:%M:%S.%f')
def container_resp_headers(**kwargs):
return swob.HeaderKeyDict(gen_resp_headers(kwargs))
class FakeStoragePolicySwift(object):
def __init__(self):
self.storage_policy = defaultdict(FakeSwift)
self._mock_oldest_spi_map = {}
def __getattribute__(self, name):
try:
return object.__getattribute__(self, name)
except AttributeError:
return getattr(self.storage_policy[None], name)
def __call__(self, env, start_response):
method = env['REQUEST_METHOD']
path = env['PATH_INFO']
_, acc, cont, obj = split_path(env['PATH_INFO'], 0, 4,
rest_with_last=True)
if not obj:
policy_index = None
else:
policy_index = self._mock_oldest_spi_map.get(cont, 0)
# allow backend policy override
if 'HTTP_X_BACKEND_STORAGE_POLICY_INDEX' in env:
policy_index = int(env['HTTP_X_BACKEND_STORAGE_POLICY_INDEX'])
try:
return self.storage_policy[policy_index].__call__(
env, start_response)
except KeyError:
pass
if method == 'PUT':
resp_class = swob.HTTPCreated
else:
resp_class = swob.HTTPNotFound
self.storage_policy[policy_index].register(
method, path, resp_class, {}, '')
return self.storage_policy[policy_index].__call__(
env, start_response)
class FakeInternalClient(reconciler.InternalClient):
def __init__(self, listings):
self.app = FakeStoragePolicySwift()
self.user_agent = 'fake-internal-client'
self.request_tries = 1
self.parse(listings)
def parse(self, listings):
self.accounts = defaultdict(lambda: defaultdict(list))
for item, timestamp in listings.items():
# XXX this interface is stupid
if isinstance(timestamp, tuple):
timestamp, content_type = timestamp
else:
timestamp, content_type = timestamp, 'application/x-put'
storage_policy_index, path = item
account, container_name, obj_name = split_path(
path.encode('utf-8'), 0, 3, rest_with_last=True)
self.accounts[account][container_name].append(
(obj_name, storage_policy_index, timestamp, content_type))
for account_name, containers in self.accounts.items():
for con in containers:
self.accounts[account_name][con].sort(key=lambda t: t[0])
for account, containers in self.accounts.items():
account_listing_data = []
account_path = '/v1/%s' % account
for container, objects in containers.items():
container_path = account_path + '/' + container
container_listing_data = []
for entry in objects:
(obj_name, storage_policy_index,
timestamp, content_type) = entry
if storage_policy_index is None and not obj_name:
# empty container
continue
obj_path = container_path + '/' + obj_name
ts = Timestamp(timestamp)
headers = {'X-Timestamp': ts.normal,
'X-Backend-Timestamp': ts.internal}
# register object response
self.app.storage_policy[storage_policy_index].register(
'GET', obj_path, swob.HTTPOk, headers)
self.app.storage_policy[storage_policy_index].register(
'DELETE', obj_path, swob.HTTPNoContent, {})
# container listing entry
last_modified = timestamp_to_last_modified(timestamp)
obj_data = {
'bytes': 0,
# listing data is unicode
'name': obj_name.decode('utf-8'),
'last_modified': last_modified,
'hash': timestamp,
'content_type': content_type,
}
container_listing_data.append(obj_data)
container_listing_data.sort(key=operator.itemgetter('name'))
# register container listing response
container_headers = {}
container_qry_string = '?format=json&marker=&end_marker='
self.app.register('GET', container_path + container_qry_string,
swob.HTTPOk, container_headers,
json.dumps(container_listing_data))
if container_listing_data:
obj_name = container_listing_data[-1]['name']
# client should quote and encode marker
end_qry_string = '?format=json&marker=%s&end_marker=' % (
urllib.quote(obj_name.encode('utf-8')))
self.app.register('GET', container_path + end_qry_string,
swob.HTTPOk, container_headers,
json.dumps([]))
self.app.register('DELETE', container_path,
swob.HTTPConflict, {}, '')
# simple account listing entry
container_data = {'name': container}
account_listing_data.append(container_data)
# register account response
account_listing_data.sort(key=operator.itemgetter('name'))
account_headers = {}
account_qry_string = '?format=json&marker=&end_marker='
self.app.register('GET', account_path + account_qry_string,
swob.HTTPOk, account_headers,
json.dumps(account_listing_data))
end_qry_string = '?format=json&marker=%s&end_marker=' % (
urllib.quote(account_listing_data[-1]['name']))
self.app.register('GET', account_path + end_qry_string,
swob.HTTPOk, account_headers,
json.dumps([]))
class TestReconcilerUtils(unittest.TestCase):
def setUp(self):
self.fake_ring = FakeRing()
reconciler.direct_get_container_policy_index.reset()
def test_parse_raw_obj(self):
got = reconciler.parse_raw_obj({
'name': "2:/AUTH_bob/con/obj",
'hash': Timestamp(2017551.49350).internal,
'last_modified': timestamp_to_last_modified(2017551.49352),
'content_type': 'application/x-delete',
})
self.assertEqual(got['q_policy_index'], 2)
self.assertEqual(got['account'], 'AUTH_bob')
self.assertEqual(got['container'], 'con')
self.assertEqual(got['obj'], 'obj')
self.assertEqual(got['q_ts'], 2017551.49350)
self.assertEqual(got['q_record'], 2017551.49352)
self.assertEqual(got['q_op'], 'DELETE')
got = reconciler.parse_raw_obj({
'name': "1:/AUTH_bob/con/obj",
'hash': Timestamp(1234.20190).internal,
'last_modified': timestamp_to_last_modified(1234.20192),
'content_type': 'application/x-put',
})
self.assertEqual(got['q_policy_index'], 1)
self.assertEqual(got['account'], 'AUTH_bob')
self.assertEqual(got['container'], 'con')
self.assertEqual(got['obj'], 'obj')
self.assertEqual(got['q_ts'], 0000001234.20190)
self.assertEqual(got['q_record'], 0000001234.20192)
self.assertEqual(got['q_op'], 'PUT')
# negative test
obj_info = {
'name': "1:/AUTH_bob/con/obj",
'hash': Timestamp(1234.20190).internal,
'last_modified': timestamp_to_last_modified(1234.20192),
}
self.assertRaises(ValueError, reconciler.parse_raw_obj, obj_info)
obj_info['content_type'] = 'foo'
self.assertRaises(ValueError, reconciler.parse_raw_obj, obj_info)
obj_info['content_type'] = 'appliation/x-post'
self.assertRaises(ValueError, reconciler.parse_raw_obj, obj_info)
self.assertRaises(ValueError, reconciler.parse_raw_obj,
{'name': 'bogus'})
self.assertRaises(ValueError, reconciler.parse_raw_obj,
{'name': '-1:/AUTH_test/container'})
self.assertRaises(ValueError, reconciler.parse_raw_obj,
{'name': 'asdf:/AUTH_test/c/obj'})
self.assertRaises(KeyError, reconciler.parse_raw_obj,
{'name': '0:/AUTH_test/c/obj',
'content_type': 'application/x-put'})
def test_get_container_policy_index(self):
ts = itertools.count(int(time.time()))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
container_resp_headers(
status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=0,
),
container_resp_headers(
status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=1,
),
container_resp_headers(
status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=0,
),
]
for permutation in itertools.permutations((0, 1, 2)):
reconciler.direct_get_container_policy_index.reset()
resp_headers = [stub_resp_headers[i] for i in permutation]
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
test_values = [(info['x-storage-policy-index'],
info['x-backend-status-changed-at']) for
info in resp_headers]
self.assertEqual(oldest_spi, 0,
"oldest policy index wrong "
"for permutation %r" % test_values)
def test_get_container_policy_index_with_error(self):
ts = itertools.count(int(time.time()))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
container_resp_headers(
status_change_at=ts.next(),
storage_policy_index=2,
),
container_resp_headers(
status_changed_at=ts.next(),
storage_policy_index=1,
),
# old timestamp, but 500 should be ignored...
ClientException(
'Container Server blew up',
http_status=500, http_reason='Server Error',
http_headers=container_resp_headers(
status_changed_at=Timestamp(0).internal,
storage_policy_index=0,
),
),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertEqual(oldest_spi, 2)
def test_get_container_policy_index_with_socket_error(self):
ts = itertools.count(int(time.time()))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
container_resp_headers(
status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=1,
),
container_resp_headers(
status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=0,
),
socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertEqual(oldest_spi, 1)
def test_get_container_policy_index_with_too_many_errors(self):
ts = itertools.count(int(time.time()))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
container_resp_headers(
status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=0,
),
socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)),
ClientException(
'Container Server blew up',
http_status=500, http_reason='Server Error',
http_headers=container_resp_headers(
status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=1,
),
),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertEqual(oldest_spi, None)
def test_get_container_policy_index_for_deleted(self):
mock_path = 'swift.container.reconciler.direct_head_container'
headers = container_resp_headers(
status_changed_at=Timestamp(time.time()).internal,
storage_policy_index=1,
)
stub_resp_headers = [
ClientException(
'Container Not Found',
http_status=404, http_reason='Not Found',
http_headers=headers,
),
ClientException(
'Container Not Found',
http_status=404, http_reason='Not Found',
http_headers=headers,
),
ClientException(
'Container Not Found',
http_status=404, http_reason='Not Found',
http_headers=headers,
),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertEqual(oldest_spi, 1)
def test_get_container_policy_index_for_recently_deleted(self):
ts = itertools.count(int(time.time()))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
ClientException(
'Container Not Found',
http_status=404, http_reason='Not Found',
http_headers=container_resp_headers(
put_timestamp=ts.next(),
delete_timestamp=ts.next(),
status_changed_at=ts.next(),
storage_policy_index=0,
),
),
ClientException(
'Container Not Found',
http_status=404, http_reason='Not Found',
http_headers=container_resp_headers(
put_timestamp=ts.next(),
delete_timestamp=ts.next(),
status_changed_at=ts.next(),
storage_policy_index=1,
),
),
ClientException(
'Container Not Found',
http_status=404, http_reason='Not Found',
http_headers=container_resp_headers(
put_timestamp=ts.next(),
delete_timestamp=ts.next(),
status_changed_at=ts.next(),
storage_policy_index=2,
),
),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertEqual(oldest_spi, 2)
def test_get_container_policy_index_for_recently_recreated(self):
ts = itertools.count(int(time.time()))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
# old put, no recreate
container_resp_headers(
delete_timestamp=0,
put_timestamp=ts.next(),
status_changed_at=ts.next(),
storage_policy_index=0,
),
# recently deleted
ClientException(
'Container Not Found',
http_status=404, http_reason='Not Found',
http_headers=container_resp_headers(
put_timestamp=ts.next(),
delete_timestamp=ts.next(),
status_changed_at=ts.next(),
storage_policy_index=1,
),
),
# recently recreated
container_resp_headers(
delete_timestamp=ts.next(),
put_timestamp=ts.next(),
status_changed_at=ts.next(),
storage_policy_index=2,
),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertEqual(oldest_spi, 2)
def test_get_container_policy_index_for_recently_split_brain(self):
ts = itertools.count(int(time.time()))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
# oldest put
container_resp_headers(
delete_timestamp=0,
put_timestamp=ts.next(),
status_changed_at=ts.next(),
storage_policy_index=0,
),
# old recreate
container_resp_headers(
delete_timestamp=ts.next(),
put_timestamp=ts.next(),
status_changed_at=ts.next(),
storage_policy_index=1,
),
# recently put
container_resp_headers(
delete_timestamp=0,
put_timestamp=ts.next(),
status_changed_at=ts.next(),
storage_policy_index=2,
),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertEqual(oldest_spi, 1)
def test_get_container_policy_index_cache(self):
now = time.time()
ts = itertools.count(int(now))
mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [
container_resp_headers(
status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=0,
),
container_resp_headers(
status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=1,
),
container_resp_headers(
status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=0,
),
]
random.shuffle(stub_resp_headers)
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
self.assertEqual(oldest_spi, 0)
# re-mock with errors
stub_resp_headers = [
socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)),
socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)),
socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)),
]
with mock.patch('time.time', new=lambda: now):
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
# still cached
self.assertEqual(oldest_spi, 0)
# propel time forward
the_future = now + 31
with mock.patch('time.time', new=lambda: the_future):
with mock.patch(mock_path) as direct_head:
direct_head.side_effect = stub_resp_headers
oldest_spi = reconciler.direct_get_container_policy_index(
self.fake_ring, 'a', 'con')
# expired
self.assertEqual(oldest_spi, None)
def test_direct_delete_container_entry(self):
mock_path = 'swift.common.direct_client.http_connect'
connect_args = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
connect_args.append({
'ipaddr': ipaddr, 'port': port, 'device': device,
'partition': partition, 'method': method, 'path': path,
'headers': headers, 'query_string': query_string})
x_timestamp = Timestamp(time.time())
headers = {'x-timestamp': x_timestamp.internal}
fake_hc = fake_http_connect(200, 200, 200, give_connect=test_connect)
with mock.patch(mock_path, fake_hc):
reconciler.direct_delete_container_entry(
self.fake_ring, 'a', 'c', 'o', headers=headers)
self.assertEqual(len(connect_args), 3)
for args in connect_args:
self.assertEqual(args['method'], 'DELETE')
self.assertEqual(args['path'], '/a/c/o')
self.assertEqual(args['headers'].get('x-timestamp'),
headers['x-timestamp'])
def test_direct_delete_container_entry_with_errors(self):
# setup mock direct_delete
mock_path = \
'swift.container.reconciler.direct_delete_container_object'
stub_resp = [
None,
socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)),
ClientException(
'Container Server blew up',
'10.0.0.12', 6001, 'sdj', 404, 'Not Found'
),
]
mock_direct_delete = mock.MagicMock()
mock_direct_delete.side_effect = stub_resp
with contextlib.nested(
mock.patch(mock_path, mock_direct_delete),
mock.patch('eventlet.greenpool.DEBUG', False),
):
rv = reconciler.direct_delete_container_entry(
self.fake_ring, 'a', 'c', 'o')
self.assertEqual(rv, None)
self.assertEqual(len(mock_direct_delete.mock_calls), 3)
def test_add_to_reconciler_queue(self):
mock_path = 'swift.common.direct_client.http_connect'
connect_args = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
connect_args.append({
'ipaddr': ipaddr, 'port': port, 'device': device,
'partition': partition, 'method': method, 'path': path,
'headers': headers, 'query_string': query_string})
fake_hc = fake_http_connect(200, 200, 200, give_connect=test_connect)
with mock.patch(mock_path, fake_hc):
ret = reconciler.add_to_reconciler_queue(
self.fake_ring, 'a', 'c', 'o', 17, 5948918.63946, 'DELETE')
self.assertTrue(ret)
self.assertEqual(ret, str(int(5948918.63946 // 3600 * 3600)))
self.assertEqual(len(connect_args), 3)
connect_args.sort(key=lambda a: (a['ipaddr'], a['port']))
required_headers = ('x-content-type', 'x-etag')
for args in connect_args:
self.assertEqual(args['headers']['X-Timestamp'], '5948918.63946')
self.assertEqual(args['path'],
'/.misplaced_objects/5947200/17:/a/c/o')
self.assertEqual(args['headers']['X-Content-Type'],
'application/x-delete')
for header in required_headers:
self.assert_(header in args['headers'],
'%r was missing request headers %r' % (
header, args['headers']))
def test_add_to_reconciler_queue_force(self):
mock_path = 'swift.common.direct_client.http_connect'
connect_args = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
connect_args.append({
'ipaddr': ipaddr, 'port': port, 'device': device,
'partition': partition, 'method': method, 'path': path,
'headers': headers, 'query_string': query_string})
fake_hc = fake_http_connect(200, 200, 200, give_connect=test_connect)
now = time.time()
with contextlib.nested(
mock.patch(mock_path, fake_hc),
mock.patch('swift.container.reconciler.time.time',
lambda: now),
):
ret = reconciler.add_to_reconciler_queue(
self.fake_ring, 'a', 'c', 'o', 17, 5948918.63946, 'PUT',
force=True)
self.assertTrue(ret)
self.assertEqual(ret, str(int(5948918.63946 // 3600 * 3600)))
self.assertEqual(len(connect_args), 3)
connect_args.sort(key=lambda a: (a['ipaddr'], a['port']))
required_headers = ('x-size', 'x-content-type')
for args in connect_args:
self.assertEqual(args['headers']['X-Timestamp'],
Timestamp(now).internal)
self.assertEqual(args['headers']['X-Etag'], '5948918.63946')
self.assertEqual(args['path'],
'/.misplaced_objects/5947200/17:/a/c/o')
for header in required_headers:
self.assert_(header in args['headers'],
'%r was missing request headers %r' % (
header, args['headers']))
def test_add_to_reconciler_queue_fails(self):
mock_path = 'swift.common.direct_client.http_connect'
fake_connects = [fake_http_connect(200),
fake_http_connect(200, raise_timeout_exc=True),
fake_http_connect(507)]
def fake_hc(*a, **kw):
return fake_connects.pop()(*a, **kw)
with mock.patch(mock_path, fake_hc):
ret = reconciler.add_to_reconciler_queue(
self.fake_ring, 'a', 'c', 'o', 17, 5948918.63946, 'PUT')
self.assertFalse(ret)
def test_add_to_reconciler_queue_socket_error(self):
mock_path = 'swift.common.direct_client.http_connect'
exc = socket.error(errno.ECONNREFUSED,
os.strerror(errno.ECONNREFUSED))
fake_connects = [fake_http_connect(200),
fake_http_connect(200, raise_timeout_exc=True),
fake_http_connect(500, raise_exc=exc)]
def fake_hc(*a, **kw):
return fake_connects.pop()(*a, **kw)
with mock.patch(mock_path, fake_hc):
ret = reconciler.add_to_reconciler_queue(
self.fake_ring, 'a', 'c', 'o', 17, 5948918.63946, 'DELETE')
self.assertFalse(ret)
def listing_qs(marker):
return "?format=json&marker=%s&end_marker=" % \
urllib.quote(marker.encode('utf-8'))
class TestReconciler(unittest.TestCase):
maxDiff = None
def setUp(self):
self.logger = debug_logger()
conf = {}
with mock.patch('swift.container.reconciler.InternalClient'):
self.reconciler = reconciler.ContainerReconciler(conf)
self.reconciler.logger = self.logger
self.start_interval = int(time.time() // 3600 * 3600)
self.current_container_path = '/v1/.misplaced_objects/%d' % (
self.start_interval) + listing_qs('')
def _mock_listing(self, objects):
self.reconciler.swift = FakeInternalClient(objects)
self.fake_swift = self.reconciler.swift.app
def _mock_oldest_spi(self, container_oldest_spi_map):
self.fake_swift._mock_oldest_spi_map = container_oldest_spi_map
def _run_once(self):
"""
Helper method to run the reconciler once with appropriate direct-client
mocks in place.
Returns the list of direct-deleted container entries in the format
[(acc1, con1, obj1), ...]
"""
def mock_oldest_spi(ring, account, container_name):
return self.fake_swift._mock_oldest_spi_map.get(container_name, 0)
items = {
'direct_get_container_policy_index': mock_oldest_spi,
'direct_delete_container_entry': mock.DEFAULT,
}
mock_time_iter = itertools.count(self.start_interval)
with mock.patch.multiple(reconciler, **items) as mocks:
self.mock_delete_container_entry = \
mocks['direct_delete_container_entry']
with mock.patch('time.time', mock_time_iter.next):
self.reconciler.run_once()
return [c[1][1:4] for c in
mocks['direct_delete_container_entry'].mock_calls]
def test_invalid_queue_name(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/bogus"): 3618.84187,
})
deleted_container_entries = self._run_once()
# we try to find something useful
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('bogus'))])
# but only get the bogus record
self.assertEqual(self.reconciler.stats['invalid_record'], 1)
# and just leave it on the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 0)
self.assertFalse(deleted_container_entries)
def test_invalid_queue_name_marches_onward(self):
# there's something useful there on the queue
self._mock_listing({
(None, "/.misplaced_objects/3600/00000bogus"): 3600.0000,
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3618.84187,
(1, "/AUTH_bob/c/o1"): 3618.84187,
})
self._mock_oldest_spi({'c': 1}) # already in the right spot!
deleted_container_entries = self._run_once()
# we get all the queue entries we can
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# and one is garbage
self.assertEqual(self.reconciler.stats['invalid_record'], 1)
# but the other is workable
self.assertEqual(self.reconciler.stats['noop_object'], 1)
# so pop the queue for that one
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_queue_name_with_policy_index_delimiter_in_name(self):
q_path = '.misplaced_objects/3600'
obj_path = "AUTH_bob/c:sneaky/o1:sneaky"
# there's something useful there on the queue
self._mock_listing({
(None, "/%s/1:/%s" % (q_path, obj_path)): 3618.84187,
(1, '/%s' % obj_path): 3618.84187,
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# we find the misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/%s' % obj_path))])
# move it
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(self.reconciler.stats['copy_success'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/%s' % obj_path),
('DELETE', '/v1/%s' % obj_path)])
delete_headers = self.fake_swift.storage_policy[1].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/%s' % obj_path),
('PUT', '/v1/%s' % obj_path)])
# clean up the source
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# we DELETE the object from the wrong place with source_ts + offset 1
# timestamp to make sure the change takes effect
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3618.84187, offset=1).internal)
# and pop the queue for that one
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries, [(
'.misplaced_objects', '3600', '1:/%s' % obj_path)])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_unable_to_direct_get_oldest_storage_policy(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3618.84187,
})
# the reconciler gets "None" if we can't quorum the container
self._mock_oldest_spi({'c': None})
deleted_container_entries = self._run_once()
# we look for misplaced objects
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# but can't really say where to go looking
self.assertEqual(self.reconciler.stats['unavailable_container'], 1)
# we don't clean up anything
self.assertEqual(self.reconciler.stats['cleanup_object'], 0)
# and we definitely should not pop_queue
self.assertFalse(deleted_container_entries)
self.assertEqual(self.reconciler.stats['retry'], 1)
def test_object_move(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3618.84187,
(1, "/AUTH_bob/c/o1"): 3618.84187,
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# moves it
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(self.reconciler.stats['copy_success'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1'),
('DELETE', '/v1/AUTH_bob/c/o1')])
delete_headers = self.fake_swift.storage_policy[1].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'),
('PUT', '/v1/AUTH_bob/c/o1')])
put_headers = self.fake_swift.storage_policy[0].headers[1]
# we PUT the object in the right place with q_ts + offset 2
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(3618.84187, offset=2))
# cleans up the old
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# we DELETE the object from the wrong place with source_ts + offset 1
# timestamp to make sure the change takes effect
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3618.84187, offset=1))
# and when we're done, we pop the entry from the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_move_the_other_direction(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/0:/AUTH_bob/c/o1"): 3618.84187,
(0, "/AUTH_bob/c/o1"): 3618.84187,
})
self._mock_oldest_spi({'c': 1})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('0:/AUTH_bob/c/o1'))])
# moves it
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(self.reconciler.stats['copy_success'], 1)
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('GET', '/v1/AUTH_bob/c/o1'), # 2
('DELETE', '/v1/AUTH_bob/c/o1')]) # 4
delete_headers = self.fake_swift.storage_policy[0].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'), # 1
('PUT', '/v1/AUTH_bob/c/o1')]) # 3
put_headers = self.fake_swift.storage_policy[1].headers[1]
# we PUT the object in the right place with q_ts + offset 2
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(3618.84187, offset=2).internal)
# cleans up the old
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# we DELETE the object from the wrong place with source_ts + offset 1
# timestamp to make sure the change takes effect
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3618.84187, offset=1).internal)
# and when we're done, we pop the entry from the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '0:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_move_with_unicode_and_spaces(self):
# the "name" in listings and the unicode string passed to all
# functions where we call them with (account, container, obj)
obj_name = u"AUTH_bob/c \u062a/o1 \u062a"
# anytime we talk about a call made to swift for a path
obj_path = obj_name.encode('utf-8')
# this mock expects unquoted unicode because it handles container
# listings as well as paths
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/%s" % obj_name): 3618.84187,
(1, "/%s" % obj_name): 3618.84187,
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
# listing_qs encodes and quotes - so give it name
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/%s' % obj_name))])
# moves it
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(self.reconciler.stats['copy_success'], 1)
# these calls are to the real path
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/%s' % obj_path), # 2
('DELETE', '/v1/%s' % obj_path)]) # 4
delete_headers = self.fake_swift.storage_policy[1].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/%s' % obj_path), # 1
('PUT', '/v1/%s' % obj_path)]) # 3
put_headers = self.fake_swift.storage_policy[0].headers[1]
# we PUT the object in the right place with q_ts + offset 2
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(3618.84187, offset=2).internal)
# cleans up the old
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# we DELETE the object from the wrong place with source_ts + offset 1
# timestamp to make sure the change takes effect
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3618.84187, offset=1).internal)
self.assertEqual(
delete_headers.get('X-Backend-Storage-Policy-Index'), '1')
# and when we're done, we pop the entry from the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
# this mock received the name, it's encoded down in buffered_http
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/%s' % obj_name)])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_delete(self):
q_ts = time.time()
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): (
Timestamp(q_ts).internal, 'application/x-delete'),
# object exists in "correct" storage policy - slightly older
(0, "/AUTH_bob/c/o1"): Timestamp(q_ts - 1).internal,
})
self._mock_oldest_spi({'c': 0})
# the tombstone exists in the enqueued storage policy
self.fake_swift.storage_policy[1].register(
'GET', '/v1/AUTH_bob/c/o1', swob.HTTPNotFound,
{'X-Backend-Timestamp': Timestamp(q_ts).internal})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# delete it
self.assertEqual(self.reconciler.stats['delete_attempt'], 1)
self.assertEqual(self.reconciler.stats['delete_success'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1'),
('DELETE', '/v1/AUTH_bob/c/o1')])
delete_headers = self.fake_swift.storage_policy[1].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'),
('DELETE', '/v1/AUTH_bob/c/o1')])
reconcile_headers = self.fake_swift.storage_policy[0].headers[1]
# we DELETE the object in the right place with q_ts + offset 2
self.assertEqual(reconcile_headers.get('X-Timestamp'),
Timestamp(q_ts, offset=2).internal)
# cleans up the old
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# we DELETE the object from the wrong place with source_ts + offset 1
# timestamp to make sure the change takes effect
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(q_ts, offset=1))
# and when we're done, we pop the entry from the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_enqueued_for_the_correct_dest_noop(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3618.84187,
(1, "/AUTH_bob/c/o1"): 3618.84187,
})
self._mock_oldest_spi({'c': 1}) # already in the right spot!
deleted_container_entries = self._run_once()
# nothing to see here
self.assertEqual(self.reconciler.stats['noop_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# so we just pop the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_move_src_object_newer_than_queue_entry(self):
# setup the cluster
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3600.123456,
(1, '/AUTH_bob/c/o1'): 3600.234567, # slightly newer
})
self._mock_oldest_spi({'c': 0}) # destination
# turn the crank
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# proceed with the move
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(self.reconciler.stats['copy_success'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1'), # 2
('DELETE', '/v1/AUTH_bob/c/o1')]) # 4
delete_headers = self.fake_swift.storage_policy[1].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'), # 1
('PUT', '/v1/AUTH_bob/c/o1')]) # 3
# .. with source timestamp + offset 2
put_headers = self.fake_swift.storage_policy[0].headers[1]
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(3600.234567, offset=2))
# src object is cleaned up
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# ... with q_ts + offset 1
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3600.123456, offset=1))
# and queue is popped
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_move_src_object_older_than_queue_entry(self):
# should be some sort of retry case
q_ts = time.time()
container = str(int(q_ts // 3600 * 3600))
q_path = '.misplaced_objects/%s' % container
self._mock_listing({
(None, "/%s/1:/AUTH_bob/c/o1" % q_path): q_ts,
(1, '/AUTH_bob/c/o1'): q_ts - 0.00001, # slightly older
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', '/v1/%s' % q_path + listing_qs('')),
('GET', '/v1/%s' % q_path +
listing_qs('1:/AUTH_bob/c/o1')),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs(container))])
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1')])
# but no object copy is attempted
self.assertEqual(self.reconciler.stats['unavailable_source'], 1)
self.assertEqual(self.reconciler.stats['copy_attempt'], 0)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1')])
# src object is un-modified
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 0)
# queue is un-changed, we'll have to retry
self.assertEqual(self.reconciler.stats['pop_queue'], 0)
self.assertEqual(deleted_container_entries, [])
self.assertEqual(self.reconciler.stats['retry'], 1)
def test_src_object_unavailable_with_slightly_newer_tombstone(self):
# should be some sort of retry case
q_ts = float(Timestamp(time.time()))
container = str(int(q_ts // 3600 * 3600))
q_path = '.misplaced_objects/%s' % container
self._mock_listing({
(None, "/%s/1:/AUTH_bob/c/o1" % q_path): q_ts,
})
self._mock_oldest_spi({'c': 0})
self.fake_swift.storage_policy[1].register(
'GET', '/v1/AUTH_bob/c/o1', swob.HTTPNotFound,
{'X-Backend-Timestamp': Timestamp(q_ts, offset=2).internal})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', '/v1/%s' % q_path + listing_qs('')),
('GET', '/v1/%s' % q_path +
listing_qs('1:/AUTH_bob/c/o1')),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs(container))])
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1')])
# but no object copy is attempted
self.assertEqual(self.reconciler.stats['unavailable_source'], 1)
self.assertEqual(self.reconciler.stats['copy_attempt'], 0)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1')])
# src object is un-modified
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 0)
# queue is un-changed, we'll have to retry
self.assertEqual(self.reconciler.stats['pop_queue'], 0)
self.assertEqual(deleted_container_entries, [])
self.assertEqual(self.reconciler.stats['retry'], 1)
def test_src_object_unavailable_server_error(self):
# should be some sort of retry case
q_ts = float(Timestamp(time.time()))
container = str(int(q_ts // 3600 * 3600))
q_path = '.misplaced_objects/%s' % container
self._mock_listing({
(None, "/%s/1:/AUTH_bob/c/o1" % q_path): q_ts,
})
self._mock_oldest_spi({'c': 0})
self.fake_swift.storage_policy[1].register(
'GET', '/v1/AUTH_bob/c/o1', swob.HTTPServiceUnavailable, {})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', '/v1/%s' % q_path + listing_qs('')),
('GET', '/v1/%s' % q_path +
listing_qs('1:/AUTH_bob/c/o1')),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs(container))])
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1')])
# but no object copy is attempted
self.assertEqual(self.reconciler.stats['unavailable_source'], 1)
self.assertEqual(self.reconciler.stats['copy_attempt'], 0)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1')])
# src object is un-modified
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 0)
# queue is un-changed, we'll have to retry
self.assertEqual(self.reconciler.stats['pop_queue'], 0)
self.assertEqual(deleted_container_entries, [])
self.assertEqual(self.reconciler.stats['retry'], 1)
def test_object_move_fails_cleanup(self):
# setup the cluster
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3600.123456,
(1, '/AUTH_bob/c/o1'): 3600.123457, # slightly newer
})
self._mock_oldest_spi({'c': 0}) # destination
# make the DELETE blow up
self.fake_swift.storage_policy[1].register(
'DELETE', '/v1/AUTH_bob/c/o1', swob.HTTPServiceUnavailable, {})
# turn the crank
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# proceed with the move
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(self.reconciler.stats['copy_success'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1'), # 2
('DELETE', '/v1/AUTH_bob/c/o1')]) # 4
delete_headers = self.fake_swift.storage_policy[1].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'), # 1
('PUT', '/v1/AUTH_bob/c/o1')]) # 3
# .. with source timestamp + offset 2
put_headers = self.fake_swift.storage_policy[0].headers[1]
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(3600.123457, offset=2))
# we try to cleanup
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
# ... with q_ts + offset 1
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3600.12346, offset=1))
# but cleanup fails!
self.assertEqual(self.reconciler.stats['cleanup_failed'], 1)
# so the queue is not popped
self.assertEqual(self.reconciler.stats['pop_queue'], 0)
self.assertEqual(deleted_container_entries, [])
# and we'll have to retry
self.assertEqual(self.reconciler.stats['retry'], 1)
def test_object_move_src_object_is_forever_gone(self):
# oh boy, hate to be here - this is an oldy
q_ts = self.start_interval - self.reconciler.reclaim_age - 1
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): q_ts,
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1')])
# but it's gone :\
self.assertEqual(self.reconciler.stats['lost_source'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1')])
# gah, look, even if it was out there somewhere - we've been at this
# two weeks and haven't found it. We can't just keep looking forever,
# so... we're done
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
# dunno if this is helpful, but FWIW we don't throw tombstones?
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 0)
self.assertEqual(self.reconciler.stats['success'], 1) # lol
def test_object_move_dest_already_moved(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3679.2019,
(1, "/AUTH_bob/c/o1"): 3679.2019,
(0, "/AUTH_bob/c/o1"): 3679.2019,
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# we look for misplaced objects
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# but we found it already in the right place!
self.assertEqual(self.reconciler.stats['found_object'], 1)
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1')])
# so no attempt to read the source is made, but we do cleanup
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('DELETE', '/v1/AUTH_bob/c/o1')])
delete_headers = self.fake_swift.storage_policy[1].headers[0]
# rather we just clean up the dark matter
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3679.2019, offset=1))
# and wipe our hands of it
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_move_dest_object_newer_than_queue_entry(self):
self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3679.2019,
(1, "/AUTH_bob/c/o1"): 3679.2019,
(0, "/AUTH_bob/c/o1"): 3679.2019 + 0.00001, # slightly newer
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# we look for misplaced objects...
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('3600')),
('GET', '/v1/.misplaced_objects/3600' + listing_qs('')),
('GET', '/v1/.misplaced_objects/3600' +
listing_qs('1:/AUTH_bob/c/o1'))])
# but we found it already in the right place!
self.assertEqual(self.reconciler.stats['found_object'], 1)
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1')])
# so not attempt to read is made, but we do cleanup
self.assertEqual(self.reconciler.stats['copy_attempt'], 0)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('DELETE', '/v1/AUTH_bob/c/o1')])
delete_headers = self.fake_swift.storage_policy[1].headers[0]
# rather we just clean up the dark matter
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(3679.2019, offset=1))
# and since we cleaned up the old object, so this counts as done
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '3600', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_move_dest_object_older_than_queue_entry(self):
self._mock_listing({
(None, "/.misplaced_objects/36000/1:/AUTH_bob/c/o1"): 36123.38393,
(1, "/AUTH_bob/c/o1"): 36123.38393,
(0, "/AUTH_bob/c/o1"): 36123.38393 - 0.00001, # slightly older
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
# we found a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('36000')),
('GET', '/v1/.misplaced_objects/36000' + listing_qs('')),
('GET', '/v1/.misplaced_objects/36000' +
listing_qs('1:/AUTH_bob/c/o1'))])
# and since our version is *newer*, we overwrite
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(self.reconciler.stats['copy_success'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1'), # 2
('DELETE', '/v1/AUTH_bob/c/o1')]) # 4
delete_headers = self.fake_swift.storage_policy[1].headers[1]
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'), # 1
('PUT', '/v1/AUTH_bob/c/o1')]) # 3
# ... with a q_ts + offset 2
put_headers = self.fake_swift.storage_policy[0].headers[1]
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(36123.38393, offset=2))
# then clean the dark matter
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# ... with a q_ts + offset 1
self.assertEqual(delete_headers.get('X-Timestamp'),
Timestamp(36123.38393, offset=1))
# and pop the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries,
[('.misplaced_objects', '36000', '1:/AUTH_bob/c/o1')])
self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_move_put_fails(self):
# setup the cluster
self._mock_listing({
(None, "/.misplaced_objects/36000/1:/AUTH_bob/c/o1"): 36123.383925,
(1, "/AUTH_bob/c/o1"): 36123.383925,
})
self._mock_oldest_spi({'c': 0})
# make the put to dest fail!
self.fake_swift.storage_policy[0].register(
'PUT', '/v1/AUTH_bob/c/o1', swob.HTTPServiceUnavailable, {})
# turn the crank
deleted_container_entries = self._run_once()
# we find a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('36000')),
('GET', '/v1/.misplaced_objects/36000' + listing_qs('')),
('GET', '/v1/.misplaced_objects/36000' +
listing_qs('1:/AUTH_bob/c/o1'))])
# and try to move it, but it fails
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1')]) # 2
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'), # 1
('PUT', '/v1/AUTH_bob/c/o1')]) # 3
put_headers = self.fake_swift.storage_policy[0].headers[1]
# ...with q_ts + offset 2 (20-microseconds)
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(36123.383925, offset=2))
# but it failed
self.assertEqual(self.reconciler.stats['copy_success'], 0)
self.assertEqual(self.reconciler.stats['copy_failed'], 1)
# ... so we don't clean up the source
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 0)
# and we don't pop the queue
self.assertEqual(deleted_container_entries, [])
self.assertEqual(self.reconciler.stats['unhandled_errors'], 0)
self.assertEqual(self.reconciler.stats['retry'], 1)
def test_object_move_put_blows_up_crazy_town(self):
# setup the cluster
self._mock_listing({
(None, "/.misplaced_objects/36000/1:/AUTH_bob/c/o1"): 36123.383925,
(1, "/AUTH_bob/c/o1"): 36123.383925,
})
self._mock_oldest_spi({'c': 0})
# make the put to dest blow up crazy town
def blow_up(*args, **kwargs):
raise Exception('kaboom!')
self.fake_swift.storage_policy[0].register(
'PUT', '/v1/AUTH_bob/c/o1', blow_up, {})
# turn the crank
deleted_container_entries = self._run_once()
# we find a misplaced object
self.assertEqual(self.reconciler.stats['misplaced_object'], 1)
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs('36000')),
('GET', '/v1/.misplaced_objects/36000' + listing_qs('')),
('GET', '/v1/.misplaced_objects/36000' +
listing_qs('1:/AUTH_bob/c/o1'))])
# and attempt to move it
self.assertEqual(self.reconciler.stats['copy_attempt'], 1)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_bob/c/o1')]) # 2
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_bob/c/o1'), # 1
('PUT', '/v1/AUTH_bob/c/o1')]) # 3
put_headers = self.fake_swift.storage_policy[0].headers[1]
# ...with q_ts + offset 2 (20-microseconds)
self.assertEqual(put_headers.get('X-Timestamp'),
Timestamp(36123.383925, offset=2))
# but it blows up hard
self.assertEqual(self.reconciler.stats['unhandled_error'], 1)
# so we don't cleanup
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 0)
# and we don't pop the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 0)
self.assertEqual(deleted_container_entries, [])
self.assertEqual(self.reconciler.stats['retry'], 1)
def test_object_move_no_such_object_no_tombstone_recent(self):
q_ts = float(Timestamp(time.time()))
container = str(int(q_ts // 3600 * 3600))
q_path = '.misplaced_objects/%s' % container
self._mock_listing({
(None, "/%s/1:/AUTH_jeb/c/o1" % q_path): q_ts
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
self.assertEqual(
self.fake_swift.calls,
[('GET', '/v1/.misplaced_objects/%s' % container + listing_qs('')),
('GET', '/v1/.misplaced_objects/%s' % container +
listing_qs('1:/AUTH_jeb/c/o1')),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs(container))])
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_jeb/c/o1')],
)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_jeb/c/o1')],
)
# the queue entry is recent enough that there could easily be
# tombstones on offline nodes or something, so we'll just leave it
# here and try again later
self.assertEqual(deleted_container_entries, [])
def test_object_move_no_such_object_no_tombstone_ancient(self):
queue_ts = float(Timestamp(time.time())) - \
self.reconciler.reclaim_age * 1.1
container = str(int(queue_ts // 3600 * 3600))
self._mock_listing({
(
None, "/.misplaced_objects/%s/1:/AUTH_jeb/c/o1" % container
): queue_ts
})
self._mock_oldest_spi({'c': 0})
deleted_container_entries = self._run_once()
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs(container)),
('GET', '/v1/.misplaced_objects/%s' % container + listing_qs('')),
('GET', '/v1/.misplaced_objects/%s' % container +
listing_qs('1:/AUTH_jeb/c/o1'))])
self.assertEqual(
self.fake_swift.storage_policy[0].calls,
[('HEAD', '/v1/AUTH_jeb/c/o1')],
)
self.assertEqual(
self.fake_swift.storage_policy[1].calls,
[('GET', '/v1/AUTH_jeb/c/o1')],
)
# the queue entry is old enough that the tombstones, if any, have
# probably been reaped, so we'll just give up
self.assertEqual(
deleted_container_entries,
[('.misplaced_objects', container, '1:/AUTH_jeb/c/o1')])
def test_delete_old_empty_queue_containers(self):
ts = time.time() - self.reconciler.reclaim_age * 1.1
container = str(int(ts // 3600 * 3600))
older_ts = ts - 3600
older_container = str(int(older_ts // 3600 * 3600))
self._mock_listing({
(None, "/.misplaced_objects/%s/" % container): 0,
(None, "/.misplaced_objects/%s/something" % older_container): 0,
})
deleted_container_entries = self._run_once()
self.assertEqual(deleted_container_entries, [])
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs(container)),
('GET', '/v1/.misplaced_objects/%s' % container + listing_qs('')),
('DELETE', '/v1/.misplaced_objects/%s' % container),
('GET', '/v1/.misplaced_objects/%s' % older_container +
listing_qs('')),
('GET', '/v1/.misplaced_objects/%s' % older_container +
listing_qs('something'))])
self.assertEqual(self.reconciler.stats['invalid_record'], 1)
def test_iter_over_old_containers_in_reverse(self):
step = reconciler.MISPLACED_OBJECTS_CONTAINER_DIVISOR
now = self.start_interval
containers = []
for i in range(10):
container_ts = int(now - step * i)
container_name = str(container_ts // 3600 * 3600)
containers.append(container_name)
# add some old containers too
now -= self.reconciler.reclaim_age
old_containers = []
for i in range(10):
container_ts = int(now - step * i)
container_name = str(container_ts // 3600 * 3600)
old_containers.append(container_name)
containers.sort()
old_containers.sort()
all_containers = old_containers + containers
self._mock_listing(dict((
(None, "/.misplaced_objects/%s/" % container), 0
) for container in all_containers))
deleted_container_entries = self._run_once()
self.assertEqual(deleted_container_entries, [])
last_container = all_containers[-1]
account_listing_calls = [
('GET', '/v1/.misplaced_objects' + listing_qs('')),
('GET', '/v1/.misplaced_objects' + listing_qs(last_container)),
]
new_container_calls = [
('GET', '/v1/.misplaced_objects/%s' % container +
listing_qs('')) for container in reversed(containers)
][1:] # current_container get's skipped the second time around...
old_container_listings = [
('GET', '/v1/.misplaced_objects/%s' % container +
listing_qs('')) for container in reversed(old_containers)
]
old_container_deletes = [
('DELETE', '/v1/.misplaced_objects/%s' % container)
for container in reversed(old_containers)
]
old_container_calls = list(itertools.chain(*zip(
old_container_listings, old_container_deletes)))
self.assertEqual(self.fake_swift.calls,
[('GET', self.current_container_path)] +
account_listing_calls + new_container_calls +
old_container_calls)
def test_error_in_iter_containers(self):
self._mock_listing({})
# make the listing return an error
self.fake_swift.storage_policy[None].register(
'GET', '/v1/.misplaced_objects' + listing_qs(''),
swob.HTTPServiceUnavailable, {})
self._run_once()
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs(''))])
self.assertEqual(self.reconciler.stats, {})
errors = self.reconciler.logger.get_lines_for_level('error')
self.assertEqual(errors, [
'Error listing containers in account '
'.misplaced_objects (Unexpected response: '
'503 Service Unavailable)'])
def test_unhandled_exception_in_reconcile(self):
self._mock_listing({})
# make the listing blow up
def blow_up(*args, **kwargs):
raise Exception('kaboom!')
self.fake_swift.storage_policy[None].register(
'GET', '/v1/.misplaced_objects' + listing_qs(''),
blow_up, {})
self._run_once()
self.assertEqual(
self.fake_swift.calls,
[('GET', self.current_container_path),
('GET', '/v1/.misplaced_objects' + listing_qs(''))])
self.assertEqual(self.reconciler.stats, {})
errors = self.reconciler.logger.get_lines_for_level('error')
self.assertEqual(errors,
['Unhandled Exception trying to reconcile: '])
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
XiaosongWei/crosswalk-test-suite
|
apptools/apptools-linux-tests/apptools/build_path.py
|
18
|
2180
|
#!/usr/bin/env python
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Yun, Liu<[email protected]>
import unittest
import os
import comm
import shutil
class TestCrosswalkApptoolsFunctions(unittest.TestCase):
def test_build_path_normal(self):
comm.setUp()
comm.create(self)
if os.path.exists("pkg"):
shutil.rmtree("pkg")
os.mkdir("pkg")
os.chdir('pkg')
buildcmd = "crosswalk-app build " + comm.TEMP_DATA_PATH + comm.TEST_PROJECT_COMM
comm.build(self, buildcmd)
comm.run(self)
os.chdir('../')
shutil.rmtree("pkg")
comm.cleanTempData(comm.TEST_PROJECT_COMM)
comm.delete()
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
bgxavier/nova
|
nova/tests/unit/virt/libvirt/test_designer.py
|
97
|
1241
|
# Copyright (C) 2013 eNovance SAS <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import test
from nova.virt.libvirt import designer
class DesignerTestCase(test.NoDBTestCase):
def test_set_vif_bandwidth_config_no_extra_specs(self):
# Test whether test_set_vif_bandwidth_config_no_extra_specs fails when
# its second parameter has no 'extra_specs' field.
try:
# The conf will never be user be used, so we can use 'None'.
# An empty dictionary is fine: all that matters it that there is no
# 'extra_specs' field.
designer.set_vif_bandwidth_config(None, {})
except KeyError as e:
self.fail('KeyError: %s' % e)
|
apache-2.0
|
joopert/home-assistant
|
homeassistant/components/emulated_hue/upnp.py
|
4
|
5016
|
"""Support UPNP discovery method that mimics Hue hubs."""
import threading
import socket
import logging
import select
from aiohttp import web
from homeassistant import core
from homeassistant.components.http import HomeAssistantView
_LOGGER = logging.getLogger(__name__)
class DescriptionXmlView(HomeAssistantView):
"""Handles requests for the description.xml file."""
url = "/description.xml"
name = "description:xml"
requires_auth = False
def __init__(self, config):
"""Initialize the instance of the view."""
self.config = config
@core.callback
def get(self, request):
"""Handle a GET request."""
xml_template = """<?xml version="1.0" encoding="UTF-8" ?>
<root xmlns="urn:schemas-upnp-org:device-1-0">
<specVersion>
<major>1</major>
<minor>0</minor>
</specVersion>
<URLBase>http://{0}:{1}/</URLBase>
<device>
<deviceType>urn:schemas-upnp-org:device:Basic:1</deviceType>
<friendlyName>HASS Bridge ({0})</friendlyName>
<manufacturer>Royal Philips Electronics</manufacturer>
<manufacturerURL>http://www.philips.com</manufacturerURL>
<modelDescription>Philips hue Personal Wireless Lighting</modelDescription>
<modelName>Philips hue bridge 2015</modelName>
<modelNumber>BSB002</modelNumber>
<modelURL>http://www.meethue.com</modelURL>
<serialNumber>1234</serialNumber>
<UDN>uuid:2f402f80-da50-11e1-9b23-001788255acc</UDN>
</device>
</root>
"""
resp_text = xml_template.format(
self.config.advertise_ip, self.config.advertise_port
)
return web.Response(text=resp_text, content_type="text/xml")
class UPNPResponderThread(threading.Thread):
"""Handle responding to UPNP/SSDP discovery requests."""
_interrupted = False
def __init__(
self,
host_ip_addr,
listen_port,
upnp_bind_multicast,
advertise_ip,
advertise_port,
):
"""Initialize the class."""
threading.Thread.__init__(self)
self.host_ip_addr = host_ip_addr
self.listen_port = listen_port
self.upnp_bind_multicast = upnp_bind_multicast
# Note that the double newline at the end of
# this string is required per the SSDP spec
resp_template = """HTTP/1.1 200 OK
CACHE-CONTROL: max-age=60
EXT:
LOCATION: http://{0}:{1}/description.xml
SERVER: FreeRTOS/6.0.5, UPnP/1.0, IpBridge/0.1
hue-bridgeid: 1234
ST: urn:schemas-upnp-org:device:basic:1
USN: uuid:Socket-1_0-221438K0100073::urn:schemas-upnp-org:device:basic:1
"""
self.upnp_response = (
resp_template.format(advertise_ip, advertise_port)
.replace("\n", "\r\n")
.encode("utf-8")
)
def run(self):
"""Run the server."""
# Listen for UDP port 1900 packets sent to SSDP multicast address
ssdp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ssdp_socket.setblocking(False)
# Required for receiving multicast
ssdp_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
ssdp_socket.setsockopt(
socket.SOL_IP, socket.IP_MULTICAST_IF, socket.inet_aton(self.host_ip_addr)
)
ssdp_socket.setsockopt(
socket.SOL_IP,
socket.IP_ADD_MEMBERSHIP,
socket.inet_aton("239.255.255.250") + socket.inet_aton(self.host_ip_addr),
)
if self.upnp_bind_multicast:
ssdp_socket.bind(("", 1900))
else:
ssdp_socket.bind((self.host_ip_addr, 1900))
while True:
if self._interrupted:
clean_socket_close(ssdp_socket)
return
try:
read, _, _ = select.select([ssdp_socket], [], [ssdp_socket], 2)
if ssdp_socket in read:
data, addr = ssdp_socket.recvfrom(1024)
else:
# most likely the timeout, so check for interrupt
continue
except socket.error as ex:
if self._interrupted:
clean_socket_close(ssdp_socket)
return
_LOGGER.error(
"UPNP Responder socket exception occurred: %s", ex.__str__
)
# without the following continue, a second exception occurs
# because the data object has not been initialized
continue
if "M-SEARCH" in data.decode("utf-8", errors="ignore"):
# SSDP M-SEARCH method received, respond to it with our info
resp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
resp_socket.sendto(self.upnp_response, addr)
resp_socket.close()
def stop(self):
"""Stop the server."""
# Request for server
self._interrupted = True
self.join()
def clean_socket_close(sock):
"""Close a socket connection and logs its closure."""
_LOGGER.info("UPNP responder shutting down.")
sock.close()
|
apache-2.0
|
Pflanzgurke/glimpse_client
|
3rdparty/breakpad/src/tools/gyp/test/win/gyptest-command-quote.py
|
296
|
1282
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure the program in a command can be a called batch file, or an
application in the path. Specifically, this means not quoting something like
"call x.bat", lest the shell look for a program named "call x.bat", rather
than calling "x.bat".
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'command-quote'
test.run_gyp('command-quote.gyp', chdir=CHDIR)
test.build('command-quote.gyp', 'test_batch', chdir=CHDIR)
test.build('command-quote.gyp', 'test_call_separate', chdir=CHDIR)
test.build('command-quote.gyp', 'test_with_double_quotes', chdir=CHDIR)
test.build('command-quote.gyp', 'test_with_single_quotes', chdir=CHDIR)
# We confirm that this fails because other generators don't handle spaces in
# inputs so it's preferable to not have it work here.
test.build('command-quote.gyp', 'test_with_spaces', chdir=CHDIR, status=1)
CHDIR = 'command-quote/subdir/and/another'
test.run_gyp('in-subdir.gyp', chdir=CHDIR)
test.build('in-subdir.gyp', 'test_batch_depth', chdir=CHDIR)
test.pass_test()
|
bsd-3-clause
|
ESS-LLP/erpnext
|
erpnext/education/doctype/course_schedule/course_schedule.py
|
24
|
1722
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.model.document import Document
class CourseSchedule(Document):
def validate(self):
self.instructor_name = frappe.db.get_value("Instructor", self.instructor, "instructor_name")
self.set_title()
self.validate_course()
self.validate_date()
self.validate_overlap()
def set_title(self):
"""Set document Title"""
self.title = self.course + " by " + (self.instructor_name if self.instructor_name else self.instructor)
def validate_course(self):
group_based_on, course = frappe.db.get_value("Student Group", self.student_group, ["group_based_on", "course"])
if group_based_on == "Course":
self.course = course
def validate_date(self):
"""Validates if from_time is greater than to_time"""
if self.from_time > self.to_time:
frappe.throw(_("From Time cannot be greater than To Time."))
def validate_overlap(self):
"""Validates overlap for Student Group, Instructor, Room"""
from erpnext.education.utils import validate_overlap_for
#Validate overlapping course schedules.
if self.student_group:
validate_overlap_for(self, "Course Schedule", "student_group")
validate_overlap_for(self, "Course Schedule", "instructor")
validate_overlap_for(self, "Course Schedule", "room")
#validate overlapping assessment schedules.
if self.student_group:
validate_overlap_for(self, "Assessment Plan", "student_group")
validate_overlap_for(self, "Assessment Plan", "room")
validate_overlap_for(self, "Assessment Plan", "supervisor", self.instructor)
|
gpl-3.0
|
hzc1126/dell_gallo_kernel_ics
|
tools/perf/scripts/python/sctop.py
|
895
|
1936
|
# system call top
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import thread
import time
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf trace -s syscall-counts.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40d %10d\n" % (id, val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
gpl-2.0
|
andreaskern/bitcoin
|
qa/rpc-tests/test_framework/coverage.py
|
20
|
2931
|
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
This module contains utilities for doing coverage analysis on the RPC
interface.
It provides a way to track which RPC commands are exercised during
testing.
"""
import os
REFERENCE_FILENAME = 'rpc_interface.txt'
class AuthServiceProxyWrapper(object):
"""
An object that wraps AuthServiceProxy to record specific RPC calls.
"""
def __init__(self, auth_service_proxy_instance, coverage_logfile=None):
"""
Kwargs:
auth_service_proxy_instance (AuthServiceProxy): the instance
being wrapped.
coverage_logfile (str): if specified, write each service_name
out to a file when called.
"""
self.auth_service_proxy_instance = auth_service_proxy_instance
self.coverage_logfile = coverage_logfile
def __getattr__(self, *args, **kwargs):
return_val = self.auth_service_proxy_instance.__getattr__(
*args, **kwargs)
return AuthServiceProxyWrapper(return_val, self.coverage_logfile)
def __call__(self, *args, **kwargs):
"""
Delegates to AuthServiceProxy, then writes the particular RPC method
called to a file.
"""
return_val = self.auth_service_proxy_instance.__call__(*args, **kwargs)
rpc_method = self.auth_service_proxy_instance._service_name
if self.coverage_logfile:
with open(self.coverage_logfile, 'a+') as f:
f.write("%s\n" % rpc_method)
return return_val
@property
def url(self):
return self.auth_service_proxy_instance.url
def get_filename(dirname, n_node):
"""
Get a filename unique to the test process ID and node.
This file will contain a list of RPC commands covered.
"""
pid = str(os.getpid())
return os.path.join(
dirname, "coverage.pid%s.node%s.txt" % (pid, str(n_node)))
def write_all_rpc_commands(dirname, node):
"""
Write out a list of all RPC functions available in `bitcoin-cli` for
coverage comparison. This will only happen once per coverage
directory.
Args:
dirname (str): temporary test dir
node (AuthServiceProxy): client
Returns:
bool. if the RPC interface file was written.
"""
filename = os.path.join(dirname, REFERENCE_FILENAME)
if os.path.isfile(filename):
return False
help_output = node.help().split('\n')
commands = set()
for line in help_output:
line = line.strip()
# Ignore blanks and headers
if line and not line.startswith('='):
commands.add("%s\n" % line.split()[0])
with open(filename, 'w') as f:
f.writelines(list(commands))
return True
|
mit
|
dpwrussell/openmicroscopy
|
components/tools/OmeroPy/test/unit/tablestest/test_hdfstorage.py
|
11
|
10650
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Test of the HDF storage for the Tables API.
Copyright 2009-2014 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import time
import pytest
import omero.columns
import omero.tables
import logging
import tables
import threading
import Ice
from omero_ext.mox import Mox
from omero.rtypes import rint, rstring
from library import TestCase
from path import path
logging.basicConfig(level=logging.CRITICAL)
class MockAdapter(object):
def __init__(self, ic):
self.ic = ic
def getCommunicator(self):
return self.ic
class TestHdfStorage(TestCase):
def setup_method(self, method):
TestCase.setup_method(self, method)
self.ic = Ice.initialize()
self.current = Ice.Current()
self.current.adapter = MockAdapter(self.ic)
self.lock = threading.RLock()
for of in omero.columns.ObjectFactories.values():
of.register(self.ic)
def cols(self):
a = omero.columns.LongColumnI('a', 'first', None)
b = omero.columns.LongColumnI('b', 'first', None)
c = omero.columns.LongColumnI('c', 'first', None)
return [a, b, c]
def init(self, hdf, meta=False):
if meta:
m = {"analysisA": 1, "analysisB": "param", "analysisC": 4.1}
else:
m = None
hdf.initialize(self.cols(), m)
def append(self, hdf, map):
cols = self.cols()
for col in cols:
try:
col.values = [map[col.name]]
except KeyError:
col.values = []
hdf.append(cols)
def hdfpath(self):
tmpdir = self.tmpdir()
return path(tmpdir) / "test.h5"
def testInvalidFile(self):
pytest.raises(
omero.ApiUsageException, omero.tables.HdfStorage, None, None)
pytest.raises(
omero.ApiUsageException, omero.tables.HdfStorage, '', self.lock)
bad = path(self.tmpdir()) / "doesntexist" / "test.h5"
pytest.raises(
omero.ApiUsageException, omero.tables.HdfStorage, bad, self.lock)
def testValidFile(self):
hdf = omero.tables.HdfStorage(self.hdfpath(), self.lock)
hdf.cleanup()
def testLocking(self):
tmp = str(self.hdfpath())
hdf1 = omero.tables.HdfStorage(tmp, self.lock)
with pytest.raises(omero.LockTimeout) as exc_info:
omero.tables.HdfStorage(tmp, self.lock)
assert exc_info.value.message.startswith('Path already in HdfList: ')
hdf1.cleanup()
hdf3 = omero.tables.HdfStorage(tmp, self.lock)
hdf3.cleanup()
def testSimpleCreation(self):
hdf = omero.tables.HdfStorage(self.hdfpath(), self.lock)
self.init(hdf, False)
hdf.cleanup()
def testCreationWithMetadata(self):
hdf = omero.tables.HdfStorage(self.hdfpath(), self.lock)
self.init(hdf, True)
hdf.cleanup()
def testAddSingleRow(self):
hdf = omero.tables.HdfStorage(self.hdfpath(), self.lock)
self.init(hdf, True)
self.append(hdf, {"a": 1, "b": 2, "c": 3})
hdf.cleanup()
def testModifyRow(self):
hdf = omero.tables.HdfStorage(self.hdfpath(), self.lock)
self.init(hdf, True)
self.append(hdf, {"a": 1, "b": 2, "c": 3})
self.append(hdf, {"a": 5, "b": 6, "c": 7})
data = hdf.readCoordinates(hdf._stamp, [0, 1], self.current)
data.columns[0].values[0] = 100
data.columns[0].values[1] = 200
data.columns[1].values[0] = 300
data.columns[1].values[1] = 400
hdf.update(hdf._stamp, data)
hdf.readCoordinates(hdf._stamp, [0, 1], self.current)
hdf.cleanup()
def testReadTicket1951(self):
hdf = omero.tables.HdfStorage(self.hdfpath(), self.lock)
self.init(hdf, True)
self.append(hdf, {"a": 1, "b": 2, "c": 3})
hdf.readCoordinates(hdf._stamp, [0], self.current)
hdf.read(hdf._stamp, [0, 1, 2], 0, 1, self.current)
hdf.cleanup()
def testSorting(self): # Probably shouldn't work
hdf = omero.tables.HdfStorage(self.hdfpath(), self.lock)
self.init(hdf, True)
self.append(hdf, {"a": 0, "b": 2, "c": 3})
self.append(hdf, {"a": 4, "b": 4, "c": 4})
self.append(hdf, {"a": 0, "b": 1, "c": 0})
self.append(hdf, {"a": 0, "b": 0, "c": 0})
self.append(hdf, {"a": 0, "b": 4, "c": 0})
self.append(hdf, {"a": 0, "b": 0, "c": 0})
hdf.getWhereList(time.time(), '(a==0)', None, 'b', None, None, None)
# Doesn't work yet.
hdf.cleanup()
def testInitializeInvalidColoumnNames(self):
hdf = omero.tables.HdfStorage(self.hdfpath(), self.lock)
with pytest.raises(omero.ApiUsageException) as exc:
hdf.initialize([omero.columns.LongColumnI('')], None)
assert exc.value.message.startswith('Column unnamed:')
with pytest.raises(omero.ApiUsageException) as exc:
hdf.initialize([omero.columns.LongColumnI('__a')], None)
assert exc.value.message == 'Reserved column name: __a'
hdf.initialize([omero.columns.LongColumnI('a')], None)
hdf.cleanup()
def testInitializationOnInitializedFileFails(self):
p = self.hdfpath()
hdf = omero.tables.HdfStorage(p, self.lock)
self.init(hdf, True)
hdf.cleanup()
hdf = omero.tables.HdfStorage(p, self.lock)
try:
self.init(hdf, True)
assert False
except omero.ApiUsageException:
pass
hdf.cleanup()
"""
Hard fails disabled. See #2067
def testAddColumn(self):
assert False, "NYI"
def testMergeFiles(self):
assert False, "NYI"
def testVersion(self):
assert False, "NYI"
"""
def testHandlesExistingDirectory(self):
t = path(self.tmpdir())
h = t / "test.h5"
assert t.exists()
hdf = omero.tables.HdfStorage(h, self.lock)
hdf.cleanup()
def testGetSetMetaMap(self):
hdf = omero.tables.HdfStorage(self.hdfpath(), self.lock)
self.init(hdf, False)
hdf.add_meta_map({'a': rint(1)})
m1 = hdf.get_meta_map()
assert len(m1) == 3
assert m1['__initialized'].val > 0
assert m1['__version'] == rstring('2')
assert m1['a'] == rint(1)
with pytest.raises(omero.ApiUsageException) as exc:
hdf.add_meta_map({'b': rint(1), '__c': rint(2)})
assert exc.value.message == 'Reserved attribute name: __c'
assert hdf.get_meta_map() == m1
with pytest.raises(omero.ValidationException) as exc:
hdf.add_meta_map({'d': rint(None)})
assert exc.value.serverStackTrace.startswith('Unsupported type:')
assert hdf.get_meta_map() == m1
hdf.add_meta_map({}, replace=True)
m2 = hdf.get_meta_map()
assert len(m2) == 2
assert m2 == {
'__initialized': m1['__initialized'], '__version': rstring('2')}
hdf.add_meta_map({'__test': 1}, replace=True, init=True)
m3 = hdf.get_meta_map()
assert m3 == {'__test': rint(1)}
hdf.cleanup()
def testStringCol(self):
hdf = omero.tables.HdfStorage(self.hdfpath(), self.lock)
cols = [omero.columns.StringColumnI("name", "description", 16, None)]
hdf.initialize(cols)
cols[0].settable(hdf._HdfStorage__mea) # Needed for size
cols[0].values = ["foo"]
hdf.append(cols)
rows = hdf.getWhereList(time.time(), '(name=="foo")', None, 'b', None,
None, None)
assert 1 == len(rows)
assert 16 == hdf.readCoordinates(time.time(), [0],
self.current).columns[0].size
# Doesn't work yet.
hdf.cleanup()
#
# ROIs
#
def testMaskColumn(self):
hdf = omero.tables.HdfStorage(self.hdfpath(), self.lock)
mask = omero.columns.MaskColumnI('mask', 'desc', None)
hdf.initialize([mask], None)
mask.imageId = [1, 2]
mask.theZ = [2, 2]
mask.theT = [3, 3]
mask.x = [4, 4]
mask.y = [5, 5]
mask.w = [6, 6]
mask.h = [7, 7]
mask.bytes = [[0], [0, 1, 2, 3, 4]]
hdf.append([mask])
data = hdf.readCoordinates(hdf._stamp, [0, 1], self.current)
test = data.columns[0]
assert 1 == test.imageId[0]
assert 2 == test.theZ[0]
assert 3 == test.theT[0]
assert 4 == test.x[0]
assert 5 == test.y[0]
assert 6 == test.w[0]
assert 7 == test.h[0]
assert [0] == test.bytes[0]
assert 2 == test.imageId[1]
assert 2 == test.theZ[1]
assert 3 == test.theT[1]
assert 4 == test.x[1]
assert 5 == test.y[1]
assert 6 == test.w[1]
assert 7 == test.h[1]
assert [0 == 1, 2, 3, 4], test.bytes[1]
hdf.cleanup()
class TestHdfList(TestCase):
def setup_method(self, method):
TestCase.setup_method(self, method)
self.mox = Mox()
def hdfpath(self):
tmpdir = self.tmpdir()
return path(tmpdir) / "test.h5"
def testLocking(self, monkeypatch):
lock1 = threading.RLock()
hdflist2 = omero.tables.HdfList()
lock2 = threading.RLock()
tmp = str(self.hdfpath())
# Using omero.tables.HDFLIST
hdf1 = omero.tables.HdfStorage(tmp, lock1)
# There are multiple guards against opening the same HDF5 file
# PyTables includes a check
monkeypatch.setattr(omero.tables, 'HDFLIST', hdflist2)
with pytest.raises(ValueError) as exc_info:
omero.tables.HdfStorage(tmp, lock2)
assert exc_info.value.message.startswith(
"The file '%s' is already opened. " % tmp)
monkeypatch.undo()
# HdfList uses portalocker, test by mocking tables.openFile
self.mox.StubOutWithMock(tables, 'openFile')
tables.openFile(tmp, mode='w', title='OMERO HDF Measurement Storage',
rootUEP='/').AndReturn(open(tmp))
self.mox.ReplayAll()
monkeypatch.setattr(omero.tables, 'HDFLIST', hdflist2)
with pytest.raises(omero.LockTimeout) as exc_info:
omero.tables.HdfStorage(tmp, lock2)
print exc_info.value
assert (exc_info.value.message ==
'Cannot acquire exclusive lock on: %s' % tmp)
monkeypatch.undo()
hdf1.cleanup()
self.mox.UnsetStubs()
self.mox.VerifyAll()
|
gpl-2.0
|
poulpito/Flexget
|
flexget/tests/test_proper_movies.py
|
13
|
1887
|
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import pytest
from jinja2 import Template
class TestProperMovies(object):
_config = """
templates:
global:
seen_movies: strict
accept_all: yes
proper_movies: yes
parsing:
series: {{parser}}
movie: {{parser}}
tasks:
test1:
mock:
- {title: 'Movie.Name.2011.720p-FlexGet', imdb_id: 'tt12345678'}
test2:
mock:
- {title: 'Movie.Name.2011.720p-FooBar', imdb_id: 'tt12345678'}
test3:
mock:
- {title: 'Movie.Name.2011.PROPER.DVDRip-AsdfAsdf', imdb_id: 'tt12345678'}
test4:
mock:
- {title: 'Movie.Name.2011.PROPER.720p-FlexGet', imdb_id: 'tt12345678'}
"""
@pytest.fixture(scope='class', params=['internal', 'guessit'], ids=['internal', 'guessit'])
def config(self, request):
"""Override and parametrize default config fixture."""
return Template(self._config).render({'parser': request.param})
def test_proper_movies(self, execute_task):
# first occurence
task = execute_task('test1')
assert task.find_entry('accepted', title='Movie.Name.2011.720p-FlexGet')
# duplicate movie
task = execute_task('test2')
assert task.find_entry('rejected', title='Movie.Name.2011.720p-FooBar')
# proper with wrong quality
task = execute_task('test3')
assert task.find_entry('rejected', title='Movie.Name.2011.PROPER.DVDRip-AsdfAsdf')
# proper version of same quality
task = execute_task('test4')
assert task.find_entry('accepted', title='Movie.Name.2011.PROPER.720p-FlexGet')
|
mit
|
Krossom/python-for-android
|
python-modules/twisted/twisted/conch/test/test_conch.py
|
60
|
13236
|
# -*- test-case-name: twisted.conch.test.test_conch -*-
# Copyright (c) 2001-2008 Twisted Matrix Laboratories.
# See LICENSE for details.
import os, sys, socket
from twisted.cred import portal
from twisted.internet import reactor, defer, protocol
from twisted.internet.error import ProcessExitedAlready
from twisted.python import log, runtime
from twisted.trial import unittest
from twisted.conch.error import ConchError
try:
from twisted.conch.scripts.conch import SSHSession as StdioInteractingSession
except ImportError, e:
StdioInteractingSession = None
_reason = str(e)
del e
from twisted.conch.test.test_ssh import ConchTestRealm
from twisted.python.procutils import which
from twisted.conch.test.keydata import publicRSA_openssh, privateRSA_openssh
from twisted.conch.test.keydata import publicDSA_openssh, privateDSA_openssh
from twisted.conch.test.test_ssh import Crypto, pyasn1
try:
from twisted.conch.test.test_ssh import ConchTestServerFactory, \
ConchTestPublicKeyChecker
except ImportError:
pass
class StdioInteractingSessionTests(unittest.TestCase):
"""
Tests for L{twisted.conch.scripts.conch.SSHSession}.
"""
if StdioInteractingSession is None:
skip = _reason
def test_eofReceived(self):
"""
L{twisted.conch.scripts.conch.SSHSession.eofReceived} loses the
write half of its stdio connection.
"""
class FakeStdio:
writeConnLost = False
def loseWriteConnection(self):
self.writeConnLost = True
stdio = FakeStdio()
channel = StdioInteractingSession()
channel.stdio = stdio
channel.eofReceived()
self.assertTrue(stdio.writeConnLost)
class Echo(protocol.Protocol):
def connectionMade(self):
log.msg('ECHO CONNECTION MADE')
def connectionLost(self, reason):
log.msg('ECHO CONNECTION DONE')
def dataReceived(self, data):
self.transport.write(data)
if '\n' in data:
self.transport.loseConnection()
class EchoFactory(protocol.Factory):
protocol = Echo
class ConchTestOpenSSHProcess(protocol.ProcessProtocol):
"""
Test protocol for launching an OpenSSH client process.
@ivar deferred: Set by whatever uses this object. Accessed using
L{_getDeferred}, which destroys the value so the Deferred is not
fired twice. Fires when the process is terminated.
"""
deferred = None
buf = ''
def _getDeferred(self):
d, self.deferred = self.deferred, None
return d
def outReceived(self, data):
self.buf += data
def processEnded(self, reason):
"""
Called when the process has ended.
@param reason: a Failure giving the reason for the process' end.
"""
if reason.value.exitCode != 0:
self._getDeferred().errback(
ConchError("exit code was not 0: %s" %
reason.value.exitCode))
else:
buf = self.buf.replace('\r\n', '\n')
self._getDeferred().callback(buf)
class ConchTestForwardingProcess(protocol.ProcessProtocol):
"""
Manages a third-party process which launches a server.
Uses L{ConchTestForwardingPort} to connect to the third-party server.
Once L{ConchTestForwardingPort} has disconnected, kill the process and fire
a Deferred with the data received by the L{ConchTestForwardingPort}.
@ivar deferred: Set by whatever uses this object. Accessed using
L{_getDeferred}, which destroys the value so the Deferred is not
fired twice. Fires when the process is terminated.
"""
deferred = None
def __init__(self, port, data):
"""
@type port: C{int}
@param port: The port on which the third-party server is listening.
(it is assumed that the server is running on localhost).
@type data: C{str}
@param data: This is sent to the third-party server. Must end with '\n'
in order to trigger a disconnect.
"""
self.port = port
self.buffer = None
self.data = data
def _getDeferred(self):
d, self.deferred = self.deferred, None
return d
def connectionMade(self):
self._connect()
def _connect(self):
"""
Connect to the server, which is often a third-party process.
Tries to reconnect if it fails because we have no way of determining
exactly when the port becomes available for listening -- we can only
know when the process starts.
"""
cc = protocol.ClientCreator(reactor, ConchTestForwardingPort, self,
self.data)
d = cc.connectTCP('127.0.0.1', self.port)
d.addErrback(self._ebConnect)
return d
def _ebConnect(self, f):
reactor.callLater(.1, self._connect)
def forwardingPortDisconnected(self, buffer):
"""
The network connection has died; save the buffer of output
from the network and attempt to quit the process gracefully,
and then (after the reactor has spun) send it a KILL signal.
"""
self.buffer = buffer
self.transport.write('\x03')
self.transport.loseConnection()
reactor.callLater(0, self._reallyDie)
def _reallyDie(self):
try:
self.transport.signalProcess('KILL')
except ProcessExitedAlready:
pass
def processEnded(self, reason):
"""
Fire the Deferred at self.deferred with the data collected
from the L{ConchTestForwardingPort} connection, if any.
"""
self._getDeferred().callback(self.buffer)
class ConchTestForwardingPort(protocol.Protocol):
"""
Connects to server launched by a third-party process (managed by
L{ConchTestForwardingProcess}) sends data, then reports whatever it
received back to the L{ConchTestForwardingProcess} once the connection
is ended.
"""
def __init__(self, protocol, data):
"""
@type protocol: L{ConchTestForwardingProcess}
@param protocol: The L{ProcessProtocol} which made this connection.
@type data: str
@param data: The data to be sent to the third-party server.
"""
self.protocol = protocol
self.data = data
def connectionMade(self):
self.buffer = ''
self.transport.write(self.data)
def dataReceived(self, data):
self.buffer += data
def connectionLost(self, reason):
self.protocol.forwardingPortDisconnected(self.buffer)
def _makeArgs(args, mod="conch"):
start = [sys.executable, '-c'
"""
### Twisted Preamble
import sys, os
path = os.path.abspath(sys.argv[0])
while os.path.dirname(path) != path:
if os.path.basename(path).startswith('Twisted'):
sys.path.insert(0, path)
break
path = os.path.dirname(path)
from twisted.conch.scripts.%s import run
run()""" % mod]
return start + list(args)
class ForwardingTestBase:
"""
Template class for tests of the Conch server's ability to forward arbitrary
protocols over SSH.
These tests are integration tests, not unit tests. They launch a Conch
server, a custom TCP server (just an L{EchoProtocol}) and then call
L{execute}.
L{execute} is implemented by subclasses of L{ForwardingTestBase}. It should
cause an SSH client to connect to the Conch server, asking it to forward
data to the custom TCP server.
"""
if not Crypto:
skip = "can't run w/o PyCrypto"
if not pyasn1:
skip = "can't run w/o PyASN1"
def _createFiles(self):
for f in ['rsa_test','rsa_test.pub','dsa_test','dsa_test.pub',
'kh_test']:
if os.path.exists(f):
os.remove(f)
open('rsa_test','w').write(privateRSA_openssh)
open('rsa_test.pub','w').write(publicRSA_openssh)
open('dsa_test.pub','w').write(publicDSA_openssh)
open('dsa_test','w').write(privateDSA_openssh)
os.chmod('dsa_test', 33152)
os.chmod('rsa_test', 33152)
open('kh_test','w').write('127.0.0.1 '+publicRSA_openssh)
def _getFreePort(self):
s = socket.socket()
s.bind(('', 0))
port = s.getsockname()[1]
s.close()
return port
def _makeConchFactory(self):
"""
Make a L{ConchTestServerFactory}, which allows us to start a
L{ConchTestServer} -- i.e. an actually listening conch.
"""
realm = ConchTestRealm()
p = portal.Portal(realm)
p.registerChecker(ConchTestPublicKeyChecker())
factory = ConchTestServerFactory()
factory.portal = p
return factory
def setUp(self):
self._createFiles()
self.conchFactory = self._makeConchFactory()
self.conchFactory.expectedLoseConnection = 1
self.conchServer = reactor.listenTCP(0, self.conchFactory,
interface="127.0.0.1")
self.echoServer = reactor.listenTCP(0, EchoFactory())
self.echoPort = self.echoServer.getHost().port
def tearDown(self):
try:
self.conchFactory.proto.done = 1
except AttributeError:
pass
else:
self.conchFactory.proto.transport.loseConnection()
return defer.gatherResults([
defer.maybeDeferred(self.conchServer.stopListening),
defer.maybeDeferred(self.echoServer.stopListening)])
def test_exec(self):
"""
Test that we can use whatever client to send the command "echo goodbye"
to the Conch server. Make sure we receive "goodbye" back from the
server.
"""
d = self.execute('echo goodbye', ConchTestOpenSSHProcess())
return d.addCallback(self.assertEquals, 'goodbye\n')
def test_localToRemoteForwarding(self):
"""
Test that we can use whatever client to forward a local port to a
specified port on the server.
"""
localPort = self._getFreePort()
process = ConchTestForwardingProcess(localPort, 'test\n')
d = self.execute('', process,
sshArgs='-N -L%i:127.0.0.1:%i'
% (localPort, self.echoPort))
d.addCallback(self.assertEqual, 'test\n')
return d
def test_remoteToLocalForwarding(self):
"""
Test that we can use whatever client to forward a port from the server
to a port locally.
"""
localPort = self._getFreePort()
process = ConchTestForwardingProcess(localPort, 'test\n')
d = self.execute('', process,
sshArgs='-N -R %i:127.0.0.1:%i'
% (localPort, self.echoPort))
d.addCallback(self.assertEqual, 'test\n')
return d
class OpenSSHClientTestCase(ForwardingTestBase, unittest.TestCase):
if not which('ssh'):
skip = "no ssh command-line client available"
def execute(self, remoteCommand, process, sshArgs=''):
"""
Connects to the SSH server started in L{ForwardingTestBase.setUp} by
running the 'ssh' command line tool.
@type remoteCommand: str
@param remoteCommand: The command (with arguments) to run on the
remote end.
@type process: L{ConchTestOpenSSHProcess}
@type sshArgs: str
@param sshArgs: Arguments to pass to the 'ssh' process.
@return: L{defer.Deferred}
"""
process.deferred = defer.Deferred()
cmdline = ('ssh -2 -l testuser -p %i '
'-oUserKnownHostsFile=kh_test '
'-oPasswordAuthentication=no '
# Always use the RSA key, since that's the one in kh_test.
'-oHostKeyAlgorithms=ssh-rsa '
'-a '
'-i dsa_test ') + sshArgs + \
' 127.0.0.1 ' + remoteCommand
port = self.conchServer.getHost().port
cmds = (cmdline % port).split()
reactor.spawnProcess(process, "ssh", cmds)
return process.deferred
class CmdLineClientTestCase(ForwardingTestBase, unittest.TestCase):
if runtime.platformType == 'win32':
skip = "can't run cmdline client on win32"
def execute(self, remoteCommand, process, sshArgs=''):
"""
As for L{OpenSSHClientTestCase.execute}, except it runs the 'conch'
command line tool, not 'ssh'.
"""
process.deferred = defer.Deferred()
port = self.conchServer.getHost().port
cmd = ('-p %i -l testuser '
'--known-hosts kh_test '
'--user-authentications publickey '
'--host-key-algorithms ssh-rsa '
'-a '
'-i dsa_test '
'-v ') % port + sshArgs + \
' 127.0.0.1 ' + remoteCommand
cmds = _makeArgs(cmd.split())
log.msg(str(cmds))
env = os.environ.copy()
env['PYTHONPATH'] = os.pathsep.join(sys.path)
reactor.spawnProcess(process, sys.executable, cmds, env=env)
return process.deferred
|
apache-2.0
|
windedge/odoo
|
addons/l10n_es/__openerp__.py
|
314
|
2772
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2008-2010 Zikzakmedia S.L. (http://zikzakmedia.com) All Rights Reserved.
# Jordi Esteve <[email protected]>
# Copyright (c) 2012-2013, Grupo OPENTIA (<http://opentia.com>) Registered EU Trademark.
# Dpto. Consultoría <[email protected]>
# Copyright (c) 2013 Serv. Tecnol. Avanzados (http://www.serviciosbaeza.com)
# Pedro Manuel Baeza <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "Spanish Charts of Accounts (PGCE 2008)",
"version" : "4.0",
"author" : "Spanish Localization Team",
'website' : 'https://launchpad.net/openerp-spain',
"category" : "Localization/Account Charts",
"description": """
Spanish charts of accounts (PGCE 2008).
========================================
* Defines the following chart of account templates:
* Spanish general chart of accounts 2008
* Spanish general chart of accounts 2008 for small and medium companies
* Spanish general chart of accounts 2008 for associations
* Defines templates for sale and purchase VAT
* Defines tax code templates
* Defines fiscal positions for spanish fiscal legislation
""",
"license" : "AGPL-3",
"depends" : ["account", "base_vat", "base_iban"],
"data" : [
"account_type.xml",
"account_chart_template.xml",
"account_account_common.xml",
"account_account_full.xml",
"account_account_pymes.xml",
"account_account_assoc.xml",
"tax_codes_common.xml",
"taxes_common.xml",
"fiscal_templates_common.xml",
"account_chart_template_post.xml",
"l10n_es_wizard.xml",
],
"demo" : [],
'auto_install': False,
"installable": True,
'images': ['images/config_chart_l10n_es.png', 'images/l10n_es_chart.png'],
}
|
agpl-3.0
|
hbrunn/OpenUpgrade
|
addons/hr_attendance/wizard/__init__.py
|
375
|
1073
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_attendance_error
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
TEAM-Gummy/platform_external_chromium_org
|
third_party/closure_linter/closure_linter/ecmametadatapass.py
|
155
|
18062
|
#!/usr/bin/env python
#
# Copyright 2010 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Metadata pass for annotating tokens in EcmaScript files."""
__author__ = ('[email protected] (Robert Walker)')
from closure_linter import javascripttokens
from closure_linter import tokenutil
TokenType = javascripttokens.JavaScriptTokenType
class ParseError(Exception):
"""Exception indicating a parse error at the given token.
Attributes:
token: The token where the parse error occurred.
"""
def __init__(self, token, message=None):
"""Initialize a parse error at the given token with an optional message.
Args:
token: The token where the parse error occurred.
message: A message describing the parse error.
"""
Exception.__init__(self, message)
self.token = token
class EcmaContext(object):
"""Context object for EcmaScript languages.
Attributes:
type: The context type.
start_token: The token where this context starts.
end_token: The token where this context ends.
parent: The parent context.
"""
# The root context.
ROOT = 'root'
# A block of code.
BLOCK = 'block'
# A pseudo-block of code for a given case or default section.
CASE_BLOCK = 'case_block'
# Block of statements in a for loop's parentheses.
FOR_GROUP_BLOCK = 'for_block'
# An implied block of code for 1 line if, while, and for statements
IMPLIED_BLOCK = 'implied_block'
# An index in to an array or object.
INDEX = 'index'
# An array literal in [].
ARRAY_LITERAL = 'array_literal'
# An object literal in {}.
OBJECT_LITERAL = 'object_literal'
# An individual element in an array or object literal.
LITERAL_ELEMENT = 'literal_element'
# The portion of a ternary statement between ? and :
TERNARY_TRUE = 'ternary_true'
# The portion of a ternary statment after :
TERNARY_FALSE = 'ternary_false'
# The entire switch statment. This will contain a GROUP with the variable
# and a BLOCK with the code.
# Since that BLOCK is not a normal block, it can not contain statements except
# for case and default.
SWITCH = 'switch'
# A normal comment.
COMMENT = 'comment'
# A JsDoc comment.
DOC = 'doc'
# An individual statement.
STATEMENT = 'statement'
# Code within parentheses.
GROUP = 'group'
# Parameter names in a function declaration.
PARAMETERS = 'parameters'
# A set of variable declarations appearing after the 'var' keyword.
VAR = 'var'
# Context types that are blocks.
BLOCK_TYPES = frozenset([
ROOT, BLOCK, CASE_BLOCK, FOR_GROUP_BLOCK, IMPLIED_BLOCK])
def __init__(self, type, start_token, parent):
"""Initializes the context object.
Args:
type: The context type.
start_token: The token where this context starts.
parent: The parent context.
"""
self.type = type
self.start_token = start_token
self.end_token = None
self.parent = parent
def __repr__(self):
"""Returns a string representation of the context object."""
stack = []
context = self
while context:
stack.append(context.type)
context = context.parent
return 'Context(%s)' % ' > '.join(stack)
class EcmaMetaData(object):
"""Token metadata for EcmaScript languages.
Attributes:
last_code: The last code token to appear before this one.
context: The context this token appears in.
operator_type: The operator type, will be one of the *_OPERATOR constants
defined below.
"""
UNARY_OPERATOR = 'unary'
UNARY_POST_OPERATOR = 'unary_post'
BINARY_OPERATOR = 'binary'
TERNARY_OPERATOR = 'ternary'
def __init__(self):
"""Initializes a token metadata object."""
self.last_code = None
self.context = None
self.operator_type = None
self.is_implied_semicolon = False
self.is_implied_block = False
self.is_implied_block_close = False
def __repr__(self):
"""Returns a string representation of the context object."""
parts = ['%r' % self.context]
if self.operator_type:
parts.append('optype: %r' % self.operator_type)
if self.is_implied_semicolon:
parts.append('implied;')
return 'MetaData(%s)' % ', '.join(parts)
def IsUnaryOperator(self):
return self.operator_type in (EcmaMetaData.UNARY_OPERATOR,
EcmaMetaData.UNARY_POST_OPERATOR)
def IsUnaryPostOperator(self):
return self.operator_type == EcmaMetaData.UNARY_POST_OPERATOR
class EcmaMetaDataPass(object):
"""A pass that iterates over all tokens and builds metadata about them."""
def __init__(self):
"""Initialize the meta data pass object."""
self.Reset()
def Reset(self):
"""Resets the metadata pass to prepare for the next file."""
self._token = None
self._context = None
self._AddContext(EcmaContext.ROOT)
self._last_code = None
def _CreateContext(self, type):
"""Overridable by subclasses to create the appropriate context type."""
return EcmaContext(type, self._token, self._context)
def _CreateMetaData(self):
"""Overridable by subclasses to create the appropriate metadata type."""
return EcmaMetaData()
def _AddContext(self, type):
"""Adds a context of the given type to the context stack.
Args:
type: The type of context to create
"""
self._context = self._CreateContext(type)
def _PopContext(self):
"""Moves up one level in the context stack.
Returns:
The former context.
Raises:
ParseError: If the root context is popped.
"""
top_context = self._context
top_context.end_token = self._token
self._context = top_context.parent
if self._context:
return top_context
else:
raise ParseError(self._token)
def _PopContextType(self, *stop_types):
"""Pops the context stack until a context of the given type is popped.
Args:
stop_types: The types of context to pop to - stops at the first match.
Returns:
The context object of the given type that was popped.
"""
last = None
while not last or last.type not in stop_types:
last = self._PopContext()
return last
def _EndStatement(self):
"""Process the end of a statement."""
self._PopContextType(EcmaContext.STATEMENT)
if self._context.type == EcmaContext.IMPLIED_BLOCK:
self._token.metadata.is_implied_block_close = True
self._PopContext()
def _ProcessContext(self):
"""Process the context at the current token.
Returns:
The context that should be assigned to the current token, or None if
the current context after this method should be used.
Raises:
ParseError: When the token appears in an invalid context.
"""
token = self._token
token_type = token.type
if self._context.type in EcmaContext.BLOCK_TYPES:
# Whenever we're in a block, we add a statement context. We make an
# exception for switch statements since they can only contain case: and
# default: and therefore don't directly contain statements.
# The block we add here may be immediately removed in some cases, but
# that causes no harm.
parent = self._context.parent
if not parent or parent.type != EcmaContext.SWITCH:
self._AddContext(EcmaContext.STATEMENT)
elif self._context.type == EcmaContext.ARRAY_LITERAL:
self._AddContext(EcmaContext.LITERAL_ELEMENT)
if token_type == TokenType.START_PAREN:
if self._last_code and self._last_code.IsKeyword('for'):
# for loops contain multiple statements in the group unlike while,
# switch, if, etc.
self._AddContext(EcmaContext.FOR_GROUP_BLOCK)
else:
self._AddContext(EcmaContext.GROUP)
elif token_type == TokenType.END_PAREN:
result = self._PopContextType(EcmaContext.GROUP,
EcmaContext.FOR_GROUP_BLOCK)
keyword_token = result.start_token.metadata.last_code
# keyword_token will not exist if the open paren is the first line of the
# file, for example if all code is wrapped in an immediately executed
# annonymous function.
if keyword_token and keyword_token.string in ('if', 'for', 'while'):
next_code = tokenutil.SearchExcept(token, TokenType.NON_CODE_TYPES)
if next_code.type != TokenType.START_BLOCK:
# Check for do-while.
is_do_while = False
pre_keyword_token = keyword_token.metadata.last_code
if (pre_keyword_token and
pre_keyword_token.type == TokenType.END_BLOCK):
start_block_token = pre_keyword_token.metadata.context.start_token
is_do_while = start_block_token.metadata.last_code.string == 'do'
# If it's not do-while, it's an implied block.
if not is_do_while:
self._AddContext(EcmaContext.IMPLIED_BLOCK)
token.metadata.is_implied_block = True
return result
# else (not else if) with no open brace after it should be considered the
# start of an implied block, similar to the case with if, for, and while
# above.
elif (token_type == TokenType.KEYWORD and
token.string == 'else'):
next_code = tokenutil.SearchExcept(token, TokenType.NON_CODE_TYPES)
if (next_code.type != TokenType.START_BLOCK and
(next_code.type != TokenType.KEYWORD or next_code.string != 'if')):
self._AddContext(EcmaContext.IMPLIED_BLOCK)
token.metadata.is_implied_block = True
elif token_type == TokenType.START_PARAMETERS:
self._AddContext(EcmaContext.PARAMETERS)
elif token_type == TokenType.END_PARAMETERS:
return self._PopContextType(EcmaContext.PARAMETERS)
elif token_type == TokenType.START_BRACKET:
if (self._last_code and
self._last_code.type in TokenType.EXPRESSION_ENDER_TYPES):
self._AddContext(EcmaContext.INDEX)
else:
self._AddContext(EcmaContext.ARRAY_LITERAL)
elif token_type == TokenType.END_BRACKET:
return self._PopContextType(EcmaContext.INDEX, EcmaContext.ARRAY_LITERAL)
elif token_type == TokenType.START_BLOCK:
if (self._last_code.type in (TokenType.END_PAREN,
TokenType.END_PARAMETERS) or
self._last_code.IsKeyword('else') or
self._last_code.IsKeyword('do') or
self._last_code.IsKeyword('try') or
self._last_code.IsKeyword('finally') or
(self._last_code.IsOperator(':') and
self._last_code.metadata.context.type == EcmaContext.CASE_BLOCK)):
# else, do, try, and finally all might have no () before {.
# Also, handle the bizzare syntax case 10: {...}.
self._AddContext(EcmaContext.BLOCK)
else:
self._AddContext(EcmaContext.OBJECT_LITERAL)
elif token_type == TokenType.END_BLOCK:
context = self._PopContextType(EcmaContext.BLOCK,
EcmaContext.OBJECT_LITERAL)
if self._context.type == EcmaContext.SWITCH:
# The end of the block also means the end of the switch statement it
# applies to.
return self._PopContext()
return context
elif token.IsKeyword('switch'):
self._AddContext(EcmaContext.SWITCH)
elif (token_type == TokenType.KEYWORD and
token.string in ('case', 'default')):
# Pop up to but not including the switch block.
while self._context.parent.type != EcmaContext.SWITCH:
self._PopContext()
elif token.IsOperator('?'):
self._AddContext(EcmaContext.TERNARY_TRUE)
elif token.IsOperator(':'):
if self._context.type == EcmaContext.OBJECT_LITERAL:
self._AddContext(EcmaContext.LITERAL_ELEMENT)
elif self._context.type == EcmaContext.TERNARY_TRUE:
self._PopContext()
self._AddContext(EcmaContext.TERNARY_FALSE)
# Handle nested ternary statements like:
# foo = bar ? baz ? 1 : 2 : 3
# When we encounter the second ":" the context is
# ternary_false > ternary_true > statement > root
elif (self._context.type == EcmaContext.TERNARY_FALSE and
self._context.parent.type == EcmaContext.TERNARY_TRUE):
self._PopContext() # Leave current ternary false context.
self._PopContext() # Leave current parent ternary true
self._AddContext(EcmaContext.TERNARY_FALSE)
elif self._context.parent.type == EcmaContext.SWITCH:
self._AddContext(EcmaContext.CASE_BLOCK)
elif token.IsKeyword('var'):
self._AddContext(EcmaContext.VAR)
elif token.IsOperator(','):
while self._context.type not in (EcmaContext.VAR,
EcmaContext.ARRAY_LITERAL,
EcmaContext.OBJECT_LITERAL,
EcmaContext.STATEMENT,
EcmaContext.PARAMETERS,
EcmaContext.GROUP):
self._PopContext()
elif token_type == TokenType.SEMICOLON:
self._EndStatement()
def Process(self, first_token):
"""Processes the token stream starting with the given token."""
self._token = first_token
while self._token:
self._ProcessToken()
if self._token.IsCode():
self._last_code = self._token
self._token = self._token.next
try:
self._PopContextType(self, EcmaContext.ROOT)
except ParseError:
# Ignore the "popped to root" error.
pass
def _ProcessToken(self):
"""Process the given token."""
token = self._token
token.metadata = self._CreateMetaData()
context = (self._ProcessContext() or self._context)
token.metadata.context = context
token.metadata.last_code = self._last_code
# Determine the operator type of the token, if applicable.
if token.type == TokenType.OPERATOR:
token.metadata.operator_type = self._GetOperatorType(token)
# Determine if there is an implied semicolon after the token.
if token.type != TokenType.SEMICOLON:
next_code = tokenutil.SearchExcept(token, TokenType.NON_CODE_TYPES)
# A statement like if (x) does not need a semicolon after it
is_implied_block = self._context == EcmaContext.IMPLIED_BLOCK
is_last_code_in_line = token.IsCode() and (
not next_code or next_code.line_number != token.line_number)
is_continued_identifier = (token.type == TokenType.IDENTIFIER and
token.string.endswith('.'))
is_continued_operator = (token.type == TokenType.OPERATOR and
not token.metadata.IsUnaryPostOperator())
is_continued_dot = token.string == '.'
next_code_is_operator = next_code and next_code.type == TokenType.OPERATOR
next_code_is_dot = next_code and next_code.string == '.'
is_end_of_block = (token.type == TokenType.END_BLOCK and
token.metadata.context.type != EcmaContext.OBJECT_LITERAL)
is_multiline_string = token.type == TokenType.STRING_TEXT
next_code_is_block = next_code and next_code.type == TokenType.START_BLOCK
if (is_last_code_in_line and
self._StatementCouldEndInContext() and
not is_multiline_string and
not is_end_of_block and
not is_continued_identifier and
not is_continued_operator and
not is_continued_dot and
not next_code_is_dot and
not next_code_is_operator and
not is_implied_block and
not next_code_is_block):
token.metadata.is_implied_semicolon = True
self._EndStatement()
def _StatementCouldEndInContext(self):
"""Returns whether the current statement (if any) may end in this context."""
# In the basic statement or variable declaration context, statement can
# always end in this context.
if self._context.type in (EcmaContext.STATEMENT, EcmaContext.VAR):
return True
# End of a ternary false branch inside a statement can also be the
# end of the statement, for example:
# var x = foo ? foo.bar() : null
# In this case the statement ends after the null, when the context stack
# looks like ternary_false > var > statement > root.
if (self._context.type == EcmaContext.TERNARY_FALSE and
self._context.parent.type in (EcmaContext.STATEMENT, EcmaContext.VAR)):
return True
# In all other contexts like object and array literals, ternary true, etc.
# the statement can't yet end.
return False
def _GetOperatorType(self, token):
"""Returns the operator type of the given operator token.
Args:
token: The token to get arity for.
Returns:
The type of the operator. One of the *_OPERATOR constants defined in
EcmaMetaData.
"""
if token.string == '?':
return EcmaMetaData.TERNARY_OPERATOR
if token.string in TokenType.UNARY_OPERATORS:
return EcmaMetaData.UNARY_OPERATOR
last_code = token.metadata.last_code
if not last_code or last_code.type == TokenType.END_BLOCK:
return EcmaMetaData.UNARY_OPERATOR
if (token.string in TokenType.UNARY_POST_OPERATORS and
last_code.type in TokenType.EXPRESSION_ENDER_TYPES):
return EcmaMetaData.UNARY_POST_OPERATOR
if (token.string in TokenType.UNARY_OK_OPERATORS and
last_code.type not in TokenType.EXPRESSION_ENDER_TYPES and
last_code.string not in TokenType.UNARY_POST_OPERATORS):
return EcmaMetaData.UNARY_OPERATOR
return EcmaMetaData.BINARY_OPERATOR
|
bsd-3-clause
|
resmo/cloudstack
|
test/integration/plugins/nuagevsp/test_nuage_vpc_network.py
|
2
|
5478
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Component tests for basic VPC Network functionality with
Nuage VSP SDN plugin
"""
# Import Local Modules
from nuageTestCase import nuageTestCase
from marvin.lib.base import Account
# Import System Modules
from nose.plugins.attrib import attr
class TestNuageVpcNetwork(nuageTestCase):
""" Test basic VPC Network functionality with Nuage VSP SDN plugin
"""
@classmethod
def setUpClass(cls, zone=None):
super(TestNuageVpcNetwork, cls).setUpClass()
return
def setUp(self):
# Create an account
self.account = Account.create(self.api_client,
self.test_data["account"],
admin=True,
domainid=self.domain.id
)
self.cleanup = [self.account]
return
@attr(tags=["advanced", "nuagevsp"], required_hardware="false")
def test_nuage_vpc_network(self):
""" Test basic VPC Network functionality with Nuage VSP SDN plugin
"""
# 1. Create Nuage VSP VPC offering, check if it is successfully
# created and enabled.
# 2. Create a VPC with Nuage VSP VPC offering, check if it is
# successfully created and enabled.
# 3. Create Nuage VSP VPC Network offering, check if it is successfully
# created and enabled.
# 4. Create an ACL list in the created VPC, and add an ACL item to it.
# 5. Create a VPC Network with Nuage VSP VPC Network offering and the
# created ACL list, check if it is successfully created, is in the
# "Implemented" state, and is added to the VPC VR.
# 6. Deploy a VM in the created VPC network, check if the VM is
# successfully deployed and is in the "Running" state.
# 7. Verify that the created ACL item is successfully implemented in
# Nuage VSP.
# 8. Delete all the created objects (cleanup).
# Creating a VPC offering
self.debug("Creating Nuage VSP VPC offering...")
vpc_offering = self.create_VpcOffering(
self.test_data["nuagevsp"]["vpc_offering"])
self.validate_VpcOffering(vpc_offering, state="Enabled")
# Creating a VPC
self.debug("Creating a VPC with Nuage VSP VPC offering...")
vpc = self.create_Vpc(vpc_offering, cidr='10.1.0.0/16')
self.validate_Vpc(vpc, state="Enabled")
# Creating a network offering
self.debug("Creating Nuage VSP VPC Network offering...")
network_offering = self.create_NetworkOffering(
self.test_data["nuagevsp"]["vpc_network_offering"])
self.validate_NetworkOffering(network_offering, state="Enabled")
# Creating an ACL list
acl_list = self.create_NetworkAclList(
name="acl", description="acl", vpc=vpc)
# Creating an ACL item
acl_item = self.create_NetworkAclRule(
self.test_data["ingress_rule"], acl_list=acl_list)
# Creating a VPC network in the VPC
self.debug("Creating a VPC network with Nuage VSP VPC Network "
"offering...")
vpc_network = self.create_Network(
network_offering, vpc=vpc, acl_list=acl_list)
self.validate_Network(vpc_network, state="Implemented")
vr = self.get_Router(vpc_network)
self.check_Router_state(vr, state="Running")
# Deploying a VM in the VPC network
vm = self.create_VM(vpc_network)
self.check_VM_state(vm, state="Running")
# VSD verification
self.verify_vsd_network(self.domain.id, vpc_network, vpc)
self.verify_vsd_router(vr)
self.verify_vsd_vm(vm)
# VSD verification for ACL item
self.verify_vsd_firewall_rule(acl_item)
@attr(
tags=["advanced", "nuagevsp", "multizone"], required_hardware="false")
def test_nuage_vpc_network_multizone(self):
""" Test basic VPC Network functionality with Nuage VSP SDN plugin on
multiple zones
"""
# Repeat the tests in the above testcase "test_nuage_vpc_network" on
# multiple zones
self.debug("Testing basic VPC Network functionality with Nuage VSP "
"SDN plugin on multiple zones...")
if len(self.zones) == 1:
self.skipTest("There is only one Zone configured: skipping test")
for zone in self.zones:
self.debug("Zone - %s" % zone.name)
# Get Zone details
self.getZoneDetails(zone=zone)
# Configure VSD sessions
self.configureVSDSessions()
self.test_nuage_vpc_network()
|
apache-2.0
|
denismakogon/trove-guestagent
|
trove_guestagent/openstack/common/utils.py
|
6
|
1583
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
System-level utilities and helper functions.
"""
import logging
LOG = logging.getLogger(__name__)
def int_from_bool_as_string(subject):
"""
Interpret a string as a boolean and return either 1 or 0.
Any string value in:
('True', 'true', 'On', 'on', '1')
is interpreted as a boolean True.
Useful for JSON-decoded stuff and config file parsing
"""
return bool_from_string(subject) and 1 or 0
def bool_from_string(subject):
"""
Interpret a string as a boolean.
Any string value in:
('True', 'true', 'On', 'on', 'Yes', 'yes', '1')
is interpreted as a boolean True.
Useful for JSON-decoded stuff and config file parsing
"""
if isinstance(subject, bool):
return subject
if isinstance(subject, basestring):
if subject.strip().lower() in ('true', 'on', 'yes', '1'):
return True
return False
|
apache-2.0
|
azureplus/hue
|
desktop/core/ext-py/tablib-0.10.0/tablib/packages/odf3/draw.py
|
56
|
5590
|
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2007 Søren Roug, European Environment Agency
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Contributor(s):
#
from .namespaces import DRAWNS, STYLENS, PRESENTATIONNS
from .element import Element
def StyleRefElement(stylename=None, classnames=None, **args):
qattrs = {}
if stylename is not None:
f = stylename.getAttrNS(STYLENS, 'family')
if f == 'graphic':
qattrs[(DRAWNS,'style-name')]= stylename
elif f == 'presentation':
qattrs[(PRESENTATIONNS,'style-name')]= stylename
else:
raise ValueError("Style's family must be either 'graphic' or 'presentation'")
if classnames is not None:
f = classnames[0].getAttrNS(STYLENS, 'family')
if f == 'graphic':
qattrs[(DRAWNS,'class-names')]= classnames
elif f == 'presentation':
qattrs[(PRESENTATIONNS,'class-names')]= classnames
else:
raise ValueError("Style's family must be either 'graphic' or 'presentation'")
return Element(qattributes=qattrs, **args)
def DrawElement(name=None, **args):
e = Element(name=name, **args)
if 'displayname' not in args:
e.setAttrNS(DRAWNS,'display-name', name)
return e
# Autogenerated
def A(**args):
return Element(qname = (DRAWNS,'a'), **args)
def Applet(**args):
return Element(qname = (DRAWNS,'applet'), **args)
def AreaCircle(**args):
return Element(qname = (DRAWNS,'area-circle'), **args)
def AreaPolygon(**args):
return Element(qname = (DRAWNS,'area-polygon'), **args)
def AreaRectangle(**args):
return Element(qname = (DRAWNS,'area-rectangle'), **args)
def Caption(**args):
return StyleRefElement(qname = (DRAWNS,'caption'), **args)
def Circle(**args):
return StyleRefElement(qname = (DRAWNS,'circle'), **args)
def Connector(**args):
return StyleRefElement(qname = (DRAWNS,'connector'), **args)
def ContourPath(**args):
return Element(qname = (DRAWNS,'contour-path'), **args)
def ContourPolygon(**args):
return Element(qname = (DRAWNS,'contour-polygon'), **args)
def Control(**args):
return StyleRefElement(qname = (DRAWNS,'control'), **args)
def CustomShape(**args):
return StyleRefElement(qname = (DRAWNS,'custom-shape'), **args)
def Ellipse(**args):
return StyleRefElement(qname = (DRAWNS,'ellipse'), **args)
def EnhancedGeometry(**args):
return Element(qname = (DRAWNS,'enhanced-geometry'), **args)
def Equation(**args):
return Element(qname = (DRAWNS,'equation'), **args)
def FillImage(**args):
return DrawElement(qname = (DRAWNS,'fill-image'), **args)
def FloatingFrame(**args):
return Element(qname = (DRAWNS,'floating-frame'), **args)
def Frame(**args):
return StyleRefElement(qname = (DRAWNS,'frame'), **args)
def G(**args):
return StyleRefElement(qname = (DRAWNS,'g'), **args)
def GluePoint(**args):
return Element(qname = (DRAWNS,'glue-point'), **args)
def Gradient(**args):
return DrawElement(qname = (DRAWNS,'gradient'), **args)
def Handle(**args):
return Element(qname = (DRAWNS,'handle'), **args)
def Hatch(**args):
return DrawElement(qname = (DRAWNS,'hatch'), **args)
def Image(**args):
return Element(qname = (DRAWNS,'image'), **args)
def ImageMap(**args):
return Element(qname = (DRAWNS,'image-map'), **args)
def Layer(**args):
return Element(qname = (DRAWNS,'layer'), **args)
def LayerSet(**args):
return Element(qname = (DRAWNS,'layer-set'), **args)
def Line(**args):
return StyleRefElement(qname = (DRAWNS,'line'), **args)
def Marker(**args):
return DrawElement(qname = (DRAWNS,'marker'), **args)
def Measure(**args):
return StyleRefElement(qname = (DRAWNS,'measure'), **args)
def Object(**args):
return Element(qname = (DRAWNS,'object'), **args)
def ObjectOle(**args):
return Element(qname = (DRAWNS,'object-ole'), **args)
def Opacity(**args):
return DrawElement(qname = (DRAWNS,'opacity'), **args)
def Page(**args):
return Element(qname = (DRAWNS,'page'), **args)
def PageThumbnail(**args):
return StyleRefElement(qname = (DRAWNS,'page-thumbnail'), **args)
def Param(**args):
return Element(qname = (DRAWNS,'param'), **args)
def Path(**args):
return StyleRefElement(qname = (DRAWNS,'path'), **args)
def Plugin(**args):
return Element(qname = (DRAWNS,'plugin'), **args)
def Polygon(**args):
return StyleRefElement(qname = (DRAWNS,'polygon'), **args)
def Polyline(**args):
return StyleRefElement(qname = (DRAWNS,'polyline'), **args)
def Rect(**args):
return StyleRefElement(qname = (DRAWNS,'rect'), **args)
def RegularPolygon(**args):
return StyleRefElement(qname = (DRAWNS,'regular-polygon'), **args)
def StrokeDash(**args):
return DrawElement(qname = (DRAWNS,'stroke-dash'), **args)
def TextBox(**args):
return Element(qname = (DRAWNS,'text-box'), **args)
|
apache-2.0
|
sestrella/ansible
|
lib/ansible/modules/network/f5/bigip_remote_role.py
|
38
|
17947
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_remote_role
short_description: Manage remote roles on a BIG-IP
description:
- Manages remote roles on a BIG-IP. Remote roles are used in situations where
user authentication is handled off-box. Local access control to the BIG-IP
is controlled by the defined remote role. Where-as authentication (and by
extension, assignment to the role) is handled off-box.
version_added: 2.7
options:
name:
description:
- Specifies the name of the remote role.
type: str
required: True
line_order:
description:
- Specifies the order of the line in the file C(/config/bigip/auth/remoterole).
- The LDAP and Active Directory servers read this file line by line.
- The order of the information is important; therefore, F5 recommends that
you set the first line at 1000. This allows you, in the future, to insert
lines before the first line.
- When creating a new remote role, this parameter is required.
type: int
attribute_string:
description:
- Specifies the user account attributes saved in the group, in the format
C(cn=, ou=, dc=).
- When creating a new remote role, this parameter is required.
type: str
remote_access:
description:
- Enables or disables remote access for the specified group of remotely
authenticated users.
- When creating a new remote role, if this parameter is not specified, the default
is C(yes).
type: bool
assigned_role:
description:
- Specifies the authorization (level of access) for the account.
- When creating a new remote role, if this parameter is not provided, the
default is C(none).
- The C(partition_access) parameter controls which partitions the account can
access.
- The chosen role may affect the partitions that one is allowed to specify.
Specifically, roles such as C(administrator), C(auditor) and C(resource-administrator)
required a C(partition_access) of C(all).
- A set of pre-existing roles ship with the system. They are C(none), C(guest),
C(operator), C(application-editor), C(manager), C(certificate-manager),
C(irule-manager), C(user-manager), C(resource-administrator), C(auditor),
C(administrator), C(firewall-manager).
type: str
partition_access:
description:
- Specifies the accessible partitions for the account.
- This parameter supports the reserved names C(all) and C(Common), as well as
specific partitions a user may access.
- Users who have access to a partition can operate on objects in that partition,
as determined by the permissions conferred by the user's C(assigned_role).
- When creating a new remote role, if this parameter is not specified, the default
is C(all).
type: str
terminal_access:
description:
- Specifies terminal-based accessibility for remote accounts not already
explicitly assigned a user role.
- Common values for this include C(tmsh) and C(none), however custom values
may also be specified.
- When creating a new remote role, if this parameter is not specified, the default
is C(none).
type: str
state:
description:
- When C(present), guarantees that the remote role exists.
- When C(absent), removes the remote role from the system.
type: str
choices:
- absent
- present
default: present
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create a remote role
bigip_remote_role:
name: foo
group_name: ldap_group
line_order: 1
attribute_string: memberOf=cn=ldap_group,cn=ldap.group,ou=ldap
remote_access: enabled
assigned_role: administrator
partition_access: all
terminal_access: none
state: present
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
attribute_string:
description: The new attribute string of the resource.
returned: changed
type: str
sample: "memberOf=cn=ldap_group,cn=ldap.group,ou=ldap"
terminal_access:
description: The terminal setting of the remote role.
returned: changed
type: str
sample: tmsh
line_order:
description: Order of the remote role for LDAP and Active Directory servers.
returned: changed
type: int
sample: 1000
assigned_role:
description: System role that this remote role is associated with.
returned: changed
type: str
sample: administrator
partition_access:
description: Partition that the role has access to.
returned: changed
type: str
sample: all
remote_access:
description: Whether remote access is allowed or not.
returned: changed
type: bool
sample: no
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import flatten_boolean
from library.module_utils.network.f5.common import transform_name
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import flatten_boolean
from ansible.module_utils.network.f5.common import transform_name
class Parameters(AnsibleF5Parameters):
api_map = {
'attribute': 'attribute_string',
'console': 'terminal_access',
'lineOrder': 'line_order',
'role': 'assigned_role',
'userPartition': 'partition_access',
'deny': 'remote_access'
}
api_attributes = [
'attribute',
'console',
'lineOrder',
'role',
'deny',
'userPartition',
]
returnables = [
'attribute_string',
'terminal_access',
'line_order',
'assigned_role',
'partition_access',
'remote_access',
]
updatables = [
'attribute_string',
'terminal_access',
'line_order',
'assigned_role',
'partition_access',
'remote_access',
]
role_map = {
'application-editor': 'applicationeditor',
'none': 'noaccess',
'certificate-manager': 'certificatemanager',
'irule-manager': 'irulemanager',
'user-manager': 'usermanager',
'resource-administrator': 'resourceadmin',
'firewall-manager': 'firewallmanager'
}
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
@property
def partition(self):
return 'Common'
@property
def assigned_role(self):
if self._values['assigned_role'] is None:
return None
return self.role_map.get(self._values['assigned_role'], self._values['assigned_role'])
@property
def terminal_access(self):
if self._values['terminal_access'] in [None, 'tmsh']:
return self._values['terminal_access']
elif self._values['terminal_access'] == 'none':
return 'disable'
return self._values['terminal_access']
@property
def partition_access(self):
if self._values['partition_access'] is None:
return None
if self._values['partition_access'] == 'all':
return 'All'
return self._values['partition_access']
@property
def remote_access(self):
result = flatten_boolean(self._values['remote_access'])
if result == 'yes':
return 'disabled'
elif result == 'no':
return 'enabled'
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
@property
def assigned_role(self):
if self._values['assigned_role'] is None:
return None
rmap = dict((v, k) for k, v in iteritems(self.role_map))
return rmap.get(self._values['assigned_role'], self._values['assigned_role'])
@property
def terminal_access(self):
if self._values['terminal_access'] in [None, 'tmsh']:
return self._values['terminal_access']
elif self._values['terminal_access'] == 'disabled':
return 'none'
return self._values['terminal_access']
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/auth/remote-role/role-info/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
if self.want.partition_access is None:
self.want.update({'partition_access': 'all'})
if self.want.remote_access is None:
self.want.update({'remote_access': True})
if self.want.assigned_role is None:
self.want.update({'assigned_role': 'none'})
if self.want.terminal_access is None:
self.want.update({'terminal_access': 'none'})
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
uri = "https://{0}:{1}/mgmt/tm/auth/remote-role/role-info/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/auth/remote-role/role-info/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
if 'Once configured [All] partition, remote user group cannot' in response['message']:
raise F5ModuleError(
"The specified 'attribute_string' is already used in the 'all' partition."
)
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def absent(self):
if self.exists():
return self.remove()
return False
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/auth/remote-role/role-info/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(response.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/auth/remote-role/role-info/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
line_order=dict(type='int'),
attribute_string=dict(),
remote_access=dict(type='bool'),
assigned_role=dict(),
partition_access=dict(),
terminal_access=dict(),
state=dict(
default='present',
choices=['present', 'absent']
),
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
gpl-3.0
|
chengjf/database-interface-doc-management
|
flask-demo/flask/Lib/site-packages/pip/_vendor/requests/packages/urllib3/connectionpool.py
|
477
|
30319
|
import errno
import logging
import sys
import warnings
from socket import error as SocketError, timeout as SocketTimeout
import socket
try: # Python 3
from queue import LifoQueue, Empty, Full
except ImportError:
from Queue import LifoQueue, Empty, Full
import Queue as _ # Platform-specific: Windows
from .exceptions import (
ClosedPoolError,
ProtocolError,
EmptyPoolError,
HostChangedError,
LocationValueError,
MaxRetryError,
ProxyError,
ReadTimeoutError,
SSLError,
TimeoutError,
InsecureRequestWarning,
)
from .packages.ssl_match_hostname import CertificateError
from .packages import six
from .connection import (
port_by_scheme,
DummyConnection,
HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
HTTPException, BaseSSLError, ConnectionError
)
from .request import RequestMethods
from .response import HTTPResponse
from .util.connection import is_connection_dropped
from .util.retry import Retry
from .util.timeout import Timeout
from .util.url import get_host
xrange = six.moves.xrange
log = logging.getLogger(__name__)
_Default = object()
## Pool objects
class ConnectionPool(object):
"""
Base class for all connection pools, such as
:class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
"""
scheme = None
QueueCls = LifoQueue
def __init__(self, host, port=None):
if not host:
raise LocationValueError("No host specified.")
# httplib doesn't like it when we include brackets in ipv6 addresses
self.host = host.strip('[]')
self.port = port
def __str__(self):
return '%s(host=%r, port=%r)' % (type(self).__name__,
self.host, self.port)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
# Return False to re-raise any potential exceptions
return False
def close():
"""
Close all pooled connections and disable the pool.
"""
pass
# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
class HTTPConnectionPool(ConnectionPool, RequestMethods):
"""
Thread-safe connection pool for one host.
:param host:
Host used for this HTTP Connection (e.g. "localhost"), passed into
:class:`httplib.HTTPConnection`.
:param port:
Port used for this HTTP Connection (None is equivalent to 80), passed
into :class:`httplib.HTTPConnection`.
:param strict:
Causes BadStatusLine to be raised if the status line can't be parsed
as a valid HTTP/1.0 or 1.1 status line, passed into
:class:`httplib.HTTPConnection`.
.. note::
Only works in Python 2. This parameter is ignored in Python 3.
:param timeout:
Socket timeout in seconds for each individual connection. This can
be a float or integer, which sets the timeout for the HTTP request,
or an instance of :class:`urllib3.util.Timeout` which gives you more
fine-grained control over request timeouts. After the constructor has
been parsed, this is always a `urllib3.util.Timeout` object.
:param maxsize:
Number of connections to save that can be reused. More than 1 is useful
in multithreaded situations. If ``block`` is set to false, more
connections will be created but they will not be saved once they've
been used.
:param block:
If set to True, no more than ``maxsize`` connections will be used at
a time. When no free connections are available, the call will block
until a connection has been released. This is a useful side effect for
particular multithreaded situations where one does not want to use more
than maxsize connections per host to prevent flooding.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param retries:
Retry configuration to use by default with requests in this pool.
:param _proxy:
Parsed proxy URL, should not be used directly, instead, see
:class:`urllib3.connectionpool.ProxyManager`"
:param _proxy_headers:
A dictionary with proxy headers, should not be used directly,
instead, see :class:`urllib3.connectionpool.ProxyManager`"
:param \**conn_kw:
Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
:class:`urllib3.connection.HTTPSConnection` instances.
"""
scheme = 'http'
ConnectionCls = HTTPConnection
def __init__(self, host, port=None, strict=False,
timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
headers=None, retries=None,
_proxy=None, _proxy_headers=None,
**conn_kw):
ConnectionPool.__init__(self, host, port)
RequestMethods.__init__(self, headers)
self.strict = strict
if not isinstance(timeout, Timeout):
timeout = Timeout.from_float(timeout)
if retries is None:
retries = Retry.DEFAULT
self.timeout = timeout
self.retries = retries
self.pool = self.QueueCls(maxsize)
self.block = block
self.proxy = _proxy
self.proxy_headers = _proxy_headers or {}
# Fill the queue up so that doing get() on it will block properly
for _ in xrange(maxsize):
self.pool.put(None)
# These are mostly for testing and debugging purposes.
self.num_connections = 0
self.num_requests = 0
self.conn_kw = conn_kw
if self.proxy:
# Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
# We cannot know if the user has added default socket options, so we cannot replace the
# list.
self.conn_kw.setdefault('socket_options', [])
def _new_conn(self):
"""
Return a fresh :class:`HTTPConnection`.
"""
self.num_connections += 1
log.info("Starting new HTTP connection (%d): %s" %
(self.num_connections, self.host))
conn = self.ConnectionCls(host=self.host, port=self.port,
timeout=self.timeout.connect_timeout,
strict=self.strict, **self.conn_kw)
return conn
def _get_conn(self, timeout=None):
"""
Get a connection. Will return a pooled connection if one is available.
If no connections are available and :prop:`.block` is ``False``, then a
fresh connection is returned.
:param timeout:
Seconds to wait before giving up and raising
:class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
:prop:`.block` is ``True``.
"""
conn = None
try:
conn = self.pool.get(block=self.block, timeout=timeout)
except AttributeError: # self.pool is None
raise ClosedPoolError(self, "Pool is closed.")
except Empty:
if self.block:
raise EmptyPoolError(self,
"Pool reached maximum size and no more "
"connections are allowed.")
pass # Oh well, we'll create a new connection then
# If this is a persistent connection, check if it got disconnected
if conn and is_connection_dropped(conn):
log.info("Resetting dropped connection: %s" % self.host)
conn.close()
if getattr(conn, 'auto_open', 1) == 0:
# This is a proxied connection that has been mutated by
# httplib._tunnel() and cannot be reused (since it would
# attempt to bypass the proxy)
conn = None
return conn or self._new_conn()
def _put_conn(self, conn):
"""
Put a connection back into the pool.
:param conn:
Connection object for the current host and port as returned by
:meth:`._new_conn` or :meth:`._get_conn`.
If the pool is already full, the connection is closed and discarded
because we exceeded maxsize. If connections are discarded frequently,
then maxsize should be increased.
If the pool is closed, then the connection will be closed and discarded.
"""
try:
self.pool.put(conn, block=False)
return # Everything is dandy, done.
except AttributeError:
# self.pool is None.
pass
except Full:
# This should never happen if self.block == True
log.warning(
"Connection pool is full, discarding connection: %s" %
self.host)
# Connection never got put back into the pool, close it.
if conn:
conn.close()
def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
pass
def _prepare_proxy(self, conn):
# Nothing to do for HTTP connections.
pass
def _get_timeout(self, timeout):
""" Helper that always returns a :class:`urllib3.util.Timeout` """
if timeout is _Default:
return self.timeout.clone()
if isinstance(timeout, Timeout):
return timeout.clone()
else:
# User passed us an int/float. This is for backwards compatibility,
# can be removed later
return Timeout.from_float(timeout)
def _raise_timeout(self, err, url, timeout_value):
"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""
if isinstance(err, SocketTimeout):
raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
# See the above comment about EAGAIN in Python 3. In Python 2 we have
# to specifically catch it and throw the timeout error
if hasattr(err, 'errno') and err.errno in _blocking_errnos:
raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
# Catch possible read timeouts thrown as SSL errors. If not the
# case, rethrow the original. We need to do this because of:
# http://bugs.python.org/issue10272
if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6
raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
def _make_request(self, conn, method, url, timeout=_Default,
**httplib_request_kw):
"""
Perform a request on a given urllib connection object taken from our
pool.
:param conn:
a connection from one of our connection pools
:param timeout:
Socket timeout in seconds for the request. This can be a
float or integer, which will set the same timeout value for
the socket connect and the socket read, or an instance of
:class:`urllib3.util.Timeout`, which gives you more fine-grained
control over your timeouts.
"""
self.num_requests += 1
timeout_obj = self._get_timeout(timeout)
timeout_obj.start_connect()
conn.timeout = timeout_obj.connect_timeout
# Trigger any extra validation we need to do.
try:
self._validate_conn(conn)
except (SocketTimeout, BaseSSLError) as e:
# Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
raise
# conn.request() calls httplib.*.request, not the method in
# urllib3.request. It also calls makefile (recv) on the socket.
conn.request(method, url, **httplib_request_kw)
# Reset the timeout for the recv() on the socket
read_timeout = timeout_obj.read_timeout
# App Engine doesn't have a sock attr
if getattr(conn, 'sock', None):
# In Python 3 socket.py will catch EAGAIN and return None when you
# try and read into the file pointer created by http.client, which
# instead raises a BadStatusLine exception. Instead of catching
# the exception and assuming all BadStatusLine exceptions are read
# timeouts, check for a zero timeout before making the request.
if read_timeout == 0:
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % read_timeout)
if read_timeout is Timeout.DEFAULT_TIMEOUT:
conn.sock.settimeout(socket.getdefaulttimeout())
else: # None or a value
conn.sock.settimeout(read_timeout)
# Receive the response from the server
try:
try: # Python 2.7, use buffering of HTTP responses
httplib_response = conn.getresponse(buffering=True)
except TypeError: # Python 2.6 and older
httplib_response = conn.getresponse()
except (SocketTimeout, BaseSSLError, SocketError) as e:
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
raise
# AppEngine doesn't have a version attr.
http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
log.debug("\"%s %s %s\" %s %s" % (method, url, http_version,
httplib_response.status,
httplib_response.length))
return httplib_response
def close(self):
"""
Close all pooled connections and disable the pool.
"""
# Disable access to the pool
old_pool, self.pool = self.pool, None
try:
while True:
conn = old_pool.get(block=False)
if conn:
conn.close()
except Empty:
pass # Done.
def is_same_host(self, url):
"""
Check if the given ``url`` is a member of the same host as this
connection pool.
"""
if url.startswith('/'):
return True
# TODO: Add optional support for socket.gethostbyname checking.
scheme, host, port = get_host(url)
# Use explicit default port for comparison when none is given
if self.port and not port:
port = port_by_scheme.get(scheme)
elif not self.port and port == port_by_scheme.get(scheme):
port = None
return (scheme, host, port) == (self.scheme, self.host, self.port)
def urlopen(self, method, url, body=None, headers=None, retries=None,
redirect=True, assert_same_host=True, timeout=_Default,
pool_timeout=None, release_conn=None, **response_kw):
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method provided
by :class:`.RequestMethods`, such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param body:
Data to send in the request body (useful for creating
POST requests, see HTTPConnectionPool.post_url for
more convenience).
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
Pass ``None`` to retry until you receive a response. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When False, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of
``response_kw.get('preload_content', True)``.
:param \**response_kw:
Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib`
"""
if headers is None:
headers = self.headers
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = response_kw.get('preload_content', True)
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
conn = None
# Merge the proxy headers. Only do this in HTTP. We have to copy the
# headers dict so we can safely change it without those changes being
# reflected in anyone else's copy.
if self.scheme == 'http':
headers = headers.copy()
headers.update(self.proxy_headers)
# Must keep the exception bound to a separate variable or else Python 3
# complains about UnboundLocalError.
err = None
try:
# Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout
is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
if is_new_proxy_conn:
self._prepare_proxy(conn)
# Make the request on the httplib connection object.
httplib_response = self._make_request(conn, method, url,
timeout=timeout_obj,
body=body, headers=headers)
# If we're going to release the connection in ``finally:``, then
# the request doesn't need to know about the connection. Otherwise
# it will also try to release it and we'll have a double-release
# mess.
response_conn = not release_conn and conn
# Import httplib's response into our own wrapper object
response = HTTPResponse.from_httplib(httplib_response,
pool=self,
connection=response_conn,
**response_kw)
# else:
# The connection will be put back into the pool when
# ``response.release_conn()`` is called (implicitly by
# ``response.read()``)
except Empty:
# Timed out by queue.
raise EmptyPoolError(self, "No pool connections are available.")
except (BaseSSLError, CertificateError) as e:
# Close the connection. If a connection is reused on which there
# was a Certificate error, the next request will certainly raise
# another Certificate error.
if conn:
conn.close()
conn = None
raise SSLError(e)
except SSLError:
# Treat SSLError separately from BaseSSLError to preserve
# traceback.
if conn:
conn.close()
conn = None
raise
except (TimeoutError, HTTPException, SocketError, ConnectionError) as e:
if conn:
# Discard the connection for these exceptions. It will be
# be replaced during the next _get_conn() call.
conn.close()
conn = None
if isinstance(e, SocketError) and self.proxy:
e = ProxyError('Cannot connect to proxy.', e)
elif isinstance(e, (SocketError, HTTPException)):
e = ProtocolError('Connection aborted.', e)
retries = retries.increment(method, url, error=e, _pool=self,
_stacktrace=sys.exc_info()[2])
retries.sleep()
# Keep track of the error for the retry warning.
err = e
finally:
if release_conn:
# Put the connection back to be reused. If the connection is
# expired then it will be None, which will get replaced with a
# fresh connection during _get_conn.
self._put_conn(conn)
if not conn:
# Try again
log.warning("Retrying (%r) after connection "
"broken by '%r': %s" % (retries, err, url))
return self.urlopen(method, url, body, headers, retries,
redirect, assert_same_host,
timeout=timeout, pool_timeout=pool_timeout,
release_conn=release_conn, **response_kw)
# Handle redirect?
redirect_location = redirect and response.get_redirect_location()
if redirect_location:
if response.status == 303:
method = 'GET'
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_redirect:
raise
return response
log.info("Redirecting %s -> %s" % (url, redirect_location))
return self.urlopen(method, redirect_location, body, headers,
retries=retries, redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout, pool_timeout=pool_timeout,
release_conn=release_conn, **response_kw)
# Check if we should retry the HTTP response.
if retries.is_forced_retry(method, status_code=response.status):
retries = retries.increment(method, url, response=response, _pool=self)
retries.sleep()
log.info("Forced retry: %s" % url)
return self.urlopen(method, url, body, headers,
retries=retries, redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout, pool_timeout=pool_timeout,
release_conn=release_conn, **response_kw)
return response
class HTTPSConnectionPool(HTTPConnectionPool):
"""
Same as :class:`.HTTPConnectionPool`, but HTTPS.
When Python is compiled with the :mod:`ssl` module, then
:class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
instead of :class:`.HTTPSConnection`.
:class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
``assert_hostname`` and ``host`` in this order to verify connections.
If ``assert_hostname`` is False, no verification is done.
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs`` and
``ssl_version`` are only used if :mod:`ssl` is available and are fed into
:meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket
into an SSL socket.
"""
scheme = 'https'
ConnectionCls = HTTPSConnection
def __init__(self, host, port=None,
strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
block=False, headers=None, retries=None,
_proxy=None, _proxy_headers=None,
key_file=None, cert_file=None, cert_reqs=None,
ca_certs=None, ssl_version=None,
assert_hostname=None, assert_fingerprint=None,
**conn_kw):
HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
block, headers, retries, _proxy, _proxy_headers,
**conn_kw)
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.ca_certs = ca_certs
self.ssl_version = ssl_version
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
def _prepare_conn(self, conn):
"""
Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
and establish the tunnel if proxy is used.
"""
if isinstance(conn, VerifiedHTTPSConnection):
conn.set_cert(key_file=self.key_file,
cert_file=self.cert_file,
cert_reqs=self.cert_reqs,
ca_certs=self.ca_certs,
assert_hostname=self.assert_hostname,
assert_fingerprint=self.assert_fingerprint)
conn.ssl_version = self.ssl_version
return conn
def _prepare_proxy(self, conn):
"""
Establish tunnel connection early, because otherwise httplib
would improperly set Host: header to proxy's IP:port.
"""
# Python 2.7+
try:
set_tunnel = conn.set_tunnel
except AttributeError: # Platform-specific: Python 2.6
set_tunnel = conn._set_tunnel
if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
set_tunnel(self.host, self.port)
else:
set_tunnel(self.host, self.port, self.proxy_headers)
conn.connect()
def _new_conn(self):
"""
Return a fresh :class:`httplib.HTTPSConnection`.
"""
self.num_connections += 1
log.info("Starting new HTTPS connection (%d): %s"
% (self.num_connections, self.host))
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
raise SSLError("Can't connect to HTTPS URL because the SSL "
"module is not available.")
actual_host = self.host
actual_port = self.port
if self.proxy is not None:
actual_host = self.proxy.host
actual_port = self.proxy.port
conn = self.ConnectionCls(host=actual_host, port=actual_port,
timeout=self.timeout.connect_timeout,
strict=self.strict, **self.conn_kw)
return self._prepare_conn(conn)
def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
super(HTTPSConnectionPool, self)._validate_conn(conn)
# Force connect early to allow us to validate the connection.
if not getattr(conn, 'sock', None): # AppEngine might not have `.sock`
conn.connect()
if not conn.is_verified:
warnings.warn((
'Unverified HTTPS request is being made. '
'Adding certificate verification is strongly advised. See: '
'https://urllib3.readthedocs.org/en/latest/security.html'),
InsecureRequestWarning)
def connection_from_url(url, **kw):
"""
Given a url, return an :class:`.ConnectionPool` instance of its host.
This is a shortcut for not having to parse out the scheme, host, and port
of the url before creating an :class:`.ConnectionPool` instance.
:param url:
Absolute URL string that must include the scheme. Port is optional.
:param \**kw:
Passes additional parameters to the constructor of the appropriate
:class:`.ConnectionPool`. Useful for specifying things like
timeout, maxsize, headers, etc.
Example::
>>> conn = connection_from_url('http://google.com/')
>>> r = conn.request('GET', '/')
"""
scheme, host, port = get_host(url)
if scheme == 'https':
return HTTPSConnectionPool(host, port=port, **kw)
else:
return HTTPConnectionPool(host, port=port, **kw)
|
apache-2.0
|
taedori81/wagtail
|
wagtail/wagtailembeds/embeds.py
|
3
|
4575
|
from datetime import datetime
import json
# Needs to be imported like this to allow @patch to work in tests
from six.moves.urllib import request as urllib_request
from six.moves.urllib.request import Request
from six.moves.urllib.error import URLError
from six.moves.urllib.parse import urlencode
from django.utils.module_loading import import_string
from django.conf import settings
from wagtail.wagtailembeds.oembed_providers import get_oembed_provider
from wagtail.wagtailembeds.models import Embed
class EmbedNotFoundException(Exception):
pass
class EmbedlyException(Exception):
pass
class AccessDeniedEmbedlyException(EmbedlyException):
pass
def embedly(url, max_width=None, key=None):
from embedly import Embedly
# Get embedly key
if key is None:
key = settings.EMBEDLY_KEY
# Get embedly client
client = Embedly(key=key)
# Call embedly
if max_width is not None:
oembed = client.oembed(url, maxwidth=max_width, better=False)
else:
oembed = client.oembed(url, better=False)
# Check for error
if oembed.get('error'):
if oembed['error_code'] in [401, 403]:
raise AccessDeniedEmbedlyException
elif oembed['error_code'] == 404:
raise EmbedNotFoundException
else:
raise EmbedlyException
# Convert photos into HTML
if oembed['type'] == 'photo':
html = '<img src="%s" />' % (oembed['url'], )
else:
html = oembed.get('html')
# Return embed as a dict
return {
'title': oembed['title'] if 'title' in oembed else '',
'author_name': oembed['author_name'] if 'author_name' in oembed else '',
'provider_name': oembed['provider_name'] if 'provider_name' in oembed else '',
'type': oembed['type'],
'thumbnail_url': oembed.get('thumbnail_url'),
'width': oembed.get('width'),
'height': oembed.get('height'),
'html': html,
}
def oembed(url, max_width=None):
# Find provider
provider = get_oembed_provider(url)
if provider is None:
raise EmbedNotFoundException
# Work out params
params = {'url': url, 'format': 'json'}
if max_width:
params['maxwidth'] = max_width
# Perform request
request = Request(provider + '?' + urlencode(params))
request.add_header('User-agent', 'Mozilla/5.0')
try:
r = urllib_request.urlopen(request)
except URLError:
raise EmbedNotFoundException
oembed = json.loads(r.read().decode('utf-8'))
# Convert photos into HTML
if oembed['type'] == 'photo':
html = '<img src="%s" />' % (oembed['url'], )
else:
html = oembed.get('html')
# Return embed as a dict
return {
'title': oembed['title'] if 'title' in oembed else '',
'author_name': oembed['author_name'] if 'author_name' in oembed else '',
'provider_name': oembed['provider_name'] if 'provider_name' in oembed else '',
'type': oembed['type'],
'thumbnail_url': oembed.get('thumbnail_url'),
'width': oembed.get('width'),
'height': oembed.get('height'),
'html': html,
}
def get_default_finder():
# Check if the user has set the embed finder manually
if hasattr(settings, 'WAGTAILEMBEDS_EMBED_FINDER'):
return import_string(settings.WAGTAILEMBEDS_EMBED_FINDER)
# Use embedly if the embedly key is set
if hasattr(settings, 'EMBEDLY_KEY'):
return embedly
# Fall back to oembed
return oembed
def get_embed(url, max_width=None, finder=None):
# Check database
try:
return Embed.objects.get(url=url, max_width=max_width)
except Embed.DoesNotExist:
pass
# Get/Call finder
if not finder:
finder = get_default_finder()
embed_dict = finder(url, max_width)
# Make sure width and height are valid integers before inserting into database
try:
embed_dict['width'] = int(embed_dict['width'])
except (TypeError, ValueError):
embed_dict['width'] = None
try:
embed_dict['height'] = int(embed_dict['height'])
except (TypeError, ValueError):
embed_dict['height'] = None
# Make sure html field is valid
if 'html' not in embed_dict or not embed_dict['html']:
embed_dict['html'] = ''
# Create database record
embed, created = Embed.objects.get_or_create(
url=url,
max_width=max_width,
defaults=embed_dict,
)
# Save
embed.last_updated = datetime.now()
embed.save()
return embed
|
bsd-3-clause
|
mkieszek/odoo
|
addons/sale/wizard/sale_line_invoice.py
|
3
|
5219
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp.osv import osv, fields
from openerp.tools.translate import _
from openerp import workflow
from openerp.exceptions import UserError
class sale_order_line_make_invoice(osv.osv_memory):
_name = "sale.order.line.make.invoice"
_description = "Sale OrderLine Make_invoice"
def _prepare_invoice(self, cr, uid, order, lines, context=None):
a = order.partner_id.property_account_receivable_id.id
if order.partner_id and order.partner_id.property_payment_term_id.id:
pay_term = order.partner_id.property_payment_term_id.id
else:
pay_term = False
return {
'name': order.client_order_ref or '',
'origin': order.name,
'type': 'out_invoice',
'reference': "P%dSO%d" % (order.partner_id.id, order.id),
'account_id': a,
'partner_id': order.partner_invoice_id.id,
'invoice_line_ids': [(6, 0, lines)],
'currency_id' : order.pricelist_id.currency_id.id,
'comment': order.note,
'payment_term_id': pay_term,
'fiscal_position_id': order.fiscal_position_id.id or order.partner_id.property_account_position_id.id,
'user_id': order.user_id and order.user_id.id or False,
'company_id': order.company_id and order.company_id.id or False,
'date_invoice': fields.date.today(),
'team_id': order.team_id.id,
}
def make_invoices(self, cr, uid, ids, context=None):
"""
To make invoices.
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: the ID or list of IDs
@param context: A standard dictionary
@return: A dictionary which of fields with values.
"""
if context is None: context = {}
res = False
invoices = {}
#TODO: merge with sale.py/make_invoice
def make_invoice(order, lines):
"""
To make invoices.
@param order:
@param lines:
@return:
"""
inv = self._prepare_invoice(cr, uid, order, lines)
inv_id = self.pool.get('account.invoice').create(cr, uid, inv)
return inv_id
sales_order_line_obj = self.pool.get('sale.order.line')
sales_order_obj = self.pool.get('sale.order')
for line in sales_order_line_obj.browse(cr, uid, context.get('active_ids', []), context=context):
if (not line.invoiced) and (line.state not in ('draft', 'cancel')):
if not line.order_id in invoices:
invoices[line.order_id] = []
line_id = sales_order_line_obj.invoice_line_create(cr, uid, [line.id])
for lid in line_id:
invoices[line.order_id].append(lid)
for order, il in invoices.items():
res = make_invoice(order, il)
cr.execute('INSERT INTO sale_order_invoice_rel \
(order_id,invoice_id) values (%s,%s)', (order.id, res))
sales_order_obj.invalidate_cache(cr, uid, ['invoice_ids'], [order.id], context=context)
flag = True
sales_order_obj.message_post(cr, uid, [order.id], body=_("Invoice created"), context=context)
data_sale = sales_order_obj.browse(cr, uid, order.id, context=context)
for line in data_sale.order_line:
if not line.invoiced and line.state != 'cancel':
flag = False
break
if flag:
line.order_id.write({'state': 'progress'})
workflow.trg_validate(uid, 'sale.order', order.id, 'all_lines', cr)
if not invoices:
raise UserError(_('Invoice cannot be created for this Sales Order Line due to one of the following reasons:\n1.The state of this sales order line is either "draft" or "cancel"!\n2.The Sales Order Line is Invoiced!'))
if context.get('open_invoices', False):
return self.open_invoices(cr, uid, ids, res, context=context)
return {'type': 'ir.actions.act_window_close'}
def open_invoices(self, cr, uid, ids, invoice_ids, context=None):
""" open a view on one of the given invoice_ids """
ir_model_data = self.pool.get('ir.model.data')
form_res = ir_model_data.get_object_reference(cr, uid, 'account', 'invoice_form')
form_id = form_res and form_res[1] or False
tree_res = ir_model_data.get_object_reference(cr, uid, 'account', 'invoice_tree')
tree_id = tree_res and tree_res[1] or False
return {
'name': _('Invoice'),
'view_type': 'form',
'view_mode': 'form,tree',
'res_model': 'account.invoice',
'res_id': invoice_ids,
'view_id': False,
'views': [(form_id, 'form'), (tree_id, 'tree')],
'context': {'type': 'out_invoice'},
'type': 'ir.actions.act_window',
}
|
agpl-3.0
|
DanielSBrown/osf.io
|
api/addons/views.py
|
4
|
2693
|
from rest_framework.exceptions import NotFound
from rest_framework import generics, permissions as drf_permissions
from framework.auth.oauth_scopes import CoreScopes
from api.addons.serializers import AddonSerializer
from api.base.permissions import TokenHasScope
from api.base.settings import ADDONS_OAUTH
from api.base.views import JSONAPIBaseView
from website import settings as osf_settings
class AddonSettingsMixin(object):
"""Mixin with convenience method for retrieving the current <Addon><Node|User>Settings based on the
current URL. By default, fetches the settings based on the user or node available in self context.
"""
def get_addon_settings(self, provider=None, fail_if_absent=True):
owner = None
if hasattr(self, 'get_user'):
owner = self.get_user()
elif hasattr(self, 'get_node'):
owner = self.get_node()
provider = provider or self.kwargs['provider']
if not owner or provider not in ADDONS_OAUTH:
raise NotFound('Requested addon unavailable')
addon_settings = owner.get_addon(provider)
if not addon_settings and fail_if_absent:
raise NotFound('Requested addon not enabled')
if not addon_settings or addon_settings.deleted:
return None
return addon_settings
class AddonList(JSONAPIBaseView, generics.ListAPIView):
"""List of addons configurable with the OSF *Read-only*.
Paginated list of addons associated with third-party services
##Permissions
No restrictions.
## <Addon> Attributes
OSF <Addon\> entities have the "addons" `type`, and their `id` indicates the
`short_name` of the associated service provider (eg. `box`, `googledrive`, etc).
name type description
======================================================================================================
url string Url of this third-party service
name string `full_name` of third-party service provider
description string Description of this addon
categories list List of categories this addon belongs to
#This Request/Response
"""
permission_classes = (
drf_permissions.AllowAny,
drf_permissions.IsAuthenticatedOrReadOnly,
TokenHasScope, )
required_read_scopes = [CoreScopes.ALWAYS_PUBLIC]
required_write_scopes = [CoreScopes.NULL]
serializer_class = AddonSerializer
view_category = 'addons'
view_name = 'addon-list'
def get_queryset(self):
return [conf for conf in osf_settings.ADDONS_AVAILABLE_DICT.itervalues() if 'accounts' in conf.configs]
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.