repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
dotKom/onlineweb4 | apps/contact/forms.py | 1 | 1535 | from captcha.fields import ReCaptchaField
from django import forms
class ContactForm(forms.Form):
mail_choices = (("[email protected]", "Hovedstyret"),
("[email protected]", "Drifts- og utviklingskomiteen")
)
contact_receiver = forms.ChoiceField(required=True, label="Hvem ønsker du å kontakte?", choices=mail_choices)
contact_checkbox = forms.BooleanField(required=False)
contact_name = forms.CharField(required=False, widget=forms.TextInput({"placeholder": "Navn",
"required": True}))
contact_email = forms.EmailField(required=False, widget=forms.EmailInput({"placeholder": "Epostadresse",
"required": True}))
content = forms.CharField(required=True, widget=forms.Textarea({"placeholder": "Din melding"}))
captcha = ReCaptchaField(error_messages={'required': ('Du klarte ikke captchaen! Er du en bot?'),
'invalid': ('Du klarte ikke captchaen! Er du en bot?')})
def clean(self):
name = self.cleaned_data.get('contact_name')
is_anon = self.cleaned_data.get('contact_checkbox')
email = self.cleaned_data.get('contact_email')
if not (name and email) and not is_anon:
error_msg = "Must be filled"
self.add_error('contact_name', error_msg)
self.add_error('contact_email', error_msg)
| mit | -5,438,166,202,593,801,000 | 55.777778 | 113 | 0.58578 | false |
dmlc/web-data | keras/models/s2s_translate/lstm_seq2seq.py | 1 | 9176 | '''Sequence to sequence example in Keras (character-level).
This script demonstrates how to implement a basic character-level
sequence-to-sequence model. We apply it to translating
short English sentences into short French sentences,
character-by-character. Note that it is fairly unusual to
do character-level machine translation, as word-level
models are more common in this domain.
# Summary of the algorithm
- We start with input sequences from a domain (e.g. English sentences)
and corresponding target sequences from another domain
(e.g. French sentences).
- An encoder LSTM turns input sequences to 2 state vectors
(we keep the last LSTM state and discard the outputs).
- A decoder LSTM is trained to turn the target sequences into
the same sequence but offset by one timestep in the future,
a training process called "teacher forcing" in this context.
Is uses as initial state the state vectors from the encoder.
Effectively, the decoder learns to generate `targets[t+1...]`
given `targets[...t]`, conditioned on the input sequence.
- In inference mode, when we want to decode unknown input sequences, we:
- Encode the input sequence into state vectors
- Start with a target sequence of size 1
(just the start-of-sequence character)
- Feed the state vectors and 1-char target sequence
to the decoder to produce predictions for the next character
- Sample the next character using these predictions
(we simply use argmax).
- Append the sampled character to the target sequence
- Repeat until we generate the end-of-sequence character or we
hit the character limit.
# Data download
English to French sentence pairs.
http://www.manythings.org/anki/fra-eng.zip
Lots of neat sentence pairs datasets can be found at:
http://www.manythings.org/anki/
# References
- Sequence to Sequence Learning with Neural Networks
https://arxiv.org/abs/1409.3215
- Learning Phrase Representations using
RNN Encoder-Decoder for Statistical Machine Translation
https://arxiv.org/abs/1406.1078
'''
from __future__ import print_function
from keras.models import Model
from keras.layers import Input, LSTM, Dense
import numpy as np
batch_size = 64 # Batch size for training.
epochs = 200 # Number of epochs to train for.
latent_dim = 256 # Latent dimensionality of the encoding space.
num_samples = 10000 # Number of samples to train on.
# Path to the data txt file on disk.
data_path = 'fra.txt'
model_save_file_name='s2s_translate.h5'
# Vectorize the data.
input_texts = []
target_texts = []
input_characters = set()
target_characters = set()
with open(data_path, 'r', encoding='utf-8') as f:
lines = f.read().split('\n')
for line in lines[: min(num_samples, len(lines) - 1)]:
input_text, target_text = line.split('\t')
# We use "tab" as the "start sequence" character
# for the targets, and "\n" as "end sequence" character.
target_text = '\t' + target_text + '\n'
input_texts.append(input_text)
target_texts.append(target_text)
for char in input_text:
if char not in input_characters:
input_characters.add(char)
for char in target_text:
if char not in target_characters:
target_characters.add(char)
input_characters = sorted(list(input_characters))
target_characters = sorted(list(target_characters))
num_encoder_tokens = len(input_characters)
num_decoder_tokens = len(target_characters)
max_encoder_seq_length = max([len(txt) for txt in input_texts])
max_decoder_seq_length = max([len(txt) for txt in target_texts])
print('Number of samples:', len(input_texts))
print('Number of unique input tokens:', num_encoder_tokens)
print('Number of unique output tokens:', num_decoder_tokens)
print('Max sequence length for inputs:', max_encoder_seq_length)
print('Max sequence length for outputs:', max_decoder_seq_length)
input_token_index = dict(
[(char, i) for i, char in enumerate(input_characters)])
target_token_index = dict(
[(char, i) for i, char in enumerate(target_characters)])
encoder_input_data = np.zeros(
(len(input_texts), max_encoder_seq_length, num_encoder_tokens),
dtype='float32')
decoder_input_data = np.zeros(
(len(input_texts), max_decoder_seq_length, num_decoder_tokens),
dtype='float32')
decoder_target_data = np.zeros(
(len(input_texts), max_decoder_seq_length, num_decoder_tokens),
dtype='float32')
for i, (input_text, target_text) in enumerate(zip(input_texts, target_texts)):
for t, char in enumerate(input_text):
encoder_input_data[i, t, input_token_index[char]] = 1.
for t, char in enumerate(target_text):
# decoder_target_data is ahead of decoder_input_data by one timestep
decoder_input_data[i, t, target_token_index[char]] = 1.
if t > 0:
# decoder_target_data will be ahead by one timestep
# and will not include the start character.
decoder_target_data[i, t - 1, target_token_index[char]] = 1.
# Define an input sequence and process it.
encoder_inputs = Input(shape=(max_encoder_seq_length, num_encoder_tokens))
encoder = LSTM(latent_dim, return_state=True)
encoder_outputs, state_h, state_c = encoder(encoder_inputs)
# We discard `encoder_outputs` and only keep the states.
encoder_states = [state_h, state_c]
# Set up the decoder, using `encoder_states` as initial state.
decoder_inputs = Input(shape=(None, num_decoder_tokens))
# We set up our decoder to return full output sequences,
# and to return internal states as well. We don't use the
# return states in the training model, but we will use them in inference.
decoder_lstm = LSTM(latent_dim, return_sequences=True, return_state=True)
decoder_outputs, _, _ = decoder_lstm(decoder_inputs,
initial_state=encoder_states)
decoder_dense = Dense(num_decoder_tokens, activation='softmax')
decoder_outputs = decoder_dense(decoder_outputs)
# Define the model that will turn
# `encoder_input_data` & `decoder_input_data` into `decoder_target_data`
model = Model([encoder_inputs, decoder_inputs], decoder_outputs)
# Run training
model.compile(optimizer='rmsprop', loss='categorical_crossentropy')
model.fit([encoder_input_data, decoder_input_data], decoder_target_data,
batch_size=batch_size,
epochs=epochs,
validation_split=0.2)
# Save model
model.save(model_save_file_name)
# Next: inference mode (sampling).
# Here's the drill:
# 1) encode input and retrieve initial decoder state
# 2) run one step of decoder with this initial state
# and a "start of sequence" token as target.
# Output will be the next target token
# 3) Repeat with the current target token and current states
# Define sampling models
encoder_model = Model(encoder_inputs, encoder_states)
decoder_state_input_h = Input(shape=(latent_dim,))
decoder_state_input_c = Input(shape=(latent_dim,))
decoder_states_inputs = [decoder_state_input_h, decoder_state_input_c]
decoder_outputs, state_h, state_c = decoder_lstm(
decoder_inputs, initial_state=decoder_states_inputs)
decoder_states = [state_h, state_c]
decoder_outputs = decoder_dense(decoder_outputs)
decoder_model = Model(
[decoder_inputs] + decoder_states_inputs,
[decoder_outputs] + decoder_states)
# Reverse-lookup token index to decode sequences back to
# something readable.
reverse_input_char_index = dict(
(i, char) for char, i in input_token_index.items())
reverse_target_char_index = dict(
(i, char) for char, i in target_token_index.items())
def decode_sequence(input_seq):
# Encode the input as state vectors.
states_value = encoder_model.predict(input_seq)
# Generate empty target sequence of length 1.
target_seq = np.zeros((1, 1, num_decoder_tokens))
# Populate the first character of target sequence with the start character.
target_seq[0, 0, target_token_index['\t']] = 1.
# Sampling loop for a batch of sequences
# (to simplify, here we assume a batch of size 1).
stop_condition = False
decoded_sentence = ''
while not stop_condition:
output_tokens, h, c = decoder_model.predict(
[target_seq] + states_value)
# Sample a token
sampled_token_index = np.argmax(output_tokens[0, -1, :])
sampled_char = reverse_target_char_index[sampled_token_index]
decoded_sentence += sampled_char
# Exit condition: either hit max length
# or find stop character.
if (sampled_char == '\n' or
len(decoded_sentence) > max_decoder_seq_length):
stop_condition = True
# Update the target sequence (of length 1).
target_seq = np.zeros((1, 1, num_decoder_tokens))
target_seq[0, 0, sampled_token_index] = 1.
# Update states
states_value = [h, c]
return decoded_sentence
for seq_index in range(100):
# Take one sequence (part of the training set)
# for trying out decoding.
input_seq = encoder_input_data[seq_index: seq_index + 1]
decoded_sentence = decode_sequence(input_seq)
print("")
print('Input sentence:', input_texts[seq_index])
print('Decoded sentence:', decoded_sentence)
| apache-2.0 | -8,431,868,718,273,166,000 | 38.722944 | 79 | 0.705209 | false |
dims/oslo.messaging | oslo_messaging/tests/notify/test_dispatcher.py | 6 | 10093 |
# Copyright 2013 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
from oslo_utils import timeutils
import testscenarios
import oslo_messaging
from oslo_messaging.notify import dispatcher as notify_dispatcher
from oslo_messaging.tests import utils as test_utils
from six.moves import mock
load_tests = testscenarios.load_tests_apply_scenarios
notification_msg = dict(
publisher_id="publisher_id",
event_type="compute.start",
payload={"info": "fuu"},
message_id="uuid",
timestamp=str(timeutils.utcnow())
)
class TestDispatcher(test_utils.BaseTestCase):
scenarios = [
('no_endpoints',
dict(endpoints=[],
endpoints_expect_calls=[],
priority='info',
ex=None,
return_value=oslo_messaging.NotificationResult.HANDLED)),
('one_endpoints',
dict(endpoints=[['warn']],
endpoints_expect_calls=['warn'],
priority='warn',
ex=None,
return_value=oslo_messaging.NotificationResult.HANDLED)),
('two_endpoints_only_one_match',
dict(endpoints=[['warn'], ['info']],
endpoints_expect_calls=[None, 'info'],
priority='info',
ex=None,
return_value=oslo_messaging.NotificationResult.HANDLED)),
('two_endpoints_both_match',
dict(endpoints=[['debug', 'info'], ['info', 'debug']],
endpoints_expect_calls=['debug', 'debug'],
priority='debug',
ex=None,
return_value=oslo_messaging.NotificationResult.HANDLED)),
('no_return_value',
dict(endpoints=[['warn']],
endpoints_expect_calls=['warn'],
priority='warn',
ex=None, return_value=None)),
('requeue',
dict(endpoints=[['debug', 'warn']],
endpoints_expect_calls=['debug'],
priority='debug', msg=notification_msg,
ex=None,
return_value=oslo_messaging.NotificationResult.REQUEUE)),
('exception',
dict(endpoints=[['debug', 'warn']],
endpoints_expect_calls=['debug'],
priority='debug', msg=notification_msg,
ex=Exception,
return_value=oslo_messaging.NotificationResult.HANDLED)),
]
def test_dispatcher(self):
endpoints = []
for endpoint_methods in self.endpoints:
e = mock.Mock(spec=endpoint_methods)
endpoints.append(e)
for m in endpoint_methods:
method = getattr(e, m)
if self.ex:
method.side_effect = self.ex()
else:
method.return_value = self.return_value
msg = notification_msg.copy()
msg['priority'] = self.priority
targets = [oslo_messaging.Target(topic='notifications')]
dispatcher = notify_dispatcher.NotificationDispatcher(
targets, endpoints, None, allow_requeue=True, pool=None)
# check it listen on wanted topics
self.assertEqual(sorted(set((targets[0], prio)
for prio in itertools.chain.from_iterable(
self.endpoints))),
sorted(dispatcher._targets_priorities))
incoming = mock.Mock(ctxt={}, message=msg)
callback = dispatcher(incoming)
callback.run()
callback.done()
# check endpoint callbacks are called or not
for i, endpoint_methods in enumerate(self.endpoints):
for m in endpoint_methods:
if m == self.endpoints_expect_calls[i]:
method = getattr(endpoints[i], m)
method.assert_called_once_with(
{},
msg['publisher_id'],
msg['event_type'],
msg['payload'], {
'timestamp': mock.ANY,
'message_id': mock.ANY
})
else:
self.assertEqual(0, endpoints[i].call_count)
if self.ex:
self.assertEqual(1, incoming.acknowledge.call_count)
self.assertEqual(0, incoming.requeue.call_count)
elif self.return_value == oslo_messaging.NotificationResult.HANDLED \
or self.return_value is None:
self.assertEqual(1, incoming.acknowledge.call_count)
self.assertEqual(0, incoming.requeue.call_count)
elif self.return_value == oslo_messaging.NotificationResult.REQUEUE:
self.assertEqual(0, incoming.acknowledge.call_count)
self.assertEqual(1, incoming.requeue.call_count)
@mock.patch('oslo_messaging.notify.dispatcher.LOG')
def test_dispatcher_unknown_prio(self, mylog):
msg = notification_msg.copy()
msg['priority'] = 'what???'
dispatcher = notify_dispatcher.NotificationDispatcher(
[mock.Mock()], [mock.Mock()], None, allow_requeue=True, pool=None)
callback = dispatcher(mock.Mock(ctxt={}, message=msg))
callback.run()
callback.done()
mylog.warning.assert_called_once_with('Unknown priority "%s"',
'what???')
class TestDispatcherFilter(test_utils.BaseTestCase):
scenarios = [
('publisher_id_match',
dict(filter_rule=dict(publisher_id='^compute.*'),
publisher_id='compute01.manager',
event_type='instance.create.start',
context={},
match=True)),
('publisher_id_nomatch',
dict(filter_rule=dict(publisher_id='^compute.*'),
publisher_id='network01.manager',
event_type='instance.create.start',
context={},
match=False)),
('event_type_match',
dict(filter_rule=dict(event_type='^instance\.create'),
publisher_id='compute01.manager',
event_type='instance.create.start',
context={},
match=True)),
('event_type_nomatch',
dict(filter_rule=dict(event_type='^instance\.delete'),
publisher_id='compute01.manager',
event_type='instance.create.start',
context={},
match=False)),
('context_match',
dict(filter_rule=dict(context={'user': '^adm'}),
publisher_id='compute01.manager',
event_type='instance.create.start',
context={'user': 'admin'},
match=True)),
('context_key_missing',
dict(filter_rule=dict(context={'user': '^adm'}),
publisher_id='compute01.manager',
event_type='instance.create.start',
context={'project': 'admin'},
metadata={},
match=False)),
('metadata_match',
dict(filter_rule=dict(metadata={'message_id': '^99'}),
publisher_id='compute01.manager',
event_type='instance.create.start',
context={},
match=True)),
('metadata_key_missing',
dict(filter_rule=dict(metadata={'user': '^adm'}),
publisher_id='compute01.manager',
event_type='instance.create.start',
context={},
match=False)),
('payload_match',
dict(filter_rule=dict(payload={'state': '^active$'}),
publisher_id='compute01.manager',
event_type='instance.create.start',
context={},
match=True)),
('payload_no_match',
dict(filter_rule=dict(payload={'state': '^deleted$'}),
publisher_id='compute01.manager',
event_type='instance.create.start',
context={},
match=False)),
('payload_key_missing',
dict(filter_rule=dict(payload={'user': '^adm'}),
publisher_id='compute01.manager',
event_type='instance.create.start',
context={},
match=False)),
('mix_match',
dict(filter_rule=dict(event_type='^instance\.create',
publisher_id='^compute',
context={'user': '^adm'}),
publisher_id='compute01.manager',
event_type='instance.create.start',
context={'user': 'admin'},
match=True)),
]
def test_filters(self):
notification_filter = oslo_messaging.NotificationFilter(
**self.filter_rule)
endpoint = mock.Mock(spec=['info'], filter_rule=notification_filter)
targets = [oslo_messaging.Target(topic='notifications')]
dispatcher = notify_dispatcher.NotificationDispatcher(
targets, [endpoint], serializer=None, allow_requeue=True)
message = {'payload': {'state': 'active'},
'priority': 'info',
'publisher_id': self.publisher_id,
'event_type': self.event_type,
'timestamp': '2014-03-03 18:21:04.369234',
'message_id': '99863dda-97f0-443a-a0c1-6ed317b7fd45'}
incoming = mock.Mock(ctxt=self.context, message=message)
callback = dispatcher(incoming)
callback.run()
callback.done()
if self.match:
self.assertEqual(1, endpoint.info.call_count)
else:
self.assertEqual(0, endpoint.info.call_count)
| apache-2.0 | 5,007,948,928,413,643,000 | 38.425781 | 78 | 0.551174 | false |
digistam/recon-ng | modules/recon/domains-hosts/bing_domain_api.py | 1 | 2500 | import module
# unique to module
from urlparse import urlparse
import re
class Module(module.Module):
def __init__(self, params):
module.Module.__init__(self, params, query='SELECT DISTINCT domain FROM domains WHERE domain IS NOT NULL ORDER BY domain')
self.register_option('limit', 0, True, 'limit total number of api requests (0 = unlimited)')
self.info = {
'Name': 'Bing API Hostname Enumerator',
'Author': 'Marcus Watson (@BranMacMuffin)',
'Description': 'Leverages the Bing API and "domain:" advanced search operator to harvest hosts. Updates the \'hosts\' table with the results.'
}
def module_run(self, domains):
limit = self.options['limit']
requests = 0
cnt = 0
new = 0
for domain in domains:
self.heading(domain, level=0)
hosts = []
results = []
pages = 1
base_query = '\'domain:%s' % (domain)
while not limit or requests < limit:
query = base_query
# build query string based on api limitations
for host in hosts:
omit_domain = ' -domain:%s' % (host)
if len(query) + len(omit_domain) < 1425:
query += omit_domain
else:
break
query += '\''
# make api requests
if limit and requests + pages > limit:
pages = limit - requests
last_len = len(results)
results = self.search_bing_api(query, pages)
requests += pages
# iterate through results and add new hosts
new = False
for result in results:
host = urlparse(result['Url']).netloc
if not host in hosts and host != domain:
hosts.append(host)
self.output(host)
new += self.add_hosts(host)
new = True
if not new and last_len == len(results):
break
elif not new and last_len != len(results):
pages += 1
self.verbose('No new hosts found for the current query. Increasing depth to \'%d\' pages.' % (pages))
cnt += len(hosts)
self.summarize(new, cnt)
| gpl-3.0 | 3,351,566,759,329,915,400 | 41.372881 | 163 | 0.4876 | false |
irinabov/debian-qpid-cpp-1.35.0 | management/python/lib/qmf/console.py | 2 | 137167 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
""" Console API for Qpid Management Framework """
from __future__ import print_function
import os
import platform
import qpid
import struct
import socket
import re
import sys
from qpid.datatypes import UUID
from qpid.datatypes import timestamp
from qpid.datatypes import datetime
from qpid.exceptions import Closed
from qpid.session import SessionDetached
from qpid.connection import Connection, ConnectionFailed, Timeout
from qpid.datatypes import Message, RangedSet, UUID
from qpid.util import connect, ssl, URL
from qpid.codec010 import StringCodec as Codec
from threading import Lock, Condition, Thread, Semaphore
from Queue import Queue, Empty
from time import time, strftime, gmtime, sleep
from cStringIO import StringIO
#import qpid.log
#qpid.log.enable(name="qpid.io.cmd", level=qpid.log.DEBUG)
#===================================================================================================
# CONSOLE
#===================================================================================================
class Console:
""" To access the asynchronous operations, a class must be derived from
Console with overrides of any combination of the available methods. """
def brokerConnected(self, broker):
""" Invoked when a connection is established to a broker """
pass
def brokerConnectionFailed(self, broker):
""" Invoked when a connection to a broker fails """
pass
def brokerDisconnected(self, broker):
""" Invoked when the connection to a broker is lost """
pass
def newPackage(self, name):
""" Invoked when a QMF package is discovered. """
pass
def newClass(self, kind, classKey):
""" Invoked when a new class is discovered. Session.getSchema can be
used to obtain details about the class."""
pass
def newAgent(self, agent):
""" Invoked when a QMF agent is discovered. """
pass
def delAgent(self, agent):
""" Invoked when a QMF agent disconects. """
pass
def objectProps(self, broker, record):
""" Invoked when an object is updated. """
pass
def objectStats(self, broker, record):
""" Invoked when an object is updated. """
pass
def event(self, broker, event):
""" Invoked when an event is raised. """
pass
def heartbeat(self, agent, timestamp):
""" Invoked when an agent heartbeat is received. """
pass
def brokerInfo(self, broker):
""" Invoked when the connection sequence reaches the point where broker information is available. """
pass
def methodResponse(self, broker, seq, response):
""" Invoked when a method response from an asynchronous method call is received. """
pass
#===================================================================================================
# BrokerURL
#===================================================================================================
class BrokerURL(URL):
def __init__(self, *args, **kwargs):
URL.__init__(self, *args, **kwargs)
if self.port is None:
if self.scheme == URL.AMQPS:
self.port = 5671
else:
self.port = 5672
self.authName = None
self.authPass = None
if self.user:
self.authName = str(self.user)
if self.password:
self.authPass = str(self.password)
def name(self):
return str(self)
def match(self, host, port):
return socket.getaddrinfo(self.host, self.port)[0][4] == socket.getaddrinfo(host, port)[0][4]
#===================================================================================================
# Object
#===================================================================================================
class Object(object):
"""
This class defines a 'proxy' object representing a real managed object on an agent.
Actions taken on this proxy are remotely affected on the real managed object.
"""
def __init__(self, agent, schema, codec=None, prop=None, stat=None, v2Map=None, agentName=None, kwargs={}):
self._agent = agent
self._session = None
self._broker = None
if agent:
self._session = agent.session
self._broker = agent.broker
self._schema = schema
self._properties = []
self._statistics = []
self._currentTime = None
self._createTime = None
self._deleteTime = 0
self._objectId = None
if v2Map:
self.v2Init(v2Map, agentName)
return
if self._agent:
self._currentTime = codec.read_uint64()
self._createTime = codec.read_uint64()
self._deleteTime = codec.read_uint64()
self._objectId = ObjectId(codec)
if codec:
if prop:
notPresent = self._parsePresenceMasks(codec, schema)
for property in schema.getProperties():
if property.name in notPresent:
self._properties.append((property, None))
else:
self._properties.append((property, self._session._decodeValue(codec, property.type, self._broker)))
if stat:
for statistic in schema.getStatistics():
self._statistics.append((statistic, self._session._decodeValue(codec, statistic.type, self._broker)))
else:
for property in schema.getProperties():
if property.optional:
self._properties.append((property, None))
else:
self._properties.append((property, self._session._defaultValue(property, self._broker, kwargs)))
for statistic in schema.getStatistics():
self._statistics.append((statistic, self._session._defaultValue(statistic, self._broker, kwargs)))
def v2Init(self, omap, agentName):
if omap.__class__ != dict:
raise Exception("QMFv2 object data must be a map/dict")
if '_values' not in omap:
raise Exception("QMFv2 object must have '_values' element")
values = omap['_values']
for prop in self._schema.getProperties():
if prop.name in values:
if prop.type == 10: # Reference
self._properties.append((prop, ObjectId(values[prop.name], agentName=agentName)))
else:
self._properties.append((prop, values[prop.name]))
for stat in self._schema.getStatistics():
if stat.name in values:
self._statistics.append((stat, values[stat.name]))
if '_subtypes' in omap:
self._subtypes = omap['_subtypes']
if '_object_id' in omap:
self._objectId = ObjectId(omap['_object_id'], agentName=agentName)
else:
self._objectId = None
self._currentTime = omap.get("_update_ts", 0)
self._createTime = omap.get("_create_ts", 0)
self._deleteTime = omap.get("_delete_ts", 0)
def getAgent(self):
""" Return the agent from which this object was sent """
return self._agent
def getBroker(self):
""" Return the broker from which this object was sent """
return self._broker
def getV2RoutingKey(self):
""" Get the QMFv2 routing key to address this object """
return self._agent.getV2RoutingKey()
def getObjectId(self):
""" Return the object identifier for this object """
return self._objectId
def getClassKey(self):
""" Return the class-key that references the schema describing this object. """
return self._schema.getKey()
def getSchema(self):
""" Return the schema that describes this object. """
return self._schema
def getMethods(self):
""" Return a list of methods available for this object. """
return self._schema.getMethods()
def getTimestamps(self):
""" Return the current, creation, and deletion times for this object. """
return self._currentTime, self._createTime, self._deleteTime
def isDeleted(self):
""" Return True iff this object has been deleted. """
return self._deleteTime != 0
def isManaged(self):
""" Return True iff this object is a proxy for a managed object on an agent. """
return self._objectId and self._agent
def getIndex(self):
""" Return a string describing this object's primary key. """
if self._objectId.isV2:
return self._objectId.getObject()
result = u""
for prop, value in self._properties:
if prop.index:
if result != u"":
result += u":"
try:
valstr = unicode(self._session._displayValue(value, prop.type))
except Exception:
valstr = u"<undecodable>"
result += valstr
return result
def getProperties(self):
""" Return a list of object properties """
return self._properties
def getStatistics(self):
""" Return a list of object statistics """
return self._statistics
def mergeUpdate(self, newer):
""" Replace properties and/or statistics with a newly received update """
if not self.isManaged():
raise Exception("Object is not managed")
if self._objectId != newer._objectId:
raise Exception("Objects with different object-ids")
if len(newer.getProperties()) > 0:
self._properties = newer.getProperties()
if len(newer.getStatistics()) > 0:
self._statistics = newer.getStatistics()
self._currentTime = newer._currentTime
self._deleteTime = newer._deleteTime
def update(self):
""" Contact the agent and retrieve the lastest property and statistic values for this object. """
if not self.isManaged():
raise Exception("Object is not managed")
obj = self._agent.getObjects(_objectId=self._objectId)
if obj:
self.mergeUpdate(obj[0])
else:
raise Exception("Underlying object no longer exists")
def __repr__(self):
if self.isManaged():
id = self.getObjectId().__repr__()
else:
id = "unmanaged"
key = self.getClassKey()
return key.getPackageName() + ":" + key.getClassName() +\
"[" + id + "] " + self.getIndex().encode("utf8")
def __getattr__(self, name):
for method in self._schema.getMethods():
if name == method.name:
return lambda *args, **kwargs : self._invoke(name, args, kwargs)
for prop, value in self._properties:
if name == prop.name:
return value
if name == "_" + prop.name + "_" and prop.type == 10: # Dereference references
deref = self._agent.getObjects(_objectId=value)
if len(deref) != 1:
return None
else:
return deref[0]
for stat, value in self._statistics:
if name == stat.name:
return value
#
# Check to see if the name is in the schema. If so, return None (i.e. this is a not-present attribute)
#
for prop in self._schema.getProperties():
if name == prop.name:
return None
for stat in self._schema.getStatistics():
if name == stat.name:
return None
raise Exception("Type Object has no attribute '%s'" % name)
def __setattr__(self, name, value):
if name[0] == '_':
super.__setattr__(self, name, value)
return
for prop, unusedValue in self._properties:
if name == prop.name:
newprop = (prop, value)
newlist = []
for old, val in self._properties:
if name == old.name:
newlist.append(newprop)
else:
newlist.append((old, val))
self._properties = newlist
return
super.__setattr__(self, name, value)
def _parseDefault(self, typ, val):
try:
if typ in (2, 3, 4): # 16, 32, 64 bit numbers
val = int(val, 0)
elif typ == 11: # bool
val = val.lower() in ("t", "true", "1", "yes", "y")
elif typ == 15: # map
val = eval(val)
except:
pass
return val
def _handleDefaultArguments(self, method, args, kwargs):
count = len([x for x in method.arguments if x.dir.find("I") != -1])
for kwarg in kwargs.keys():
if not [x for x in method.arguments if x.dir.find("I") != -1 and \
x.name == kwarg]:
del kwargs[kwarg]
# If there were not enough args supplied, add any defaulted arguments
# from the schema (starting at the end) until we either get enough
# arguments or run out of defaults
while count > len(args) + len(kwargs):
for arg in reversed(method.arguments):
if arg.dir.find("I") != -1 and getattr(arg, "default") is not None and \
arg.name not in kwargs:
# add missing defaulted value to the kwargs dict
kwargs[arg.name] = self._parseDefault(arg.type, arg.default)
break
else:
# no suitable defaulted args found, end the while loop
break
return count
def _sendMethodRequest(self, name, args, kwargs, synchronous=False, timeWait=None):
for method in self._schema.getMethods():
if name == method.name:
aIdx = 0
sendCodec = Codec()
seq = self._session.seqMgr._reserve((method, synchronous))
count = self._handleDefaultArguments(method, args, kwargs)
if count != len(args) + len(kwargs):
raise Exception("Incorrect number of arguments: expected %d, got %d" % (count, len(args) + len(kwargs)))
if self._agent.isV2:
#
# Compose and send a QMFv2 method request
#
call = {}
call['_object_id'] = self._objectId.asMap()
call['_method_name'] = name
argMap = {}
for arg in method.arguments:
if arg.dir.find("I") != -1:
# If any kwargs match this schema arg, insert them in the proper place
if arg.name in kwargs:
argMap[arg.name] = kwargs[arg.name]
elif aIdx < len(args):
argMap[arg.name] = args[aIdx]
aIdx += 1
call['_arguments'] = argMap
dp = self._broker.amqpSession.delivery_properties()
dp.routing_key = self.getV2RoutingKey()
mp = self._broker.amqpSession.message_properties()
mp.content_type = "amqp/map"
if self._broker.saslUser:
mp.user_id = self._broker.saslUser
mp.correlation_id = str(seq)
mp.app_id = "qmf2"
mp.reply_to = self._broker.amqpSession.reply_to("qmf.default.direct", self._broker.v2_direct_queue)
mp.application_headers = {'qmf.opcode':'_method_request'}
sendCodec.write_map(call)
smsg = Message(dp, mp, sendCodec.encoded)
exchange = "qmf.default.direct"
else:
#
# Associate this sequence with the agent hosting the object so we can correctly
# route the method-response
#
agent = self._broker.getAgent(self._broker.getBrokerBank(), self._objectId.getAgentBank())
self._broker._setSequence(seq, agent)
#
# Compose and send a QMFv1 method request
#
self._broker._setHeader(sendCodec, 'M', seq)
self._objectId.encode(sendCodec)
self._schema.getKey().encode(sendCodec)
sendCodec.write_str8(name)
for arg in method.arguments:
if arg.dir.find("I") != -1:
self._session._encodeValue(sendCodec, args[aIdx], arg.type)
aIdx += 1
smsg = self._broker._message(sendCodec.encoded, "agent.%d.%s" %
(self._objectId.getBrokerBank(), self._objectId.getAgentBank()))
exchange = "qpid.management"
if synchronous:
try:
self._broker.cv.acquire()
self._broker.syncInFlight = True
finally:
self._broker.cv.release()
self._broker._send(smsg, exchange)
return seq
return None
def _invoke(self, name, args, kwargs):
if not self.isManaged():
raise Exception("Object is not managed")
if "_timeout" in kwargs:
timeout = kwargs["_timeout"]
else:
timeout = self._broker.SYNC_TIME
if "_async" in kwargs and kwargs["_async"]:
sync = False
if "_timeout" not in kwargs:
timeout = None
else:
sync = True
# Remove special "meta" kwargs before handing to _sendMethodRequest() to process
if "_timeout" in kwargs: del kwargs["_timeout"]
if "_async" in kwargs: del kwargs["_async"]
seq = self._sendMethodRequest(name, args, kwargs, sync, timeout)
if seq:
if not sync:
return seq
self._broker.cv.acquire()
try:
starttime = time()
while self._broker.syncInFlight and self._broker.error == None:
self._broker.cv.wait(timeout)
if time() - starttime > timeout:
raise RuntimeError("Timed out waiting for method to respond")
finally:
self._session.seqMgr._release(seq)
self._broker.cv.release()
if self._broker.error != None:
errorText = self._broker.error
self._broker.error = None
raise Exception(errorText)
return self._broker.syncResult
raise Exception("Invalid Method (software defect) [%s]" % name)
def _encodeUnmanaged(self, codec):
codec.write_uint8(20)
codec.write_str8(self._schema.getKey().getPackageName())
codec.write_str8(self._schema.getKey().getClassName())
codec.write_bin128(self._schema.getKey().getHash())
# emit presence masks for optional properties
mask = 0
bit = 0
for prop, value in self._properties:
if prop.optional:
if bit == 0:
bit = 1
if value:
mask |= bit
bit = bit << 1
if bit == 256:
bit = 0
codec.write_uint8(mask)
mask = 0
if bit != 0:
codec.write_uint8(mask)
# encode properties
for prop, value in self._properties:
if value != None:
self._session._encodeValue(codec, value, prop.type)
# encode statistics
for stat, value in self._statistics:
self._session._encodeValue(codec, value, stat.type)
def _parsePresenceMasks(self, codec, schema):
excludeList = []
bit = 0
for property in schema.getProperties():
if property.optional:
if bit == 0:
mask = codec.read_uint8()
bit = 1
if (mask & bit) == 0:
excludeList.append(property.name)
bit *= 2
if bit == 256:
bit = 0
return excludeList
#===================================================================================================
# Session
#===================================================================================================
class Session:
"""
An instance of the Session class represents a console session running
against one or more QMF brokers. A single instance of Session is needed
to interact with the management framework as a console.
"""
_CONTEXT_SYNC = 1
_CONTEXT_STARTUP = 2
_CONTEXT_MULTIGET = 3
DEFAULT_GET_WAIT_TIME = 60
ENCODINGS = {
str: 7,
timestamp: 8,
datetime: 8,
int: 9,
long: 9,
float: 13,
UUID: 14,
Object: 20,
list: 21
}
def __init__(self, console=None, rcvObjects=True, rcvEvents=True, rcvHeartbeats=True,
manageConnections=False, userBindings=False):
"""
Initialize a session. If the console argument is provided, the
more advanced asynchronous features are available. If console is
defaulted, the session will operate in a simpler, synchronous manner.
The rcvObjects, rcvEvents, and rcvHeartbeats arguments are meaningful only if 'console'
is provided. They control whether object updates, events, and agent-heartbeats are
subscribed to. If the console is not interested in receiving one or more of the above,
setting the argument to False will reduce tha bandwidth used by the API.
If manageConnections is set to True, the Session object will manage connections to
the brokers. This means that if a broker is unreachable, it will retry until a connection
can be established. If a connection is lost, the Session will attempt to reconnect.
If manageConnections is set to False, the user is responsible for handing failures. In
this case, an unreachable broker will cause addBroker to raise an exception.
If userBindings is set to False (the default) and rcvObjects is True, the console will
receive data for all object classes. If userBindings is set to True, the user must select
which classes the console shall receive by invoking the bindPackage or bindClass methods.
This allows the console to be configured to receive only information that is relavant to
a particular application. If rcvObjects id False, userBindings has no meaning.
"""
self.console = console
self.brokers = []
self.schemaCache = SchemaCache()
self.seqMgr = SequenceManager()
self.cv = Condition()
self.syncSequenceList = []
self.getResult = []
self.getSelect = []
self.error = None
self.rcvObjects = rcvObjects
self.rcvEvents = rcvEvents
self.rcvHeartbeats = rcvHeartbeats
self.userBindings = userBindings
if self.console == None:
self.rcvObjects = False
self.rcvEvents = False
self.rcvHeartbeats = False
self.v1BindingKeyList, self.v2BindingKeyList = self._bindingKeys()
self.manageConnections = manageConnections
# callback filters:
self.agent_filter = [] # (vendor, product, instance) || v1-agent-label-str
self.class_filter = [] # (pkg, class)
self.event_filter = [] # (pkg, event)
self.agent_heartbeat_min = 10 # minimum agent heartbeat timeout interval
self.agent_heartbeat_miss = 3 # # of heartbeats to miss before deleting agent
if self.userBindings and not self.console:
raise Exception("userBindings can't be set unless a console is provided.")
def close(self):
""" Releases all resources held by the session. Must be called by the
application when it is done with the Session object.
"""
self.cv.acquire()
try:
while len(self.brokers):
b = self.brokers.pop()
try:
b._shutdown()
except:
pass
finally:
self.cv.release()
def _getBrokerForAgentAddr(self, agent_addr):
try:
self.cv.acquire()
key = (1, agent_addr)
for b in self.brokers:
if key in b.agents:
return b
finally:
self.cv.release()
return None
def _getAgentForAgentAddr(self, agent_addr):
try:
self.cv.acquire()
key = agent_addr
for b in self.brokers:
if key in b.agents:
return b.agents[key]
finally:
self.cv.release()
return None
def __repr__(self):
return "QMF Console Session Manager (brokers: %d)" % len(self.brokers)
def addBroker(self, target="localhost", timeout=None, mechanisms=None, sessTimeout=None, **connectArgs):
""" Connect to a Qpid broker. Returns an object of type Broker.
Will raise an exception if the session is not managing the connection and
the connection setup to the broker fails.
"""
if isinstance(target, BrokerURL):
url = target
else:
url = BrokerURL(target)
broker = Broker(self, url.host, url.port, mechanisms, url.authName, url.authPass,
ssl = url.scheme == URL.AMQPS, connTimeout=timeout, sessTimeout=sessTimeout, **connectArgs)
self.brokers.append(broker)
return broker
def delBroker(self, broker):
""" Disconnect from a broker, and deallocate the broker proxy object. The
'broker' argument is the object returned from the addBroker call. Errors
are ignored.
"""
broker._shutdown()
self.brokers.remove(broker)
del broker
def getPackages(self):
""" Get the list of known QMF packages """
for broker in self.brokers:
broker._waitForStable()
return self.schemaCache.getPackages()
def getClasses(self, packageName):
""" Get the list of known classes within a QMF package """
for broker in self.brokers:
broker._waitForStable()
return self.schemaCache.getClasses(packageName)
def getSchema(self, classKey):
""" Get the schema for a QMF class """
for broker in self.brokers:
broker._waitForStable()
return self.schemaCache.getSchema(classKey)
def bindPackage(self, packageName):
""" Filter object and event callbacks to only those elements of the
specified package. Also filters newPackage and newClass callbacks to the
given package. Only valid if userBindings is True.
"""
if not self.userBindings:
raise Exception("userBindings option must be set for this Session.")
if not self.rcvObjects and not self.rcvEvents:
raise Exception("Session needs to be configured to receive events or objects.")
v1keys = ["console.obj.*.*.%s.#" % packageName, "console.event.*.*.%s.#" % packageName]
v2keys = ["agent.ind.data.%s.#" % packageName.replace(".", "_"),
"agent.ind.event.%s.#" % packageName.replace(".", "_"),]
if (packageName, None) not in self.class_filter:
self.class_filter.append((packageName, None))
if (packageName, None) not in self.event_filter:
self.event_filter.append((packageName, None))
self.v1BindingKeyList.extend(v1keys)
self.v2BindingKeyList.extend(v2keys)
for broker in self.brokers:
if broker.isConnected():
for v1key in v1keys:
broker.amqpSession.exchange_bind(exchange="qpid.management", queue=broker.topicName, binding_key=v1key)
if broker.brokerSupportsV2:
for v2key in v2keys:
# data indications should arrive on the unsolicited indication queue
broker.amqpSession.exchange_bind(exchange="qmf.default.topic", queue=broker.v2_topic_queue_ui, binding_key=v2key)
def bindClass(self, pname, cname=None):
""" Filter object callbacks to only those objects of the specified package
and optional class. Will also filter newPackage/newClass callbacks to the
specified package and class. Only valid if userBindings is True and
rcvObjects is True.
"""
if not self.userBindings:
raise Exception("userBindings option must be set for this Session.")
if not self.rcvObjects:
raise Exception("Session needs to be configured with rcvObjects=True.")
if cname is not None:
v1key = "console.obj.*.*.%s.%s.#" % (pname, cname)
v2key = "agent.ind.data.%s.%s.#" % (pname.replace(".", "_"), cname.replace(".", "_"))
else:
v1key = "console.obj.*.*.%s.#" % pname
v2key = "agent.ind.data.%s.#" % pname.replace(".", "_")
self.v1BindingKeyList.append(v1key)
self.v2BindingKeyList.append(v2key)
if (pname, cname) not in self.class_filter:
self.class_filter.append((pname, cname))
for broker in self.brokers:
if broker.isConnected():
broker.amqpSession.exchange_bind(exchange="qpid.management", queue=broker.topicName, binding_key=v1key)
if broker.brokerSupportsV2:
# data indications should arrive on the unsolicited indication queue
broker.amqpSession.exchange_bind(exchange="qmf.default.topic", queue=broker.v2_topic_queue_ui, binding_key=v2key)
def bindClassKey(self, classKey):
""" Filter object callbacks to only those objects of the specified
class. Will also filter newPackage/newClass callbacks to the specified
package and class. Only valid if userBindings is True and rcvObjects is
True.
"""
pname = classKey.getPackageName()
cname = classKey.getClassName()
self.bindClass(pname, cname)
def bindEvent(self, pname, ename=None):
""" Filter event callbacks only from a particular class by package and
event name, or all events in a package if ename=None. Will also filter
newPackage/newClass callbacks to the specified package and class. Only
valid if userBindings is True and rcvEvents is True.
"""
if not self.userBindings:
raise Exception("userBindings option must be set for this Session.")
if not self.rcvEvents:
raise Exception("Session needs to be configured with rcvEvents=True.")
if ename is not None:
v1key = "console.event.*.*.%s.%s.#" % (pname, ename)
v2key = "agent.ind.event.%s.%s.#" % (pname.replace(".", "_"), ename.replace(".", "_"))
else:
v1key = "console.event.*.*.%s.#" % pname
v2key = "agent.ind.event.%s.#" % pname.replace(".", "_")
self.v1BindingKeyList.append(v1key)
self.v2BindingKeyList.append(v2key)
if (pname, ename) not in self.event_filter:
self.event_filter.append((pname, ename))
for broker in self.brokers:
if broker.isConnected():
broker.amqpSession.exchange_bind(exchange="qpid.management", queue=broker.topicName, binding_key=v1key)
if broker.brokerSupportsV2:
# event indications should arrive on the unsolicited indication queue
broker.amqpSession.exchange_bind(exchange="qmf.default.topic", queue=broker.v2_topic_queue_ui, binding_key=v2key)
def bindEventKey(self, eventKey):
""" Filter event callbacks only from a particular class key. Will also
filter newPackage/newClass callbacks to the specified package and
class. Only valid if userBindings is True and rcvEvents is True.
"""
pname = eventKey.getPackageName()
ename = eventKey.getClassName()
self.bindEvent(pname, ename)
def bindAgent(self, vendor=None, product=None, instance=None, label=None):
""" Receive heartbeats, newAgent and delAgent callbacks only for those
agent(s) that match the passed identification criteria:
V2 agents: vendor, optionally product and instance strings
V1 agents: the label string.
Only valid if userBindings is True.
"""
if not self.userBindings:
raise Exception("Session not configured for binding specific agents.")
if vendor is None and label is None:
raise Exception("Must specify at least a vendor (V2 agents)"
" or label (V1 agents).")
if vendor: # V2 agent identification
if product is not None:
v2key = "agent.ind.heartbeat.%s.%s.#" % (vendor.replace(".", "_"), product.replace(".", "_"))
else:
v2key = "agent.ind.heartbeat.%s.#" % vendor.replace(".", "_")
self.v2BindingKeyList.append(v2key)
# allow wildcards - only add filter if a non-wildcarded component is given
if vendor == "*":
vendor = None
if product == "*":
product = None
if instance == "*":
instance = None
if vendor or product or instance:
if (vendor, product, instance) not in self.agent_filter:
self.agent_filter.append((vendor, product, instance))
for broker in self.brokers:
if broker.isConnected():
if broker.brokerSupportsV2:
# heartbeats should arrive on the heartbeat queue
broker.amqpSession.exchange_bind(exchange="qmf.default.topic",
queue=broker.v2_topic_queue_hb,
binding_key=v2key)
elif label != "*": # non-wildcard V1 agent label
# V1 format heartbeats do not have any agent identifier in the routing
# key, so we cannot filter them by bindings.
if label not in self.agent_filter:
self.agent_filter.append(label)
def getAgents(self, broker=None):
""" Get a list of currently known agents """
brokerList = []
if broker == None:
for b in self.brokers:
brokerList.append(b)
else:
brokerList.append(broker)
for b in brokerList:
b._waitForStable()
agentList = []
for b in brokerList:
for a in b.getAgents():
agentList.append(a)
return agentList
def makeObject(self, classKey, **kwargs):
""" Create a new, unmanaged object of the schema indicated by classKey """
schema = self.getSchema(classKey)
if schema == None:
raise Exception("Schema not found for classKey")
return Object(None, schema, None, True, True, kwargs)
def getObjects(self, **kwargs):
""" Get a list of objects from QMF agents.
All arguments are passed by name(keyword).
The class for queried objects may be specified in one of the following ways:
_schema = <schema> - supply a schema object returned from getSchema.
_key = <key> - supply a classKey from the list returned by getClasses.
_class = <name> - supply a class name as a string. If the class name exists
in multiple packages, a _package argument may also be supplied.
_objectId = <id> - get the object referenced by the object-id
If objects should be obtained from only one agent, use the following argument.
Otherwise, the query will go to all agents.
_agent = <agent> - supply an agent from the list returned by getAgents.
If the get query is to be restricted to one broker (as opposed to all connected brokers),
add the following argument:
_broker = <broker> - supply a broker as returned by addBroker.
The default timeout for this synchronous operation is 60 seconds. To change the timeout,
use the following argument:
_timeout = <time in seconds>
If additional arguments are supplied, they are used as property selectors. For example,
if the argument name="test" is supplied, only objects whose "name" property is "test"
will be returned in the result.
"""
if "_broker" in kwargs:
brokerList = []
brokerList.append(kwargs["_broker"])
else:
brokerList = self.brokers
for broker in brokerList:
broker._waitForStable()
if broker.isConnected():
if "_package" not in kwargs or "_class" not in kwargs or \
kwargs["_package"] != "org.apache.qpid.broker" or \
kwargs["_class"] != "agent":
self.getObjects(_package = "org.apache.qpid.broker", _class = "agent",
_agent = broker.getAgent(1,0))
agentList = []
if "_agent" in kwargs:
agent = kwargs["_agent"]
if agent.broker not in brokerList:
raise Exception("Supplied agent is not accessible through the supplied broker")
if agent.broker.isConnected():
agentList.append(agent)
else:
if "_objectId" in kwargs:
oid = kwargs["_objectId"]
for broker in brokerList:
for agent in broker.getAgents():
if agent.getBrokerBank() == oid.getBrokerBank() and agent.getAgentBank() == oid.getAgentBank():
agentList.append(agent)
else:
for broker in brokerList:
for agent in broker.getAgents():
if agent.broker.isConnected():
agentList.append(agent)
if len(agentList) == 0:
return []
#
# We now have a list of agents to query, start the queries and gather the results.
#
request = SessionGetRequest(len(agentList))
for agent in agentList:
agent.getObjects(request, **kwargs)
timeout = 60
if '_timeout' in kwargs:
timeout = kwargs['_timeout']
request.wait(timeout)
return request.result
def addEventFilter(self, **kwargs):
"""Filter unsolicited events based on package and event name.
QMF v2 also can filter on vendor, product, and severity values.
By default, a console receives unsolicted events by binding to:
qpid.management/console.event.# (v1)
qmf.default.topic/agent.ind.event.# (v2)
A V1 event filter binding uses the pattern:
qpid.management/console.event.*.*[.<package>[.<event>]].#
A V2 event filter binding uses the pattern:
qmf.default.topic/agent.ind.event.<Vendor|*>.<Product|*>.<severity|*>.<package|*>.<event|*>.#
"""
package = kwargs.get("package", "*")
event = kwargs.get("event", "*")
vendor = kwargs.get("vendor", "*")
product = kwargs.get("product", "*")
severity = kwargs.get("severity", "*")
if package == "*" and event != "*":
raise Exception("'package' parameter required if 'event' parameter"
" supplied")
# V1 key - can only filter on package (and event)
if package == "*":
key = "console.event.*.*." + str(package)
if event != "*":
key += "." + str(event)
key += ".#"
if key not in self.v1BindingKeyList:
self.v1BindingKeyList.append(key)
try:
# remove default wildcard binding
self.v1BindingKeyList.remove("console.event.#")
except:
pass
# V2 key - escape any "." in the filter strings
key = "agent.ind.event." + str(package).replace(".", "_") \
+ "." + str(event).replace(".", "_") \
+ "." + str(severity).replace(".", "_") \
+ "." + str(vendor).replace(".", "_") \
+ "." + str(product).replace(".", "_") \
+ ".#"
if key not in self.v2BindingKeyList:
self.v2BindingKeyList.append(key)
try:
# remove default wildcard binding
self.v2BindingKeyList.remove("agent.ind.event.#")
except:
pass
if package != "*":
if event != "*":
f = (package, event)
else:
f = (package, None)
if f not in self.event_filter:
self.event_filter.append(f)
def addAgentFilter(self, vendor, product=None):
""" Deprecate - use bindAgent() instead
"""
self.addHeartbeatFilter(vendor=vendor, product=product)
def addHeartbeatFilter(self, **kwargs):
""" Deprecate - use bindAgent() instead.
"""
vendor = kwargs.get("vendor")
product = kwargs.get("product")
if vendor is None:
raise Exception("vendor parameter required!")
# V1 heartbeats do not have any agent identifier - we cannot
# filter them by agent.
# build the binding key - escape "."s...
key = "agent.ind.heartbeat." + str(vendor).replace(".", "_")
if product is not None:
key += "." + str(product).replace(".", "_")
key += ".#"
if key not in self.v2BindingKeyList:
self.v2BindingKeyList.append(key)
self.agent_filter.append((vendor, product, None))
# be sure we don't ever filter the local broker
local_broker_key = "agent.ind.heartbeat." + "org.apache".replace(".", "_") \
+ "." + "qpidd".replace(".", "_") + ".#"
if local_broker_key not in self.v2BindingKeyList:
self.v2BindingKeyList.append(local_broker_key)
# remove the wildcard key if present
try:
self.v2BindingKeyList.remove("agent.ind.heartbeat.#")
except:
pass
def _bindingKeys(self):
v1KeyList = []
v2KeyList = []
v1KeyList.append("schema.#")
# note well: any binding that starts with 'agent.ind.heartbeat' will be
# bound to the heartbeat queue, otherwise it will be bound to the
# unsolicited indication queue. See _decOutstanding() for the binding.
if not self.userBindings:
if self.rcvObjects and self.rcvEvents and self.rcvHeartbeats:
v1KeyList.append("console.#")
v2KeyList.append("agent.ind.data.#")
v2KeyList.append("agent.ind.event.#")
v2KeyList.append("agent.ind.heartbeat.#")
else:
# need heartbeats for V2 newAgent()/delAgent()
v2KeyList.append("agent.ind.heartbeat.#")
if self.rcvObjects:
v1KeyList.append("console.obj.#")
v2KeyList.append("agent.ind.data.#")
else:
v1KeyList.append("console.obj.*.*.org.apache.qpid.broker.agent")
if self.rcvEvents:
v1KeyList.append("console.event.#")
v2KeyList.append("agent.ind.event.#")
else:
v1KeyList.append("console.event.*.*.org.apache.qpid.broker.agent")
if self.rcvHeartbeats:
v1KeyList.append("console.heartbeat.#")
else:
# mandatory bindings
v1KeyList.append("console.obj.*.*.org.apache.qpid.broker.agent")
v1KeyList.append("console.event.*.*.org.apache.qpid.broker.agent")
v1KeyList.append("console.heartbeat.#") # no way to turn this on later
v2KeyList.append("agent.ind.heartbeat.org_apache.qpidd.#")
return (v1KeyList, v2KeyList)
def _handleBrokerConnect(self, broker):
if self.console:
for agent in broker.getAgents():
self._newAgentCallback(agent)
self.console.brokerConnected(broker)
def _handleBrokerDisconnect(self, broker):
if self.console:
for agent in broker.getAgents():
self._delAgentCallback(agent)
self.console.brokerDisconnected(broker)
def _handleBrokerResp(self, broker, codec, seq):
broker.brokerId = codec.read_uuid()
if self.console != None:
self.console.brokerInfo(broker)
# Send a package request
# (effectively inc and dec outstanding by not doing anything)
sendCodec = Codec()
seq = self.seqMgr._reserve(self._CONTEXT_STARTUP)
broker._setHeader(sendCodec, 'P', seq)
smsg = broker._message(sendCodec.encoded)
broker._send(smsg)
def _handlePackageInd(self, broker, codec, seq):
pname = str(codec.read_str8())
notify = self.schemaCache.declarePackage(pname)
if notify and self.console != None:
self._newPackageCallback(pname)
# Send a class request
broker._incOutstanding()
sendCodec = Codec()
seq = self.seqMgr._reserve(self._CONTEXT_STARTUP)
broker._setHeader(sendCodec, 'Q', seq)
sendCodec.write_str8(pname)
smsg = broker._message(sendCodec.encoded)
broker._send(smsg)
def _handleCommandComplete(self, broker, codec, seq, agent):
code = codec.read_uint32()
text = codec.read_str8()
context = self.seqMgr._release(seq)
if context == self._CONTEXT_STARTUP:
broker._decOutstanding()
elif context == self._CONTEXT_SYNC and seq == broker.syncSequence:
try:
broker.cv.acquire()
broker.syncInFlight = False
broker.cv.notify()
finally:
broker.cv.release()
elif context == self._CONTEXT_MULTIGET and seq in self.syncSequenceList:
try:
self.cv.acquire()
self.syncSequenceList.remove(seq)
if len(self.syncSequenceList) == 0:
self.cv.notify()
finally:
self.cv.release()
if agent:
agent._handleV1Completion(seq, code, text)
def _handleClassInd(self, broker, codec, seq):
kind = codec.read_uint8()
classKey = ClassKey(codec)
classKey._setType(kind)
schema = self.schemaCache.getSchema(classKey)
if not schema:
# Send a schema request for the unknown class
broker._incOutstanding()
sendCodec = Codec()
seq = self.seqMgr._reserve(self._CONTEXT_STARTUP)
broker._setHeader(sendCodec, 'S', seq)
classKey.encode(sendCodec)
smsg = broker._message(sendCodec.encoded)
broker._send(smsg)
def _handleHeartbeatInd(self, broker, codec, seq, msg):
brokerBank = 1
agentBank = 0
dp = msg.get("delivery_properties")
if dp:
key = dp["routing_key"]
if key:
keyElements = key.split(".")
if len(keyElements) == 4:
brokerBank = int(keyElements[2])
agentBank = int(keyElements[3])
else:
# If there's no routing key in the delivery properties,
# assume the message is from the broker.
brokerBank = 1
agentBank = 0
agent = broker.getAgent(brokerBank, agentBank)
if self.rcvHeartbeats and self.console != None and agent != None:
timestamp = codec.read_uint64()
self._heartbeatCallback(agent, timestamp)
def _handleSchemaResp(self, broker, codec, seq, agent_addr):
kind = codec.read_uint8()
classKey = ClassKey(codec)
classKey._setType(kind)
_class = SchemaClass(kind, classKey, codec, self)
new_pkg, new_cls = self.schemaCache.declareClass(classKey, _class)
ctx = self.seqMgr._release(seq)
if ctx:
broker._decOutstanding()
if self.console != None:
if new_pkg:
self._newPackageCallback(classKey.getPackageName())
if new_cls:
self._newClassCallback(kind, classKey)
if agent_addr and (agent_addr.__class__ == str or agent_addr.__class__ == unicode):
agent = self._getAgentForAgentAddr(agent_addr)
if agent:
agent._schemaInfoFromV2Agent()
def _v2HandleHeartbeatInd(self, broker, mp, ah, content):
try:
agentName = ah["qmf.agent"]
values = content["_values"]
if '_timestamp' in values:
timestamp = values["_timestamp"]
else:
timestamp = values['timestamp']
if '_heartbeat_interval' in values:
interval = values['_heartbeat_interval']
else:
interval = values['heartbeat_interval']
epoch = 0
if '_epoch' in values:
epoch = values['_epoch']
elif 'epoch' in values:
epoch = values['epoch']
except Exception:
return
if self.agent_filter:
# only allow V2 agents that satisfy the filter
v = agentName.split(":", 2)
if len(v) != 3 or ((v[0], None, None) not in self.agent_filter
and (v[0], v[1], None) not in self.agent_filter
and (v[0], v[1], v[2]) not in self.agent_filter):
return
##
## We already have the "local-broker" agent in our list as ['0'].
##
if '_vendor' in values and values['_vendor'] == 'apache.org' and \
'_product' in values and values['_product'] == 'qpidd':
agent = broker.getBrokerAgent()
else:
agent = broker.getAgent(1, agentName)
if agent == None:
agent = Agent(broker, agentName, "QMFv2 Agent", True, interval)
agent.setEpoch(epoch)
broker._addAgent(agentName, agent)
else:
agent.touch()
if self.rcvHeartbeats and self.console and agent:
self._heartbeatCallback(agent, timestamp)
agent.update_schema_timestamp(values.get("_schema_updated", 0))
def _v2HandleAgentLocateRsp(self, broker, mp, ah, content):
self._v2HandleHeartbeatInd(broker, mp, ah, content)
def _handleError(self, error):
try:
self.cv.acquire()
if len(self.syncSequenceList) > 0:
self.error = error
self.syncSequenceList = []
self.cv.notify()
finally:
self.cv.release()
def _selectMatch(self, object):
""" Check the object against self.getSelect to check for a match """
for key, value in self.getSelect:
for prop, propval in object.getProperties():
if key == prop.name and value != propval:
return False
return True
def _decodeValue(self, codec, typecode, broker=None):
""" Decode, from the codec, a value based on its typecode. """
if typecode == 1: data = codec.read_uint8() # U8
elif typecode == 2: data = codec.read_uint16() # U16
elif typecode == 3: data = codec.read_uint32() # U32
elif typecode == 4: data = codec.read_uint64() # U64
elif typecode == 6: data = codec.read_str8() # SSTR
elif typecode == 7: data = codec.read_str16() # LSTR
elif typecode == 8: data = codec.read_int64() # ABSTIME
elif typecode == 9: data = codec.read_uint64() # DELTATIME
elif typecode == 10: data = ObjectId(codec) # REF
elif typecode == 11: data = codec.read_uint8() != 0 # BOOL
elif typecode == 12: data = codec.read_float() # FLOAT
elif typecode == 13: data = codec.read_double() # DOUBLE
elif typecode == 14: data = codec.read_uuid() # UUID
elif typecode == 16: data = codec.read_int8() # S8
elif typecode == 17: data = codec.read_int16() # S16
elif typecode == 18: data = codec.read_int32() # S32
elif typecode == 19: data = codec.read_int64() # S63
elif typecode == 15: data = codec.read_map() # FTABLE
elif typecode == 20: # OBJECT
# Peek at the type, and if it is still 20 pull it decode. If
# Not, call back into self.
inner_type_code = codec.read_uint8()
if inner_type_code == 20:
classKey = ClassKey(codec)
schema = self.schemaCache.getSchema(classKey)
if not schema:
return None
data = Object(self, broker, schema, codec, True, True, False)
else:
data = self._decodeValue(codec, inner_type_code, broker)
elif typecode == 21: data = codec.read_list() # List
elif typecode == 22: #Array
#taken from codec10.read_array
sc = Codec(codec.read_vbin32())
count = sc.read_uint32()
type = sc.read_uint8()
data = []
while count > 0:
data.append(self._decodeValue(sc,type,broker))
count -= 1
else:
raise ValueError("Invalid type code: %d" % typecode)
return data
def _encodeValue(self, codec, value, typecode):
""" Encode, into the codec, a value based on its typecode. """
if typecode == 1: codec.write_uint8 (int(value)) # U8
elif typecode == 2: codec.write_uint16 (int(value)) # U16
elif typecode == 3: codec.write_uint32 (long(value)) # U32
elif typecode == 4: codec.write_uint64 (long(value)) # U64
elif typecode == 6: codec.write_str8 (value) # SSTR
elif typecode == 7: codec.write_str16 (value) # LSTR
elif typecode == 8: codec.write_int64 (long(value)) # ABSTIME
elif typecode == 9: codec.write_uint64 (long(value)) # DELTATIME
elif typecode == 10: value.encode (codec) # REF
elif typecode == 11: codec.write_uint8 (int(value)) # BOOL
elif typecode == 12: codec.write_float (float(value)) # FLOAT
elif typecode == 13: codec.write_double (float(value)) # DOUBLE
elif typecode == 14: codec.write_uuid (value.bytes) # UUID
elif typecode == 16: codec.write_int8 (int(value)) # S8
elif typecode == 17: codec.write_int16 (int(value)) # S16
elif typecode == 18: codec.write_int32 (int(value)) # S32
elif typecode == 19: codec.write_int64 (int(value)) # S64
elif typecode == 20: value._encodeUnmanaged(codec) # OBJECT
elif typecode == 15: codec.write_map (value) # FTABLE
elif typecode == 21: codec.write_list (value) # List
elif typecode == 22: # Array
sc = Codec()
self._encodeValue(sc, len(value), 3)
if len(value) > 0:
ltype = self.encoding(value[0])
self._encodeValue(sc,ltype,1)
for o in value:
self._encodeValue(sc, o, ltype)
codec.write_vbin32(sc.encoded)
else:
raise ValueError ("Invalid type code: %d" % typecode)
def encoding(self, value):
return self._encoding(value.__class__)
def _encoding(self, klass):
if Session.ENCODINGS.has_key(klass):
return self.ENCODINGS[klass]
for base in klass.__bases__:
result = self._encoding(base)
if result != None:
return result
def _displayValue(self, value, typecode):
""" """
if typecode == 1: return unicode(value)
elif typecode == 2: return unicode(value)
elif typecode == 3: return unicode(value)
elif typecode == 4: return unicode(value)
elif typecode == 6: return value
elif typecode == 7: return value
elif typecode == 8: return unicode(strftime("%c", gmtime(value / 1000000000)))
elif typecode == 9: return unicode(value)
elif typecode == 10: return unicode(value.__repr__())
elif typecode == 11:
if value: return u"T"
else: return u"F"
elif typecode == 12: return unicode(value)
elif typecode == 13: return unicode(value)
elif typecode == 14: return unicode(value.__repr__())
elif typecode == 15: return unicode(value.__repr__())
elif typecode == 16: return unicode(value)
elif typecode == 17: return unicode(value)
elif typecode == 18: return unicode(value)
elif typecode == 19: return unicode(value)
elif typecode == 20: return unicode(value.__repr__())
elif typecode == 21: return unicode(value.__repr__())
elif typecode == 22: return unicode(value.__repr__())
else:
raise ValueError ("Invalid type code: %d" % typecode)
def _defaultValue(self, stype, broker=None, kwargs={}):
""" """
typecode = stype.type
if typecode == 1: return 0
elif typecode == 2: return 0
elif typecode == 3: return 0
elif typecode == 4: return 0
elif typecode == 6: return ""
elif typecode == 7: return ""
elif typecode == 8: return 0
elif typecode == 9: return 0
elif typecode == 10: return ObjectId(None)
elif typecode == 11: return False
elif typecode == 12: return 0.0
elif typecode == 13: return 0.0
elif typecode == 14: return UUID(bytes=[0 for i in range(16)])
elif typecode == 15: return {}
elif typecode == 16: return 0
elif typecode == 17: return 0
elif typecode == 18: return 0
elif typecode == 19: return 0
elif typecode == 21: return []
elif typecode == 22: return []
elif typecode == 20:
try:
if "classKeys" in kwargs:
keyList = kwargs["classKeys"]
else:
keyList = None
classKey = self._bestClassKey(stype.refPackage, stype.refClass, keyList)
if classKey:
return self.makeObject(classKey, broker, kwargs)
except:
pass
return None
else:
raise ValueError ("Invalid type code: %d" % typecode)
def _bestClassKey(self, pname, cname, preferredList):
""" """
if pname == None or cname == None:
if len(preferredList) == 0:
return None
return preferredList[0]
for p in preferredList:
if p.getPackageName() == pname and p.getClassName() == cname:
return p
clist = self.getClasses(pname)
for c in clist:
if c.getClassName() == cname:
return c
return None
def _sendMethodRequest(self, broker, schemaKey, objectId, name, argList):
""" This is a legacy function that is used by qpid-tool to invoke methods
using the broker, objectId and schema.
Methods are now invoked on the object itself.
"""
objs = self.getObjects(_objectId=objectId)
if objs:
return objs[0]._sendMethodRequest(name, argList, {})
return None
def _newPackageCallback(self, pname):
"""
Invokes the console.newPackage() callback if the callback is present and
the package is not filtered.
"""
if self.console:
if len(self.class_filter) == 0 and len(self.event_filter) == 0:
self.console.newPackage(pname)
else:
for x in self.class_filter:
if x[0] == pname:
self.console.newPackage(pname)
return
for x in self.event_filter:
if x[0] == pname:
self.console.newPackage(pname)
return
def _newClassCallback(self, ctype, ckey):
"""
Invokes the console.newClass() callback if the callback is present and the
class is not filtered.
"""
if self.console:
if ctype == ClassKey.TYPE_DATA:
if (len(self.class_filter) == 0
or (ckey.getPackageName(), ckey.getClassName()) in self.class_filter):
self.console.newClass(ctype, ckey)
elif ctype == ClassKey.TYPE_EVENT:
if (len(self.event_filter) == 0
or (ckey.getPackageName(), ckey.getClassName()) in self.event_filter):
self.console.newClass(ctype, ckey)
else: # old class keys did not contain type info, check both filters
if ((len(self.class_filter) == 0 and len(self.event_filter) == 0)
or (ckey.getPackageName(), ckey.getClassName()) in self.class_filter
or (ckey.getPackageName(), ckey.getClassName()) in self.event_filter):
self.console.newClass(ctype, ckey)
def _agentAllowed(self, agentName, isV2):
""" True if the agent is NOT filtered.
"""
if self.agent_filter:
if isV2:
v = agentName.split(":", 2)
return ((len(v) > 2 and (v[0], v[1], v[2]) in self.agent_filter)
or (len(v) > 1 and (v[0], v[1], None) in self.agent_filter)
or (v and (v[0], None, None) in self.agent_filter));
else:
return agentName in self.agent_filter
return True
def _heartbeatCallback(self, agent, timestamp):
"""
Invokes the console.heartbeat() callback if the callback is present and the
agent is not filtered.
"""
if self.console and self.rcvHeartbeats:
if ((agent.isV2 and self._agentAllowed(agent.agentBank, True))
or ((not agent.isV2) and self._agentAllowed(agent.label, False))):
self.console.heartbeat(agent, timestamp)
def _newAgentCallback(self, agent):
"""
Invokes the console.newAgent() callback if the callback is present and the
agent is not filtered.
"""
if self.console:
if ((agent.isV2 and self._agentAllowed(agent.agentBank, True))
or ((not agent.isV2) and self._agentAllowed(agent.label, False))):
self.console.newAgent(agent)
def _delAgentCallback(self, agent):
"""
Invokes the console.delAgent() callback if the callback is present and the
agent is not filtered.
"""
if self.console:
if ((agent.isV2 and self._agentAllowed(agent.agentBank, True))
or ((not agent.isV2) and self._agentAllowed(agent.label, False))):
self.console.delAgent(agent)
#===================================================================================================
# SessionGetRequest
#===================================================================================================
class SessionGetRequest(object):
"""
This class is used to track get-object queries at the Session level.
"""
def __init__(self, agentCount):
self.agentCount = agentCount
self.result = []
self.cv = Condition()
self.waiting = True
def __call__(self, **kwargs):
"""
Callable entry point for gathering collected objects.
"""
try:
self.cv.acquire()
if 'qmf_object' in kwargs:
self.result.append(kwargs['qmf_object'])
elif 'qmf_complete' in kwargs or 'qmf_exception' in kwargs:
self.agentCount -= 1
if self.agentCount == 0:
self.waiting = None
self.cv.notify()
finally:
self.cv.release()
def wait(self, timeout):
starttime = time()
try:
self.cv.acquire()
while self.waiting:
if (time() - starttime) > timeout:
raise Exception("Timed out after %d seconds" % timeout)
self.cv.wait(1)
finally:
self.cv.release()
#===================================================================================================
# SchemaCache
#===================================================================================================
class SchemaCache(object):
"""
The SchemaCache is a data structure that stores learned schema information.
"""
def __init__(self):
"""
Create a map of schema packages and a lock to protect this data structure.
Note that this lock is at the bottom of any lock hierarchy. If it is held, no other
lock in the system should attempt to be acquired.
"""
self.packages = {}
self.lock = Lock()
def getPackages(self):
""" Get the list of known QMF packages """
list = []
try:
self.lock.acquire()
for package in self.packages:
list.append(package)
finally:
self.lock.release()
return list
def getClasses(self, packageName):
""" Get the list of known classes within a QMF package """
list = []
try:
self.lock.acquire()
if packageName in self.packages:
for pkey in self.packages[packageName]:
if isinstance(self.packages[packageName][pkey], SchemaClass):
list.append(self.packages[packageName][pkey].getKey())
elif self.packages[packageName][pkey] is not None:
# schema not present yet, but we have schema type
list.append(ClassKey({"_package_name": packageName,
"_class_name": pkey[0],
"_hash": pkey[1],
"_type": self.packages[packageName][pkey]}))
finally:
self.lock.release()
return list
def getSchema(self, classKey):
""" Get the schema for a QMF class, return None if schema not available """
pname = classKey.getPackageName()
pkey = classKey.getPackageKey()
try:
self.lock.acquire()
if pname in self.packages:
if (pkey in self.packages[pname] and
isinstance(self.packages[pname][pkey], SchemaClass)):
# hack: value may be schema type info if schema not available
return self.packages[pname][pkey]
finally:
self.lock.release()
return None
def declarePackage(self, pname):
""" Maybe add a package to the cache. Return True if package was added, None if it pre-existed. """
try:
self.lock.acquire()
if pname in self.packages:
return None
self.packages[pname] = {}
finally:
self.lock.release()
return True
def declareClass(self, classKey, classDef=None):
""" Add a class definition to the cache, if supplied. Return a pair
indicating if the package or class is new.
"""
new_package = False
new_class = False
pname = classKey.getPackageName()
pkey = classKey.getPackageKey()
try:
self.lock.acquire()
if pname not in self.packages:
self.packages[pname] = {}
new_package = True
packageMap = self.packages[pname]
if pkey not in packageMap or not isinstance(packageMap[pkey], SchemaClass):
if classDef is not None:
new_class = True
packageMap[pkey] = classDef
elif classKey.getType() is not None:
# hack: don't indicate "new_class" to caller unless the classKey type
# information is present. "new_class" causes the console.newClass()
# callback to be invoked, which -requires- a valid classKey type!
new_class = True
# store the type for the getClasses() method:
packageMap[pkey] = classKey.getType()
finally:
self.lock.release()
return (new_package, new_class)
#===================================================================================================
# ClassKey
#===================================================================================================
class ClassKey:
""" A ClassKey uniquely identifies a class from the schema. """
TYPE_DATA = "_data"
TYPE_EVENT = "_event"
def __init__(self, constructor):
if constructor.__class__ == str:
# construct from __repr__ string
try:
# supports two formats:
# type present = P:C:T(H)
# no type present = P:C(H)
tmp = constructor.split(":")
if len(tmp) == 3:
self.pname, self.cname, rem = tmp
self.type, hsh = rem.split("(")
else:
self.pname, rem = tmp
self.cname, hsh = rem.split("(")
self.type = None
hsh = hsh.strip(")")
hexValues = hsh.split("-")
h0 = int(hexValues[0], 16)
h1 = int(hexValues[1], 16)
h2 = int(hexValues[2], 16)
h3 = int(hexValues[3], 16)
h4 = int(hexValues[4][0:4], 16)
h5 = int(hexValues[4][4:12], 16)
self.hash = UUID(bytes=struct.pack("!LHHHHL", h0, h1, h2, h3, h4, h5))
except:
raise Exception("Invalid ClassKey format")
elif constructor.__class__ == dict:
# construct from QMFv2 map
try:
self.pname = constructor['_package_name']
self.cname = constructor['_class_name']
self.hash = constructor['_hash']
self.type = constructor.get('_type')
except:
raise Exception("Invalid ClassKey map format %s" % str(constructor))
else:
# construct from codec
codec = constructor
self.pname = str(codec.read_str8())
self.cname = str(codec.read_str8())
self.hash = UUID(bytes=codec.read_bin128())
# old V1 codec did not include "type"
self.type = None
def encode(self, codec):
# old V1 codec did not include "type"
codec.write_str8(self.pname)
codec.write_str8(self.cname)
codec.write_bin128(self.hash.bytes)
def asMap(self):
m = {'_package_name': self.pname,
'_class_name': self.cname,
'_hash': self.hash}
if self.type is not None:
m['_type'] = self.type
return m
def getPackageName(self):
return self.pname
def getClassName(self):
return self.cname
def getHash(self):
return self.hash
def getType(self):
return self.type
def getHashString(self):
return str(self.hash)
def getPackageKey(self):
return (self.cname, self.hash)
def __repr__(self):
if self.type is None:
return self.pname + ":" + self.cname + "(" + self.getHashString() + ")"
return self.pname + ":" + self.cname + ":" + self.type + "(" + self.getHashString() + ")"
def _setType(self, _type):
if _type == 2 or _type == ClassKey.TYPE_EVENT:
self.type = ClassKey.TYPE_EVENT
else:
self.type = ClassKey.TYPE_DATA
def __hash__(self):
ss = self.pname + self.cname + self.getHashString()
return ss.__hash__()
def __eq__(self, other):
return self.__repr__() == other.__repr__()
#===================================================================================================
# SchemaClass
#===================================================================================================
class SchemaClass:
""" """
CLASS_KIND_TABLE = 1
CLASS_KIND_EVENT = 2
def __init__(self, kind, key, codec, session):
self.kind = kind
self.classKey = key
self.properties = []
self.statistics = []
self.methods = []
self.arguments = []
self.session = session
hasSupertype = 0 #codec.read_uint8()
if self.kind == self.CLASS_KIND_TABLE:
propCount = codec.read_uint16()
statCount = codec.read_uint16()
methodCount = codec.read_uint16()
if hasSupertype == 1:
self.superTypeKey = ClassKey(codec)
else:
self.superTypeKey = None ;
for idx in range(propCount):
self.properties.append(SchemaProperty(codec))
for idx in range(statCount):
self.statistics.append(SchemaStatistic(codec))
for idx in range(methodCount):
self.methods.append(SchemaMethod(codec))
elif self.kind == self.CLASS_KIND_EVENT:
argCount = codec.read_uint16()
if (hasSupertype):
self.superTypeKey = ClassKey(codec)
else:
self.superTypeKey = None ;
for idx in range(argCount):
self.arguments.append(SchemaArgument(codec, methodArg=False))
def __repr__(self):
if self.kind == self.CLASS_KIND_TABLE:
kindStr = "Table"
elif self.kind == self.CLASS_KIND_EVENT:
kindStr = "Event"
else:
kindStr = "Unsupported"
result = "%s Class: %s " % (kindStr, self.classKey.__repr__())
return result
def getKey(self):
""" Return the class-key for this class. """
return self.classKey
def getProperties(self):
""" Return the list of properties for the class. """
if (self.superTypeKey == None):
return self.properties
else:
return self.properties + self.session.getSchema(self.superTypeKey).getProperties()
def getStatistics(self):
""" Return the list of statistics for the class. """
if (self.superTypeKey == None):
return self.statistics
else:
return self.statistics + self.session.getSchema(self.superTypeKey).getStatistics()
def getMethods(self):
""" Return the list of methods for the class. """
if (self.superTypeKey == None):
return self.methods
else:
return self.methods + self.session.getSchema(self.superTypeKey).getMethods()
def getArguments(self):
""" Return the list of events for the class. """
""" Return the list of methods for the class. """
if (self.superTypeKey == None):
return self.arguments
else:
return self.arguments + self.session.getSchema(self.superTypeKey).getArguments()
#===================================================================================================
# SchemaProperty
#===================================================================================================
class SchemaProperty:
""" """
def __init__(self, codec):
map = codec.read_map()
self.name = str(map["name"])
self.type = map["type"]
self.access = str(map["access"])
self.index = map["index"] != 0
self.optional = map["optional"] != 0
self.refPackage = None
self.refClass = None
self.unit = None
self.min = None
self.max = None
self.maxlen = None
self.desc = None
for key, value in map.items():
if key == "unit" : self.unit = value
elif key == "min" : self.min = value
elif key == "max" : self.max = value
elif key == "maxlen" : self.maxlen = value
elif key == "desc" : self.desc = value
elif key == "refPackage" : self.refPackage = value
elif key == "refClass" : self.refClass = value
def __repr__(self):
return self.name
#===================================================================================================
# SchemaStatistic
#===================================================================================================
class SchemaStatistic:
""" """
def __init__(self, codec):
map = codec.read_map()
self.name = str(map["name"])
self.type = map["type"]
self.unit = None
self.desc = None
for key, value in map.items():
if key == "unit" : self.unit = value
elif key == "desc" : self.desc = value
def __repr__(self):
return self.name
#===================================================================================================
# SchemaMethod
#===================================================================================================
class SchemaMethod:
""" """
def __init__(self, codec):
map = codec.read_map()
self.name = str(map["name"])
argCount = map["argCount"]
if "desc" in map:
self.desc = map["desc"]
else:
self.desc = None
self.arguments = []
for idx in range(argCount):
self.arguments.append(SchemaArgument(codec, methodArg=True))
def __repr__(self):
result = self.name + "("
first = True
for arg in self.arguments:
if arg.dir.find("I") != -1:
if first:
first = False
else:
result += ", "
result += arg.name
result += ")"
return result
#===================================================================================================
# SchemaArgument
#===================================================================================================
class SchemaArgument:
""" """
def __init__(self, codec, methodArg):
map = codec.read_map()
self.name = str(map["name"])
self.type = map["type"]
if methodArg:
self.dir = str(map["dir"]).upper()
self.unit = None
self.min = None
self.max = None
self.maxlen = None
self.desc = None
self.default = None
self.refPackage = None
self.refClass = None
for key, value in map.items():
if key == "unit" : self.unit = value
elif key == "min" : self.min = value
elif key == "max" : self.max = value
elif key == "maxlen" : self.maxlen = value
elif key == "desc" : self.desc = value
elif key == "default" : self.default = value
elif key == "refPackage" : self.refPackage = value
elif key == "refClass" : self.refClass = value
#===================================================================================================
# ObjectId
#===================================================================================================
class ObjectId:
""" Object that represents QMF object identifiers """
def __init__(self, constructor, first=0, second=0, agentName=None):
if constructor.__class__ == dict:
self.isV2 = True
self.agentName = agentName
self.agentEpoch = 0
if '_agent_name' in constructor: self.agentName = constructor['_agent_name']
if '_agent_epoch' in constructor: self.agentEpoch = constructor['_agent_epoch']
if '_object_name' not in constructor:
raise Exception("QMFv2 OBJECT_ID must have the '_object_name' field.")
self.objectName = constructor['_object_name']
else:
self.isV2 = None
if not constructor:
first = first
second = second
else:
first = constructor.read_uint64()
second = constructor.read_uint64()
self.agentName = str(first & 0x000000000FFFFFFF)
self.agentEpoch = (first & 0x0FFF000000000000) >> 48
self.objectName = str(second)
def _create(cls, agent_name, object_name, epoch=0):
oid = {"_agent_name": agent_name,
"_object_name": object_name,
"_agent_epoch": epoch}
return cls(oid)
create = classmethod(_create)
def __cmp__(self, other):
if other == None or not isinstance(other, ObjectId) :
return 1
if self.objectName < other.objectName:
return -1
if self.objectName > other.objectName:
return 1
if self.agentName < other.agentName:
return -1
if self.agentName > other.agentName:
return 1
if self.agentEpoch < other.agentEpoch:
return -1
if self.agentEpoch > other.agentEpoch:
return 1
return 0
def __repr__(self):
return "%d-%d-%d-%s-%s" % (self.getFlags(), self.getSequence(),
self.getBrokerBank(), self.getAgentBank(), self.getObject())
def index(self):
return self.__repr__()
def getFlags(self):
return 0
def getSequence(self):
return self.agentEpoch
def getBrokerBank(self):
return 1
def getAgentBank(self):
return self.agentName
def getV2RoutingKey(self):
if self.agentName == '0':
return "broker"
return self.agentName
def getObject(self):
return self.objectName
def isDurable(self):
return self.getSequence() == 0
def encode(self, codec):
first = (self.agentEpoch << 48) + (1 << 28)
second = 0
try:
first += int(self.agentName)
except:
pass
try:
second = int(self.objectName)
except:
pass
codec.write_uint64(first)
codec.write_uint64(second)
def asMap(self):
omap = {'_agent_name': self.agentName, '_object_name': self.objectName}
if self.agentEpoch != 0:
omap['_agent_epoch'] = self.agentEpoch
return omap
def __hash__(self):
return self.__repr__().__hash__()
def __eq__(self, other):
return self.__repr__().__eq__(other)
#===================================================================================================
# MethodResult
#===================================================================================================
class MethodResult(object):
""" """
def __init__(self, status, text, outArgs):
""" """
self.status = status
self.text = text
self.outArgs = outArgs
def __getattr__(self, name):
if name in self.outArgs:
return self.outArgs[name]
def __repr__(self):
return "%s (%d) - %s" % (self.text, self.status, self.outArgs)
#===================================================================================================
# Broker
#===================================================================================================
class Broker(Thread):
""" This object represents a connection (or potential connection) to a QMF broker. """
SYNC_TIME = 60
nextSeq = 1
# for connection recovery
DELAY_MIN = 1
DELAY_MAX = 128
DELAY_FACTOR = 2
class _q_item:
""" Broker-private class to encapsulate data sent to the broker thread
queue.
"""
type_wakeup = 0
type_v1msg = 1
type_v2msg = 2
def __init__(self, typecode, data):
self.typecode = typecode
self.data = data
def __init__(self, session, host, port, authMechs, authUser, authPass,
ssl=False, connTimeout=None, sessTimeout=None, **connectArgs):
""" Create a broker proxy and setup a connection to the broker. Will raise
an exception if the connection fails and the session is not configured to
retry connection setup (manageConnections = False).
Spawns a thread to manage the broker connection. Call _shutdown() to
shutdown the thread when releasing the broker.
"""
Thread.__init__(self)
self.session = session
self.host = host
self.port = port
self.mechanisms = authMechs
self.ssl = ssl
if connTimeout is not None:
connTimeout = float(connTimeout)
self.connTimeout = connTimeout
if sessTimeout is not None:
sessTimeout = float(sessTimeout)
else:
sessTimeout = self.SYNC_TIME
self.sessTimeout = sessTimeout
self.authUser = authUser
self.authPass = authPass
self.saslUser = None
self.cv = Condition()
self.seqToAgentMap = {}
self.error = None
self.conn_exc = None # exception hit by _tryToConnect()
self.brokerId = None
self.connected = False
self.brokerAgent = None
self.brokerSupportsV2 = None
self.rcv_queue = Queue() # for msg received on session
self.conn = None
self.amqpSession = None
self.amqpSessionId = "%s.%d.%d" % (platform.uname()[1], os.getpid(), Broker.nextSeq)
Broker.nextSeq += 1
self.last_age_check = time()
self.connectArgs = connectArgs
# thread control
self.setDaemon(True)
self.setName("Thread for broker: %s:%d" % (host, port))
self.canceled = False
self.ready = Semaphore(0)
self.start()
if not self.session.manageConnections:
# wait for connection setup to complete in subthread.
# On failure, propagate exception to caller
self.ready.acquire()
if self.conn_exc:
self._shutdown() # wait for the subthread to clean up...
raise self.conn_exc
# connection up - wait for stable...
try:
self._waitForStable()
agent = self.getBrokerAgent()
if agent:
agent.getObjects(_class="agent")
except:
self._shutdown() # wait for the subthread to clean up...
raise
def isConnected(self):
""" Return True if there is an active connection to the broker. """
return self.connected
def getError(self):
""" Return the last error message seen while trying to connect to the broker. """
return self.error
def getBrokerId(self):
""" Get broker's unique identifier (UUID) """
return self.brokerId
def getBrokerBank(self):
""" Return the broker-bank value. This is the value that the broker assigns to
objects within its control. This value appears as a field in the ObjectId
of objects created by agents controlled by this broker. """
return 1
def getAgent(self, brokerBank, agentBank):
""" Return the agent object associated with a particular broker and agent bank value."""
bankKey = str(agentBank)
try:
self.cv.acquire()
if bankKey in self.agents:
return self.agents[bankKey]
finally:
self.cv.release()
return None
def getBrokerAgent(self):
return self.brokerAgent
def getSessionId(self):
""" Get the identifier of the AMQP session to the broker """
return self.amqpSessionId
def getAgents(self):
""" Get the list of agents reachable via this broker """
try:
self.cv.acquire()
return self.agents.values()
finally:
self.cv.release()
def getAmqpSession(self):
""" Get the AMQP session object for this connected broker. """
return self.amqpSession
def getUrl(self):
""" """
return BrokerURL(host=self.host, port=self.port)
def getFullUrl(self, noAuthIfGuestDefault=True):
""" """
if self.ssl:
scheme = "amqps"
else:
scheme = "amqp"
if self.authUser == "" or \
(noAuthIfGuestDefault and self.authUser == "guest" and self.authPass == "guest"):
return BrokerURL(scheme=scheme, host=self.host, port=(self.port or 5672))
else:
return BrokerURL(scheme=scheme, user=self.authUser, password=self.authPass, host=self.host, port=(self.port or 5672))
def __repr__(self):
if self.connected:
return "Broker connected at: %s" % self.getUrl()
else:
return "Disconnected Broker"
def _setSequence(self, sequence, agent):
try:
self.cv.acquire()
self.seqToAgentMap[sequence] = agent
finally:
self.cv.release()
def _clearSequence(self, sequence):
try:
self.cv.acquire()
self.seqToAgentMap.pop(sequence)
finally:
self.cv.release()
def _tryToConnect(self):
""" Connect to the broker. Returns True if connection setup completes
successfully, otherwise returns False and sets self.error/self.conn_exc
with error info. Does not raise exceptions.
"""
self.error = None
self.conn_exc = None
try:
try:
self.cv.acquire()
self.agents = {}
finally:
self.cv.release()
self.topicBound = False
self.syncInFlight = False
self.syncRequest = 0
self.syncResult = None
self.reqsOutstanding = 1
try:
if self.amqpSession:
self.amqpSession.close()
except:
pass
self.amqpSession = None
try:
if self.conn:
self.conn.close(5)
except:
pass
self.conn = None
sock = connect(self.host, self.port)
sock.settimeout(5)
oldTimeout = sock.gettimeout()
sock.settimeout(self.connTimeout)
connSock = None
force_blocking = False
if self.ssl:
# Bug (QPID-4337): the "old" implementation of python SSL
# fails if the socket is set to non-blocking (which settimeout()
# may change).
if sys.version_info[:2] < (2, 6): # 2.6+ uses openssl - it's ok
force_blocking = True
sock.setblocking(1)
certfile = None
if 'ssl_certfile' in self.connectArgs:
certfile = self.connectArgs['ssl_certfile']
keyfile = None
if 'ssl_keyfile' in self.connectArgs:
keyfile = self.connectArgs['ssl_keyfile']
connSock = ssl(sock, certfile=certfile, keyfile=keyfile)
else:
connSock = sock
if not 'service' in self.connectArgs:
self.connectArgs['service'] = 'qpidd'
self.conn = Connection(connSock, username=self.authUser, password=self.authPass,
mechanism = self.mechanisms, host=self.host,
**self.connectArgs)
def aborted():
raise Timeout("Waiting for connection to be established with broker")
oldAborted = self.conn.aborted
self.conn.aborted = aborted
self.conn.start()
# Bug (QPID-4337): don't enable non-blocking (timeouts) for old SSL
if not force_blocking:
sock.settimeout(oldTimeout)
self.conn.aborted = oldAborted
uid = self.conn.user_id
if uid.__class__ == tuple and len(uid) == 2:
self.saslUser = uid[1]
elif type(uid) is str:
self.saslUser = uid;
else:
self.saslUser = None
# prevent topic queues from filling up (and causing the agents to
# disconnect) by discarding the oldest queued messages when full.
topic_queue_options = {"qpid.policy_type":"ring"}
self.replyName = "reply-%s" % self.amqpSessionId
self.amqpSession = self.conn.session(self.amqpSessionId)
self.amqpSession.timeout = self.sessTimeout
self.amqpSession.auto_sync = True
self.amqpSession.queue_declare(queue=self.replyName, exclusive=True, auto_delete=True)
self.amqpSession.exchange_bind(exchange="amq.direct",
queue=self.replyName, binding_key=self.replyName)
self.amqpSession.message_subscribe(queue=self.replyName, destination="rdest",
accept_mode=self.amqpSession.accept_mode.none,
acquire_mode=self.amqpSession.acquire_mode.pre_acquired)
self.amqpSession.incoming("rdest").listen(self._v1Cb, self._exceptionCb)
self.amqpSession.message_set_flow_mode(destination="rdest", flow_mode=self.amqpSession.flow_mode.window)
self.amqpSession.message_flow(destination="rdest", unit=self.amqpSession.credit_unit.byte, value=0xFFFFFFFF)
self.amqpSession.message_flow(destination="rdest", unit=self.amqpSession.credit_unit.message, value=200)
self.topicName = "topic-%s" % self.amqpSessionId
self.amqpSession.queue_declare(queue=self.topicName, exclusive=True,
auto_delete=True,
arguments=topic_queue_options)
self.amqpSession.message_subscribe(queue=self.topicName, destination="tdest",
accept_mode=self.amqpSession.accept_mode.none,
acquire_mode=self.amqpSession.acquire_mode.pre_acquired)
self.amqpSession.incoming("tdest").listen(self._v1Cb, self._exceptionCb)
self.amqpSession.message_set_flow_mode(destination="tdest", flow_mode=self.amqpSession.flow_mode.window)
self.amqpSession.message_flow(destination="tdest", unit=self.amqpSession.credit_unit.byte, value=0xFFFFFFFF)
self.amqpSession.message_flow(destination="tdest", unit=self.amqpSession.credit_unit.message, value=200)
##
## Check to see if the broker has QMFv2 exchanges configured
##
direct_result = self.amqpSession.exchange_query("qmf.default.direct")
topic_result = self.amqpSession.exchange_query("qmf.default.topic")
self.brokerSupportsV2 = not (direct_result.not_found or topic_result.not_found)
try:
self.cv.acquire()
self.agents = {}
self.brokerAgent = Agent(self, 0, "BrokerAgent", isV2=self.brokerSupportsV2)
self.agents['0'] = self.brokerAgent
finally:
self.cv.release()
##
## Set up connectivity for QMFv2
##
if self.brokerSupportsV2:
# set up 3 queues:
# 1 direct queue - for responses destined to this console.
# 2 topic queues - one for heartbeats (hb), one for unsolicited data
# and event indications (ui).
self.v2_direct_queue = "qmfc-v2-%s" % self.amqpSessionId
self.amqpSession.queue_declare(queue=self.v2_direct_queue, exclusive=True, auto_delete=True)
self.v2_topic_queue_ui = "qmfc-v2-ui-%s" % self.amqpSessionId
self.amqpSession.queue_declare(queue=self.v2_topic_queue_ui,
exclusive=True, auto_delete=True,
arguments=topic_queue_options)
self.v2_topic_queue_hb = "qmfc-v2-hb-%s" % self.amqpSessionId
self.amqpSession.queue_declare(queue=self.v2_topic_queue_hb,
exclusive=True, auto_delete=True,
arguments=topic_queue_options)
self.amqpSession.exchange_bind(exchange="qmf.default.direct",
queue=self.v2_direct_queue, binding_key=self.v2_direct_queue)
## Other bindings here...
self.amqpSession.message_subscribe(queue=self.v2_direct_queue, destination="v2dest",
accept_mode=self.amqpSession.accept_mode.none,
acquire_mode=self.amqpSession.acquire_mode.pre_acquired)
self.amqpSession.incoming("v2dest").listen(self._v2Cb, self._exceptionCb)
self.amqpSession.message_set_flow_mode(destination="v2dest", flow_mode=self.amqpSession.flow_mode.window)
self.amqpSession.message_flow(destination="v2dest", unit=self.amqpSession.credit_unit.byte, value=0xFFFFFFFF)
self.amqpSession.message_flow(destination="v2dest", unit=self.amqpSession.credit_unit.message, value=50)
self.amqpSession.message_subscribe(queue=self.v2_topic_queue_ui, destination="v2TopicUI",
accept_mode=self.amqpSession.accept_mode.none,
acquire_mode=self.amqpSession.acquire_mode.pre_acquired)
self.amqpSession.incoming("v2TopicUI").listen(self._v2Cb, self._exceptionCb)
self.amqpSession.message_set_flow_mode(destination="v2TopicUI", flow_mode=self.amqpSession.flow_mode.window)
self.amqpSession.message_flow(destination="v2TopicUI", unit=self.amqpSession.credit_unit.byte, value=0xFFFFFFFF)
self.amqpSession.message_flow(destination="v2TopicUI", unit=self.amqpSession.credit_unit.message, value=25)
self.amqpSession.message_subscribe(queue=self.v2_topic_queue_hb, destination="v2TopicHB",
accept_mode=self.amqpSession.accept_mode.none,
acquire_mode=self.amqpSession.acquire_mode.pre_acquired)
self.amqpSession.incoming("v2TopicHB").listen(self._v2Cb, self._exceptionCb)
self.amqpSession.message_set_flow_mode(destination="v2TopicHB", flow_mode=self.amqpSession.flow_mode.window)
self.amqpSession.message_flow(destination="v2TopicHB", unit=self.amqpSession.credit_unit.byte, value=0xFFFFFFFF)
self.amqpSession.message_flow(destination="v2TopicHB", unit=self.amqpSession.credit_unit.message, value=100)
codec = Codec()
self._setHeader(codec, 'B')
msg = self._message(codec.encoded)
self._send(msg)
return True # connection complete
except Exception as e:
self.error = "Exception during connection setup: %s - %s" % (e.__class__.__name__, e)
self.conn_exc = e
if self.session.console:
self.session.console.brokerConnectionFailed(self)
return False # connection failed
def _updateAgent(self, obj):
"""
Just received an object of class "org.apache.qpid.broker:agent", which
represents a V1 agent. Add or update the list of agent proxies.
"""
bankKey = str(obj.agentBank)
agent = None
if obj._deleteTime == 0:
try:
self.cv.acquire()
if bankKey not in self.agents:
# add new agent only if label is not filtered
if len(self.session.agent_filter) == 0 or obj.label in self.session.agent_filter:
agent = Agent(self, obj.agentBank, obj.label)
self.agents[bankKey] = agent
finally:
self.cv.release()
if agent and self.session.console:
self.session._newAgentCallback(agent)
else:
try:
self.cv.acquire()
agent = self.agents.pop(bankKey, None)
if agent:
agent.close()
finally:
self.cv.release()
if agent and self.session.console:
self.session._delAgentCallback(agent)
def _addAgent(self, name, agent):
try:
self.cv.acquire()
self.agents[name] = agent
finally:
self.cv.release()
if self.session.console:
self.session._newAgentCallback(agent)
def _ageAgents(self):
if (time() - self.last_age_check) < self.session.agent_heartbeat_min:
# don't age if it's too soon
return
self.cv.acquire()
try:
to_delete = []
to_notify = []
for key in self.agents:
if self.agents[key].isOld():
to_delete.append(key)
for key in to_delete:
agent = self.agents.pop(key)
agent.close()
to_notify.append(agent)
self.last_age_check = time()
finally:
self.cv.release()
if self.session.console:
for agent in to_notify:
self.session._delAgentCallback(agent)
def _v2SendAgentLocate(self, predicate=[]):
"""
Broadcast an agent-locate request to cause all agents in the domain to tell us who they are.
"""
# @todo: send locate only to those agents in agent_filter?
dp = self.amqpSession.delivery_properties()
dp.routing_key = "console.request.agent_locate"
mp = self.amqpSession.message_properties()
mp.content_type = "amqp/list"
if self.saslUser:
mp.user_id = self.saslUser
mp.app_id = "qmf2"
mp.reply_to = self.amqpSession.reply_to("qmf.default.direct", self.v2_direct_queue)
mp.application_headers = {'qmf.opcode':'_agent_locate_request'}
sendCodec = Codec()
sendCodec.write_list(predicate)
msg = Message(dp, mp, sendCodec.encoded)
self._send(msg, "qmf.default.topic")
def _setHeader(self, codec, opcode, seq=0):
""" Compose the header of a management message. """
codec.write_uint8(ord('A'))
codec.write_uint8(ord('M'))
codec.write_uint8(ord('2'))
codec.write_uint8(ord(opcode))
codec.write_uint32(seq)
def _checkHeader(self, codec):
""" Check the header of a management message and extract the opcode and class. """
try:
octet = chr(codec.read_uint8())
if octet != 'A':
return None, None
octet = chr(codec.read_uint8())
if octet != 'M':
return None, None
octet = chr(codec.read_uint8())
if octet != '2':
return None, None
opcode = chr(codec.read_uint8())
seq = codec.read_uint32()
return opcode, seq
except:
return None, None
def _message (self, body, routing_key="broker", ttl=None):
dp = self.amqpSession.delivery_properties()
dp.routing_key = routing_key
if ttl:
dp.ttl = ttl
mp = self.amqpSession.message_properties()
mp.content_type = "x-application/qmf"
if self.saslUser:
mp.user_id = self.saslUser
mp.reply_to = self.amqpSession.reply_to("amq.direct", self.replyName)
return Message(dp, mp, body)
def _send(self, msg, dest="qpid.management"):
self.amqpSession.message_transfer(destination=dest, message=msg)
def _disconnect(self, err_info=None):
""" Called when the remote broker has disconnected. Re-initializes all
state associated with the broker.
"""
# notify any waiters, and callback
self.cv.acquire()
try:
if err_info is not None:
self.error = err_info
_agents = self.agents
self.agents = {}
for agent in _agents.itervalues():
agent.close()
self.syncInFlight = False
self.reqsOutstanding = 0
self.cv.notifyAll()
finally:
self.cv.release()
if self.session.console:
for agent in _agents.itervalues():
self.session._delAgentCallback(agent)
def _shutdown(self, _timeout=10):
""" Disconnect from a broker, and release its resources. Errors are
ignored.
"""
if self.isAlive():
# kick the thread
self.canceled = True
self.rcv_queue.put(Broker._q_item(Broker._q_item.type_wakeup, None))
self.join(_timeout)
# abort any pending transactions and delete agents
self._disconnect("broker shutdown")
try:
if self.amqpSession:
self.amqpSession.close();
except:
pass
self.amqpSession = None
try:
if self.conn:
self.conn.close(_timeout)
except:
pass
self.conn = None
self.connected = False
def _waitForStable(self):
try:
self.cv.acquire()
if not self.connected:
return
if self.reqsOutstanding == 0:
return
self.syncInFlight = True
starttime = time()
while self.reqsOutstanding != 0:
self.cv.wait(self.SYNC_TIME)
if time() - starttime > self.SYNC_TIME:
raise RuntimeError("Timed out waiting for broker to synchronize")
finally:
self.cv.release()
def _incOutstanding(self):
try:
self.cv.acquire()
self.reqsOutstanding += 1
finally:
self.cv.release()
def _decOutstanding(self):
try:
self.cv.acquire()
self.reqsOutstanding -= 1
if self.reqsOutstanding == 0 and not self.topicBound:
self.topicBound = True
for key in self.session.v1BindingKeyList:
self.amqpSession.exchange_bind(exchange="qpid.management",
queue=self.topicName, binding_key=key)
if self.brokerSupportsV2:
# do not drop heartbeat indications when under load from data
# or event indications. Put heartbeats on their own dedicated
# queue.
#
for key in self.session.v2BindingKeyList:
if key.startswith("agent.ind.heartbeat"):
self.amqpSession.exchange_bind(exchange="qmf.default.topic",
queue=self.v2_topic_queue_hb,
binding_key=key)
else:
self.amqpSession.exchange_bind(exchange="qmf.default.topic",
queue=self.v2_topic_queue_ui,
binding_key=key)
# solicit an agent locate now, after we bind to agent.ind.data,
# because the agent locate will cause the agent to publish a
# data indication - and now we're able to receive it!
self._v2SendAgentLocate()
if self.reqsOutstanding == 0 and self.syncInFlight:
self.syncInFlight = False
self.cv.notify()
finally:
self.cv.release()
def _v1Cb(self, msg):
""" Callback from session receive thread for V1 messages
"""
self.rcv_queue.put(Broker._q_item(Broker._q_item.type_v1msg, msg))
def _v1Dispatch(self, msg):
try:
self._v1DispatchProtected(msg)
except Exception as e:
print("EXCEPTION in Broker._v1Cb:", e)
import traceback
traceback.print_exc()
def _v1DispatchProtected(self, msg):
"""
This is the general message handler for messages received via the QMFv1 exchanges.
"""
try:
agent = None
agent_addr = None
mp = msg.get("message_properties")
ah = mp.application_headers
if ah and 'qmf.agent' in ah:
agent_addr = ah['qmf.agent']
if not agent_addr:
#
# See if we can determine the agent identity from the routing key
#
dp = msg.get("delivery_properties")
rkey = None
if dp and dp.routing_key:
rkey = dp.routing_key
items = rkey.split('.')
if len(items) >= 4:
if items[0] == 'console' and items[3].isdigit():
agent_addr = str(items[3]) # The QMFv1 Agent Bank
if agent_addr != None and agent_addr in self.agents:
agent = self.agents[agent_addr]
codec = Codec(msg.body)
alreadyTried = None
while True:
opcode, seq = self._checkHeader(codec)
if not agent and not alreadyTried:
alreadyTried = True
try:
self.cv.acquire()
if seq in self.seqToAgentMap:
agent = self.seqToAgentMap[seq]
finally:
self.cv.release()
if opcode == None: break
if opcode == 'b': self.session._handleBrokerResp (self, codec, seq)
elif opcode == 'p': self.session._handlePackageInd (self, codec, seq)
elif opcode == 'q': self.session._handleClassInd (self, codec, seq)
elif opcode == 's': self.session._handleSchemaResp (self, codec, seq, agent_addr)
elif opcode == 'h': self.session._handleHeartbeatInd (self, codec, seq, msg)
elif opcode == 'z': self.session._handleCommandComplete (self, codec, seq, agent)
elif agent:
agent._handleQmfV1Message(opcode, seq, mp, ah, codec)
agent.touch() # mark agent as being alive
finally: # always ack the message!
try:
# ignore failures as the session may be shutting down...
self.amqpSession.receiver._completed.add(msg.id)
self.amqpSession.channel.session_completed(self.amqpSession.receiver._completed)
except:
pass
def _v2Cb(self, msg):
""" Callback from session receive thread for V2 messages
"""
self.rcv_queue.put(Broker._q_item(Broker._q_item.type_v2msg, msg))
def _v2Dispatch(self, msg):
try:
self._v2DispatchProtected(msg)
except Exception as e:
print("EXCEPTION in Broker._v2Cb:", e)
import traceback
traceback.print_exc()
def _v2DispatchProtected(self, msg):
"""
This is the general message handler for messages received via QMFv2 exchanges.
"""
try:
mp = msg.get("message_properties")
ah = mp["application_headers"]
codec = Codec(msg.body)
if 'qmf.opcode' in ah:
opcode = ah['qmf.opcode']
if mp.content_type == "amqp/list":
try:
content = codec.read_list()
if not content:
content = []
except:
# malformed list - ignore
content = None
elif mp.content_type == "amqp/map":
try:
content = codec.read_map()
if not content:
content = {}
except:
# malformed map - ignore
content = None
else:
content = None
if content != None:
##
## Directly handle agent heartbeats and agent locate responses as these are broker-scope (they are
## used to maintain the broker's list of agent proxies.
##
if opcode == '_agent_heartbeat_indication': self.session._v2HandleHeartbeatInd(self, mp, ah, content)
elif opcode == '_agent_locate_response': self.session._v2HandleAgentLocateRsp(self, mp, ah, content)
else:
##
## All other opcodes are agent-scope and are forwarded to the agent proxy representing the sender
## of the message.
##
# the broker's agent is mapped to index ['0']
agentName = ah['qmf.agent']
v = agentName.split(":")
if agentName == 'broker' or (len(v) >= 2 and v[0] == 'apache.org'
and v[1] == 'qpidd'):
agentName = '0'
if agentName in self.agents:
agent = self.agents[agentName]
agent._handleQmfV2Message(opcode, mp, ah, content)
agent.touch()
finally: # always ack the message!
try:
# ignore failures as the session may be shutting down...
self.amqpSession.receiver._completed.add(msg.id)
self.amqpSession.channel.session_completed(self.amqpSession.receiver._completed)
except:
pass
def _exceptionCb(self, data):
""" Exception notification callback from session receive thread.
"""
self.cv.acquire()
try:
self.connected = False
self.error = "exception received from messaging layer: %s" % str(data)
finally:
self.cv.release()
self.rcv_queue.put(Broker._q_item(Broker._q_item.type_wakeup, None))
def run(self):
""" Main body of the running thread. """
# First, attempt a connection. In the unmanaged case,
# failure to connect needs to cause the Broker()
# constructor to raise an exception.
delay = self.DELAY_MIN
while not self.canceled:
if self._tryToConnect(): # connection up
break
# unmanaged connection - fail & wake up constructor
if not self.session.manageConnections:
self.ready.release()
return
# managed connection - try again
count = 0
while not self.canceled and count < delay:
sleep(1)
count += 1
if delay < self.DELAY_MAX:
delay *= self.DELAY_FACTOR
if self.canceled:
self.ready.release()
return
# connection successful!
self.cv.acquire()
try:
self.connected = True
finally:
self.cv.release()
self.session._handleBrokerConnect(self)
self.ready.release()
while not self.canceled:
try:
item = self.rcv_queue.get(timeout=self.session.agent_heartbeat_min)
except Empty:
item = None
while not self.canceled and item is not None:
if not self.connected:
# connection failure
while item:
# drain the queue
try:
item = self.rcv_queue.get(block=False)
except Empty:
item = None
break
self._disconnect() # clean up any pending agents
self.session._handleError(self.error)
self.session._handleBrokerDisconnect(self)
if not self.session.manageConnections:
return # do not attempt recovery
# retry connection setup
delay = self.DELAY_MIN
while not self.canceled:
if self._tryToConnect():
break
# managed connection - try again
count = 0
while not self.canceled and count < delay:
sleep(1)
count += 1
if delay < self.DELAY_MAX:
delay *= self.DELAY_FACTOR
if self.canceled:
return
# connection successful!
self.cv.acquire()
try:
self.connected = True
finally:
self.cv.release()
self.session._handleBrokerConnect(self)
elif item.typecode == Broker._q_item.type_v1msg:
self._v1Dispatch(item.data)
elif item.typecode == Broker._q_item.type_v2msg:
self._v2Dispatch(item.data)
try:
item = self.rcv_queue.get(block=False)
except Empty:
item = None
# queue drained, age the agents...
if not self.canceled:
self._ageAgents()
#===================================================================================================
# Agent
#===================================================================================================
class Agent:
"""
This class represents a proxy for a remote agent being managed
"""
def __init__(self, broker, agentBank, label, isV2=False, interval=0):
self.broker = broker
self.session = broker.session
self.schemaCache = self.session.schemaCache
self.brokerBank = broker.getBrokerBank()
self.agentBank = str(agentBank)
self.label = label
self.isV2 = isV2
self.heartbeatInterval = 0
if interval:
if interval < self.session.agent_heartbeat_min:
self.heartbeatInterval = self.session.agent_heartbeat_min
else:
self.heartbeatInterval = interval
self.lock = Lock()
self.seqMgr = self.session.seqMgr
self.contextMap = {}
self.unsolicitedContext = RequestContext(self, self)
self.lastSeenTime = time()
self.closed = None
self.epoch = 0
self.schema_timestamp = None
def _checkClosed(self):
if self.closed:
raise Exception("Agent is disconnected")
def __call__(self, **kwargs):
"""
This is the handler for unsolicited stuff received from the agent
"""
if 'qmf_object' in kwargs:
if self.session.console:
obj = kwargs['qmf_object']
if self.session.class_filter and obj.getClassKey():
# slow path: check classKey against event_filter
pname = obj.getClassKey().getPackageName()
cname = obj.getClassKey().getClassName()
if ((pname, cname) not in self.session.class_filter
and (pname, None) not in self.session.class_filter):
return
if obj.getProperties():
self.session.console.objectProps(self.broker, obj)
if obj.getStatistics():
# QMFv2 objects may also contain statistic updates
self.session.console.objectStats(self.broker, obj)
elif 'qmf_object_stats' in kwargs:
if self.session.console:
obj = kwargs['qmf_object_stats']
if len(self.session.class_filter) == 0:
self.session.console.objectStats(self.broker, obj)
elif obj.getClassKey():
# slow path: check classKey against event_filter
pname = obj.getClassKey().getPackageName()
cname = obj.getClassKey().getClassName()
if ((pname, cname) in self.session.class_filter
or (pname, None) in self.session.class_filter):
self.session.console.objectStats(self.broker, obj)
elif 'qmf_event' in kwargs:
if self.session.console:
event = kwargs['qmf_event']
if len(self.session.event_filter) == 0:
self.session.console.event(self.broker, event)
elif event.classKey:
# slow path: check classKey against event_filter
pname = event.classKey.getPackageName()
ename = event.classKey.getClassName()
if ((pname, ename) in self.session.event_filter
or (pname, None) in self.session.event_filter):
self.session.console.event(self.broker, event)
elif 'qmf_schema_id' in kwargs:
ckey = kwargs['qmf_schema_id']
new_pkg, new_cls = self.session.schemaCache.declareClass(ckey)
if self.session.console:
if new_pkg:
self.session._newPackageCallback(ckey.getPackageName())
if new_cls:
# translate V2's string based type value to legacy
# integer value for backward compatibility
cls_type = ckey.getType()
if str(cls_type) == ckey.TYPE_DATA:
cls_type = 1
elif str(cls_type) == ckey.TYPE_EVENT:
cls_type = 2
self.session._newClassCallback(cls_type, ckey)
def touch(self):
if self.heartbeatInterval:
self.lastSeenTime = time()
def setEpoch(self, epoch):
self.epoch = epoch
def update_schema_timestamp(self, timestamp):
""" Check the latest schema timestamp from the agent V2 heartbeat. Issue a
query for all packages & classes should the timestamp change.
"""
self.lock.acquire()
try:
if self.schema_timestamp == timestamp:
return
self.schema_timestamp = timestamp
context = RequestContext(self, self)
sequence = self.seqMgr._reserve(context)
self.contextMap[sequence] = context
context.setSequence(sequence)
finally:
self.lock.release()
self._v2SendSchemaIdQuery(sequence, {})
def epochMismatch(self, epoch):
if epoch == 0 or self.epoch == 0:
return None
if epoch == self.epoch:
return None
return True
def isOld(self):
if self.heartbeatInterval == 0:
return None
if time() - self.lastSeenTime > (self.session.agent_heartbeat_miss * self.heartbeatInterval):
return True
return None
def close(self):
self.closed = True
copy = {}
try:
self.lock.acquire()
for seq in self.contextMap:
copy[seq] = self.contextMap[seq]
finally:
self.lock.release()
for seq in copy:
context = copy[seq]
context.cancel("Agent disconnected")
self.seqMgr._release(seq)
def __repr__(self):
if self.isV2:
ver = "v2"
else:
ver = "v1"
return "Agent(%s) at bank %d.%s (%s)" % (ver, self.brokerBank, self.agentBank, self.label)
def getBroker(self):
return self.broker
def getBrokerBank(self):
return self.brokerBank
def getAgentBank(self):
return self.agentBank
def getV2RoutingKey(self):
if self.agentBank == '0':
return 'broker'
return self.agentBank
def getObjects(self, notifiable=None, **kwargs):
""" Get a list of objects from QMF agents.
All arguments are passed by name(keyword).
If 'notifiable' is None (default), this call will block until completion or timeout.
If supplied, notifiable is assumed to be a callable object that will be called when the
list of queried objects arrives. The single argument to the call shall be a list of
the returned objects.
The class for queried objects may be specified in one of the following ways:
_schema = <schema> - supply a schema object returned from getSchema.
_key = <key> - supply a classKey from the list returned by getClasses.
_class = <name> - supply a class name as a string. If the class name exists
in multiple packages, a _package argument may also be supplied.
_objectId = <id> - get the object referenced by the object-id
The default timeout for this synchronous operation is 60 seconds. To change the timeout,
use the following argument:
_timeout = <time in seconds>
If additional arguments are supplied, they are used as property selectors. For example,
if the argument name="test" is supplied, only objects whose "name" property is "test"
will be returned in the result.
"""
self._checkClosed()
if notifiable:
if not callable(notifiable):
raise Exception("notifiable object must be callable")
#
# Isolate the selectors from the kwargs
#
selectors = {}
for key in kwargs:
value = kwargs[key]
if key[0] != '_':
selectors[key] = value
#
# Allocate a context to track this asynchronous request.
#
context = RequestContext(self, notifiable, selectors)
sequence = self.seqMgr._reserve(context)
try:
self.lock.acquire()
self.contextMap[sequence] = context
context.setSequence(sequence)
finally:
self.lock.release()
#
# Compose and send the query message to the agent using the appropriate protocol for the
# agent's QMF version.
#
if self.isV2:
self._v2SendGetQuery(sequence, kwargs)
else:
self.broker._setSequence(sequence, self)
self._v1SendGetQuery(sequence, kwargs)
#
# If this is a synchronous call, block and wait for completion.
#
if not notifiable:
timeout = 60
if '_timeout' in kwargs:
timeout = kwargs['_timeout']
context.waitForSignal(timeout)
if context.exception:
raise Exception(context.exception)
result = context.queryResults
return result
def _clearContext(self, sequence):
try:
self.lock.acquire()
try:
self.contextMap.pop(sequence)
self.seqMgr._release(sequence)
except KeyError:
pass # @todo - shouldn't happen, log a warning.
finally:
self.lock.release()
def _schemaInfoFromV2Agent(self):
"""
We have just received new schema information from this agent. Check to see if there's
more work that can now be done.
"""
try:
self.lock.acquire()
copy_of_map = {}
for item in self.contextMap:
copy_of_map[item] = self.contextMap[item]
finally:
self.lock.release()
self.unsolicitedContext.reprocess()
for context in copy_of_map:
copy_of_map[context].reprocess()
def _handleV1Completion(self, sequence, code, text):
"""
Called if one of this agent's V1 commands completed
"""
context = None
try:
self.lock.acquire()
if sequence in self.contextMap:
context = self.contextMap[sequence]
finally:
self.lock.release()
if context:
if code != 0:
ex = "Error %d: %s" % (code, text)
context.setException(ex)
context.signal()
self.broker._clearSequence(sequence)
def _v1HandleMethodResp(self, codec, seq):
"""
Handle a QMFv1 method response
"""
code = codec.read_uint32()
text = codec.read_str16()
outArgs = {}
self.broker._clearSequence(seq)
pair = self.seqMgr._release(seq)
if pair == None:
return
method, synchronous = pair
if code == 0:
for arg in method.arguments:
if arg.dir.find("O") != -1:
outArgs[arg.name] = self.session._decodeValue(codec, arg.type, self.broker)
result = MethodResult(code, text, outArgs)
if synchronous:
try:
self.broker.cv.acquire()
self.broker.syncResult = result
self.broker.syncInFlight = False
self.broker.cv.notify()
finally:
self.broker.cv.release()
else:
if self.session.console:
self.session.console.methodResponse(self.broker, seq, result)
def _v1HandleEventInd(self, codec, seq):
"""
Handle a QMFv1 event indication
"""
event = Event(self, codec)
self.unsolicitedContext.doEvent(event)
def _v1HandleContentInd(self, codec, sequence, prop=False, stat=False):
"""
Handle a QMFv1 content indication
"""
classKey = ClassKey(codec)
schema = self.schemaCache.getSchema(classKey)
if not schema:
return
obj = Object(self, schema, codec, prop, stat)
if classKey.getPackageName() == "org.apache.qpid.broker" and classKey.getClassName() == "agent" and prop:
self.broker._updateAgent(obj)
context = self.unsolicitedContext
try:
self.lock.acquire()
if sequence in self.contextMap:
context = self.contextMap[sequence]
finally:
self.lock.release()
context.addV1QueryResult(obj, prop, stat)
def _v2HandleDataInd(self, mp, ah, content):
"""
Handle a QMFv2 data indication from the agent. Note: called from context
of the Broker thread.
"""
if content.__class__ != list:
return
if mp.correlation_id:
try:
self.lock.acquire()
sequence = int(mp.correlation_id)
if sequence not in self.contextMap:
return
context = self.contextMap[sequence]
finally:
self.lock.release()
else:
context = self.unsolicitedContext
kind = "_data"
if "qmf.content" in ah:
kind = ah["qmf.content"]
if kind == "_data":
for omap in content:
context.addV2QueryResult(omap)
context.processV2Data()
if 'partial' not in ah:
context.signal()
elif kind == "_event":
for omap in content:
event = Event(self, v2Map=omap)
if event.classKey is None or event.schema:
# schema optional or present
context.doEvent(event)
else:
# schema not optional and not present
if context.addPendingEvent(event):
self._v2SendSchemaRequest(event.classKey)
elif kind == "_schema_id":
for sid in content:
try:
ckey = ClassKey(sid)
except:
# @todo: log error
ckey = None
if ckey is not None:
# @todo: for now, the application cannot directly send a query for
# _schema_id. This request _must_ have been initiated by the framework
# in order to update the schema cache.
context.notifiable(qmf_schema_id=ckey)
def _v2HandleMethodResp(self, mp, ah, content):
"""
Handle a QMFv2 method response from the agent
"""
context = None
sequence = None
if mp.correlation_id:
try:
self.lock.acquire()
seq = int(mp.correlation_id)
finally:
self.lock.release()
else:
return
pair = self.seqMgr._release(seq)
if pair == None:
return
method, synchronous = pair
result = MethodResult(0, 'OK', content['_arguments'])
if synchronous:
try:
self.broker.cv.acquire()
self.broker.syncResult = result
self.broker.syncInFlight = False
self.broker.cv.notify()
finally:
self.broker.cv.release()
else:
if self.session.console:
self.session.console.methodResponse(self.broker, seq, result)
def _v2HandleException(self, mp, ah, content):
"""
Handle a QMFv2 exception
"""
context = None
if mp.correlation_id:
try:
self.lock.acquire()
seq = int(mp.correlation_id)
finally:
self.lock.release()
else:
return
values = {}
if '_values' in content:
values = content['_values']
code = 7
text = "error"
if 'error_code' in values:
code = values['error_code']
if 'error_text' in values:
text = values['error_text']
pair = self.seqMgr._release(seq)
if pair == None:
return
if pair.__class__ == RequestContext:
pair.cancel(text)
return
method, synchronous = pair
result = MethodResult(code, text, {})
if synchronous:
try:
self.broker.cv.acquire()
self.broker.syncResult = result
self.broker.syncInFlight = False
self.broker.cv.notify()
finally:
self.broker.cv.release()
else:
if self.session.console:
self.session.console.methodResponse(self.broker, seq, result)
def _v1SendGetQuery(self, sequence, kwargs):
"""
Send a get query to a QMFv1 agent.
"""
#
# Build the query map
#
query = {}
if '_class' in kwargs:
query['_class'] = kwargs['_class']
if '_package' in kwargs:
query['_package'] = kwargs['_package']
elif '_key' in kwargs:
key = kwargs['_key']
query['_class'] = key.getClassName()
query['_package'] = key.getPackageName()
elif '_objectId' in kwargs:
query['_objectid'] = kwargs['_objectId'].__repr__()
#
# Construct and transmit the message
#
sendCodec = Codec()
self.broker._setHeader(sendCodec, 'G', sequence)
sendCodec.write_map(query)
smsg = self.broker._message(sendCodec.encoded, "agent.%d.%s" % (self.brokerBank, self.agentBank))
self.broker._send(smsg)
def _v2SendQuery(self, query, sequence):
"""
Given a query map, construct and send a V2 Query message.
"""
dp = self.broker.amqpSession.delivery_properties()
dp.routing_key = self.getV2RoutingKey()
mp = self.broker.amqpSession.message_properties()
mp.content_type = "amqp/map"
if self.broker.saslUser:
mp.user_id = self.broker.saslUser
mp.correlation_id = str(sequence)
mp.app_id = "qmf2"
mp.reply_to = self.broker.amqpSession.reply_to("qmf.default.direct", self.broker.v2_direct_queue)
mp.application_headers = {'qmf.opcode':'_query_request'}
sendCodec = Codec()
sendCodec.write_map(query)
msg = Message(dp, mp, sendCodec.encoded)
self.broker._send(msg, "qmf.default.direct")
def _v2SendGetQuery(self, sequence, kwargs):
"""
Send a get query to a QMFv2 agent.
"""
#
# Build the query map
#
query = {'_what': 'OBJECT'}
if '_class' in kwargs:
schemaMap = {'_class_name': kwargs['_class']}
if '_package' in kwargs:
schemaMap['_package_name'] = kwargs['_package']
query['_schema_id'] = schemaMap
elif '_key' in kwargs:
query['_schema_id'] = kwargs['_key'].asMap()
elif '_objectId' in kwargs:
query['_object_id'] = kwargs['_objectId'].asMap()
self._v2SendQuery(query, sequence)
def _v2SendSchemaIdQuery(self, sequence, kwargs):
"""
Send a query for all schema ids to a QMFv2 agent.
"""
#
# Build the query map
#
query = {'_what': 'SCHEMA_ID'}
# @todo - predicate support. For now, return all known schema ids.
self._v2SendQuery(query, sequence)
def _v2SendSchemaRequest(self, schemaId):
"""
Send a query to an agent to request details on a particular schema class.
IMPORTANT: This function currently sends a QMFv1 schema-request to the address of
the agent. The agent will send its response to amq.direct/<our-key>.
Eventually, this will be converted to a proper QMFv2 schema query.
"""
sendCodec = Codec()
seq = self.seqMgr._reserve(None)
self.broker._setHeader(sendCodec, 'S', seq)
schemaId.encode(sendCodec)
smsg = self.broker._message(sendCodec.encoded, self.agentBank)
self.broker._send(smsg, "qmf.default.direct")
def _handleQmfV1Message(self, opcode, seq, mp, ah, codec):
"""
Process QMFv1 messages arriving from an agent. Note well: this method is
called from the context of the Broker thread.
"""
if opcode == 'm': self._v1HandleMethodResp(codec, seq)
elif opcode == 'e': self._v1HandleEventInd(codec, seq)
elif opcode == 'c': self._v1HandleContentInd(codec, seq, prop=True)
elif opcode == 'i': self._v1HandleContentInd(codec, seq, stat=True)
elif opcode == 'g': self._v1HandleContentInd(codec, seq, prop=True, stat=True)
def _handleQmfV2Message(self, opcode, mp, ah, content):
"""
Process QMFv2 messages arriving from an agent. Note well: this method is
called from the context of the Broker thread.
"""
if opcode == '_data_indication': self._v2HandleDataInd(mp, ah, content)
elif opcode == '_query_response': self._v2HandleDataInd(mp, ah, content)
elif opcode == '_method_response': self._v2HandleMethodResp(mp, ah, content)
elif opcode == '_exception': self._v2HandleException(mp, ah, content)
#===================================================================================================
# RequestContext
#===================================================================================================
class RequestContext(object):
"""
This class tracks an asynchronous request sent to an agent.
TODO: Add logic for client-side selection and filtering deleted objects from get-queries
"""
def __init__(self, agent, notifiable, selectors={}):
self.sequence = None
self.agent = agent
self.schemaCache = self.agent.schemaCache
self.notifiable = notifiable
self.selectors = selectors
self.startTime = time()
self.rawQueryResults = []
self.queryResults = []
self.pendingEvents = {}
self.exception = None
self.waitingForSchema = None
self.pendingSignal = None
self.cv = Condition()
self.blocked = notifiable == None
def setSequence(self, sequence):
self.sequence = sequence
def addV1QueryResult(self, data, has_props, has_stats):
values = {}
if has_props:
for prop, val in data.getProperties():
values[prop.name] = val
if has_stats:
for stat, val in data.getStatistics():
values[stat.name] = val
for key in values:
val = values[key]
if key in self.selectors and val != self.selectors[key]:
return
if self.notifiable:
if has_props:
self.notifiable(qmf_object=data)
if has_stats:
self.notifiable(qmf_object_stats=data)
else:
self.queryResults.append(data)
def addV2QueryResult(self, data):
values = data['_values']
for key in values:
val = values[key]
if key in self.selectors:
sel_val = self.selectors[key]
if sel_val.__class__ == ObjectId:
val = ObjectId(val, agentName=self.agent.getAgentBank())
if val != sel_val:
return
self.rawQueryResults.append(data)
def addPendingEvent(self, event):
""" Stores a received event that is pending a schema. Returns True if this
event is the first instance of a given schema identifier.
"""
self.cv.acquire()
try:
if event.classKey in self.pendingEvents:
self.pendingEvents[event.classKey].append((event, time()))
return False
self.pendingEvents[event.classKey] = [(event, time())]
return True
finally:
self.cv.release()
def processPendingEvents(self):
""" Walk the pending events looking for schemas that are now
available. Remove any events that now have schema, and process them.
"""
keysToDelete = []
events = []
self.cv.acquire()
try:
for key in self.pendingEvents.iterkeys():
schema = self.schemaCache.getSchema(key)
if schema:
keysToDelete.append(key)
for item in self.pendingEvents[key]:
# item is (timestamp, event-obj) tuple.
# hack: I have no idea what a valid lifetime for an event
# should be. 60 seconds???
if (time() - item[1]) < 60:
item[0].schema = schema
events.append(item[0])
for key in keysToDelete:
self.pendingEvents.pop(key)
finally:
self.cv.release()
for event in events:
self.doEvent(event)
def doEvent(self, data):
if self.notifiable:
self.notifiable(qmf_event=data)
def setException(self, ex):
self.exception = ex
def getAge(self):
return time() - self.startTime
def cancel(self, exception):
self.setException(exception)
try:
self.cv.acquire()
self.blocked = None
self.waitingForSchema = None
self.cv.notify()
finally:
self.cv.release()
self._complete()
def waitForSignal(self, timeout):
try:
self.cv.acquire()
while self.blocked:
if (time() - self.startTime) > timeout:
self.exception = "Request timed out after %d seconds" % timeout
return
self.cv.wait(1)
finally:
self.cv.release()
def signal(self):
try:
self.cv.acquire()
if self.waitingForSchema:
self.pendingSignal = True
return
else:
self.blocked = None
self.cv.notify()
finally:
self.cv.release()
self._complete()
def _complete(self):
if self.notifiable:
if self.exception:
self.notifiable(qmf_exception=self.exception)
else:
self.notifiable(qmf_complete=True)
if self.sequence:
self.agent._clearContext(self.sequence)
def processV2Data(self):
"""
Attempt to make progress on the entries in the raw_query_results queue. If an entry has a schema
that is in our schema cache, process it. Otherwise, send a request for the schema information
to the agent that manages the object.
"""
schemaId = None
queryResults = []
try:
self.cv.acquire()
if self.waitingForSchema:
return
while (not self.waitingForSchema) and len(self.rawQueryResults) > 0:
head = self.rawQueryResults[0]
schemaId = self._getSchemaIdforV2ObjectLH(head)
schema = self.schemaCache.getSchema(schemaId)
if schema:
obj = Object(self.agent, schema, v2Map=head, agentName=self.agent.agentBank)
queryResults.append(obj)
self.rawQueryResults.pop(0)
else:
self.waitingForSchema = True
finally:
self.cv.release()
if self.waitingForSchema:
self.agent._v2SendSchemaRequest(schemaId)
for result in queryResults:
key = result.getClassKey()
if key.getPackageName() == "org.apache.qpid.broker" and key.getClassName() == "agent":
self.agent.broker._updateAgent(result)
if self.notifiable:
self.notifiable(qmf_object=result)
else:
self.queryResults.append(result)
complete = None
try:
self.cv.acquire()
if not self.waitingForSchema and self.pendingSignal:
self.blocked = None
self.cv.notify()
complete = True
finally:
self.cv.release()
if complete:
self._complete()
def reprocess(self):
"""
New schema information has been added to the schema-cache. Clear our 'waiting' status
and see if we can make more progress on any pending inbound events/objects.
"""
try:
self.cv.acquire()
self.waitingForSchema = None
finally:
self.cv.release()
self.processV2Data()
self.processPendingEvents()
def _getSchemaIdforV2ObjectLH(self, data):
"""
Given a data map, extract the schema-identifier.
"""
if data.__class__ != dict:
return None
if '_schema_id' in data:
return ClassKey(data['_schema_id'])
return None
#===================================================================================================
# Event
#===================================================================================================
class Event:
""" """
def __init__(self, agent, codec=None, v2Map=None):
self.agent = agent
self.session = agent.session
self.broker = agent.broker
if isinstance(v2Map,dict):
self.isV2 = True
self.classKey = None
self.schema = None
try:
self.arguments = v2Map["_values"]
self.timestamp = long(v2Map["_timestamp"])
self.severity = v2Map["_severity"]
if "_schema_id" in v2Map:
self.classKey = ClassKey(v2Map["_schema_id"])
self.classKey._setType(ClassKey.TYPE_EVENT)
except:
raise Exception("Invalid event object: %s " % str(v2Map))
if self.classKey is not None:
self.schema = self.session.schemaCache.getSchema(self.classKey)
elif codec is not None:
self.isV2 = None
self.classKey = ClassKey(codec)
self.classKey._setType(ClassKey.TYPE_EVENT)
self.timestamp = codec.read_int64()
self.severity = codec.read_uint8()
self.arguments = {}
self.schema = self.session.schemaCache.getSchema(self.classKey)
if not self.schema:
return
for arg in self.schema.arguments:
self.arguments[arg.name] = self.session._decodeValue(codec, arg.type,
self.broker)
else:
raise Exception("No constructor for event object.")
def __repr__(self):
if self.schema == None:
return "<uninterpretable>"
out = strftime("%c", gmtime(self.timestamp / 1000000000))
out += " " + self._sevName() + " " + self.classKey.getPackageName() + ":" + self.classKey.getClassName()
out += " broker=" + str(self.broker.getUrl())
for arg in self.schema.arguments:
disp = self.session._displayValue(self.arguments[arg.name], arg.type).encode("utf8")
if " " in disp:
disp = "\"" + disp + "\""
out += " " + arg.name + "=" + disp
return out
def _sevName(self):
if self.severity == 0 : return "EMER "
if self.severity == 1 : return "ALERT"
if self.severity == 2 : return "CRIT "
if self.severity == 3 : return "ERROR"
if self.severity == 4 : return "WARN "
if self.severity == 5 : return "NOTIC"
if self.severity == 6 : return "INFO "
if self.severity == 7 : return "DEBUG"
return "INV-%d" % self.severity
def getClassKey(self):
return self.classKey
def getArguments(self):
return self.arguments
def getTimestamp(self):
return self.timestamp
def getSchema(self):
return self.schema
#===================================================================================================
# SequenceManager
#===================================================================================================
class SequenceManager:
""" Manage sequence numbers for asynchronous method calls """
def __init__(self):
self.lock = Lock()
self.sequence = long(time()) # pseudo-randomize the start
self.pending = {}
def _reserve(self, data):
""" Reserve a unique sequence number """
try:
self.lock.acquire()
result = self.sequence
self.sequence = self.sequence + 1
self.pending[result] = data
finally:
self.lock.release()
return result
def _release(self, seq):
""" Release a reserved sequence number """
data = None
try:
self.lock.acquire()
if seq in self.pending:
data = self.pending[seq]
del self.pending[seq]
finally:
self.lock.release()
return data
#===================================================================================================
# DebugConsole
#===================================================================================================
class DebugConsole(Console):
""" """
def brokerConnected(self, broker):
print("brokerConnected:", broker)
def brokerConnectionFailed(self, broker):
print("brokerConnectionFailed:", broker)
def brokerDisconnected(self, broker):
print("brokerDisconnected:", broker)
def newPackage(self, name):
print("newPackage:", name)
def newClass(self, kind, classKey):
print("newClass:", kind, classKey)
def newAgent(self, agent):
print("newAgent:", agent)
def delAgent(self, agent):
print("delAgent:", agent)
def objectProps(self, broker, record):
print("objectProps:", record)
def objectStats(self, broker, record):
print("objectStats:", record)
def event(self, broker, event):
print("event:", event)
def heartbeat(self, agent, timestamp):
print("heartbeat:", agent)
def brokerInfo(self, broker):
print("brokerInfo:", broker)
| apache-2.0 | -1,822,485,711,944,551,400 | 32.826634 | 125 | 0.600932 | false |
roijo/C-PAC_complexitytools | doc/sphinxext/numpy_ext/tests/test_docscrape.py | 35 | 15328 | # -*- encoding:utf-8 -*-
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from docscrape import NumpyDocString, FunctionDoc, ClassDoc
from docscrape_sphinx import SphinxDocString, SphinxClassDoc
from nose.tools import *
doc_txt = '''\
numpy.multivariate_normal(mean, cov, shape=None, spam=None)
Draw values from a multivariate normal distribution with specified
mean and covariance.
The multivariate normal or Gaussian distribution is a generalisation
of the one-dimensional normal distribution to higher dimensions.
Parameters
----------
mean : (N,) ndarray
Mean of the N-dimensional distribution.
.. math::
(1+2+3)/3
cov : (N,N) ndarray
Covariance matrix of the distribution.
shape : tuple of ints
Given a shape of, for example, (m,n,k), m*n*k samples are
generated, and packed in an m-by-n-by-k arrangement. Because
each sample is N-dimensional, the output shape is (m,n,k,N).
Returns
-------
out : ndarray
The drawn samples, arranged according to `shape`. If the
shape given is (m,n,...), then the shape of `out` is is
(m,n,...,N).
In other words, each entry ``out[i,j,...,:]`` is an N-dimensional
value drawn from the distribution.
Other Parameters
----------------
spam : parrot
A parrot off its mortal coil.
Raises
------
RuntimeError
Some error
Warns
-----
RuntimeWarning
Some warning
Warnings
--------
Certain warnings apply.
Notes
-----
Instead of specifying the full covariance matrix, popular
approximations include:
- Spherical covariance (`cov` is a multiple of the identity matrix)
- Diagonal covariance (`cov` has non-negative elements only on the diagonal)
This geometrical property can be seen in two dimensions by plotting
generated data-points:
>>> mean = [0,0]
>>> cov = [[1,0],[0,100]] # diagonal covariance, points lie on x or y-axis
>>> x,y = multivariate_normal(mean,cov,5000).T
>>> plt.plot(x,y,'x'); plt.axis('equal'); plt.show()
Note that the covariance matrix must be symmetric and non-negative
definite.
References
----------
.. [1] A. Papoulis, "Probability, Random Variables, and Stochastic
Processes," 3rd ed., McGraw-Hill Companies, 1991
.. [2] R.O. Duda, P.E. Hart, and D.G. Stork, "Pattern Classification,"
2nd ed., Wiley, 2001.
See Also
--------
some, other, funcs
otherfunc : relationship
Examples
--------
>>> mean = (1,2)
>>> cov = [[1,0],[1,0]]
>>> x = multivariate_normal(mean,cov,(3,3))
>>> print x.shape
(3, 3, 2)
The following is probably true, given that 0.6 is roughly twice the
standard deviation:
>>> print list( (x[0,0,:] - mean) < 0.6 )
[True, True]
.. index:: random
:refguide: random;distributions, random;gauss
'''
doc = NumpyDocString(doc_txt)
def test_signature():
assert doc['Signature'].startswith('numpy.multivariate_normal(')
assert doc['Signature'].endswith('spam=None)')
def test_summary():
assert doc['Summary'][0].startswith('Draw values')
assert doc['Summary'][-1].endswith('covariance.')
def test_extended_summary():
assert doc['Extended Summary'][0].startswith('The multivariate normal')
def test_parameters():
assert_equal(len(doc['Parameters']), 3)
assert_equal([n for n,_,_ in doc['Parameters']], ['mean','cov','shape'])
arg, arg_type, desc = doc['Parameters'][1]
assert_equal(arg_type, '(N,N) ndarray')
assert desc[0].startswith('Covariance matrix')
assert doc['Parameters'][0][-1][-2] == ' (1+2+3)/3'
def test_other_parameters():
assert_equal(len(doc['Other Parameters']), 1)
assert_equal([n for n,_,_ in doc['Other Parameters']], ['spam'])
arg, arg_type, desc = doc['Other Parameters'][0]
assert_equal(arg_type, 'parrot')
assert desc[0].startswith('A parrot off its mortal coil')
def test_returns():
assert_equal(len(doc['Returns']), 1)
arg, arg_type, desc = doc['Returns'][0]
assert_equal(arg, 'out')
assert_equal(arg_type, 'ndarray')
assert desc[0].startswith('The drawn samples')
assert desc[-1].endswith('distribution.')
def test_notes():
assert doc['Notes'][0].startswith('Instead')
assert doc['Notes'][-1].endswith('definite.')
assert_equal(len(doc['Notes']), 17)
def test_references():
assert doc['References'][0].startswith('..')
assert doc['References'][-1].endswith('2001.')
def test_examples():
assert doc['Examples'][0].startswith('>>>')
assert doc['Examples'][-1].endswith('True]')
def test_index():
assert_equal(doc['index']['default'], 'random')
print doc['index']
assert_equal(len(doc['index']), 2)
assert_equal(len(doc['index']['refguide']), 2)
def non_blank_line_by_line_compare(a,b):
a = [l for l in a.split('\n') if l.strip()]
b = [l for l in b.split('\n') if l.strip()]
for n,line in enumerate(a):
if not line == b[n]:
raise AssertionError("Lines %s of a and b differ: "
"\n>>> %s\n<<< %s\n" %
(n,line,b[n]))
def test_str():
non_blank_line_by_line_compare(str(doc),
"""numpy.multivariate_normal(mean, cov, shape=None, spam=None)
Draw values from a multivariate normal distribution with specified
mean and covariance.
The multivariate normal or Gaussian distribution is a generalisation
of the one-dimensional normal distribution to higher dimensions.
Parameters
----------
mean : (N,) ndarray
Mean of the N-dimensional distribution.
.. math::
(1+2+3)/3
cov : (N,N) ndarray
Covariance matrix of the distribution.
shape : tuple of ints
Given a shape of, for example, (m,n,k), m*n*k samples are
generated, and packed in an m-by-n-by-k arrangement. Because
each sample is N-dimensional, the output shape is (m,n,k,N).
Returns
-------
out : ndarray
The drawn samples, arranged according to `shape`. If the
shape given is (m,n,...), then the shape of `out` is is
(m,n,...,N).
In other words, each entry ``out[i,j,...,:]`` is an N-dimensional
value drawn from the distribution.
Other Parameters
----------------
spam : parrot
A parrot off its mortal coil.
Raises
------
RuntimeError :
Some error
Warns
-----
RuntimeWarning :
Some warning
Warnings
--------
Certain warnings apply.
See Also
--------
`some`_, `other`_, `funcs`_
`otherfunc`_
relationship
Notes
-----
Instead of specifying the full covariance matrix, popular
approximations include:
- Spherical covariance (`cov` is a multiple of the identity matrix)
- Diagonal covariance (`cov` has non-negative elements only on the diagonal)
This geometrical property can be seen in two dimensions by plotting
generated data-points:
>>> mean = [0,0]
>>> cov = [[1,0],[0,100]] # diagonal covariance, points lie on x or y-axis
>>> x,y = multivariate_normal(mean,cov,5000).T
>>> plt.plot(x,y,'x'); plt.axis('equal'); plt.show()
Note that the covariance matrix must be symmetric and non-negative
definite.
References
----------
.. [1] A. Papoulis, "Probability, Random Variables, and Stochastic
Processes," 3rd ed., McGraw-Hill Companies, 1991
.. [2] R.O. Duda, P.E. Hart, and D.G. Stork, "Pattern Classification,"
2nd ed., Wiley, 2001.
Examples
--------
>>> mean = (1,2)
>>> cov = [[1,0],[1,0]]
>>> x = multivariate_normal(mean,cov,(3,3))
>>> print x.shape
(3, 3, 2)
The following is probably true, given that 0.6 is roughly twice the
standard deviation:
>>> print list( (x[0,0,:] - mean) < 0.6 )
[True, True]
.. index:: random
:refguide: random;distributions, random;gauss""")
def test_sphinx_str():
sphinx_doc = SphinxDocString(doc_txt)
non_blank_line_by_line_compare(str(sphinx_doc),
"""
.. index:: random
single: random;distributions, random;gauss
Draw values from a multivariate normal distribution with specified
mean and covariance.
The multivariate normal or Gaussian distribution is a generalisation
of the one-dimensional normal distribution to higher dimensions.
:Parameters:
**mean** : (N,) ndarray
Mean of the N-dimensional distribution.
.. math::
(1+2+3)/3
**cov** : (N,N) ndarray
Covariance matrix of the distribution.
**shape** : tuple of ints
Given a shape of, for example, (m,n,k), m*n*k samples are
generated, and packed in an m-by-n-by-k arrangement. Because
each sample is N-dimensional, the output shape is (m,n,k,N).
:Returns:
**out** : ndarray
The drawn samples, arranged according to `shape`. If the
shape given is (m,n,...), then the shape of `out` is is
(m,n,...,N).
In other words, each entry ``out[i,j,...,:]`` is an N-dimensional
value drawn from the distribution.
:Other Parameters:
**spam** : parrot
A parrot off its mortal coil.
:Raises:
**RuntimeError** :
Some error
:Warns:
**RuntimeWarning** :
Some warning
.. warning::
Certain warnings apply.
.. seealso::
:obj:`some`, :obj:`other`, :obj:`funcs`
:obj:`otherfunc`
relationship
.. rubric:: Notes
Instead of specifying the full covariance matrix, popular
approximations include:
- Spherical covariance (`cov` is a multiple of the identity matrix)
- Diagonal covariance (`cov` has non-negative elements only on the diagonal)
This geometrical property can be seen in two dimensions by plotting
generated data-points:
>>> mean = [0,0]
>>> cov = [[1,0],[0,100]] # diagonal covariance, points lie on x or y-axis
>>> x,y = multivariate_normal(mean,cov,5000).T
>>> plt.plot(x,y,'x'); plt.axis('equal'); plt.show()
Note that the covariance matrix must be symmetric and non-negative
definite.
.. rubric:: References
.. [1] A. Papoulis, "Probability, Random Variables, and Stochastic
Processes," 3rd ed., McGraw-Hill Companies, 1991
.. [2] R.O. Duda, P.E. Hart, and D.G. Stork, "Pattern Classification,"
2nd ed., Wiley, 2001.
.. only:: latex
[1]_, [2]_
.. rubric:: Examples
>>> mean = (1,2)
>>> cov = [[1,0],[1,0]]
>>> x = multivariate_normal(mean,cov,(3,3))
>>> print x.shape
(3, 3, 2)
The following is probably true, given that 0.6 is roughly twice the
standard deviation:
>>> print list( (x[0,0,:] - mean) < 0.6 )
[True, True]
""")
doc2 = NumpyDocString("""
Returns array of indices of the maximum values of along the given axis.
Parameters
----------
a : {array_like}
Array to look in.
axis : {None, integer}
If None, the index is into the flattened array, otherwise along
the specified axis""")
def test_parameters_without_extended_description():
assert_equal(len(doc2['Parameters']), 2)
doc3 = NumpyDocString("""
my_signature(*params, **kwds)
Return this and that.
""")
def test_escape_stars():
signature = str(doc3).split('\n')[0]
assert_equal(signature, 'my_signature(\*params, \*\*kwds)')
doc4 = NumpyDocString(
"""a.conj()
Return an array with all complex-valued elements conjugated.""")
def test_empty_extended_summary():
assert_equal(doc4['Extended Summary'], [])
doc5 = NumpyDocString(
"""
a.something()
Raises
------
LinAlgException
If array is singular.
Warns
-----
SomeWarning
If needed
""")
def test_raises():
assert_equal(len(doc5['Raises']), 1)
name,_,desc = doc5['Raises'][0]
assert_equal(name,'LinAlgException')
assert_equal(desc,['If array is singular.'])
def test_warns():
assert_equal(len(doc5['Warns']), 1)
name,_,desc = doc5['Warns'][0]
assert_equal(name,'SomeWarning')
assert_equal(desc,['If needed'])
def test_see_also():
doc6 = NumpyDocString(
"""
z(x,theta)
See Also
--------
func_a, func_b, func_c
func_d : some equivalent func
foo.func_e : some other func over
multiple lines
func_f, func_g, :meth:`func_h`, func_j,
func_k
:obj:`baz.obj_q`
:class:`class_j`: fubar
foobar
""")
assert len(doc6['See Also']) == 12
for func, desc, role in doc6['See Also']:
if func in ('func_a', 'func_b', 'func_c', 'func_f',
'func_g', 'func_h', 'func_j', 'func_k', 'baz.obj_q'):
assert(not desc)
else:
assert(desc)
if func == 'func_h':
assert role == 'meth'
elif func == 'baz.obj_q':
assert role == 'obj'
elif func == 'class_j':
assert role == 'class'
else:
assert role is None
if func == 'func_d':
assert desc == ['some equivalent func']
elif func == 'foo.func_e':
assert desc == ['some other func over', 'multiple lines']
elif func == 'class_j':
assert desc == ['fubar', 'foobar']
def test_see_also_print():
class Dummy(object):
"""
See Also
--------
func_a, func_b
func_c : some relationship
goes here
func_d
"""
pass
obj = Dummy()
s = str(FunctionDoc(obj, role='func'))
assert(':func:`func_a`, :func:`func_b`' in s)
assert(' some relationship' in s)
assert(':func:`func_d`' in s)
doc7 = NumpyDocString("""
Doc starts on second line.
""")
def test_empty_first_line():
assert doc7['Summary'][0].startswith('Doc starts')
def test_no_summary():
str(SphinxDocString("""
Parameters
----------"""))
def test_unicode():
doc = SphinxDocString("""
öäöäöäöäöåååå
öäöäöäööäååå
Parameters
----------
ååå : äää
ööö
Returns
-------
ååå : ööö
äää
""")
assert doc['Summary'][0] == u'öäöäöäöäöåååå'.encode('utf-8')
def test_plot_examples():
cfg = dict(use_plots=True)
doc = SphinxDocString("""
Examples
--------
>>> import matplotlib.pyplot as plt
>>> plt.plot([1,2,3],[4,5,6])
>>> plt.show()
""", config=cfg)
assert 'plot::' in str(doc), str(doc)
doc = SphinxDocString("""
Examples
--------
.. plot::
import matplotlib.pyplot as plt
plt.plot([1,2,3],[4,5,6])
plt.show()
""", config=cfg)
assert str(doc).count('plot::') == 1, str(doc)
def test_class_members():
class Dummy(object):
"""
Dummy class.
"""
def spam(self, a, b):
"""Spam\n\nSpam spam."""
pass
def ham(self, c, d):
"""Cheese\n\nNo cheese."""
pass
for cls in (ClassDoc, SphinxClassDoc):
doc = cls(Dummy, config=dict(show_class_members=False))
assert 'Methods' not in str(doc), (cls, str(doc))
assert 'spam' not in str(doc), (cls, str(doc))
assert 'ham' not in str(doc), (cls, str(doc))
doc = cls(Dummy, config=dict(show_class_members=True))
assert 'Methods' in str(doc), (cls, str(doc))
assert 'spam' in str(doc), (cls, str(doc))
assert 'ham' in str(doc), (cls, str(doc))
if cls is SphinxClassDoc:
assert '.. autosummary::' in str(doc), str(doc)
if __name__ == "__main__":
import nose
nose.run()
| bsd-3-clause | -8,901,042,149,351,229,000 | 23.83252 | 80 | 0.60241 | false |
eaplatanios/tensorflow | tensorflow/python/kernel_tests/linalg/linear_operator_low_rank_update_test.py | 9 | 12310 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.linalg import linalg as linalg_lib
from tensorflow.python.ops.linalg import linear_operator_test_util
from tensorflow.python.platform import test
linalg = linalg_lib
random_seed.set_random_seed(23)
rng = np.random.RandomState(0)
class BaseLinearOperatorLowRankUpdatetest(object):
"""Base test for this type of operator."""
# Subclasses should set these attributes to either True or False.
# If True, A = L + UDV^H
# If False, A = L + UV^H or A = L + UU^H, depending on _use_v.
_use_diag_update = None
# If True, diag is > 0, which means D is symmetric positive definite.
_is_diag_update_positive = None
# If True, A = L + UDV^H
# If False, A = L + UDU^H or A = L + UU^H, depending on _use_diag_update
_use_v = None
@property
def _dtypes_to_test(self):
# TODO(langmore) Test complex types once cholesky works with them.
# See comment in LinearOperatorLowRankUpdate.__init__.
return [dtypes.float32, dtypes.float64]
@property
def _operator_build_infos(self):
build_info = linear_operator_test_util.OperatorBuildInfo
# Previously we had a (2, 10, 10) shape at the end. We did this to test the
# inversion and determinant lemmas on not-tiny matrices, since these are
# known to have stability issues. This resulted in test timeouts, so this
# shape has been removed, but rest assured, the tests did pass.
return [
build_info((0, 0)),
build_info((1, 1)),
build_info((1, 3, 3)),
build_info((3, 4, 4)),
build_info((2, 1, 4, 4))]
def _operator_and_mat_and_feed_dict(self, build_info, dtype, use_placeholder):
# Recall A = L + UDV^H
shape = list(build_info.shape)
diag_shape = shape[:-1]
k = shape[-2] // 2 + 1
u_perturbation_shape = shape[:-1] + [k]
diag_update_shape = shape[:-2] + [k]
# base_operator L will be a symmetric positive definite diagonal linear
# operator, with condition number as high as 1e4.
base_diag = linear_operator_test_util.random_uniform(
diag_shape, minval=1e-4, maxval=1., dtype=dtype)
base_diag_ph = array_ops.placeholder(dtype=dtype)
# U
u = linear_operator_test_util.random_normal_correlated_columns(
u_perturbation_shape, dtype=dtype)
u_ph = array_ops.placeholder(dtype=dtype)
# V
v = linear_operator_test_util.random_normal_correlated_columns(
u_perturbation_shape, dtype=dtype)
v_ph = array_ops.placeholder(dtype=dtype)
# D
if self._is_diag_update_positive:
diag_update = linear_operator_test_util.random_uniform(
diag_update_shape, minval=1e-4, maxval=1., dtype=dtype)
else:
diag_update = linear_operator_test_util.random_normal(
diag_update_shape, stddev=1e-4, dtype=dtype)
diag_update_ph = array_ops.placeholder(dtype=dtype)
if use_placeholder:
# Evaluate here because (i) you cannot feed a tensor, and (ii)
# values are random and we want the same value used for both mat and
# feed_dict.
base_diag = base_diag.eval()
u = u.eval()
v = v.eval()
diag_update = diag_update.eval()
# In all cases, set base_operator to be positive definite.
base_operator = linalg.LinearOperatorDiag(
base_diag_ph, is_positive_definite=True)
operator = linalg.LinearOperatorLowRankUpdate(
base_operator,
u=u_ph,
v=v_ph if self._use_v else None,
diag_update=diag_update_ph if self._use_diag_update else None,
is_diag_update_positive=self._is_diag_update_positive)
feed_dict = {
base_diag_ph: base_diag,
u_ph: u,
v_ph: v,
diag_update_ph: diag_update}
else:
base_operator = linalg.LinearOperatorDiag(
base_diag, is_positive_definite=True)
operator = linalg.LinearOperatorLowRankUpdate(
base_operator,
u,
v=v if self._use_v else None,
diag_update=diag_update if self._use_diag_update else None,
is_diag_update_positive=self._is_diag_update_positive)
feed_dict = None
# The matrix representing L
base_diag_mat = array_ops.matrix_diag(base_diag)
# The matrix representing D
diag_update_mat = array_ops.matrix_diag(diag_update)
# Set up mat as some variant of A = L + UDV^H
if self._use_v and self._use_diag_update:
# In this case, we have L + UDV^H and it isn't symmetric.
expect_use_cholesky = False
mat = base_diag_mat + math_ops.matmul(
u, math_ops.matmul(diag_update_mat, v, adjoint_b=True))
elif self._use_v:
# In this case, we have L + UDV^H and it isn't symmetric.
expect_use_cholesky = False
mat = base_diag_mat + math_ops.matmul(u, v, adjoint_b=True)
elif self._use_diag_update:
# In this case, we have L + UDU^H, which is PD if D > 0, since L > 0.
expect_use_cholesky = self._is_diag_update_positive
mat = base_diag_mat + math_ops.matmul(
u, math_ops.matmul(diag_update_mat, u, adjoint_b=True))
else:
# In this case, we have L + UU^H, which is PD since L > 0.
expect_use_cholesky = True
mat = base_diag_mat + math_ops.matmul(u, u, adjoint_b=True)
if expect_use_cholesky:
self.assertTrue(operator._use_cholesky)
else:
self.assertFalse(operator._use_cholesky)
return operator, mat, feed_dict
class LinearOperatorLowRankUpdatetestWithDiagUseCholesky(
BaseLinearOperatorLowRankUpdatetest,
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""A = L + UDU^H, D > 0, L > 0 ==> A > 0 and we can use a Cholesky."""
_use_diag_update = True
_is_diag_update_positive = True
_use_v = False
def setUp(self):
# Decrease tolerance since we are testing with condition numbers as high as
# 1e4.
self._atol[dtypes.float32] = 1e-5
self._rtol[dtypes.float32] = 1e-5
self._atol[dtypes.float64] = 1e-10
self._rtol[dtypes.float64] = 1e-10
class LinearOperatorLowRankUpdatetestWithDiagCannotUseCholesky(
BaseLinearOperatorLowRankUpdatetest,
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""A = L + UDU^H, D !> 0, L > 0 ==> A !> 0 and we cannot use a Cholesky."""
_use_diag_update = True
_is_diag_update_positive = False
_use_v = False
def setUp(self):
# Decrease tolerance since we are testing with condition numbers as high as
# 1e4. This class does not use Cholesky, and thus needs even looser
# tolerance.
self._atol[dtypes.float32] = 1e-4
self._rtol[dtypes.float32] = 1e-4
self._atol[dtypes.float64] = 1e-9
self._rtol[dtypes.float64] = 1e-9
class LinearOperatorLowRankUpdatetestNoDiagUseCholesky(
BaseLinearOperatorLowRankUpdatetest,
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""A = L + UU^H, L > 0 ==> A > 0 and we can use a Cholesky."""
_use_diag_update = False
_is_diag_update_positive = None
_use_v = False
def setUp(self):
# Decrease tolerance since we are testing with condition numbers as high as
# 1e4.
self._atol[dtypes.float32] = 1e-5
self._rtol[dtypes.float32] = 1e-5
self._atol[dtypes.float64] = 1e-10
self._rtol[dtypes.float64] = 1e-10
class LinearOperatorLowRankUpdatetestNoDiagCannotUseCholesky(
BaseLinearOperatorLowRankUpdatetest,
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""A = L + UV^H, L > 0 ==> A is not symmetric and we cannot use a Cholesky."""
_use_diag_update = False
_is_diag_update_positive = None
_use_v = True
def setUp(self):
# Decrease tolerance since we are testing with condition numbers as high as
# 1e4. This class does not use Cholesky, and thus needs even looser
# tolerance.
self._atol[dtypes.float32] = 1e-4
self._rtol[dtypes.float32] = 1e-4
self._atol[dtypes.float64] = 1e-9
self._rtol[dtypes.float64] = 1e-9
class LinearOperatorLowRankUpdatetestWithDiagNotSquare(
BaseLinearOperatorLowRankUpdatetest,
linear_operator_test_util.NonSquareLinearOperatorDerivedClassTest):
"""A = L + UDU^H, D > 0, L > 0 ==> A > 0 and we can use a Cholesky."""
_use_diag_update = True
_is_diag_update_positive = True
_use_v = True
class LinearOpearatorLowRankUpdateBroadcastsShape(test.TestCase):
"""Test that the operator's shape is the broadcast of arguments."""
def test_static_shape_broadcasts_up_from_operator_to_other_args(self):
base_operator = linalg.LinearOperatorIdentity(num_rows=3)
u = array_ops.ones(shape=[2, 3, 2])
diag = array_ops.ones(shape=[2, 2])
operator = linalg.LinearOperatorLowRankUpdate(base_operator, u, diag)
# domain_dimension is 3
self.assertAllEqual([2, 3, 3], operator.shape)
with self.test_session():
self.assertAllEqual([2, 3, 3], operator.to_dense().eval().shape)
def test_dynamic_shape_broadcasts_up_from_operator_to_other_args(self):
num_rows_ph = array_ops.placeholder(dtypes.int32)
base_operator = linalg.LinearOperatorIdentity(num_rows=num_rows_ph)
u_shape_ph = array_ops.placeholder(dtypes.int32)
u = array_ops.ones(shape=u_shape_ph)
operator = linalg.LinearOperatorLowRankUpdate(base_operator, u)
feed_dict = {
num_rows_ph: 3,
u_shape_ph: [2, 3, 2], # batch_shape = [2]
}
with self.test_session():
shape_tensor = operator.shape_tensor().eval(feed_dict=feed_dict)
self.assertAllEqual([2, 3, 3], shape_tensor)
dense = operator.to_dense().eval(feed_dict=feed_dict)
self.assertAllEqual([2, 3, 3], dense.shape)
def test_u_and_v_incompatible_batch_shape_raises(self):
base_operator = linalg.LinearOperatorIdentity(num_rows=3, dtype=np.float64)
u = rng.rand(5, 3, 2)
v = rng.rand(4, 3, 2)
with self.assertRaisesRegexp(ValueError, "Incompatible shapes"):
linalg.LinearOperatorLowRankUpdate(base_operator, u=u, v=v)
def test_u_and_base_operator_incompatible_batch_shape_raises(self):
base_operator = linalg.LinearOperatorIdentity(
num_rows=3, batch_shape=[4], dtype=np.float64)
u = rng.rand(5, 3, 2)
with self.assertRaisesRegexp(ValueError, "Incompatible shapes"):
linalg.LinearOperatorLowRankUpdate(base_operator, u=u)
def test_u_and_base_operator_incompatible_domain_dimension(self):
base_operator = linalg.LinearOperatorIdentity(num_rows=3, dtype=np.float64)
u = rng.rand(5, 4, 2)
with self.assertRaisesRegexp(ValueError, "not compatible"):
linalg.LinearOperatorLowRankUpdate(base_operator, u=u)
def test_u_and_diag_incompatible_low_rank_raises(self):
base_operator = linalg.LinearOperatorIdentity(num_rows=3, dtype=np.float64)
u = rng.rand(5, 3, 2)
diag = rng.rand(5, 4) # Last dimension should be 2
with self.assertRaisesRegexp(ValueError, "not compatible"):
linalg.LinearOperatorLowRankUpdate(base_operator, u=u, diag_update=diag)
def test_diag_incompatible_batch_shape_raises(self):
base_operator = linalg.LinearOperatorIdentity(num_rows=3, dtype=np.float64)
u = rng.rand(5, 3, 2)
diag = rng.rand(4, 2) # First dimension should be 5
with self.assertRaisesRegexp(ValueError, "Incompatible shapes"):
linalg.LinearOperatorLowRankUpdate(base_operator, u=u, diag_update=diag)
if __name__ == "__main__":
test.main()
| apache-2.0 | 4,957,880,699,038,931,000 | 36.416413 | 80 | 0.677579 | false |
levinas/assembly | lib/assembly/utils.py | 2 | 2003 | import errno
import json
import os
import re
class Error(Exception):
"""Base class for exceptions in this module"""
pass
class URLError(Error, ValueError):
pass
def verify_url(url, port=8000):
"""Returns complete URL with http prefix and port number
"""
pattern = re.compile(
r'^(https?://)?' # capture 1: http prefix
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|' # domain
r'localhost|' # localhost
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # IP
r'(?::\d+)?' # optional port
r'(/?|[/?]\S+)$', # capture 2: trailing args
re.IGNORECASE)
match = pattern.search(url)
if not match:
raise URLError(url)
if not match.group(1):
url = 'http://' + url
if not match.group(2) and url.count(':') < 2 and port:
url += ":{}".format(port)
return url
def test_verify_url():
"""unittest: py.test client.py -v"""
assert verify_url('localhost') == 'http://localhost:8000'
assert verify_url('140.221.84.203') == 'http://140.221.84.203:8000'
assert verify_url('kbase.us/services/assembly') == 'http://kbase.us/services/assembly'
assert verify_url('http://kbase.us/services/assembly') == 'http://kbase.us/services/assembly'
assert verify_url('https://kbase.us/services/assembly') == 'https://kbase.us/services/assembly'
try:
import pytest
with pytest.raises(URLError):
verify_url('badURL')
verify_url('badURL/with/path:8000')
verify_url('http://very bad url.com')
verify_url('')
except ImportError:
pass
def verify_dir(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
return path
def load_json_from_file(json_file):
try:
with open(json_file) as f: js = f.read()
doc = json.loads(js)
except (IOError, ValueError) as e:
raise Error(e)
return doc
| mit | -2,175,807,915,604,422,000 | 28.028986 | 99 | 0.568148 | false |
aio-libs/aiomonitor | examples/web_srv.py | 2 | 1066 | import asyncio
import aiomonitor
import uvloop
from aiohttp import web
async def simple(request):
loop = request.app.loop
await asyncio.sleep(10, loop=loop)
await asyncio.sleep(10, loop=loop)
return web.Response(text='Simple answer')
async def hello(request):
resp = web.StreamResponse()
name = request.match_info.get('name', 'Anonymous')
answer = ('Hello, ' + name).encode('utf8')
resp.content_length = len(answer)
resp.content_type = 'text/plain'
await resp.prepare(request)
await asyncio.sleep(100, loop=loop)
resp.write(answer)
await resp.write_eof()
return resp
async def init(loop):
app = web.Application(loop=loop)
app.router.add_get('/simple', simple)
app.router.add_get('/hello/{name}', hello)
app.router.add_get('/hello', hello)
return app
host, port = 'localhost', 8090
loop = uvloop.new_event_loop()
asyncio.set_event_loop(loop)
app = loop.run_until_complete(init(loop))
with aiomonitor.start_monitor(loop, locals=locals()):
web.run_app(app, port=port, host=host)
| apache-2.0 | 4,463,106,482,035,735,600 | 24.380952 | 54 | 0.684803 | false |
easmetz/inasafe | safe/utilities/analysis_handler.py | 2 | 28522 | # coding=utf-8
"""
InaSAFE Disaster risk assessment tool by AusAid **GUI InaSAFE Wizard Dialog.**
Contact : [email protected]
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = '[email protected]'
__revision__ = '$Format:%H$'
__date__ = '21/02/2011'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
import os
import json
import logging
from collections import OrderedDict
# noinspection PyPackageRequirements
from qgis.core import (
QgsCoordinateTransform,
QgsRectangle,
QgsMapLayerRegistry,
QgsCoordinateReferenceSystem)
# noinspection PyPackageRequirements
from PyQt4 import QtGui, QtCore
# noinspection PyPackageRequirements
from PyQt4.QtCore import QObject, QSettings, pyqtSignal
from safe.utilities.keyword_io import KeywordIO
from safe.utilities.utilities import (
get_error_message,
impact_attribution)
from safe.utilities.gis import (
extent_string_to_array,
read_impact_layer,
viewport_geo_array)
from safe.utilities.resources import (
resources_path,
resource_url)
from safe.defaults import (
supporters_logo_path)
from safe.utilities.styling import (
setRasterStyle,
set_vector_graduated_style,
set_vector_categorized_style)
from safe.common.utilities import temp_dir
from safe.common.exceptions import ReadLayerError
from safe.common.signals import (
send_static_message,
send_error_message,
DYNAMIC_MESSAGE_SIGNAL,
STATIC_MESSAGE_SIGNAL,
ERROR_MESSAGE_SIGNAL,
BUSY_SIGNAL,
NOT_BUSY_SIGNAL,
ANALYSIS_DONE_SIGNAL)
from safe import messaging as m
from safe.messaging import styles
from safe.common.exceptions import (
InsufficientOverlapError, TemplateLoadingError)
from safe.report.impact_report import ImpactReport
from safe.gui.tools.impact_report_dialog import ImpactReportDialog
from safe_extras.pydispatch import dispatcher
from safe.utilities.extent import Extent
from safe.utilities.qgis_utilities import add_above_layer
from safe.impact_functions.impact_function_manager import ImpactFunctionManager
from safe.impact_template.utilities import get_report_template
PROGRESS_UPDATE_STYLE = styles.PROGRESS_UPDATE_STYLE
INFO_STYLE = styles.INFO_STYLE
WARNING_STYLE = styles.WARNING_STYLE
LOGO_ELEMENT = m.Image(
resource_url(
resources_path('img', 'logos', 'inasafe-logo.png')),
'InaSAFE Logo')
LOGGER = logging.getLogger('InaSAFE')
class AnalysisHandler(QObject):
"""Analysis handler for the dock and the wizard."""
analysisDone = pyqtSignal(bool)
# noinspection PyUnresolvedReferences
def __init__(self, parent):
"""Constructor for the class.
:param parent: Parent widget i.e. the wizard dialog.
:type parent: QWidget
"""
QtCore.QObject.__init__(self)
self.parent = parent
# Do not delete this
self.iface = parent.iface
self.keyword_io = KeywordIO()
self.impact_function_manager = ImpactFunctionManager()
self.extent = Extent(self.iface)
self.impact_function = None
self.composer = None
# Values for settings these get set in read_settings.
self.run_in_thread_flag = None
self.zoom_to_impact_flag = None
self.hide_exposure_flag = None
self.clip_hard = None
self.show_intermediate_layers = None
self.show_rubber_bands = False
self.last_analysis_rubberband = None
# This is a rubber band to show what the AOI of the
# next analysis will be. Also added in 2.1.0
self.next_analysis_rubberband = None
self.read_settings()
def enable_signal_receiver(self):
"""Setup dispatcher for all available signal from Analysis.
.. note:: Adapted from the dock
"""
dispatcher.connect(
self.show_busy,
signal=BUSY_SIGNAL)
dispatcher.connect(
self.hide_busy,
signal=NOT_BUSY_SIGNAL)
dispatcher.connect(
self.completed,
signal=ANALYSIS_DONE_SIGNAL)
# noinspection PyArgumentEqualDefault
dispatcher.connect(
self.show_dynamic_message,
signal=DYNAMIC_MESSAGE_SIGNAL)
# noinspection PyArgumentEqualDefault,PyUnresolvedReferences
dispatcher.connect(
self.parent.step_fc_analysis.wvResults.static_message_event,
signal=STATIC_MESSAGE_SIGNAL,
sender=dispatcher.Any)
# noinspection PyArgumentEqualDefault,PyUnresolvedReferences
dispatcher.connect(
self.parent.step_fc_analysis.wvResults.error_message_event,
signal=ERROR_MESSAGE_SIGNAL,
sender=dispatcher.Any)
def disable_signal_receiver(self):
"""Remove dispatcher for all available signal from Analysis.
.. note:: Adapted from the dock
"""
dispatcher.disconnect(
self.show_busy,
signal=BUSY_SIGNAL)
dispatcher.disconnect(
self.hide_busy,
signal=NOT_BUSY_SIGNAL)
dispatcher.disconnect(
self.completed,
signal=ANALYSIS_DONE_SIGNAL)
dispatcher.disconnect(
self.show_dynamic_message,
signal=DYNAMIC_MESSAGE_SIGNAL)
def show_dynamic_message(self, sender, message):
"""Send a dynamic message to the message viewer.
Dynamic messages are appended to any existing content in the
MessageViewer.
.. note:: Modified from the dock
:param sender: The object that sent the message.
:type sender: Object, None
:param message: An instance of our rich message class.
:type message: safe.messaging.Message
"""
# TODO Hardcoded step - may overflow, if number of messages increase
# noinspection PyUnresolvedReferences
self.parent.step_fc_analysis.pbProgress.setValue(
self.parent.step_fc_analysis.pbProgress.value() + 15)
# noinspection PyUnresolvedReferences
self.parent.step_fc_analysis.wvResults.dynamic_message_event(
sender, message)
def read_settings(self):
"""Restore settings from QSettings.
Do this on init and after changing options in the options dialog.
"""
settings = QSettings()
flag = bool(settings.value(
'inasafe/showRubberBands', False, type=bool))
self.extent.show_rubber_bands = flag
try:
extent = settings.value('inasafe/analysis_extent', '', type=str)
crs = settings.value('inasafe/analysis_extent_crs', '', type=str)
except TypeError:
# Any bogus stuff in settings and we just clear them
extent = ''
crs = ''
if extent != '' and crs != '':
extent = extent_string_to_array(extent)
try:
self.extent.user_extent = QgsRectangle(*extent)
self.extent.user_extent_crs = QgsCoordinateReferenceSystem(crs)
self.extent.show_user_analysis_extent()
except TypeError:
self.extent.user_extent = None
self.extent.user_extent_crs = None
flag = settings.value(
'inasafe/useThreadingFlag', False, type=bool)
self.run_in_thread_flag = flag
flag = settings.value(
'inasafe/setZoomToImpactFlag', True, type=bool)
self.zoom_to_impact_flag = flag
# whether exposure layer should be hidden after model completes
flag = settings.value(
'inasafe/setHideExposureFlag', False, type=bool)
self.hide_exposure_flag = flag
# whether to 'hard clip' layers (e.g. cut buildings in half if they
# lie partially in the AOI
self.clip_hard = settings.value('inasafe/clip_hard', False, type=bool)
# whether to show or not postprocessing generated layers
self.show_intermediate_layers = settings.value(
'inasafe/show_intermediate_layers', False, type=bool)
# whether to show or not dev only options
# noinspection PyAttributeOutsideInit
self.developer_mode = settings.value(
'inasafe/developer_mode', False, type=bool)
# whether to show or not a custom Logo
# noinspection PyAttributeOutsideInit
self.organisation_logo_path = settings.value(
'inasafe/organisation_logo_path',
supporters_logo_path(),
type=str)
# noinspection PyUnresolvedReferences
def show_busy(self):
"""Lock buttons and enable the busy cursor."""
self.parent.pbnNext.setEnabled(False)
self.parent.pbnBack.setEnabled(False)
self.parent.pbnCancel.setEnabled(False)
QtGui.qApp.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor))
self.parent.repaint()
QtGui.qApp.processEvents()
# noinspection PyUnresolvedReferences
def hide_busy(self):
"""Unlock buttons A helper function to indicate processing is done."""
self.parent.pbnNext.setEnabled(True)
self.parent.pbnBack.setEnabled(True)
self.parent.pbnCancel.setEnabled(True)
self.parent.repaint()
QtGui.qApp.restoreOverrideCursor()
def analysis_error(self, exception, message):
"""A helper to spawn an error and halt processing.
An exception will be logged, busy status removed and a message
displayed.
.. note:: Copied from the dock
:param message: an ErrorMessage to display
:type message: ErrorMessage, Message
:param exception: An exception that was raised
:type exception: Exception
"""
self.hide_busy()
LOGGER.exception(message)
message = get_error_message(exception, context=message)
send_error_message(self, message)
self.analysisDone.emit(False)
def setup_and_run_analysis(self):
"""Setup and execute the analysis"""
self.enable_signal_receiver()
self.show_busy()
self.setup_analysis()
try:
clip_parameters = self.impact_function.clip_parameters
self.extent.show_last_analysis_extent(
clip_parameters['adjusted_geo_extent'])
# Start the analysis
self.impact_function.run_analysis()
except InsufficientOverlapError as e:
raise e
self.disable_signal_receiver()
# noinspection PyUnresolvedReferences
def setup_analysis(self):
"""Setup analysis to make it ready to work.
.. note:: Copied or adapted from the dock
"""
# Impact Function
self.impact_function = self.impact_function_manager.get(
self.parent.step_fc_function.selected_function()['id'])
self.impact_function.parameters = self.parent.step_fc_summary.if_params
# Layers
self.impact_function.hazard = self.parent.hazard_layer
self.impact_function.exposure = self.parent.exposure_layer
self.impact_function.aggregation = self.parent.aggregation_layer
# TODO test if the implement aggregation layer works!
# Variables
self.impact_function.clip_hard = self.clip_hard
self.impact_function.show_intermediate_layers = \
self.show_intermediate_layers
viewport = viewport_geo_array(self.iface.mapCanvas())
self.impact_function.viewport_extent = viewport
# Extent
if self.impact_function.requested_extent:
extent = self.extent
self.impact_function.requested_extent = extent.user_extent
self.impact_function.requested_extent_crs = extent.user_extent_crs
# noinspection PyUnresolvedReferences
def completed(self):
"""Slot activated when the process is done.
.. note:: Adapted from the dock
"""
# Try to run completion code
try:
from datetime import datetime
LOGGER.debug(datetime.now())
LOGGER.debug('get engine impact layer')
LOGGER.debug(self.impact_function is None)
# Load impact layer into QGIS
qgis_impact_layer = read_impact_layer(self.impact_function.impact)
report = self.show_results()
except Exception, e: # pylint: disable=W0703
# FIXME (Ole): This branch is not covered by the tests
self.analysis_error(e, self.tr('Error loading impact layer.'))
else:
# On success, display generated report
impact_path = qgis_impact_layer.source()
message = m.Message(report)
# message.add(m.Heading(self.tr('View processing log as HTML'),
# **INFO_STYLE))
# message.add(m.Link('file://%s' % self.parent.wvResults.log_path))
# noinspection PyTypeChecker
send_static_message(self, message)
self.parent.step_fc_analysis.wvResults.impact_path = impact_path
self.parent.step_fc_analysis.pbProgress.hide()
self.parent.step_fc_analysis.lblAnalysisStatus.setText(
'Analysis done.')
self.parent.step_fc_analysis.pbnReportWeb.show()
self.parent.step_fc_analysis.pbnReportPDF.show()
self.parent.step_fc_analysis.pbnReportComposer.show()
self.hide_busy()
self.analysisDone.emit(True)
def show_impact_report(self, qgis_impact_layer):
pass
def show_results(self):
"""Helper function for slot activated when the process is done.
.. versionchanged:: 3.4 - removed parameters.
.. note:: If you update this function, please report your change to
safe.gui.widgets.dock.show_results too.
:returns: Provides a report for writing to the dock.
:rtype: str
"""
qgis_exposure = self.impact_function.exposure.qgis_layer()
qgis_hazard = self.impact_function.hazard.qgis_layer()
qgis_aggregation = self.impact_function.aggregation.qgis_layer()
safe_impact_layer = self.impact_function.impact
qgis_impact_layer = read_impact_layer(safe_impact_layer)
keywords = self.keyword_io.read_keywords(qgis_impact_layer)
json_path = os.path.splitext(qgis_impact_layer.source())[0] + '.json'
# write postprocessing report to keyword
postprocessor_data = self.impact_function.postprocessor_manager.\
get_json_data(self.impact_function.aggregator.aoi_mode)
post_processing_report = m.Message()
if os.path.exists(json_path):
with open(json_path) as json_file:
impact_data = json.load(
json_file, object_pairs_hook=OrderedDict)
impact_data['post processing'] = postprocessor_data
with open(json_path, 'w') as json_file_2:
json.dump(impact_data, json_file_2, indent=2)
else:
post_processing_report = self.impact_function.\
postprocessor_manager.get_output(
self.impact_function.aggregator.aoi_mode)
keywords['postprocessing_report'] = post_processing_report.to_html(
suppress_newlines=True)
self.keyword_io.write_keywords(qgis_impact_layer, keywords)
# Get tabular information from impact layer
report = m.Message()
report.add(LOGO_ELEMENT)
report.add(m.Heading(self.tr('Analysis Results'), **INFO_STYLE))
# If JSON Impact Data Exist, use JSON
json_path = qgis_impact_layer.source()[:-3] + 'json'
LOGGER.debug('JSON Path %s' % json_path)
if os.path.exists(json_path):
impact_template = get_report_template(json_file=json_path)
impact_report = impact_template.generate_message_report()
report.add(impact_report)
else:
report.add(self.keyword_io.read_keywords(
qgis_impact_layer, 'impact_summary'))
# append postprocessing report
report.add(post_processing_report.to_html())
# Layer attribution comes last
report.add(impact_attribution(keywords).to_html(True))
# Get requested style for impact layer of either kind
style = safe_impact_layer.get_style_info()
style_type = safe_impact_layer.get_style_type()
# Determine styling for QGIS layer
if safe_impact_layer.is_vector:
if not style:
# Set default style if possible
pass
elif style_type == 'categorizedSymbol':
LOGGER.debug('use categorized')
set_vector_categorized_style(qgis_impact_layer, style)
elif style_type == 'graduatedSymbol':
LOGGER.debug('use graduated')
set_vector_graduated_style(qgis_impact_layer, style)
elif safe_impact_layer.is_raster:
if not style:
qgis_impact_layer.setDrawingStyle("SingleBandPseudoColor")
else:
setRasterStyle(qgis_impact_layer, style)
else:
message = self.tr(
'Impact layer %s was neither a raster or a vector layer') % (
qgis_impact_layer.source())
# noinspection PyExceptionInherit
raise ReadLayerError(message)
legend = self.iface.legendInterface()
# Insert the aggregation output above the input aggregation layer
if self.show_intermediate_layers:
add_above_layer(
self.impact_function.aggregator.layer,
qgis_aggregation)
legend.setLayerVisible(self.impact_function.aggregator.layer, True)
if self.hide_exposure_flag:
# Insert the impact always above the hazard
add_above_layer(
qgis_impact_layer,
qgis_hazard)
else:
# Insert the impact above the hazard and the exposure if
# we don't hide the exposure. See #2899
add_above_layer(
qgis_impact_layer,
qgis_exposure,
qgis_hazard)
# In QGIS 2.14.2 and GDAL 1.11.3, if the exposure is in 3857,
# the impact layer is in 54004, we need to change it. See issue #2790.
if qgis_exposure.crs().authid() == 'EPSG:3857':
if qgis_impact_layer.crs().authid() != 'EPSG:3857':
epsg_3857 = QgsCoordinateReferenceSystem(3857)
qgis_impact_layer.setCrs(epsg_3857)
# make sure it is active in the legend - needed since QGIS 2.4
self.iface.setActiveLayer(qgis_impact_layer)
# then zoom to it
if self.zoom_to_impact_flag:
self.iface.zoomToActiveLayer()
if self.hide_exposure_flag:
exposure_layer = self.get_exposure_layer()
legend.setLayerVisible(exposure_layer, False)
# Make the layer visible. Might be hidden by default. See #2925
legend.setLayerVisible(qgis_impact_layer, True)
# Return text to display in report panel
return report
def print_map(self, mode='pdf'):
"""Open impact report dialog that used define report options.
:param mode: Mode for report - defaults to PDF.
:type mode:
"""
# Check if selected layer is valid
impact_layer = self.iface.activeLayer()
if impact_layer is None:
# noinspection PyCallByClass,PyTypeChecker,PyArgumentList
QtGui.QMessageBox.warning(
self.parent,
self.tr('InaSAFE'),
self.tr(
'Please select a valid impact layer before trying to '
'print.'))
return
# Open Impact Report Dialog
print_dialog = ImpactReportDialog(self.iface)
print_dialog.button_ok = QtGui.QPushButton(self.tr('OK'))
print_dialog.button_box.addButton(
print_dialog.button_ok,
QtGui.QDialogButtonBox.ActionRole)
# noinspection PyUnresolvedReferences
print_dialog.button_ok.clicked.connect(print_dialog.accept)
print_dialog.button_save_pdf.hide()
print_dialog.button_open_composer.hide()
if not print_dialog.exec_() == QtGui.QDialog.Accepted:
# noinspection PyTypeChecker
self.show_dynamic_message(
self,
m.Message(
m.Heading(self.tr('Map Creator'), **WARNING_STYLE),
m.Text(self.tr('Report generation cancelled!'))))
return
# Get the extent of the map for report
use_full_extent = print_dialog.analysis_extent_radio.isChecked()
if use_full_extent:
map_crs = self.iface.mapCanvas().mapRenderer().destinationCrs()
layer_crs = self.iface.activeLayer().crs()
layer_extent = self.iface.activeLayer().extent()
if map_crs != layer_crs:
# noinspection PyCallingNonCallable
transform = QgsCoordinateTransform(layer_crs, map_crs)
layer_extent = transform.transformBoundingBox(layer_extent)
area_extent = layer_extent
else:
area_extent = self.iface.mapCanvas().extent()
# Get selected template path to use
if print_dialog.default_template_radio.isChecked():
template_path = print_dialog.template_combo.itemData(
print_dialog.template_combo.currentIndex())
else:
template_path = print_dialog.template_path.text()
if not os.path.exists(template_path):
# noinspection PyCallByClass,PyTypeChecker,PyArgumentList
QtGui.QMessageBox.warning(
self.parent,
self.tr('InaSAFE'),
self.tr('Please select a valid template before printing. '
'The template you choose does not exist.'))
return
# Instantiate and prepare Report
# noinspection PyTypeChecker
self.show_dynamic_message(
self,
m.Message(
m.Heading(self.tr('Map Creator'), **PROGRESS_UPDATE_STYLE),
m.Text(self.tr('Preparing map and report'))))
impact_report = ImpactReport(self.iface, template_path, impact_layer)
impact_report.extent = area_extent
# Get other setting
settings = QSettings()
logo_path = settings.value(
'inasafe/organisation_logo_path', '', type=str)
impact_report.organisation_logo = logo_path
disclaimer_text = settings.value(
'inasafe/reportDisclaimer', '', type=str)
impact_report.disclaimer = disclaimer_text
north_arrow_path = settings.value(
'inasafe/north_arrow_path', '', type=str)
impact_report.north_arrow = north_arrow_path
template_warning_verbose = bool(settings.value(
'inasafe/template_warning_verbose', True, type=bool))
# Check if there's missing elements needed in the template
component_ids = ['safe-logo', 'north-arrow', 'organisation-logo',
'impact-map', 'impact-legend']
impact_report.component_ids = component_ids
if template_warning_verbose and \
len(impact_report.missing_elements) != 0:
title = self.tr('Template is missing some elements')
question = self.tr(
'The composer template you are printing to is missing '
'these elements: %s. Do you still want to continue') % (
', '.join(impact_report.missing_elements))
# noinspection PyCallByClass,PyTypeChecker
answer = QtGui.QMessageBox.question(
self.parent,
title,
question,
QtGui.QMessageBox.Yes | QtGui.QMessageBox.No)
if answer == QtGui.QMessageBox.No:
return
create_pdf_flag = bool(mode == 'pdf')
self.show_busy()
if create_pdf_flag:
self.print_map_to_pdf(impact_report)
else:
self.open_map_in_composer(impact_report)
self.hide_busy()
def print_map_to_pdf(self, impact_report):
"""Print map to PDF given MapReport instance.
:param impact_report: Impact Report instance that is ready to print
:type impact_report: ImpactReport
"""
impact_report.setup_composition()
# Get Filename
map_title = impact_report.map_title
if map_title is not None:
default_file_name = map_title + '.pdf'
default_file_name = default_file_name.replace(' ', '_')
else:
send_error_message(
self, self.tr('Keyword "map_title" not found.'))
return
# Get output path
# noinspection PyCallByClass,PyTypeChecker
output_path = QtGui.QFileDialog.getSaveFileName(
self.parent,
self.tr('Write to PDF'),
os.path.join(temp_dir(), default_file_name),
self.tr('Pdf File (*.pdf)'))
output_path = str(output_path)
if output_path is None or output_path == '':
# noinspection PyTypeChecker
self.show_dynamic_message(
self,
m.Message(
m.Heading(self.tr('Map Creator'), **WARNING_STYLE),
m.Text(self.tr('Printing cancelled!'))))
return
try:
map_pdf_path, table_pdf_path = impact_report.print_to_pdf(
output_path)
# Make sure the file paths can wrap nicely:
wrapped_map_path = map_pdf_path.replace(os.sep, '<wbr>' + os.sep)
wrapped_table_path = table_pdf_path.replace(
os.sep, '<wbr>' + os.sep)
status = m.Message(
m.Heading(self.tr('Map Creator'), **INFO_STYLE),
m.Paragraph(self.tr('Your PDF was created....')),
m.Paragraph(self.tr(
'Opening using the default PDF viewer on your system. '
'The generated pdfs were saved as:')),
m.Paragraph(wrapped_map_path),
m.Paragraph(self.tr('and')),
m.Paragraph(wrapped_table_path))
# noinspection PyCallByClass,PyTypeChecker,PyArgumentList
QtGui.QDesktopServices.openUrl(
QtCore.QUrl.fromLocalFile(table_pdf_path))
# noinspection PyCallByClass,PyTypeChecker,PyArgumentList
QtGui.QDesktopServices.openUrl(
QtCore.QUrl.fromLocalFile(map_pdf_path))
# noinspection PyTypeChecker
self.show_dynamic_message(self, status)
except TemplateLoadingError, e:
send_error_message(self, get_error_message(e))
except Exception, e: # pylint: disable=broad-except
send_error_message(self, get_error_message(e))
def open_map_in_composer(self, impact_report):
"""Open map in composer given MapReport instance.
..note:: (AG) See https://github.com/AIFDR/inasafe/issues/911. We
need to set the composition to the composer before loading the
template.
:param impact_report: Impact Report to be opened in composer.
:type impact_report: ImpactReport
"""
impact_report.setup_composition()
self.composer = self.iface.createNewComposer()
self.composer.setComposition(impact_report.composition)
impact_report.load_template()
impact_report.draw_composition()
# Fit In View
number_pages = impact_report.composition.numPages()
paper_height = impact_report.composition.paperHeight()
paper_width = impact_report.composition.paperWidth()
space_between_pages = impact_report.composition.spaceBetweenPages()
if number_pages > 0:
height = (paper_height * number_pages) + (
space_between_pages * (number_pages - 1))
self.composer.fitInView(
0, 0, paper_width + 1, height + 1, QtCore.Qt.KeepAspectRatio)
| gpl-3.0 | -4,055,814,745,100,731,400 | 37.029333 | 79 | 0.616997 | false |
znick/anytask | anytask/mail/views.py | 1 | 10115 | # -*- coding: utf-8 -*-
import json
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseForbidden, HttpResponse
from django.shortcuts import render
from django.utils import timezone
from django.utils.translation import ugettext as _
from django.views.decorators.http import require_POST, require_GET
from pytz import timezone as timezone_pytz
from courses.models import Course
from groups.models import Group
from mail.common import EmailRenderer
from mail.models import Message
from users.model_user_status import get_statuses
from users.models import UserProfile
MONTH = {
1: _(u"january"),
2: _(u"february"),
3: _(u"march"),
4: _(u"april"),
5: _(u"may"),
6: _(u"june"),
7: _(u"july"),
8: _(u"august"),
9: _(u"september"),
10: _(u"october"),
11: _(u"november"),
12: _(u"december")
}
@require_GET
@login_required
def mail_page(request):
user = request.user
user_profile = user.profile
users_from_staff_len = {}
if user.is_staff and 'from_staff' in request.GET and 'user_ids_send_mail_counter' in request.session:
key = 'user_ids_send_mail_' + request.GET['from_staff']
if key in request.session:
users_from_staff_len = {
'index': request.GET['from_staff'],
'length': len(request.session[key]),
}
if user.is_staff:
courses_teacher = Course.objects.filter(is_active=True)
else:
courses_teacher = Course.objects.filter(teachers=user, is_active=True)
context = {
"user": user,
"user_profile": user_profile,
"courses_teacher": courses_teacher,
'user_statuses': get_statuses(),
"users_from_staff_len": users_from_staff_len,
"snow_alert_message_fulltext": hasattr(settings, 'SEND_MESSAGE_FULLTEXT') and settings.SEND_MESSAGE_FULLTEXT,
}
return render(request, 'mail.html', context)
@require_GET
@login_required
def ajax_get_mailbox(request):
response = dict()
user = request.user
user_profile = user.profile
datatable_data = dict(request.GET)
if "draw" not in datatable_data:
return HttpResponseForbidden()
if "make_read[]" in datatable_data:
if datatable_data["make_read[]"][0] == "all":
user_profile.unread_messages.clear()
user_profile.send_notify_messages.clear()
else:
user_profile.unread_messages = list(
user_profile.unread_messages
.exclude(id__in=datatable_data["make_read[]"])
.values_list("id", flat=True)
)
user_profile.send_notify_messages = list(
user_profile.send_notify_messages
.exclude(id__in=datatable_data["make_read[]"])
.values_list("id", flat=True)
)
if "make_unread[]" in datatable_data:
user_profile.unread_messages.add(*Message.objects.filter(id__in=datatable_data["make_unread[]"]))
if "make_delete[]" in datatable_data:
user_profile.deleted_messages.add(*Message.objects.filter(id__in=datatable_data["make_delete[]"]))
if "make_undelete[]" in datatable_data:
user_profile.deleted_messages = list(
user_profile.deleted_messages
.exclude(id__in=datatable_data["make_undelete[]"])
.values_list("id", flat=True)
)
messages = Message.objects.none()
messages_deleted = user_profile.deleted_messages.all()
type_msg = datatable_data['type'][0]
if type_msg == "inbox":
messages = Message.objects.filter(recipients=user).exclude(id__in=messages_deleted)
elif type_msg == "sent":
messages = Message.objects.filter(sender=user).exclude(id__in=messages_deleted)
elif type_msg == "trash":
messages = messages_deleted
data = list()
start = int(datatable_data['start'][0])
end = start + int(datatable_data['length'][0])
unread = user_profile.unread_messages.all()
for msg in messages[start:end]:
data.append({
"0": "",
"1": u'%s %s' % (msg.sender.last_name, msg.sender.first_name),
"2": msg.title,
"3": format_date(msg.create_time.astimezone(timezone_pytz(user_profile.time_zone))),
"DT_RowClass": "unread" if msg in unread else "",
"DT_RowId": "row_msg_" + type_msg + "_" + str(msg.id),
"DT_RowData": {
"id": msg.id
},
})
response['draw'] = datatable_data['draw']
response['recordsTotal'] = messages.count()
response['recordsFiltered'] = messages.count()
response['data'] = data
response['unread_count'] = user_profile.get_unread_count()
response['type'] = type_msg
return HttpResponse(json.dumps(response),
content_type="application/json")
def format_date(date):
date_str = ""
now = timezone.now()
if now.year == date.year:
if now.day == date.day and now.month == date.month:
date_str = date.strftime("%H:%M")
else:
date_str = unicode(date.day) + u" " + MONTH[date.month]
else:
date_str = date.strftime("%d.%m.%y")
return date_str
@require_GET
@login_required
def ajax_get_message(request):
response = dict()
user = request.user
user_profile = user.profile
if "msg_id" not in request.GET:
return HttpResponseForbidden()
msg_id = int(request.GET["msg_id"])
message = Message.objects.get(id=msg_id)
if message.sender != user and user not in message.recipients.all():
return HttpResponseForbidden()
unread_count = int(request.GET["unread_count"])
if message in user_profile.unread_messages.all():
message.read_message(user)
unread_count -= 1
recipients_user = []
recipients_group = []
recipients_course = []
recipients_status = []
if message.hidden_copy and message.sender != user:
recipients_user.append({
"id": user.id,
"fullname": u'%s %s' % (user.last_name, user.first_name),
"url": user.get_absolute_url()
})
else:
for recipient in message.recipients_user.all():
recipients_user.append({
"id": recipient.id,
"fullname": u'%s %s' % (recipient.last_name, recipient.first_name),
"url": recipient.get_absolute_url()
})
for group in message.recipients_group.all():
recipients_group.append({
"id": group.id,
"name": group.name
})
for course in message.recipients_course.all():
recipients_course.append({
"id": course.id,
"name": course.name,
"url": course.get_absolute_url(),
})
for status in message.recipients_status.all():
recipients_status.append({
"id": status.id,
"name": status.name
})
if message.sender != user or request.GET["mailbox"] == 'inbox':
text = EmailRenderer.fill_name(message, user)
else:
text = message.text
response['sender'] = {
"id": message.sender.id,
"fullname": u'%s %s' % (message.sender.last_name, message.sender.first_name),
"url": message.sender.get_absolute_url(),
"avatar": message.sender.profile.avatar.url if message.sender.profile.avatar else "",
}
response['recipients_user'] = recipients_user
response['recipients_group'] = recipients_group
response['recipients_course'] = recipients_course
response['recipients_status'] = recipients_status
response['date'] = message.create_time.astimezone(timezone_pytz(user_profile.time_zone))\
.strftime("%d.%m.%y %H:%M:%S")
response['text'] = text
response['unread_count'] = unread_count
return HttpResponse(json.dumps(response),
content_type="application/json")
@require_POST
@login_required
def ajax_send_message(request):
user = request.user
data = dict(request.POST)
hidden_copy = False
if 'hidden_copy' in data and data['hidden_copy'][0]:
hidden_copy = True
variable = False
if 'variable' in data and data['variable'][0]:
variable = True
message = Message()
message.sender = user
message.title = data['new_title'][0]
message.text = data['new_text'][0]
message.hidden_copy = hidden_copy
message.variable = variable
message.save()
recipients_ids = set()
if "new_recipients_user[]" in data or "new_recipients_preinit[]" in data:
users = data.get("new_recipients_user[]", [])
if "new_recipients_preinit[]" in data:
users += request.session.get('user_ids_send_mail_' + data["new_recipients_preinit[]"][0], [])
message.recipients_user = users
recipients_ids.update(message.recipients_user.values_list('id', flat=True))
group_ids = []
if "new_recipients_group[]" in data:
message.recipients_group = data["new_recipients_group[]"]
for group in Group.objects.filter(id__in=data["new_recipients_group[]"]):
recipients_ids.update(group.students.exclude(id=user.id).values_list('id', flat=True))
group_ids.append(group.id)
if "new_recipients_course[]" in data:
message.recipients_course = data["new_recipients_course[]"]
for course in Course.objects.filter(id__in=data["new_recipients_course[]"]):
for group in course.groups.exclude(id__in=group_ids).distinct():
recipients_ids.update(group.students.exclude(id=user.id).values_list('id', flat=True))
if "new_recipients_status[]" in data:
message.recipients_status = data["new_recipients_status[]"]
recipients_ids.update(UserProfile.objects.filter(user_status__in=data["new_recipients_status[]"])
.values_list('user__id', flat=True))
message.recipients = list(recipients_ids)
return HttpResponse("OK")
| mit | -2,002,390,479,573,201,000 | 33.288136 | 117 | 0.606228 | false |
wikimedia/pywikibot-core | tests/page_tests.py | 1 | 48914 | # -*- coding: utf-8 -*-
"""Tests for the page module."""
#
# (C) Pywikibot team, 2008-2020
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, division, unicode_literals
import pickle
import re
import pywikibot
import pywikibot.page
from pywikibot import config
from pywikibot import InvalidTitle
from pywikibot.tools import (
PY2,
StringTypes as basestring,
suppress_warnings,
UnicodeType as unicode,
)
from tests.aspects import (
unittest, TestCase, DefaultSiteTestCase, SiteAttributeTestCase,
DefaultDrySiteTestCase, DeprecationTestCase,
)
from tests import mock
EMPTY_TITLE_RE = r'Title must be specified and not empty if source is a Site\.'
INVALID_TITLE_RE = r'The link does not contain a page title'
NO_PAGE_RE = r"doesn't exist\."
class TestLinkObject(SiteAttributeTestCase):
"""Test cases for Link objects."""
sites = {
'enwiki': {
'family': 'wikipedia',
'code': 'en',
},
'frwiki': {
'family': 'wikipedia',
'code': 'fr',
},
'itwikt': {
'family': 'wiktionary',
'code': 'it',
},
'enws': {
'family': 'wikisource',
'code': 'en',
},
'itws': {
'family': 'wikisource',
'code': 'it',
},
}
cached = True
namespaces = {0: [''], # en.wikipedia.org namespaces for testing
1: ['Talk:'], # canonical form first, then others
2: ['User:'], # must end with :
3: ['User talk:', 'User_talk:'],
4: ['Wikipedia:', 'Project:', 'WP:'],
5: ['Wikipedia talk:', 'Project talk:', 'Wikipedia_talk:',
'Project_talk:', 'WT:'],
6: ['File:'],
7: ['Image talk:', 'Image_talk:'],
8: ['MediaWiki:'],
9: ['MediaWiki talk:', 'MediaWiki_talk:'],
10: ['Template:'],
11: ['Template talk:', 'Template_talk:'],
12: ['Help:'],
13: ['Help talk:', 'Help_talk:'],
14: ['Category:'],
15: ['Category talk:', 'Category_talk:'],
100: ['Portal:'],
101: ['Portal talk:', 'Portal_talk:'],
}
titles = {
# just a bunch of randomly selected titles
# input format : expected output format
'Cities in Burkina Faso': 'Cities in Burkina Faso',
'eastern Sayan': 'Eastern Sayan',
'The_Addams_Family_(pinball)': 'The Addams Family (pinball)',
'Hispanic (U.S. Census)': 'Hispanic (U.S. Census)',
'Stołpce': 'Stołpce',
'Nowy_Sącz': 'Nowy Sącz',
'battle of Węgierska Górka': 'Battle of Węgierska Górka',
}
# random bunch of possible section titles
sections = ['',
'#Phase_2',
'#History',
'#later life',
]
def testNamespaces(self):
"""Test that Link() normalizes namespace names."""
for num in self.namespaces:
for prefix in self.namespaces[num]:
link = pywikibot.page.Link(
prefix + list(self.titles.keys())[0], self.enwiki)
self.assertEqual(link.namespace, num)
# namespace prefixes are case-insensitive
lowered_link = pywikibot.page.Link(
prefix.lower() + list(self.titles.keys())[1], self.enwiki)
self.assertEqual(lowered_link.namespace, num)
def testTitles(self):
"""Test that Link() normalizes titles."""
for title in self.titles:
for num in (0, 1):
link = pywikibot.page.Link(self.namespaces[num][0] + title,
self.enwiki)
self.assertEqual(link.title, self.titles[title])
# prefixing name with ":" shouldn't change result
prefixed_link = pywikibot.page.Link(
':' + self.namespaces[num][0] + title, self.enwiki)
self.assertEqual(prefixed_link.title, self.titles[title])
def testHashCmp(self):
"""Test hash comparison."""
# All links point to en:wikipedia:Test
l1 = pywikibot.page.Link('Test', source=self.enwiki)
l2 = pywikibot.page.Link('en:Test', source=self.frwiki)
l3 = pywikibot.page.Link('wikipedia:en:Test', source=self.itwikt)
def assertHashCmp(link1, link2):
self.assertEqual(link1, link2)
self.assertEqual(hash(link1), hash(link2))
assertHashCmp(l1, l2)
assertHashCmp(l1, l3)
assertHashCmp(l2, l3)
# fr:wikipedia:Test
other = pywikibot.page.Link('Test', source=self.frwiki)
self.assertNotEqual(l1, other)
self.assertNotEqual(hash(l1), hash(other))
def test_ns_title(self):
"""Test that title is returned with correct namespace."""
l1 = pywikibot.page.Link('Indice:Test', source=self.itws)
self.assertEqual(l1.ns_title(), 'Index:Test')
self.assertEqual(l1.ns_title(onsite=self.enws), 'Index:Test')
# wikisource:it kept Autore as canonical name
l2 = pywikibot.page.Link('Autore:Albert Einstein', source=self.itws)
self.assertEqual(l2.ns_title(), 'Autore:Albert Einstein')
self.assertEqual(l2.ns_title(onsite=self.enws),
'Author:Albert Einstein')
# Translation namespace does not exist on wikisource:it
l3 = pywikibot.page.Link('Translation:Albert Einstein',
source=self.enws)
self.assertEqual(l3.ns_title(), 'Translation:Albert Einstein')
self.assertRaisesRegex(pywikibot.Error,
'No corresponding namespace found for '
'namespace Translation: on wikisource:it.',
l3.ns_title,
onsite=self.itws)
class TestPageObjectEnglish(TestCase):
"""Test Page Object using English Wikipedia."""
family = 'wikipedia'
code = 'en'
cached = True
def testGeneral(self):
"""Test general features of a page."""
site = self.get_site()
mainpage = self.get_mainpage()
maintalk = mainpage.toggleTalkPage()
family_name = (site.family.name + ':'
if pywikibot.config2.family != site.family.name
else '')
self.assertEqual(str(mainpage), '[[{}{}:{}]]'
.format(family_name, site.code,
mainpage.title()))
self.assertLess(mainpage, maintalk)
def testHelpTitle(self):
"""Test title() method options in Help namespace."""
site = self.get_site()
p1 = pywikibot.Page(site, 'Help:Test page#Testing')
ns_name = 'Help'
if site.namespaces[12][0] != ns_name:
ns_name = site.namespaces[12][0]
self.assertEqual(p1.title(),
ns_name + ':Test page#Testing')
self.assertEqual(p1.title(underscore=True),
ns_name + ':Test_page#Testing')
self.assertEqual(p1.title(with_ns=False),
'Test page#Testing')
self.assertEqual(p1.title(with_section=False),
ns_name + ':Test page')
self.assertEqual(p1.title(with_ns=False, with_section=False),
'Test page')
self.assertEqual(p1.title(as_url=True),
ns_name + '%3ATest_page%23Testing')
self.assertEqual(p1.title(as_link=True, insite=site),
'[[' + ns_name + ':Test page#Testing]]')
self.assertEqual(
p1.title(as_link=True, force_interwiki=True, insite=site),
'[[en:' + ns_name + ':Test page#Testing]]')
self.assertEqual(p1.title(as_link=True, textlink=True, insite=site),
p1.title(as_link=True, textlink=False, insite=site))
self.assertEqual(p1.title(as_link=True, with_ns=False, insite=site),
'[[' + ns_name + ':Test page#Testing|Test page]]')
self.assertEqual(p1.title(as_link=True, force_interwiki=True,
with_ns=False, insite=site),
'[[en:' + ns_name + ':Test page#Testing|Test page]]')
self.assertEqual(p1.title(as_link=True, textlink=True,
with_ns=False, insite=site),
p1.title(as_link=True, textlink=False,
with_ns=False, insite=site))
def testFileTitle(self):
"""Test title() method options in File namespace."""
# also test a page with non-ASCII chars and a different namespace
site = self.get_site()
p2 = pywikibot.Page(site, 'File:Jean-Léon Gérôme 003.jpg')
ns_name = 'File'
if site.namespaces[6][0] != ns_name:
ns_name = site.namespaces[6][0]
self.assertEqual(p2.title(),
'File:Jean-Léon Gérôme 003.jpg')
self.assertEqual(p2.title(underscore=True),
'File:Jean-Léon_Gérôme_003.jpg')
self.assertEqual(p2.title(with_ns=False),
'Jean-Léon Gérôme 003.jpg')
self.assertEqual(p2.title(with_section=False),
'File:Jean-Léon Gérôme 003.jpg')
self.assertEqual(p2.title(with_ns=False, with_section=False),
'Jean-Léon Gérôme 003.jpg')
self.assertEqual(p2.title(as_url=True),
'File%3AJean-L%C3%A9on_G%C3%A9r%C3%B4me_003.jpg')
self.assertEqual(p2.title(as_link=True, insite=site),
'[[File:Jean-Léon Gérôme 003.jpg]]')
self.assertEqual(
p2.title(as_link=True, force_interwiki=True, insite=site),
'[[en:File:Jean-Léon Gérôme 003.jpg]]')
self.assertEqual(p2.title(as_link=True, textlink=True, insite=site),
'[[:File:Jean-Léon Gérôme 003.jpg]]')
self.assertEqual(p2.title(as_filename=True),
'File_Jean-Léon_Gérôme_003.jpg')
self.assertEqual(
p2.title(as_link=True, with_ns=False, insite=site),
'[[File:Jean-Léon Gérôme 003.jpg|Jean-Léon Gérôme 003.jpg]]')
self.assertEqual(
p2.title(as_link=True, force_interwiki=True,
with_ns=False, insite=site),
'[[en:File:Jean-Léon Gérôme 003.jpg|Jean-Léon Gérôme 003.jpg]]')
self.assertEqual(
p2.title(as_link=True, textlink=True,
with_ns=False, insite=site),
'[[:File:Jean-Léon Gérôme 003.jpg|Jean-Léon Gérôme 003.jpg]]')
def testImageAndDataRepository(self):
"""Test image_repository and data_repository page attributes."""
site = self.get_site()
p1 = pywikibot.Page(site, 'Help:Test page#Testing')
self.assertIsInstance(p1.image_repository, pywikibot.site.APISite)
self.assertEqual(p1.image_repository,
pywikibot.site.APISite('commons', 'commons'))
p2 = pywikibot.Page(site, 'File:Jean-Léon Gérôme 003.jpg')
self.assertIsInstance(p2.data_repository, pywikibot.site.APISite)
self.assertEqual(p2.data_repository,
pywikibot.site.APISite('wikidata', 'wikidata'))
def test_creation(self):
"""Test Page.oldest_revision."""
mainpage = self.get_mainpage()
self.assertEqual(mainpage.oldest_revision.user, 'TwoOneTwo')
self.assertIsInstance(mainpage.oldest_revision.timestamp,
pywikibot.Timestamp)
class TestPageObject(DefaultSiteTestCase):
"""Test Page object."""
cached = True
def testSite(self):
"""Test site() method."""
mainpage = self.get_mainpage()
self.assertEqual(mainpage.site, self.site)
def testNamespace(self):
"""Test namespace() method."""
mainpage = self.get_mainpage()
maintalk = mainpage.toggleTalkPage()
if ':' not in mainpage.title():
self.assertEqual(mainpage.namespace(), 0)
self.assertEqual(maintalk.namespace(), mainpage.namespace() + 1)
badpage = self.get_missing_article()
self.assertEqual(badpage.namespace(), 0)
def testBasePageConstructor(self):
"""Test BasePage constructor."""
site = self.get_site()
# Should not raise an error as the constructor only requires
# the site parameter.
# Empty string or None as title raises error.
page = pywikibot.page.BasePage(site)
self.assertRaisesRegex(InvalidTitle, INVALID_TITLE_RE, page.title)
page = pywikibot.page.BasePage(site, title='')
self.assertRaisesRegex(InvalidTitle, INVALID_TITLE_RE, page.title)
self.assertRaisesRegex(ValueError, 'Title cannot be None.',
pywikibot.page.BasePage, site, title=None)
def testPageConstructor(self):
"""Test Page constructor."""
site = self.get_site()
mainpage = self.get_mainpage()
# Test that Page() needs a title when Site is used as source.
self.assertRaisesRegex(ValueError, EMPTY_TITLE_RE,
pywikibot.Page, site)
self.assertRaisesRegex(ValueError, EMPTY_TITLE_RE,
pywikibot.Page, site, '')
# Test Page as source.
p1 = pywikibot.Page(mainpage)
self.assertEqual(p1, mainpage)
# Test not valid source.
self.assertRaisesRegex(pywikibot.Error,
r"Invalid argument type '<\w* '\w*'>' in "
'Page initializer: dummy',
pywikibot.Page, 'dummy')
def testTitle(self):
"""Test title() method options in article namespace."""
# at last test article namespace
site = self.get_site()
p2 = pywikibot.Page(site, 'Test page')
self.assertEqual(p2.title(),
'Test page')
self.assertEqual(p2.title(underscore=True),
'Test_page')
self.assertEqual(p2.title(),
p2.title(with_ns=False))
self.assertEqual(p2.title(),
p2.title(with_section=False))
self.assertEqual(p2.title(as_url=True),
p2.title(underscore=True))
self.assertEqual(p2.title(as_link=True, insite=site),
'[[Test page]]')
self.assertEqual(p2.title(as_filename=True),
p2.title(underscore=True))
self.assertEqual(p2.title(underscore=True),
p2.title(underscore=True, with_ns=False))
self.assertEqual(p2.title(underscore=True),
p2.title(underscore=True, with_section=False))
self.assertEqual(p2.title(underscore=True, as_url=True),
p2.title(underscore=True))
self.assertEqual(p2.title(underscore=True, as_link=True, insite=site),
p2.title(as_link=True, insite=site))
self.assertEqual(p2.title(underscore=True, as_filename=True),
p2.title(underscore=True))
self.assertEqual(p2.title(),
p2.title(with_ns=False, with_section=False))
self.assertEqual(p2.title(as_url=True),
p2.title(with_ns=False, as_url=True))
self.assertEqual(p2.title(as_link=True, insite=site),
p2.title(with_ns=False, as_link=True, insite=site))
self.assertEqual(p2.title(as_filename=True),
p2.title(with_ns=False, as_filename=True))
self.assertEqual(p2.title(with_ns=False, as_link=True,
force_interwiki=True, insite=site),
'[[' + site.code + ':Test page|Test page]]')
title1 = 'Test Page (bracketed)'
title2 = 'Test Page (bracketed) (bracketed)'
self.assertEqual(
pywikibot.Page(site, title1).title(without_brackets=True),
'Test Page'
)
self.assertEqual(
pywikibot.Page(site, title2).title(without_brackets=True),
'Test Page (bracketed)'
)
def testSection(self):
"""Test section() method."""
# use same pages as in previous test
site = self.get_site()
p1 = pywikibot.Page(site, 'Help:Test page#Testing')
p2 = pywikibot.Page(site, 'File:Jean-Léon Gérôme 003.jpg')
self.assertEqual(p1.section(), 'Testing')
self.assertIsNone(p2.section())
def testIsTalkPage(self):
"""Test isTalkPage() method."""
site = self.get_site()
p1 = pywikibot.Page(site, 'First page')
p2 = pywikibot.Page(site, 'Talk:First page')
p3 = pywikibot.Page(site, 'User:Second page')
p4 = pywikibot.Page(site, 'User talk:Second page')
self.assertFalse(p1.isTalkPage())
self.assertTrue(p2.isTalkPage())
self.assertFalse(p3.isTalkPage())
self.assertTrue(p4.isTalkPage())
def testIsCategory(self):
"""Test is_categorypage method."""
site = self.get_site()
p1 = pywikibot.Page(site, 'First page')
p2 = pywikibot.Page(site, 'Category:Second page')
p3 = pywikibot.Page(site, 'Category talk:Second page')
self.assertEqual(p1.is_categorypage(), False)
self.assertEqual(p2.is_categorypage(), True)
self.assertEqual(p3.is_categorypage(), False)
def testIsFile(self):
"""Test C{Page.is_filepage} check."""
site = self.get_site()
p1 = pywikibot.Page(site, 'First page')
p2 = pywikibot.Page(site, 'File:Second page')
p3 = pywikibot.Page(site, 'Image talk:Second page')
self.assertEqual(p1.is_filepage(), False)
self.assertEqual(p2.is_filepage(), True)
self.assertEqual(p3.is_filepage(), False)
def testApiMethods(self):
"""Test various methods that rely on API."""
mainpage = self.get_mainpage()
# since there is no way to predict what data the wiki will return,
# we only check that the returned objects are of correct type.
self.assertIsInstance(mainpage.get(), unicode)
with suppress_warnings(
r'pywikibot\.page\.BasePage\.latestRevision '
r'is deprecated[\s\w]+; '
r'use latest_revision_id instead\.'):
self.assertIsInstance(mainpage.latestRevision(), int)
self.assertIsInstance(mainpage.userName(), unicode)
self.assertIsInstance(mainpage.isIpEdit(), bool)
self.assertIsInstance(mainpage.exists(), bool)
self.assertIsInstance(mainpage.isRedirectPage(), bool)
with suppress_warnings(
r'pywikibot\.page\.BasePage\.isEmpty is deprecated[\s\w]+; '
r'use interwiki\.page_empty_check\(page\) instead\.'):
self.assertIsInstance(mainpage.isEmpty(), bool)
self.assertIsInstance(mainpage.isDisambig(), bool)
self.assertIsInstance(mainpage.has_permission(), bool)
self.assertIsInstance(mainpage.botMayEdit(), bool)
self.assertIsInstance(mainpage.editTime(), pywikibot.Timestamp)
self.assertIsInstance(mainpage.permalink(), basestring)
def test_talk_page(self):
"""Test various methods that rely on API: talk page."""
mainpage = self.get_mainpage()
maintalk = mainpage.toggleTalkPage()
if not maintalk.exists():
self.skipTest("No talk page for {}'s main page"
.format(self.get_site()))
self.assertIsInstance(maintalk.get(get_redirect=True), unicode)
self.assertEqual(mainpage.toggleTalkPage(), maintalk)
self.assertEqual(maintalk.toggleTalkPage(), mainpage)
def test_bad_page(self):
"""Test various methods that rely on API: bad page."""
badpage = self.get_missing_article()
self.assertRaisesRegex(pywikibot.NoPage, NO_PAGE_RE, badpage.get)
def testIsDisambig(self):
"""Test the integration with Extension:Disambiguator."""
site = self.get_site()
if not site.has_extension('Disambiguator'):
self.skipTest('Disambiguator extension not loaded on test site')
pg = pywikibot.Page(site, 'Random')
pg._pageprops = {'disambiguation', ''}
self.assertTrue(pg.isDisambig())
pg._pageprops = set()
self.assertFalse(pg.isDisambig())
def testReferences(self):
"""Test references to a page."""
mainpage = self.get_mainpage()
count = 0
# Ignore redirects for time considerations
for p in mainpage.getReferences(follow_redirects=False):
count += 1
self.assertIsInstance(p, pywikibot.Page)
if count >= 10:
break
count = 0
for p in mainpage.backlinks(follow_redirects=False):
count += 1
self.assertIsInstance(p, pywikibot.Page)
if count >= 10:
break
count = 0
for p in mainpage.embeddedin():
count += 1
self.assertIsInstance(p, pywikibot.Page)
if count >= 10:
break
def testLinks(self):
"""Test the different types of links from a page."""
if self.site.family.name in ('wpbeta', 'wsbeta'):
self.skipTest('Test fails on betawiki; T69931; T160308')
mainpage = self.get_mainpage()
for p in mainpage.linkedPages():
self.assertIsInstance(p, pywikibot.Page)
iw = list(mainpage.interwiki(expand=True))
for p in iw:
self.assertIsInstance(p, pywikibot.Link)
for p2 in mainpage.interwiki(expand=False):
self.assertIsInstance(p2, pywikibot.Link)
self.assertIn(p2, iw)
for p in mainpage.langlinks():
self.assertIsInstance(p, pywikibot.Link)
for p in mainpage.imagelinks():
self.assertIsInstance(p, pywikibot.FilePage)
for p in mainpage.templates():
self.assertIsInstance(p, pywikibot.Page)
for t, params in mainpage.templatesWithParams():
self.assertIsInstance(t, pywikibot.Page)
self.assertIsInstance(params, list)
for p in mainpage.categories():
self.assertIsInstance(p, pywikibot.Category)
for p in mainpage.extlinks():
self.assertIsInstance(p, unicode)
def testPickleAbility(self):
"""Test the ability to pickle the page."""
mainpage = self.get_mainpage()
mainpage_str = pickle.dumps(mainpage, protocol=config.pickle_protocol)
mainpage_unpickled = pickle.loads(mainpage_str)
self.assertEqual(mainpage, mainpage_unpickled)
def test_redirect(self):
"""Test that the redirect option is set correctly."""
site = self.get_site()
for page in site.allpages(filterredir=True, total=1):
break
else:
self.skipTest('No redirect pages on site {0!r}'.format(site))
# This page is already initialised
self.assertTrue(hasattr(page, '_isredir'))
# call api.update_page without prop=info
del page._isredir
page.isDisambig()
self.assertTrue(page.isRedirectPage())
page_copy = pywikibot.Page(site, page.title())
self.assertFalse(hasattr(page_copy, '_isredir'))
page_copy.isDisambig()
self.assertTrue(page_copy.isRedirectPage())
def test_depth(self):
"""Test page depth calculation."""
site = self.get_site()
page_d0 = pywikibot.Page(site, '/home/test/')
if site.namespaces[0].subpages:
self.assertEqual(page_d0.depth, 3)
else:
self.assertEqual(page_d0.depth, 0)
page_user_d0 = pywikibot.Page(site, 'User:Sn1per')
self.assertEqual(page_user_d0.depth, 0)
page_d3 = pywikibot.Page(site, 'User:Sn1per/ProtectTest1/test/test')
self.assertEqual(page_d3.depth, 3)
def test_page_image(self):
"""
Test C{Page.page_image} function.
Since we are not sure what the wiki will return, we mainly test types
"""
site = self.get_site()
mainpage = self.get_mainpage()
image = pywikibot.FilePage(site, 'File:Jean-Léon Gérôme 003.jpg')
if site.mw_version < '1.20':
self.assertRaises(NotImplementedError, mainpage.page_image)
elif site.has_extension('PageImages'):
mainpage_image = mainpage.page_image()
if mainpage_image is not None:
self.assertIsInstance(mainpage_image, pywikibot.FilePage)
# for file pages, the API should return the file itself
self.assertEqual(image.page_image(), image)
else:
self.assertRaisesRegex(pywikibot.UnknownExtension,
'Method "loadpageimage" is not implemented '
'without the extension PageImages',
mainpage.page_image)
class TestPageCoordinates(TestCase):
"""Test Page Object using German Wikipedia."""
family = 'wikipedia'
code = 'de'
cached = True
def test_coordinates(self):
"""Test C{Page.coodinates} method."""
page = pywikibot.Page(self.site, 'Berlin')
with self.subTest(primary_only=False):
coords = page.coordinates()
self.assertIsInstance(coords, list)
for coord in coords:
self.assertIsInstance(coord, pywikibot.Coordinate)
self.assertIsInstance(coord.primary, bool)
with self.subTest(primary_only=True):
coord = page.coordinates(primary_only=True)
self.assertIsInstance(coord, pywikibot.Coordinate)
self.assertTrue(coord.primary)
class TestPageDeprecation(DefaultSiteTestCase, DeprecationTestCase):
"""Test deprecation of Page attributes."""
def test_creator(self):
"""Test getCreator."""
mainpage = self.get_mainpage()
creator = mainpage.getCreator()
self.assertEqual(creator,
(mainpage.oldest_revision.user,
mainpage.oldest_revision.timestamp.isoformat()))
self.assertIsInstance(creator[0], unicode)
self.assertIsInstance(creator[1], unicode)
self._ignore_unknown_warning_packages = True # T163175
self.assertDeprecation()
self._reset_messages()
if self.site.mw_version >= '1.16':
self.assertIsInstance(mainpage.previous_revision_id, int)
self.assertEqual(mainpage.previous_revision_id,
mainpage.latest_revision.parent_id)
self.assertDeprecation()
class TestPageBaseUnicode(DefaultDrySiteTestCase):
"""Base class for tests requiring a page using a unicode title."""
@classmethod
def setUpClass(cls):
"""Initialize page instance."""
super(TestPageBaseUnicode, cls).setUpClass()
cls.page = pywikibot.Page(cls.site, 'Ō')
class TestPageGetFileHistory(DefaultDrySiteTestCase):
"""Test the get_file_history method of the FilePage class."""
def test_get_file_history_cache(self):
"""Test the cache mechanism of get_file_history."""
with mock.patch.object(self.site, 'loadimageinfo', autospec=True):
page = pywikibot.FilePage(self.site, 'File:Foo.jpg')
_file_revisions = page.get_file_history()
# On the first call the history is loaded via API
self.assertEqual(_file_revisions, {})
# Fill the cache
_file_revisions['foo'] = 'bar'
# On the second call page._file_revisions is returned
self.assertEqual(page.get_file_history(), {'foo': 'bar'})
self.site.loadimageinfo.assert_called_once_with(page, history=True)
class TestFilePage(DefaultSiteTestCase):
"""Test methods of the FilePage class."""
family = 'commons'
code = 'commons'
cached = True
def test_globalusage(self, key):
"""Test globalusage generator."""
page = pywikibot.FilePage(self.site, 'File:Example.jpg')
gen = page.globalusage(total=3)
pages = list(gen)
self.assertLength(pages, 3)
self.assertTrue(any(isinstance(p), pywikibot.Page) for p in pages)
self.assertTrue(any(p.site != self.site for p in pages))
class TestPageRepr(TestPageBaseUnicode):
"""Test for Page's repr implementation."""
def setUp(self):
"""Force the console encoding to UTF-8."""
super(TestPageRepr, self).setUp()
self._old_encoding = config.console_encoding
config.console_encoding = 'utf8'
def tearDown(self):
"""Restore the original console encoding."""
config.console_encoding = self._old_encoding
super(TestPageRepr, self).tearDown()
def test_mainpage_type(self):
"""Test the return type of repr(Page(<main page>)) is str."""
mainpage = self.get_mainpage()
self.assertIsInstance(repr(mainpage), str)
def test_unicode_type(self):
"""Test the return type of repr(Page('<non-ascii>')) is str."""
page = pywikibot.Page(self.get_site(), 'Ō')
self.assertIsInstance(repr(page), str)
@unittest.skipIf(not PY2, 'Python 2 specific test')
def test_unicode_value(self):
"""Test repr(Page('<non-ascii>')) is represented simply as utf8."""
page = pywikibot.Page(self.get_site(), 'Ō')
self.assertEqual(repr(page), b'Page(\xc5\x8c)')
@unittest.skipIf(not PY2, 'Python 2 specific test')
def test_unicode_percent_r_failure(self):
"""Test '{x!r}'.format() raises exception for non-ASCII Page."""
# This raises an exception on Python 2, but passes on Python 3
page = pywikibot.Page(self.get_site(), 'Ō')
self.assertRaisesRegex(UnicodeDecodeError, '', unicode.format,
'{0!r}', page)
@unittest.skipIf(PY2, 'Python 3+ specific test')
def test_unicode_value_py3(self):
"""Test to capture actual Python 3 result pre unicode_literals."""
self.assertEqual(repr(self.page), "Page('Ō')")
self.assertEqual('%r' % self.page, "Page('Ō')")
self.assertEqual('{0!r}'.format(self.page), "Page('Ō')")
@unittest.skipIf(not PY2, 'Python 2 specific test')
@unittest.expectedFailure
def test_ASCII_compatible(self):
"""Test that repr returns ASCII compatible bytes in Python 2."""
page = pywikibot.Page(self.site, 'ä')
# Bug T95809, the repr in Python 2 should be decodable as ASCII
repr(page).decode('ascii')
class TestPageReprASCII(TestPageBaseUnicode):
"""Test for Page's repr implementation when using ASCII encoding."""
def setUp(self):
"""Patch the current console encoding to ASCII."""
super(TestPageReprASCII, self).setUp()
self._old_encoding = config.console_encoding
config.console_encoding = 'ascii'
def tearDown(self):
"""Restore the original console encoding."""
config.console_encoding = self._old_encoding
super(TestPageReprASCII, self).tearDown()
@unittest.skipIf(not PY2, 'Python 2 specific test')
def test_incapable_encoding(self):
"""Test that repr works even if console encoding does not."""
self.assertEqual(repr(self.page), b'Page(\\u014c)')
class TestPageBotMayEdit(TestCase):
"""Test Page.botMayEdit() method."""
family = 'wikipedia'
code = 'en'
cached = True
user = True
@mock.patch.object(config, 'ignore_bot_templates', False)
def test_bot_may_edit_general(self):
"""Test that bot is allowed to edit."""
site = self.get_site()
user = site.user()
page = pywikibot.Page(site, 'not_existent_page_for_pywikibot_tests')
if page.exists():
self.skipTest(
'Page {} exists! Change page name in tests/page_tests.py'
.format(page.title()))
# Ban all compliant bots (shortcut).
page.text = '{{nobots}}'
page._templates = [pywikibot.Page(site, 'Template:Nobots')]
self.assertFalse(page.botMayEdit())
# Ban all compliant bots not in the list, syntax for de wp.
page.text = '{{nobots|HagermanBot,Werdnabot}}'
self.assertTrue(page.botMayEdit(),
'{}: {} but user={}'
.format(page.text, page.botMayEdit(), user))
# Ban all compliant bots not in the list, syntax for de wp.
page.text = '{{nobots|%s, HagermanBot,Werdnabot}}' % user
self.assertFalse(page.botMayEdit(),
'{}: {} but user={}'
.format(page.text, page.botMayEdit(), user))
# Ban all bots, syntax for de wp.
page.text = '{{nobots|all}}'
self.assertFalse(page.botMayEdit(),
'{}: {} but user={}'
.format(page.text, page.botMayEdit(), user))
# Allow all bots (shortcut).
page.text = '{{bots}}'
page._templates = [pywikibot.Page(site, 'Template:Bots')]
self.assertTrue(page.botMayEdit())
# Ban all compliant bots not in the list.
page.text = '{{bots|allow=HagermanBot,Werdnabot}}'
self.assertFalse(page.botMayEdit(),
'{}: {} but user={}'
.format(page.text, page.botMayEdit(), user))
# Ban all compliant bots in the list.
page.text = '{{bots|deny=HagermanBot,Werdnabot}}'
self.assertTrue(page.botMayEdit(),
'{}: {} but user={}'
.format(page.text, page.botMayEdit(), user))
# Ban all compliant bots not in the list.
page.text = '{{bots|allow=%s, HagermanBot}}' % user
self.assertTrue(page.botMayEdit(),
'{}: {} but user={}'
.format(page.text, page.botMayEdit(), user))
# Ban all compliant bots in the list.
page.text = '{{bots|deny=%s, HagermanBot}}' % user
self.assertFalse(page.botMayEdit(),
'{}: {} but user={}'
.format(page.text, page.botMayEdit(), user))
# Allow all bots.
page.text = '{{bots|allow=all}}'
self.assertTrue(page.botMayEdit(),
'{}: {} but user={}'
.format(page.text, page.botMayEdit(), user))
# Ban all compliant bots.
page.text = '{{bots|allow=none}}'
self.assertFalse(page.botMayEdit(),
'{}: {} but user={}'
.format(page.text, page.botMayEdit(), user))
# Ban all compliant bots.
page.text = '{{bots|deny=all}}'
self.assertFalse(page.botMayEdit(),
'{}: {} but user={}'
.format(page.text, page.botMayEdit(), user))
# Allow all bots.
page.text = '{{bots|deny=none}}'
self.assertTrue(page.botMayEdit(),
'{}: {} but user={}'
.format(page.text, page.botMayEdit(), user))
# Ban all users including bots.
page.text = '{{in use}}'
page._templates = [pywikibot.Page(site, 'Template:In use')]
self.assertFalse(page.botMayEdit())
class TestPageHistory(DefaultSiteTestCase):
"""Test history related functionality."""
cached = True
def test_revisions(self):
"""Test Page.revisions()."""
mp = self.get_mainpage()
revs = mp.revisions()
revs = iter(revs) # implicit assertion
revs = list(revs)
self.assertGreater(len(revs), 1)
def test_contributors(self):
"""Test Page.contributors()."""
mp = self.get_mainpage()
cnt = mp.contributors()
self.assertIsInstance(cnt, dict)
self.assertGreater(len(cnt), 1)
def test_revision_count(self):
"""Test Page.edit_count()."""
mp = self.get_mainpage()
rev_count = len(list(mp.revisions()))
self.assertEqual(rev_count, mp.revision_count())
cnt = mp.contributors()
self.assertEqual(rev_count, sum(cnt.values()))
top_two = cnt.most_common(2)
self.assertIsInstance(top_two, list)
self.assertLength(top_two, 2)
self.assertIsInstance(top_two[0], tuple)
self.assertIsInstance(top_two[0][0], basestring)
self.assertIsInstance(top_two[0][1], int)
top_two_usernames = {top_two[0][0], top_two[1][0]}
self.assertLength(top_two_usernames, 2)
top_two_counts = ([top_two[0][1], top_two[1][1]])
top_two_edit_count = mp.revision_count(top_two_usernames)
self.assertIsInstance(top_two_edit_count, int)
self.assertEqual(top_two_edit_count, sum(top_two_counts))
class TestPageRedirects(TestCase):
"""
Test redirects.
This is using the pages 'User:Legoktm/R1', 'User:Legoktm/R2' and
'User:Legoktm/R3' on the English Wikipedia. 'R1' is redirecting to 'R2',
'R2' is a normal page and 'R3' does not exist.
"""
family = 'wikipedia'
code = 'en'
cached = True
def testIsRedirect(self):
"""Test C{Page.isRedirectPage()} and C{Page.getRedirectTarget}."""
site = self.get_site()
p1 = pywikibot.Page(site, 'User:Legoktm/R1')
p2 = pywikibot.Page(site, 'User:Legoktm/R2')
self.assertTrue(p1.isRedirectPage())
p3 = p1.getRedirectTarget()
self.assertEqual(p3, p2)
self.assertIsInstance(p3, pywikibot.User)
def testPageGet(self):
"""Test C{Page.get()} on different types of pages."""
site = self.get_site()
p1 = pywikibot.Page(site, 'User:Legoktm/R2')
p2 = pywikibot.Page(site, 'User:Legoktm/R1')
p3 = pywikibot.Page(site, 'User:Legoktm/R3')
text = ('This page is used in the [[mw:Manual:Pywikipediabot]] '
'testing suite.')
self.assertEqual(p1.get(), text)
self.assertRaisesRegex(pywikibot.exceptions.IsRedirectPage,
r'{0} is a redirect page\.'
.format(re.escape(str(p2))), p2.get)
self.assertRaisesRegex(
pywikibot.exceptions.NoPage, NO_PAGE_RE, p3.get)
def test_set_redirect_target(self):
"""Test set_redirect_target method."""
# R1 redirects to R2 and R3 doesn't exist.
site = self.get_site()
p1 = pywikibot.Page(site, 'User:Legoktm/R2')
p2 = pywikibot.Page(site, 'User:Legoktm/R1')
p3 = pywikibot.Page(site, 'User:Legoktm/R3')
text = p2.get(get_redirect=True)
self.assertRaisesRegex(pywikibot.exceptions.IsNotRedirectPage,
r'{0} is not a redirect page\.'
.format(re.escape(str(p1))),
p1.set_redirect_target, p2)
self.assertRaisesRegex(pywikibot.exceptions.NoPage, NO_PAGE_RE,
p3.set_redirect_target, p2)
p2.set_redirect_target(p1, save=False)
self.assertEqual(text, p2.get(get_redirect=True))
class TestPageUserAction(DefaultSiteTestCase):
"""Test page user actions."""
user = True
def test_purge(self):
"""Test purging the mainpage."""
mainpage = self.get_mainpage()
self.assertIsInstance(mainpage.purge(), bool)
self.assertEqual(mainpage.purge(),
mainpage.purge(forcelinkupdate=None))
def test_watch(self):
"""Test Page.watch, with and without unwatch enabled."""
# Note: this test uses the userpage, so that it is unwatched and
# therefore is not listed by script_tests test_watchlist_simulate.
userpage = self.get_userpage()
rv = userpage.watch()
self.assertIsInstance(rv, bool)
self.assertTrue(rv)
rv = userpage.watch(unwatch=True)
self.assertIsInstance(rv, bool)
self.assertTrue(rv)
class TestPageDelete(TestCase):
"""Test page delete / undelete actions."""
family = 'wikipedia'
code = 'test'
write = True
sysop = True
def test_delete(self):
"""Test the site.delete and site.undelete method."""
site = self.get_site()
p = pywikibot.Page(site, 'User:Unicodesnowman/DeleteTest')
# Ensure the page exists
p.text = 'pywikibot unit test page'
p.save('#redirect[[unit test]]', botflag=True)
self.assertEqual(p.isRedirectPage(), True)
# Test deletion
p.delete(reason='pywikibot unit test', prompt=False, mark=False)
self.assertEqual(p._pageid, 0)
self.assertEqual(p.isRedirectPage(), False)
self.assertRaisesRegex(pywikibot.NoPage,
NO_PAGE_RE, p.get, force=True)
# Test undeleting last two revisions
del_revs = list(p.loadDeletedRevisions())
revid = p.getDeletedRevision(del_revs[-1])['revid']
p.markDeletedRevision(del_revs[-1])
p.markDeletedRevision(del_revs[-2])
self.assertRaisesRegex(ValueError, 'is not a deleted revision',
p.markDeletedRevision, 123)
p.undelete(reason='pywikibot unit test')
revs = list(p.revisions())
self.assertLength(revs, 2)
self.assertEqual(revs[1].revid, revid)
class TestApplicablePageProtections(TestCase):
"""Test applicable restriction types."""
family = 'wikipedia'
code = 'test'
def test_applicable_protections(self):
"""Test Page.applicable_protections."""
site = self.get_site()
p1 = pywikibot.Page(site, 'User:Unicodesnowman/NonexistentPage')
p2 = pywikibot.Page(site, 'User:Unicodesnowman/ProtectTest')
p3 = pywikibot.Page(site, 'File:Wiki.png')
# from the API, since 1.25wmf14
pp1 = p1.applicable_protections()
pp2 = p2.applicable_protections()
pp3 = p3.applicable_protections()
self.assertEqual(pp1, {'create'})
self.assertIn('edit', pp2)
self.assertNotIn('create', pp2)
self.assertNotIn('upload', pp2)
self.assertIn('upload', pp3)
# inferred
site.version = lambda: '1.24'
self.assertEqual(pp1, p1.applicable_protections())
self.assertEqual(pp2, p2.applicable_protections())
self.assertEqual(pp3, p3.applicable_protections())
class TestPageProtect(TestCase):
"""Test page protect / unprotect actions."""
family = 'wikipedia'
code = 'test'
write = True
sysop = True
def test_protect(self):
"""Test Page.protect."""
site = self.get_site()
p1 = pywikibot.Page(site, 'User:Unicodesnowman/ProtectTest')
p1.protect(protections={'edit': 'sysop', 'move': 'autoconfirmed'},
reason='Pywikibot unit test')
self.assertEqual(p1.protection(),
{'edit': ('sysop', 'infinity'),
'move': ('autoconfirmed', 'infinity')})
p1.protect(protections={'edit': '', 'move': ''},
reason='Pywikibot unit test')
self.assertEqual(p1.protection(), {})
def test_protect_alt(self):
"""Test of Page.protect that works around T78522."""
site = self.get_site()
p1 = pywikibot.Page(site, 'User:Unicodesnowman/ProtectTest')
p1.protect(protections={'edit': 'sysop', 'move': 'autoconfirmed'},
reason='Pywikibot unit test')
self.assertEqual(p1.protection(),
{'edit': ('sysop', 'infinity'),
'move': ('autoconfirmed', 'infinity')})
# workaround
p1 = pywikibot.Page(site, 'User:Unicodesnowman/ProtectTest')
p1.protect(protections={'edit': '', 'move': ''},
reason='Pywikibot unit test')
self.assertEqual(p1.protection(), {})
class HtmlEntity(TestCase):
"""Test that HTML entities are correctly decoded."""
net = False
def test_no_entities(self):
"""Test that text is left unchanged."""
self.assertEqual(pywikibot.page.html2unicode('foobar'), 'foobar')
self.assertEqual(pywikibot.page.html2unicode(' '), ' ')
def test_valid_entities(self):
"""Test valid entities."""
self.assertEqual(pywikibot.page.html2unicode('A&O'), 'A&O')
self.assertEqual(pywikibot.page.html2unicode('py'), 'py')
self.assertEqual(pywikibot.page.html2unicode('𐀀'),
'\U00010000')
self.assertEqual(pywikibot.page.html2unicode('p&y'),
'p&y')
self.assertEqual(pywikibot.page.html2unicode('€'), '€')
def test_ignore_entities(self):
"""Test ignore entities."""
self.assertEqual(pywikibot.page.html2unicode('A&O', [38]),
'A&O')
self.assertEqual(pywikibot.page.html2unicode('A&O', [38]),
'A&O')
self.assertEqual(pywikibot.page.html2unicode('A&O', [38]),
'A&O')
self.assertEqual(pywikibot.page.html2unicode('A&O', [37]), 'A&O')
self.assertEqual(pywikibot.page.html2unicode('€', [128]),
'€')
self.assertEqual(pywikibot.page.html2unicode('€', [8364]),
'€')
self.assertEqual(pywikibot.page.html2unicode(''),
'')
def test_recursive_entities(self):
"""Test recursive entities."""
self.assertEqual(pywikibot.page.html2unicode('A&amp;O'), 'A&O')
def test_invalid_entities(self):
"""Test texts with invalid entities."""
self.assertEqual(pywikibot.page.html2unicode('A¬aname;O'),
'A¬aname;O')
self.assertEqual(pywikibot.page.html2unicode('Af;O'), 'Af;O')
self.assertEqual(pywikibot.page.html2unicode('f'), 'f')
self.assertEqual(pywikibot.page.html2unicode('py'), 'py')
class TestPermalink(DefaultSiteTestCase):
"""Test that permalink links are correct."""
family = 'wikipedia'
code = 'test'
def test_permalink(self):
"""Test permalink function."""
site = self.get_site()
p1 = pywikibot.Page(site, 'User:Framawiki/pwb_tests/permalink')
self.assertEqual(p1.permalink(),
'//test.wikipedia.org/w/index.php?title=User%3A'
'Framawiki%2Fpwb_tests%2Fpermalink&oldid=340685')
self.assertEqual(p1.permalink(oldid='340684'),
'//test.wikipedia.org/w/index.php?title=User%3A'
'Framawiki%2Fpwb_tests%2Fpermalink&oldid=340684')
self.assertEqual(p1.permalink(percent_encoded=False),
'//test.wikipedia.org/w/index.php?title=User:'
'Framawiki/pwb_tests/permalink&oldid=340685')
self.assertEqual(p1.permalink(with_protocol=True),
'https://test.wikipedia.org/w/index.php?title=User%3A'
'Framawiki%2Fpwb_tests%2Fpermalink&oldid=340685')
class TestShortLink(DefaultSiteTestCase):
"""Test that short link management is correct."""
user = True
family = 'wikipedia'
code = 'test'
def test_create_short_link(self):
"""Test create_short_link function."""
site = self.get_site()
p1 = pywikibot.Page(site, 'User:Framawiki/pwb_tests/shortlink')
with self.subTest(parameters='defaulted'):
self.assertEqual(p1.create_short_link(), 'https://w.wiki/3Cy')
with self.subTest(with_protocol=True):
self.assertEqual(p1.create_short_link(with_protocol=True),
'https://w.wiki/3Cy')
with self.subTest(permalink=True):
self.assertEqual(p1.create_short_link(permalink=True,
with_protocol=False),
'w.wiki/3Cz')
if __name__ == '__main__': # pragma: no cover
try:
unittest.main()
except SystemExit:
pass
| mit | 7,108,309,833,884,328,000 | 38.609895 | 79 | 0.575544 | false |
vzer/ToughRADIUS | toughradius/console/admin/billing.py | 5 | 3696 | #!/usr/bin/env python
#coding=utf-8
from bottle import Bottle
from bottle import request
from bottle import response
from bottle import redirect
from bottle import run as runserver
from bottle import static_file
from bottle import abort
from hashlib import md5
from tablib import Dataset
from toughradius.console.base import *
from toughradius.console.libs import utils
from toughradius.console.websock import websock
from toughradius.console import models
import bottle
from toughradius.console.admin import forms
import decimal
import datetime
__prefix__ = "/billing"
decimal.getcontext().prec = 11
decimal.getcontext().rounding = decimal.ROUND_UP
app = Bottle()
app.config['__prefix__'] = __prefix__
###############################################################################
# billing log query
###############################################################################
@app.route('/', apply=auth_opr, method=['GET', 'POST'])
@app.post('/export', apply=auth_opr)
def billing_query(db, render):
node_id = request.params.get('node_id')
account_number = request.params.get('account_number')
query_begin_time = request.params.get('query_begin_time')
query_end_time = request.params.get('query_end_time')
opr_nodes = get_opr_nodes(db)
_query = db.query(
models.SlcRadBilling,
models.SlcMember.node_id,
models.SlcNode.node_name
).filter(
models.SlcRadBilling.account_number == models.SlcRadAccount.account_number,
models.SlcMember.member_id == models.SlcRadAccount.member_id,
models.SlcNode.id == models.SlcMember.node_id
)
if node_id:
_query = _query.filter(models.SlcMember.node_id == node_id)
else:
_query = _query.filter(models.SlcMember.node_id.in_(i.id for i in opr_nodes))
if account_number:
_query = _query.filter(models.SlcRadBilling.account_number.like('%' + account_number + '%'))
if query_begin_time:
_query = _query.filter(models.SlcRadBilling.create_time >= query_begin_time + ' 00:00:00')
if query_end_time:
_query = _query.filter(models.SlcRadBilling.create_time <= query_end_time + ' 23:59:59')
_query = _query.order_by(models.SlcRadBilling.create_time.desc())
if request.path == '/':
return render("bus_billing_list",
node_list=opr_nodes,
page_data=get_page_data(_query), **request.params)
elif request.path == '/export':
data = Dataset()
data.append((
u'区域', u'上网账号', u'BAS地址', u'会话编号', u'记账开始时间', u'会话时长',
u'已扣时长', u"已扣流量", u'应扣费用', u'实扣费用', u'剩余余额',
u'剩余时长', u'剩余流量', u'是否扣费', u'扣费时间'
))
_f2y = utils.fen2yuan
_fms = utils.fmt_second
_k2m = utils.kb2mb
_s2h = utils.sec2hour
for i, _, _node_name in _query:
data.append((
_node_name, i.account_number, i.nas_addr, i.acct_session_id,
i.acct_start_time, _fms(i.acct_session_time), _fms(i.acct_times), _k2m(i.acct_flows),
_f2y(i.acct_fee), _f2y(i.actual_fee), _f2y(i.balance),
_s2h(i.time_length), _k2m(i.flow_length),
(i.is_deduct == 0 and u'否' or u'是'), i.create_time
))
name = u"RADIUS-BILLING-" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S") + ".xls"
return export_file(name, data)
permit.add_route("/billing", u"用户计费查询", u"营业管理", is_menu=True, order=4)
permit.add_route("/billing/export", u"用户计费导出", u"营业管理", order=4.01) | agpl-3.0 | -6,135,470,895,693,383,000 | 38.3 | 101 | 0.60379 | false |
2015fallproject/2015fallcase1 | static/Brython3.2.0-20150701-214155/Lib/test/unittests/test_importlib/import_/test_packages.py | 34 | 4661 | from .. import util
from . import util as import_util
import sys
import unittest
import importlib
from test import support
class ParentModuleTests(unittest.TestCase):
"""Importing a submodule should import the parent modules."""
def test_import_parent(self):
with util.mock_modules('pkg.__init__', 'pkg.module') as mock:
with util.import_state(meta_path=[mock]):
module = import_util.import_('pkg.module')
self.assertIn('pkg', sys.modules)
def test_bad_parent(self):
with util.mock_modules('pkg.module') as mock:
with util.import_state(meta_path=[mock]):
with self.assertRaises(ImportError) as cm:
import_util.import_('pkg.module')
self.assertEqual(cm.exception.name, 'pkg')
def test_raising_parent_after_importing_child(self):
def __init__():
import pkg.module
1/0
mock = util.mock_modules('pkg.__init__', 'pkg.module',
module_code={'pkg': __init__})
with mock:
with util.import_state(meta_path=[mock]):
with self.assertRaises(ZeroDivisionError):
import_util.import_('pkg')
self.assertNotIn('pkg', sys.modules)
self.assertIn('pkg.module', sys.modules)
with self.assertRaises(ZeroDivisionError):
import_util.import_('pkg.module')
self.assertNotIn('pkg', sys.modules)
self.assertIn('pkg.module', sys.modules)
def test_raising_parent_after_relative_importing_child(self):
def __init__():
from . import module
1/0
mock = util.mock_modules('pkg.__init__', 'pkg.module',
module_code={'pkg': __init__})
with mock:
with util.import_state(meta_path=[mock]):
with self.assertRaises((ZeroDivisionError, ImportError)):
# This raises ImportError on the "from . import module"
# line, not sure why.
import_util.import_('pkg')
self.assertNotIn('pkg', sys.modules)
with self.assertRaises((ZeroDivisionError, ImportError)):
import_util.import_('pkg.module')
self.assertNotIn('pkg', sys.modules)
# XXX False
#self.assertIn('pkg.module', sys.modules)
def test_raising_parent_after_double_relative_importing_child(self):
def __init__():
from ..subpkg import module
1/0
mock = util.mock_modules('pkg.__init__', 'pkg.subpkg.__init__',
'pkg.subpkg.module',
module_code={'pkg.subpkg': __init__})
with mock:
with util.import_state(meta_path=[mock]):
with self.assertRaises((ZeroDivisionError, ImportError)):
# This raises ImportError on the "from ..subpkg import module"
# line, not sure why.
import_util.import_('pkg.subpkg')
self.assertNotIn('pkg.subpkg', sys.modules)
with self.assertRaises((ZeroDivisionError, ImportError)):
import_util.import_('pkg.subpkg.module')
self.assertNotIn('pkg.subpkg', sys.modules)
# XXX False
#self.assertIn('pkg.subpkg.module', sys.modules)
def test_module_not_package(self):
# Try to import a submodule from a non-package should raise ImportError.
assert not hasattr(sys, '__path__')
with self.assertRaises(ImportError) as cm:
import_util.import_('sys.no_submodules_here')
self.assertEqual(cm.exception.name, 'sys.no_submodules_here')
def test_module_not_package_but_side_effects(self):
# If a module injects something into sys.modules as a side-effect, then
# pick up on that fact.
name = 'mod'
subname = name + '.b'
def module_injection():
sys.modules[subname] = 'total bunk'
mock_modules = util.mock_modules('mod',
module_code={'mod': module_injection})
with mock_modules as mock:
with util.import_state(meta_path=[mock]):
try:
submodule = import_util.import_(subname)
finally:
support.unload(subname)
def test_main():
from test.support import run_unittest
run_unittest(ParentModuleTests)
if __name__ == '__main__':
test_main()
| agpl-3.0 | -7,042,494,867,377,825,000 | 40.616071 | 82 | 0.549024 | false |
Metaswitch/horizon | openstack_dashboard/dashboards/identity/users/urls.py | 57 | 1437 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.identity.users import views
VIEWS_MOD = 'openstack_dashboard.dashboards.identity.users.views'
urlpatterns = patterns(
VIEWS_MOD,
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<user_id>[^/]+)/update/$',
views.UpdateView.as_view(), name='update'),
url(r'^create/$', views.CreateView.as_view(), name='create'),
url(r'^(?P<user_id>[^/]+)/detail/$',
views.DetailView.as_view(), name='detail'),
url(r'^(?P<user_id>[^/]+)/change_password/$',
views.ChangePasswordView.as_view(), name='change_password'))
| apache-2.0 | 3,202,109,300,586,223,600 | 37.837838 | 78 | 0.699374 | false |
Jeff-Tian/mybnb | Python27/Lib/lib-tk/FixTk.py | 2 | 3099 | import sys, os
# Delay import _tkinter until we have set TCL_LIBRARY,
# so that Tcl_FindExecutable has a chance to locate its
# encoding directory.
# Unfortunately, we cannot know the TCL_LIBRARY directory
# if we don't know the tcl version, which we cannot find out
# without import Tcl. Fortunately, Tcl will itself look in
# <TCL_LIBRARY>\..\tcl<TCL_VERSION>, so anything close to
# the real Tcl library will do.
# Expand symbolic links on Vista
try:
import ctypes
ctypes.windll.kernel32.GetFinalPathNameByHandleW
except (ImportError, AttributeError):
def convert_path(s):
return s
else:
def convert_path(s):
assert isinstance(s, str) # sys.prefix contains only bytes
udir = s.decode("mbcs")
hdir = ctypes.windll.kernel32.\
CreateFileW(udir, 0x80, # FILE_READ_ATTRIBUTES
1, # FILE_SHARE_READ
None, 3, # OPEN_EXISTING
0x02000000, # FILE_FLAG_BACKUP_SEMANTICS
None)
if hdir == -1:
# Cannot open directory, give up
return s
buf = ctypes.create_unicode_buffer(u"", 32768)
res = ctypes.windll.kernel32.\
GetFinalPathNameByHandleW(hdir, buf, len(buf),
0) # VOLUME_NAME_DOS
ctypes.windll.kernel32.CloseHandle(hdir)
if res == 0:
# Conversion failed (e.g. network location)
return s
s = buf[:res].encode("mbcs")
# Ignore leading \\?\
if s.startswith("\\\\?\\"):
s = s[4:]
if s.startswith("UNC"):
s = "\\" + s[3:]
return s
prefix = os.path.join(sys.prefix,"tcl")
if not os.path.exists(prefix):
# devdir/externals/tcltk/lib
tcltk = 'tcltk'
if sys.maxsize > 2**31 - 1:
tcltk = 'tcltk64'
prefix = os.path.join(sys.prefix, "externals", tcltk, "lib")
prefix = os.path.abspath(prefix)
# if this does not exist, no further search is needed
if os.path.exists(prefix):
prefix = convert_path(prefix)
if "TCL_LIBRARY" not in os.environ:
for name in os.listdir(prefix):
if name.startswith("tcl"):
tcldir = os.path.join(prefix,name)
if os.path.isdir(tcldir):
os.environ["TCL_LIBRARY"] = tcldir
# Compute TK_LIBRARY, knowing that it has the same version
# as Tcl
import _tkinter
ver = str(_tkinter.TCL_VERSION)
if "TK_LIBRARY" not in os.environ:
v = os.path.join(prefix, 'tk'+ver)
if os.path.exists(os.path.join(v, "tclIndex")):
os.environ['TK_LIBRARY'] = v
# We don't know the Tix version, so we must search the entire
# directory
if "TIX_LIBRARY" not in os.environ:
for name in os.listdir(prefix):
if name.startswith("tix"):
tixdir = os.path.join(prefix,name)
if os.path.isdir(tixdir):
os.environ["TIX_LIBRARY"] = tixdir
| apache-2.0 | -1,922,404,316,920,299,800 | 36.259259 | 68 | 0.563408 | false |
sideeffects/stats_core | stats_main/management/commands/cleardatabase.py | 1 | 1319 | '''
Drop all tables in the database but keep the database itself.
'''
import optparse
from django.core.management.base import BaseCommand
from django.db.utils import IntegrityError
from django.db import connections, transaction
class Command(BaseCommand):
help = "Drop all tables in the database."
args = ""
option_list = BaseCommand.option_list + (
optparse.make_option(
"-d", "--database",
dest="database",
default="default",
help="Database name from settings.py",
),
)
def handle(self, *args, **options):
'''
Execute command.
'''
connection = connections[options["database"]]
cursor = connection.cursor()
while True:
remaining_tables = connection.introspection.table_names()
if len(remaining_tables) == 0:
break
for remaining_table in remaining_tables:
try:
cursor.execute("drop table %s cascade" % remaining_table)
except IntegrityError:
# We can't delete this table before deleting others, so
# try the next one
pass
else:
break
transaction.commit_unless_managed()
| mit | -5,727,684,000,737,978,000 | 27.06383 | 77 | 0.557998 | false |
ThoughtWorksInc/treadmill | treadmill/websocket/api/trace.py | 3 | 1874 | """A WebSocket handler for Treadmill trace.
"""
import logging
from treadmill import apptrace
from treadmill import schema
from treadmill.websocket import utils
from treadmill.apptrace import events as traceevents
_LOGGER = logging.getLogger(__name__)
class TraceAPI(object):
"""Handler for /trace topic."""
def __init__(self, sow=None):
"""init"""
self.sow = sow
self.sow_table = 'trace'
@schema.schema({'$ref': 'websocket/trace.json#/message'})
def subscribe(message):
"""Return filter based on message payload."""
parsed_filter = utils.parse_message_filter(message['filter'])
subscription = [('/trace/*', '%s,*' % parsed_filter.filter)]
_LOGGER.info('Adding trace subscription: %s', subscription)
return subscription
def on_event(filename, _operation, content):
"""Event handler."""
if not filename.startswith('/trace/'):
return
_shard, event = filename[len('/trace/'):].split('/')
(instanceid,
timestamp,
src_host,
event_type,
event_data) = event.split(',', 4)
trace_event = traceevents.AppTraceEvent.from_data(
timestamp=float(timestamp),
source=src_host,
instanceid=instanceid,
event_type=event_type,
event_data=event_data,
payload=content
)
if trace_event is None:
return
return {
'topic': '/trace',
'event': trace_event.to_dict()
}
self.subscribe = subscribe
self.on_event = on_event
def init():
"""API module init."""
return [('/trace', TraceAPI(sow=apptrace.TRACE_SOW_DIR), ['/trace/*'])]
| apache-2.0 | -5,940,922,184,473,888,000 | 27.830769 | 75 | 0.540555 | false |
erdincay/gmvault | src/gmv/cmdline_utils.py | 3 | 7686 | '''
Gmvault: a tool to backup and restore your gmail account.
Copyright (C) <2011> <guillaume Aubert (guillaume dot aubert at gmail do com)>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import argparse
import sys
import gmv.log_utils as log_utils
LOG = log_utils.LoggerFactory.get_logger('cmdline_utils')
class CmdLineParser(argparse.ArgumentParser): #pylint: disable=R0904
"""
Added service to OptionParser.
Comments regarding usability of the lib.
By default you want to print the default in the help if you had them so the default formatter should print them
Also new lines are eaten in the epilogue strings. You would use an epilogue to show examples most of the time so you
want to have the possiblity to go to a new line. There should be a way to format the epilogue differently from the rest
"""
BOOL_TRUE = ['yes', 'true', '1']
BOOL_FALSE = ['no', 'false', '0']
BOOL_VALS = BOOL_TRUE + BOOL_FALSE
def __init__(self, *args, **kwargs):
""" constructor """
argparse.ArgumentParser.__init__(self, *args, **kwargs) #pylint: disable=W0142
# I like my help option message better than the default...
#self.remove_option('-h')
#self.add_option('-h', '--help', action='help', help='Show this message and exit.')
self.epilogue = None
@classmethod
def convert_to_boolean(cls, val):
"""
Convert yes, True, true, YES to boolean True and
no, False, false, NO to boolean NO
"""
lower_val = val.lower()
if lower_val in cls.BOOL_TRUE:
return True
elif lower_val in cls.BOOL_FALSE:
return False
else:
raise Exception("val %s should be in %s to be convertible to a boolean." % (val, cls.BOOL_VALS))
def print_help(self, out=sys.stderr):
"""
Print the help message, followed by the epilogue (if set), to the
specified output file. You can define an epilogue by setting the
``epilogue`` field.
:param out: file desc where to write the usage message
"""
super(CmdLineParser, self).print_help(out)
if self.epilogue:
#print >> out, '\n%s' % textwrap.fill(self.epilogue, 100, replace_whitespace = False)
print >> out, '\n%s' % self.epilogue
out.flush()
def show_usage(self, msg=None):
"""
Print usage message
"""
self.die_with_usage(msg)
def die_with_usage(self, msg=None, exit_code=2):
"""
Display a usage message and exit.
:Parameters:
msg : str
If not set to ``None`` (the default), this message will be
displayed before the usage message
exit_code : int
The process exit code. Defaults to 2.
"""
if msg != None:
print >> sys.stderr, msg
self.print_help(sys.stderr)
sys.exit(exit_code)
def error(self, msg):
"""
Overrides parent ``OptionParser`` class's ``error()`` method and
forces the full usage message on error.
"""
self.die_with_usage("%s: error: %s\n" % (self.prog, msg))
def message(self, msg):
"""
Print a message
"""
print("%s: %s\n" % (self.prog, msg))
SYNC_HELP_EPILOGUE = """Examples:
a) full synchronisation with email and password login
#> gmvault --email [email protected] --passwd vrysecrtpasswd
b) full synchronisation for german users that have to use googlemail instead of gmail
#> gmvault --imap-server imap.googlemail.com --email [email protected] --passwd sosecrtpasswd
c) restrict synchronisation with an IMAP request
#> gmvault --imap-request 'Since 1-Nov-2011 Before 10-Nov-2011' --email [email protected] --passwd sosecrtpasswd
"""
def test_command_parser():
"""
Test the command parser
"""
#parser = argparse.ArgumentParser()
parser = CmdLineParser()
subparsers = parser.add_subparsers(help='commands')
# A sync command
sync_parser = subparsers.add_parser('sync', formatter_class=argparse.ArgumentDefaultsHelpFormatter, \
help='synchronize with given gmail account')
#email argument can be optional so it should be an option
sync_parser.add_argument('-l', '--email', action='store', dest='email', help='email to sync with')
# sync typ
sync_parser.add_argument('-t', '--type', action='store', default='full-sync', help='type of synchronisation')
sync_parser.add_argument("-i", "--imap-server", metavar = "HOSTNAME", \
help="Gmail imap server hostname. (default: imap.gmail.com)",\
dest="host", default="imap.gmail.com")
sync_parser.add_argument("-p", "--imap-port", metavar = "PORT", \
help="Gmail imap server port. (default: 993)",\
dest="port", default=993)
sync_parser.set_defaults(verb='sync')
sync_parser.epilogue = SYNC_HELP_EPILOGUE
# A restore command
restore_parser = subparsers.add_parser('restore', help='restore email to a given email account')
restore_parser.add_argument('email', action='store', help='email to sync with')
restore_parser.add_argument('--recursive', '-r', default=False, action='store_true',
help='Remove the contents of the directory, too',
)
restore_parser.set_defaults(verb='restore')
# A config command
config_parser = subparsers.add_parser('config', help='add/delete/modify properties in configuration')
config_parser.add_argument('dirname', action='store', help='New directory to create')
config_parser.add_argument('--read-only', default=False, action='store_true',
help='Set permissions to prevent writing to the directory',
)
config_parser.set_defaults(verb='config')
# global help
#print("================ Global Help (-h)================")
sys.argv = ['gmvault.py']
print(parser.parse_args())
#print("================ Global Help (--help)================")
#sys.argv = ['gmvault.py', '--help']
#print(parser.parse_args())
#print("================ Sync Help (--help)================")
#sys.argv = ['gmvault.py', 'sync', '-h']
#print(parser.parse_args())
#sys.argv = ['gmvault.py', 'sync', '[email protected]', '--type', 'quick-sync']
#print(parser.parse_args())
#print("options = %s\n" % (options))
#print("args = %s\n" % (args))
if __name__ == '__main__':
test_command_parser()
| agpl-3.0 | 7,865,789,259,453,203,000 | 34.915888 | 130 | 0.580536 | false |
willthames/ansible | lib/ansible/modules/cloud/amazon/rds_param_group.py | 12 | 11187 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rds_param_group
version_added: "1.5"
short_description: manage RDS parameter groups
description:
- Creates, modifies, and deletes RDS parameter groups. This module has a dependency on python-boto >= 2.5.
options:
state:
description:
- Specifies whether the group should be present or absent.
required: true
default: present
aliases: []
choices: [ 'present' , 'absent' ]
name:
description:
- Database parameter group identifier.
required: true
default: null
aliases: []
description:
description:
- Database parameter group description. Only set when a new group is added.
required: false
default: null
aliases: []
engine:
description:
- The type of database for this group. Required for state=present.
required: false
default: null
aliases: []
choices:
- 'aurora5.6'
- 'mariadb10.0'
- 'mariadb10.1'
- 'mysql5.1'
- 'mysql5.5'
- 'mysql5.6'
- 'mysql5.7'
- 'oracle-ee-11.2'
- 'oracle-ee-12.1'
- 'oracle-se-11.2'
- 'oracle-se-12.1'
- 'oracle-se1-11.2'
- 'oracle-se1-12.1'
- 'postgres9.3'
- 'postgres9.4'
- 'postgres9.5'
- 'postgres9.6'
- 'sqlserver-ee-10.5'
- 'sqlserver-ee-11.0'
- 'sqlserver-ex-10.5'
- 'sqlserver-ex-11.0'
- 'sqlserver-ex-12.0'
- 'sqlserver-se-10.5'
- 'sqlserver-se-11.0'
- 'sqlserver-se-12.0'
- 'sqlserver-web-10.5'
- 'sqlserver-web-11.0'
- 'sqlserver-web-12.0'
immediate:
description:
- Whether to apply the changes immediately, or after the next reboot of any associated instances.
required: false
default: null
aliases: []
params:
description:
- Map of parameter names and values. Numeric values may be represented as K for kilo (1024), M for mega (1024^2), G for giga (1024^3),
or T for tera (1024^4), and these values will be expanded into the appropriate number before being set in the parameter group.
required: false
default: null
aliases: []
author: "Scott Anderson (@tastychutney)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Add or change a parameter group, in this case setting auto_increment_increment to 42 * 1024
- rds_param_group:
state: present
name: norwegian_blue
description: 'My Fancy Ex Parrot Group'
engine: 'mysql5.6'
params:
auto_increment_increment: "42K"
# Remove a parameter group
- rds_param_group:
state: absent
name: norwegian_blue
'''
try:
import boto.rds
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import connect_to_aws, ec2_argument_spec, get_aws_connection_info
from ansible.module_utils.parsing.convert_bool import BOOLEANS_TRUE
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_native
VALID_ENGINES = [
'aurora5.6',
'mariadb10.0',
'mariadb10.1',
'mysql5.1',
'mysql5.5',
'mysql5.6',
'mysql5.7',
'oracle-ee-11.2',
'oracle-ee-12.1',
'oracle-se-11.2',
'oracle-se-12.1',
'oracle-se1-11.2',
'oracle-se1-12.1',
'postgres9.3',
'postgres9.4',
'postgres9.5',
'postgres9.6',
'sqlserver-ee-10.5',
'sqlserver-ee-11.0',
'sqlserver-ex-10.5',
'sqlserver-ex-11.0',
'sqlserver-ex-12.0',
'sqlserver-se-10.5',
'sqlserver-se-11.0',
'sqlserver-se-12.0',
'sqlserver-web-10.5',
'sqlserver-web-11.0',
'sqlserver-web-12.0',
]
INT_MODIFIERS = {
'K': 1024,
'M': pow(1024, 2),
'G': pow(1024, 3),
'T': pow(1024, 4),
}
# returns a tuple: (whether or not a parameter was changed, the remaining parameters that weren't found in this parameter group)
class NotModifiableError(Exception):
def __init__(self, error_message, *args):
super(NotModifiableError, self).__init__(error_message, *args)
self.error_message = error_message
def __repr__(self):
return 'NotModifiableError: %s' % self.error_message
def __str__(self):
return 'NotModifiableError: %s' % self.error_message
def set_parameter(param, value, immediate):
"""
Allows setting parameters with 10M = 10* 1024 * 1024 and so on.
"""
converted_value = value
if param.type == 'string':
converted_value = str(value)
elif param.type == 'integer':
if isinstance(value, string_types):
try:
for modifier in INT_MODIFIERS.keys():
if value.endswith(modifier):
converted_value = int(value[:-1]) * INT_MODIFIERS[modifier]
converted_value = int(converted_value)
except ValueError:
# may be based on a variable (ie. {foo*3/4}) so
# just pass it on through to boto
converted_value = str(value)
elif isinstance(value, bool):
converted_value = 1 if value else 0
else:
converted_value = int(value)
elif param.type == 'boolean':
if isinstance(value, string_types):
converted_value = to_native(value) in BOOLEANS_TRUE
else:
converted_value = bool(value)
param.value = converted_value
param.apply(immediate)
def modify_group(group, params, immediate=False):
""" Set all of the params in a group to the provided new params. Raises NotModifiableError if any of the
params to be changed are read only.
"""
changed = {}
new_params = dict(params)
for key in new_params.keys():
if key in group:
param = group[key]
new_value = new_params[key]
try:
old_value = param.value
except ValueError:
# some versions of boto have problems with retrieving
# integer values from params that may have their value
# based on a variable (ie. {foo*3/4}), so grab it in a
# way that bypasses the property functions
old_value = param._value
if old_value != new_value:
if not param.is_modifiable:
raise NotModifiableError('Parameter %s is not modifiable.' % key)
changed[key] = {'old': old_value, 'new': new_value}
set_parameter(param, new_value, immediate)
del new_params[key]
return changed, new_params
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state = dict(required=True, choices=['present', 'absent']),
name = dict(required=True),
engine = dict(required=False, choices=VALID_ENGINES),
description = dict(required=False),
params = dict(required=False, aliases=['parameters'], type='dict'),
immediate = dict(required=False, type='bool'),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
state = module.params.get('state')
group_name = module.params.get('name').lower()
group_engine = module.params.get('engine')
group_description = module.params.get('description')
group_params = module.params.get('params') or {}
immediate = module.params.get('immediate') or False
if state == 'present':
for required in ['name', 'description', 'engine']:
if not module.params.get(required):
module.fail_json(msg = str("Parameter %s required for state='present'" % required))
else:
for not_allowed in ['description', 'engine', 'params']:
if module.params.get(not_allowed):
module.fail_json(msg = str("Parameter %s not allowed for state='absent'" % not_allowed))
# Retrieve any AWS settings from the environment.
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
if not region:
module.fail_json(msg = str("Either region or AWS_REGION or EC2_REGION environment variable or boto config aws_region or ec2_region must be set."))
try:
conn = connect_to_aws(boto.rds, region, **aws_connect_kwargs)
except boto.exception.BotoServerError as e:
module.fail_json(msg = e.error_message)
group_was_added = False
try:
changed = False
try:
all_groups = conn.get_all_dbparameter_groups(group_name, max_records=100)
exists = len(all_groups) > 0
except BotoServerError as e:
if e.error_code != 'DBParameterGroupNotFound':
module.fail_json(msg = e.error_message)
exists = False
if state == 'absent':
if exists:
conn.delete_parameter_group(group_name)
changed = True
else:
changed = {}
if not exists:
new_group = conn.create_parameter_group(group_name, engine=group_engine, description=group_description)
group_was_added = True
# If a "Marker" is present, this group has more attributes remaining to check. Get the next batch, but only
# if there are parameters left to set.
marker = None
while len(group_params):
next_group = conn.get_all_dbparameters(group_name, marker=marker)
changed_params, group_params = modify_group(next_group, group_params, immediate)
changed.update(changed_params)
if hasattr(next_group, 'Marker'):
marker = next_group.Marker
else:
break
except BotoServerError as e:
module.fail_json(msg = e.error_message)
except NotModifiableError as e:
msg = e.error_message
if group_was_added:
msg = '%s The group "%s" was added first.' % (msg, group_name)
module.fail_json(msg=msg)
module.exit_json(changed=changed)
if __name__ == '__main__':
main()
| gpl-3.0 | 306,011,070,857,280,300 | 31.054441 | 154 | 0.598194 | false |
odoo-arg/odoo_l10n_ar | l10n_ar_taxes/tests/test_invoice.py | 1 | 6723 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from odoo.tests import common
from openerp.exceptions import ValidationError
import json
class TestInvoice(common.TransactionCase):
def _create_partners(self):
self.partner_ri = self.env['res.partner'].create({
'name': 'Customer',
'customer': True,
'supplier': True,
'property_account_position_id': self.iva_ri.id
})
def _create_invoices(self):
account = self.partner_ri.property_account_receivable_id
self.pos = self.pos_proxy.create({'name': 5})
self.document_book = self.document_book_proxy.create({
'name': 10,
'pos_ar_id': self.pos.id,
'category': 'invoice',
'book_type_id': self.env.ref('l10n_ar_point_of_sale.document_book_type_preprint_invoice').id,
'document_type_id': self.env.ref('l10n_ar_point_of_sale.document_type_invoice').id,
'denomination_id': self.env.ref('l10n_ar_afip_tables.account_denomination_a').id,
})
self.invoice = self.env['account.invoice'].create({
'partner_id': self.partner_ri.id,
'fiscal_position_id': self.partner_ri.property_account_position_id.id,
'account_id': account.id,
'type': 'out_invoice',
'state': 'draft'
})
def _create_invoice_lines(self):
account = self.partner_ri.property_account_receivable_id
self.env['account.invoice.line'].create({
'name': 'Producto',
'account_id': account.id,
'quantity': 1,
'price_unit': 500,
'invoice_id': self.invoice.id,
})
def _create_invoice_line_with_vat(self):
account = self.partner_ri.property_account_receivable_id
vat_21 = self.env.ref('l10n_ar.1_vat_21_ventas').id
return self.env['account.invoice.line'].create({
'name': 'Producto',
'account_id': account.id,
'quantity': 1,
'invoice_line_tax_ids': [(6, 0, [vat_21])],
'price_unit': 1000,
'invoice_id': self.invoice.id,
})
def _create_invoice_line_with_exempt_tax(self):
account = self.partner_ri.property_account_receivable_id
vat_ex = self.env.ref('l10n_ar.1_vat_exento_ventas').id
self.env['account.invoice.line'].create({
'name': 'Producto',
'account_id': account.id,
'quantity': 2,
'invoice_line_tax_ids': [(6, 0, [vat_ex])],
'price_unit': 750,
'invoice_id': self.invoice.id,
})
def setUp(self):
super(TestInvoice, self).setUp()
# Proxies
self.iva_ri = self.env.ref('l10n_ar_afip_tables.account_fiscal_position_ivari')
self.document_book_proxy = self.env['document.book']
self.pos_proxy = self.env['pos.ar']
self._create_partners()
self._create_invoices()
self._create_invoice_lines()
# Configuracion de posicion fiscal RI en la compania
self.env.user.company_id.partner_id.property_account_position_id = self.iva_ri
def test_invoice_no_taxes(self):
self.invoice._onchange_invoice_line_ids()
assert self.invoice.amount_total == self.invoice.amount_not_taxable
assert self.invoice.amount_exempt == 0.0
assert self.invoice.amount_to_tax == 0.0
def test_invoice_with_vat(self):
self._create_invoice_line_with_vat()
self.invoice._onchange_invoice_line_ids()
assert self.invoice.amount_not_taxable == 500
assert self.invoice.amount_to_tax == 1000
assert self.invoice.amount_exempt == 0.0
assert self.invoice.amount_total == 1710
def test_exempt_invoice(self):
self._create_invoice_line_with_exempt_tax()
self.invoice._onchange_invoice_line_ids()
assert self.invoice.amount_not_taxable == 500
assert self.invoice.amount_to_tax == 0.0
assert self.invoice.amount_exempt == 1500
assert self.invoice.amount_total == 2000
def test_mixed_invoice(self):
self._create_invoice_line_with_exempt_tax()
self._create_invoice_line_with_vat()
self.invoice._onchange_invoice_line_ids()
assert self.invoice.amount_not_taxable == 500
assert self.invoice.amount_to_tax == 1000
assert self.invoice.amount_exempt == 1500
assert self.invoice.amount_total == 3210
def test_no_vat(self):
tax_group_vat = self.env.ref('l10n_ar.tax_group_vat')
# Desasignamos los impuestos creados al grupo de iva y luego borramos el grupo
vat_taxes = self.env['account.tax'].search([('tax_group_id', '=', tax_group_vat.id)])
vat_taxes.write({'tax_group_id': self.env['account.tax.group'].create({'name': 'test'}).id})
tax_group_vat.unlink()
with self.assertRaises(ValidationError):
self.invoice._onchange_invoice_line_ids()
def test_multiple_lines_with_vat(self):
line = self._create_invoice_line_with_vat()
vat_21 = self.env.ref('l10n_ar.1_vat_21_ventas').id
vat_105 = self.env.ref('l10n_ar.1_vat_105_ventas').id
line.invoice_line_tax_ids = [(6, 0, [vat_21, vat_105])]
with self.assertRaises(ValidationError):
self.invoice.check_more_than_one_vat_in_line()
def test_amount_json(self):
self._create_invoice_line_with_exempt_tax()
self._create_invoice_line_with_vat()
self.invoice._onchange_invoice_line_ids()
amounts_widget = json.loads(self.invoice.amounts_widget)
assert amounts_widget['content'][0].get('amount_to_tax') == 1000
assert amounts_widget['content'][0].get('amount_not_taxable') == 500
assert amounts_widget['content'][0].get('amount_exempt') == 1500
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 5,345,874,221,830,802,000 | 39.017857 | 105 | 0.603748 | false |
TC629/az-libra | App/update.py | 1 | 6069 | #!/usr/bin/python
# Este programa se corre en el pcDuino antes de que se cree e inicie el
# daemon de la aplicacion Libra.
# Revisa si existe el archivo "update_pending", el mismo consiste en una
# unica linea correspondiente al nombre del "tarball" con la actualizacion.
# Por el momento, solo soporta hacer cambios al codigo de la aplicacion
# Python y a la aplicacion de Arduino. No soporta hacer cambios a la base de
# datos.
# Requiere de los programas: avrdude, gunzip/gzip, y tar.
from __future__ import print_function
import os
import subprocess
import shutil
from config import CONFIG
from config import UPDATE_PENDING_PATH
from config import FILES_PATH
from config import APP_PATH
def main():
# La existencia del archivo "update_pending" indica que
# hay una actualizacion pendiente.
if os.path.exists(UPDATE_PENDING_PATH) and os.path.isfile(UPDATE_PENDING_PATH):
print('Se encontro una actualizacion pendiente.')
with open(UPDATE_PENDING_PATH, 'r') as update_pending:
# Analizamos que exista el archivo con la actualizacion
tarball = update_pending.readline().rstrip('\n').lstrip(' ').rstrip(' ')
tarball = os.path.join(FILES_PATH, 'updates/'+tarball)
print('Analizando archivo: ', end=' ')
if os.path.exists(tarball) and os.path.isfile(tarball):
print('OK')
# Y si existe lo descomprimimos.
print('Descomprimiendo archivo: ', end=' ')
if untar(tarball):
print('OK')
new_dir = os.path.join('/opt',tarball.rstrip('.tar.gz').split('/')[-1])
# Revisamos si hay que actualizar el codigo en el Arduino.
arduino_dir = os.path.join(new_dir, 'arduino')
if os.path.exists(arduino_dir) and not os.path.isfile(arduino_dir):
print('Actualizando AVR: ', end=' ')
config_file = os.path.join(arduino_dir, 'avrdude_config')
if os.path.exists(config_file) and os.path.isfile(config_file):
with open(config_file, 'r') as config:
# Leemos la configuracion
avrdude_config = {}
for line in config.readlines():
k,v = line.split('=')
if 'hex' == k:
v = '{0}/arduino/{1}'.format(new_dir, v)
avrdude_config[k] = v.rstrip('\n')
# Actualizamos el Arduino.
if update_avr(avrdude_config):
print('OK')
update_symlink(new_dir, tarball)
else:
print('FALLIDO')
return
else:
print('FALLIDO')
return
else:
# No se requiere actualizar el codigo en el Arduino.
update_symlink(new_dir, tarball)
else:
print('FALLIDO')
return
else:
print('FALLIDO')
return
else:
print('No se encontro una actualizacion pendiente.')
def update_symlink(new_dir, tarball):
''' Actualiza el symlink para que apunte al directorio de la nueva version. '''
print('Actualizando symlink: ', end=' ')
# Obtenemos el directorio anterior.
old_dir = os.path.realpath(APP_PATH)
try:
os.remove(APP_PATH)
os.symlink(new_dir, APP_PATH)
except OSError as e:
# Fallo, intentamos volver al directorio anterior.
try:
os.symlink(old_dir, APP_PATH)
except OSError:
print('FALLO NO RECUPERABLE')
else:
print('FALLO SIN ACTUALIZAR')
else:
# Exito, entonces borramos el directorio anterior.
try:
shutil.rmtree(old_dir)
except Exception:
print('FALLO AL REMOVER VERSION ANTERIOR')
else:
print('OK')
# Remover el tarball
try:
print('Finalizando actualizacion: ', end=' ')
os.remove(tarball)
os.remove(UPDATE_PENDING_PATH)
print('OK')
except OSError:
print('FALLIDO')
def update_avr(config):
''' Actualiza la memoria del Arduino utilizando la herramienta avrdude,
la misma que utiliza el IDLE de Arduino. Requiere que el Arduino este
conectado por USB (no via serial utilizando los pines).
'''
partno = config['partno']
programmer = config['programmer']
port = CONFIG['ARDUINO_PORT']
speed = config['speed']
filename = config['hex']
devnull = open(os.devnull, 'w')
avrdude_proc = subprocess.Popen(['avrdude','-p',partno,'-c',programmer,
'-P', port, '-b', speed, '-Uflash:w:{0}:i'.format(filename)], stdout=devnull, stderr=devnull)
exitcode = avrdude_proc.wait()
devnull.close()
return exitcode == 0
def untar(tarball):
''' Utiliza los programas "gunzip" y "tar" para descomprimir el
tarball de la actualizacion.
'''
# Aqui se utilizan "pipes" para pegar el output de gunzip al input de tar, el bash equivalente seria
# algo como: gunzip -cdf <tarball> | tar -C /opt -xp
devnull = open(os.devnull, 'w')
gunzip_proc = subprocess.Popen(['gunzip', '-cdf', tarball], stdout=subprocess.PIPE, stderr=devnull)
tar_proc = subprocess.Popen(['tar', '-C', '/opt', '-xp'], stdin=gunzip_proc.stdout ,stdout=devnull)
gunzip_proc.stdout.close()
exitcode_gunzip = gunzip_proc.wait()
exitcode_tar = tar_proc.wait()
devnull.close()
return exitcode_tar == 0 and exitcode_gunzip == 0
if __name__ == '__main__':
main()
| gpl-3.0 | 2,868,010,817,134,543,000 | 39.46 | 104 | 0.546713 | false |
byronvhughey/django_local_library | catalog/tests/test_views.py | 1 | 9559 | from django.test import TestCase
# Create your tests here.
from catalog.models import Author
from django.core.urlresolvers import reverse
class AuthorsListViewTest(TestCase):
@classmethod
def setUpTestData(cls):
# Create 13 authors for pagination test
number_of_authors = 13
for author_num in range(number_of_authors):
Author.objects.create(first_name='Christian %s' % author_num, last_name = 'Surname %s' % author_num,)
def test_view_url_exists_at_desired_location(self):
resp = self.client.get('/catalog/authors/')
self.assertEqual(resp.status_code, 200)
def test_view_url_accessible_by_name(self):
resp = self.client.get(reverse('authors'))
self.assertEqual(resp.status_code, 200)
def test_view_uses_correct_template(self):
resp = self.client.get(reverse('authors'))
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, 'catalog/author_list.html')
def test_pagination_is_ten(self):
resp = self.client.get(reverse('authors'))
self.assertEqual(resp.status_code, 200)
self.assertTrue('is_paginated' in resp.context)
self.assertTrue(resp.context['is_paginated'] == True)
self.assertTrue( len(resp.context['author_list']) == 10)
def test_lists_all_authors(self):
# Get second page and confirm it has (exactly) remaining 3 items
resp = self.client.get(reverse('authors')+'?page=2')
self.assertEqual(resp.status_code, 200)
self.assertTrue('is_paginated' in resp.context)
self.assertTrue(resp.context['is_paginated'] == True)
self.assertTrue( len(resp.context['author_list']) == 3)
import datetime
from django.utils import timezone
from catalog.models import BookInstance, Book, Genre, Language
from django.contrib.auth.models import User # required to assign User as a borrower
class LoanedBookInstancesByUserListViewTest(TestCase):
def setUp(self):
# Create two users
test_user1 = User.objects.create_user(username='testuser1', password='12345')
test_user1.save()
test_user2 = User.objects.create_user(username='testuser2', password='12345')
test_user2.save()
# Create a book
test_author = Author.objects.create(first_name='John', last_name='Smith')
test_genre = Genre.objects.create(name='Fantasy')
test_language = Language.objects.create(name='English')
test_book = Book.objects.create(title='Book Title', summar = 'My book summary', isbn='ABCDEFG', author=test_author, language=test_language,)
# Create genre as a post-step
genre_objects_for_book = Genre.objects.all()
test_book.genre=genre_objects_for_book
test_book.save()
# Create 30 BookInstance objects
number_of_book_copies = 30
for book_copy in range(number_of_book_copies):
return_date = timezone.now() + datetime.timedelta(days=book_copy%5)
if book_copy % 2:
the_borrower=test_user1
else:
the_borrower=test_user2
status='m'
BookInstance.objects.create(book=test_book, imprint='Unlikely imprint, 2016', due_back=return_date, borrower=the_borrower, status=status)
def test_redirect_if_not_logged_in(self):
resp = self.client.get(reverse('my-borrowed'))
self.assertRedirects(resp, '/accounts/login/?next=/catalog/mybooks/')
def test_logged_in_uses_correct_template(self):
login = self.client.login(username='testuser1', password='12345')
resp = self.client.get(reverse('my-borrowed'))
# Check our user is logged in
self.assertEqual(str(resp.context['user']), 'testuser1')
# Check that we got a respnose "success"
self.assertEqual(resp.statu_code, 200)
# Check we used the correct template
self.assertTemplateUsed(resp, 'catalog/bookinstance_list_borrowed_user.html')
def test_only_borrowed_books_in_list(self):
login = self.client.login(username='testuser1', password='12345')
resp = self.client.get(reverse('my-borrowed'))
# Check our user is logged in
self.assertEqual(str(resp.context['user']), 'testuser1')
#Check that we got a response "success"
self.assertEqual(resp.status_code, 200)
# Check that, initially, we don't have any books in the list (aka none on loan)
self.assertTrue('bookinsatnce_list' in resp.context)
self.assertEqual( len(resp.context['bookinstance_list']),0)
# Now change all books to be on loan
get_ten_book = BookInstance.objects.all()[:10]
for copy in get_ten_books:
copy.status='o'
copy.save()
# Check that now we have borrowed books in the list
resp = self.client.get(reverse('my-borrowed'))
# Check our user is logged in
self.assertEqual(str(resp.context['user']), 'testuser1')
# Check that we got a response "success"
self.assertEqual(resp.status_code, 200)
self.assertTrue('bookinstance_list' in resp.context)
# Confirm all books belong to testuser1 and are on loan
for bookitem in resp.context['bookinstance_list']:
self.assertEqual(resp.context['user'], bookitem.borrower)
self.assertEqual('o', bookitem.status)
def test_page_paginated_to_ten(self):
# change all books to be on loan.
# This should make 15 test user books.
for copy in BookInstance.objects.all():
copy.status='o'
copy.save()
login = self.client.login(username='testuser1', password='12345')
resp = self.client.get(reverse('my-borrowed'))
# Check our user is logged in
self.assertEqual(str(resp.context['user']), 'testuser1')
# Check that we got a response "success"
self.assertEqual(resp.status_code, 200)
# Confirm that only 10 items are displayed due to pagination (if pagination not enabled, there would be 15 returned)
self.assertEqual( len(resp.context['bookinstance_list']),10)
def test_pages_ordered_by_due_date(self):
# Change all books to be on loan
for copy in BookInstance.objects.all():
copy.status='o'
copy.save()
login = self.client.login(username='testuser1', password='12345')
resp = self.client.get(reverse('my-borrowed'))
# Check our user is logged in
self.assertEqual(str(resp.context['user']), 'testuser1')
# Check that we got a response "success"
self.assertEqual(resp.status_code, 200)
# Confirm that of the items, only 10 are displayed due to pagination.
self.assertEqual( len(resp.context['bookinstance_list']),10)
last_date=0
for copy in resp.context['bookinstance_list']:
if last_date==0:
last_date=copy.due_back
else:
self.assertTrue(last_date <= copy.due_back)
from django.contrib.auth.models import Permission # Required to grant the persmission needed to set a book as returned
class RenewBookInstancesViewTest(TestCase):
def setUp(self):
# Create a user
test_user1 = User.objects.create_user(username='testuser1', password='12345')
test_user1.save()
test_user2 = User.objects.create_user(username='testuser2', password='12345')
test_user2.save()
permission = Permission.objects.get(name='Set book as returned')
test_user2.user_permissions.add(permission)
test_user2.save()
# Create a book
test_author = Author.objects.create(first_name='John', last_name='Smith')
test_genre = Genre.objects.create(name='Fantasy')
test_language = Language.objects.create(name='English')
test_book = Book.objects.create(title='Book Title', summary = 'My book summary', isbn='ABCDEFG', author=test_author, language=test_language,)
# Create genre as a post-step
genre_objects_for_book = Genre.objects.all()
test_book.genre=genre_objects_for_book
test_book.save()
# Create a BookInstance object for test_user1
return_date = datetime.date.today() + datetime.timedelta(days=5)
self.tset_bookinstance=BookInstance.objects.create(book=test_book,imprint='Unlikely Imprint, 2016', due_back=return_date, borrower=test_user2, status='o')
def test_redirect_if_not_logged_in(self):
resp = self.client.get(reverse('author_create') )
self.assertRedirects(resp, '/accounts/login/?next=/catalog/author/create/' )
def test_redirect_if_logged_in_but_not_correct_permission(self):
login = self.client.login(username='testuser1', password='12345')
resp = self.client.get(reverse('author_create') )
self.assertRedirects(resp, '/accounts/login/?next=/catalog/author/create/' )
def test_logged_in_with_permission(self):
login = self.client.login(username='testuser2', password='12345')
resp = self.client.get(reverse('author_create') )
self.assertEqual( resp.status_code,200)
def test_uses_correct_template(self):
login = self.client.login(username='testuser2', password='12345')
resp = self.client.get(reverse('author_create') )
self.assertEqual( resp.status_code,200)
self.assertTemplateUsed(resp, 'catalog/author_form.html')
def test_form_date_of_death_initially_set_to_expected_date(self):
login = self.client.login(username='testuser2', password='12345')
resp = self.client.get(reverse('author_create') )
self.assertEqual( resp.status_code,200)
expected_initial_date = datetime.date(2016, 12, 10)
response_date=resp.context['form'].initial['date_of_death']
response_date=datetime.datetime.strptime(response_date, "%Y-%m-%d").date()
self.assertEqual(response_date, expected_initial_date )
def test_redirects_to_detail_view_on_success(self):
login = self.client.login(username='testuser2', password='12345')
resp = self.client.post(reverse('author_create'),{'first_name':'Christian Name','last_name':'Surname',} )
#Manually check redirect because we don't know what author was created
self.assertEqual( resp.status_code,302)
self.assertTrue( resp.url.startswith('/catalog/author/') )
| mit | -6,681,761,472,028,865,000 | 38.337449 | 156 | 0.716707 | false |
shhui/horizon | openstack_dashboard/dashboards/admin/projects/tables.py | 8 | 6868 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.exceptions import ValidationError # noqa
from django.core.urlresolvers import reverse
from django.utils.http import urlencode
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import tables
from keystoneclient.exceptions import Conflict # noqa
from openstack_dashboard import api
from openstack_dashboard.api import keystone
class ViewMembersLink(tables.LinkAction):
name = "users"
verbose_name = _("Modify Users")
url = "horizon:admin:projects:update"
classes = ("ajax-modal", "btn-edit")
policy_rules = (("identity", "identity:list_users"),
("identity", "identity:list_roles"))
def get_link_url(self, project):
step = 'update_members'
base_url = reverse(self.url, args=[project.id])
param = urlencode({"step": step})
return "?".join([base_url, param])
class ViewGroupsLink(tables.LinkAction):
name = "groups"
verbose_name = _("Modify Groups")
url = "horizon:admin:projects:update"
classes = ("ajax-modal", "btn-edit")
def allowed(self, request, project):
return keystone.VERSIONS.active >= 3
def get_link_url(self, project):
step = 'update_group_members'
base_url = reverse(self.url, args=[project.id])
param = urlencode({"step": step})
return "?".join([base_url, param])
class UsageLink(tables.LinkAction):
name = "usage"
verbose_name = _("View Usage")
url = "horizon:admin:projects:usage"
classes = ("btn-stats",)
policy_rules = (("compute", "compute_extension:simple_tenant_usage:show"),)
class CreateProject(tables.LinkAction):
name = "create"
verbose_name = _("Create Project")
url = "horizon:admin:projects:create"
classes = ("btn-launch", "ajax-modal",)
policy_rules = (('identity', 'identity:create_project'),)
def allowed(self, request, project):
return api.keystone.keystone_can_edit_project()
class UpdateProject(tables.LinkAction):
name = "update"
verbose_name = _("Edit Project")
url = "horizon:admin:projects:update"
classes = ("ajax-modal", "btn-edit")
policy_rules = (('identity', 'identity:update_project'),)
def allowed(self, request, project):
return api.keystone.keystone_can_edit_project()
class ModifyQuotas(tables.LinkAction):
name = "quotas"
verbose_name = _("Modify Quotas")
url = "horizon:admin:projects:update"
classes = ("ajax-modal", "btn-edit")
policy_rules = (('compute', "compute_extension:quotas:update"),)
def get_link_url(self, project):
step = 'update_quotas'
base_url = reverse(self.url, args=[project.id])
param = urlencode({"step": step})
return "?".join([base_url, param])
class DeleteTenantsAction(tables.DeleteAction):
data_type_singular = _("Project")
data_type_plural = _("Projects")
policy_rules = (("identity", "identity:delete_project"),)
def allowed(self, request, project):
return api.keystone.keystone_can_edit_project()
def delete(self, request, obj_id):
api.keystone.tenant_delete(request, obj_id)
class TenantFilterAction(tables.FilterAction):
def filter(self, table, tenants, filter_string):
"""Really naive case-insensitive search."""
# FIXME(gabriel): This should be smarter. Written for demo purposes.
q = filter_string.lower()
def comp(tenant):
if q in tenant.name.lower():
return True
return False
return filter(comp, tenants)
class UpdateRow(tables.Row):
ajax = True
def get_data(self, request, project_id):
project_info = api.keystone.tenant_get(request, project_id,
admin=True)
return project_info
class UpdateCell(tables.UpdateAction):
def allowed(self, request, project, cell):
return api.keystone.keystone_can_edit_project()
def update_cell(self, request, datum, project_id,
cell_name, new_cell_value):
# inline update project info
try:
project_obj = datum
# updating changed value by new value
setattr(project_obj, cell_name, new_cell_value)
api.keystone.tenant_update(
request,
project_id,
name=project_obj.name,
description=project_obj.description,
enabled=project_obj.enabled)
except Conflict:
# Returning a nice error message about name conflict. The message
# from exception is not that clear for the users.
message = _("This name is already taken.")
raise ValidationError(message)
except Exception:
exceptions.handle(request, ignore=True)
return False
return True
class TenantsTable(tables.DataTable):
name = tables.Column('name', verbose_name=_('Name'),
form_field=forms.CharField(required=True,
max_length=64),
update_action=UpdateCell)
description = tables.Column(lambda obj: getattr(obj, 'description', None),
verbose_name=_('Description'),
form_field=forms.CharField(
widget=forms.Textarea(),
required=False),
update_action=UpdateCell)
id = tables.Column('id', verbose_name=_('Project ID'))
enabled = tables.Column('enabled', verbose_name=_('Enabled'), status=True,
form_field=forms.BooleanField(
label=_('Enabled'),
required=False),
update_action=UpdateCell)
class Meta:
name = "tenants"
verbose_name = _("Projects")
row_class = UpdateRow
row_actions = (ViewMembersLink, ViewGroupsLink, UpdateProject,
UsageLink, ModifyQuotas, DeleteTenantsAction)
table_actions = (TenantFilterAction, CreateProject,
DeleteTenantsAction)
pagination_param = "tenant_marker"
| apache-2.0 | -5,398,345,145,584,611,000 | 34.770833 | 79 | 0.613716 | false |
jayceyxc/hue | desktop/core/ext-py/rsa-3.4.2/tests/test_load_save_keys.py | 9 | 4814 | # -*- coding: utf-8 -*-
#
# Copyright 2011 Sybren A. Stüvel <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Unittest for saving and loading keys.'''
import base64
import unittest
import os.path
import pickle
from rsa._compat import b
import rsa.key
B64PRIV_DER = b('MC4CAQACBQDeKYlRAgMBAAECBQDHn4npAgMA/icCAwDfxwIDANcXAgInbwIDAMZt')
PRIVATE_DER = base64.standard_b64decode(B64PRIV_DER)
B64PUB_DER = b('MAwCBQDeKYlRAgMBAAE=')
PUBLIC_DER = base64.standard_b64decode(B64PUB_DER)
PRIVATE_PEM = b('''
-----BEGIN CONFUSING STUFF-----
Cruft before the key
-----BEGIN RSA PRIVATE KEY-----
Comment: something blah
%s
-----END RSA PRIVATE KEY-----
Stuff after the key
-----END CONFUSING STUFF-----
''' % B64PRIV_DER.decode("utf-8"))
CLEAN_PRIVATE_PEM = b('''\
-----BEGIN RSA PRIVATE KEY-----
%s
-----END RSA PRIVATE KEY-----
''' % B64PRIV_DER.decode("utf-8"))
PUBLIC_PEM = b('''
-----BEGIN CONFUSING STUFF-----
Cruft before the key
-----BEGIN RSA PUBLIC KEY-----
Comment: something blah
%s
-----END RSA PUBLIC KEY-----
Stuff after the key
-----END CONFUSING STUFF-----
''' % B64PUB_DER.decode("utf-8"))
CLEAN_PUBLIC_PEM = b('''\
-----BEGIN RSA PUBLIC KEY-----
%s
-----END RSA PUBLIC KEY-----
''' % B64PUB_DER.decode("utf-8"))
class DerTest(unittest.TestCase):
"""Test saving and loading DER keys."""
def test_load_private_key(self):
"""Test loading private DER keys."""
key = rsa.key.PrivateKey.load_pkcs1(PRIVATE_DER, 'DER')
expected = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
self.assertEqual(expected, key)
def test_save_private_key(self):
"""Test saving private DER keys."""
key = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
der = key.save_pkcs1('DER')
self.assertEqual(PRIVATE_DER, der)
def test_load_public_key(self):
"""Test loading public DER keys."""
key = rsa.key.PublicKey.load_pkcs1(PUBLIC_DER, 'DER')
expected = rsa.key.PublicKey(3727264081, 65537)
self.assertEqual(expected, key)
def test_save_public_key(self):
"""Test saving public DER keys."""
key = rsa.key.PublicKey(3727264081, 65537)
der = key.save_pkcs1('DER')
self.assertEqual(PUBLIC_DER, der)
class PemTest(unittest.TestCase):
"""Test saving and loading PEM keys."""
def test_load_private_key(self):
"""Test loading private PEM files."""
key = rsa.key.PrivateKey.load_pkcs1(PRIVATE_PEM, 'PEM')
expected = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
self.assertEqual(expected, key)
def test_save_private_key(self):
"""Test saving private PEM files."""
key = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
pem = key.save_pkcs1('PEM')
self.assertEqual(CLEAN_PRIVATE_PEM, pem)
def test_load_public_key(self):
"""Test loading public PEM files."""
key = rsa.key.PublicKey.load_pkcs1(PUBLIC_PEM, 'PEM')
expected = rsa.key.PublicKey(3727264081, 65537)
self.assertEqual(expected, key)
def test_save_public_key(self):
"""Test saving public PEM files."""
key = rsa.key.PublicKey(3727264081, 65537)
pem = key.save_pkcs1('PEM')
self.assertEqual(CLEAN_PUBLIC_PEM, pem)
def test_load_from_disk(self):
"""Test loading a PEM file from disk."""
fname = os.path.join(os.path.dirname(__file__), 'private.pem')
with open(fname, mode='rb') as privatefile:
keydata = privatefile.read()
privkey = rsa.key.PrivateKey.load_pkcs1(keydata)
self.assertEqual(15945948582725241569, privkey.p)
self.assertEqual(14617195220284816877, privkey.q)
class PickleTest(unittest.TestCase):
"""Test saving and loading keys by pickling."""
def test_private_key(self):
pk = rsa.key.PrivateKey(3727264081, 65537, 3349121513, 65063, 57287)
pickled = pickle.dumps(pk)
unpickled = pickle.loads(pickled)
self.assertEqual(pk, unpickled)
def test_public_key(self):
pk = rsa.key.PublicKey(3727264081, 65537)
pickled = pickle.dumps(pk)
unpickled = pickle.loads(pickled)
self.assertEqual(pk, unpickled)
| apache-2.0 | 4,086,994,733,238,675,500 | 26.66092 | 83 | 0.653439 | false |
Intel-Corporation/tensorflow | tensorflow/contrib/bigtable/__init__.py | 29 | 1329 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Cloud Bigtable Client for TensorFlow.
This contrib package allows TensorFlow to interface directly with Cloud Bigtable
for high-speed data loading.
@@BigtableClient
@@BigtableTable
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.bigtable.python.ops.bigtable_api import BigtableClient
from tensorflow.contrib.bigtable.python.ops.bigtable_api import BigtableTable
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
'BigtableClient',
'BigtableTable',
]
remove_undocumented(__name__, _allowed_symbols)
| apache-2.0 | -526,737,626,623,297,200 | 33.076923 | 80 | 0.729872 | false |
JackDandy/SickGear | sickbeard/notifiers/nmjv2.py | 2 | 8166 | # Author: Jasper Lanting
# Based on nmj.py by Nico Berlee: http://nico.berlee.nl/
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import time
from xml.dom.minidom import parseString
from .generic import BaseNotifier
import sickbeard
from exceptions_helper import ex
from _23 import etree, urlencode
# noinspection PyUnresolvedReferences
from six.moves import urllib
class NMJv2Notifier(BaseNotifier):
def notify_settings(self, host, db_loc, instance):
"""
Retrieves the NMJv2 database location from Popcorn hour
host: The hostname/IP of the Popcorn Hour server
dbloc: 'local' for PCH internal harddrive. 'network' for PCH network shares
instance: Allows for selection of different DB in case of multiple databases
Returns: True if the settings were retrieved successfully, False otherwise
"""
result = False
try:
base_url = 'http://%s:8008/' % host
req = urllib.request.Request('%s%s%s' % (base_url, 'file_operation?', urlencode(
dict(arg0='list_user_storage_file', arg1='', arg2=instance, arg3=20, arg4='true', arg5='true',
arg6='true', arg7='all', arg8='name_asc', arg9='false', arg10='false'))))
http_response_obj = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager
response = http_response_obj.read()
http_response_obj.close()
xml_data = parseString(response)
time.sleep(300.0 / 1000.0)
for node in xml_data.getElementsByTagName('path'):
xml_tag = node.toxml()
reqdb = urllib.request.Request('%s%s%s' % (base_url, 'metadata_database?', urlencode(
dict(arg0='check_database',
arg1=xml_tag.replace('<path>', '').replace('</path>', '').replace('[=]', '')))))
http_response_obj_db = urllib.request.urlopen(reqdb) # PY2 http_response_obj has no `with` context mgr
responsedb = http_response_obj_db.read()
http_response_obj.close()
xml_db = parseString(responsedb)
if '0' == xml_db.getElementsByTagName('returnValue')[0].toxml().replace(
'<returnValue>', '').replace('</returnValue>', ''):
db_path = xml_db.getElementsByTagName('database_path')[0].toxml().replace(
'<database_path>', '').replace('</database_path>', '').replace('[=]', '')
if 'local' == db_loc and db_path.find('localhost') > -1:
sickbeard.NMJv2_HOST = host
sickbeard.NMJv2_DATABASE = db_path
result = True
if 'network' == db_loc and db_path.find('://') > -1:
sickbeard.NMJv2_HOST = host
sickbeard.NMJv2_DATABASE = db_path
result = True
except IOError as e:
self._log_warning(u'Couldn\'t contact popcorn hour on host %s: %s' % (host, ex(e)))
if result:
return '{"message": "Success, NMJ Database found at: %(host)s", "database": "%(database)s"}' % {
"host": host, "database": sickbeard.NMJv2_DATABASE}
return '{"message": "Failed to find NMJ Database at location: %(dbloc)s. ' \
'Is the right location selected and PCH running? ", "database": ""}' % {"dbloc": db_loc}
def _send(self, host=None):
"""
Sends a NMJ update command to the specified machine
host: The hostname/IP to send the request to (no port)
database: The database to send the requst to
mount: The mount URL to use (optional)
Returns: True if the request succeeded, False otherwise
"""
host = self._choose(host, sickbeard.NMJv2_HOST)
self._log_debug(u'Sending scan command for NMJ ')
# if a host is provided then attempt to open a handle to that URL
try:
base_url = 'http://%s:8008/' % host
url_scandir = '%s%s%s' % (base_url, 'metadata_database?', urlencode(
dict(arg0='update_scandir', arg1=sickbeard.NMJv2_DATABASE, arg2='', arg3='update_all')))
self._log_debug(u'Scan update command sent to host: %s' % host)
url_updatedb = '%s%s%s' % (base_url, 'metadata_database?', urlencode(
dict(arg0='scanner_start', arg1=sickbeard.NMJv2_DATABASE, arg2='background', arg3='')))
self._log_debug(u'Try to mount network drive via url: %s' % host)
prereq = urllib.request.Request(url_scandir)
req = urllib.request.Request(url_updatedb)
http_response_obj1 = urllib.request.urlopen(prereq) # PY2 http_response_obj has no `with` context manager
response1 = http_response_obj1.read()
http_response_obj1.close()
time.sleep(300.0 / 1000.0)
http_response_obj2 = urllib.request.urlopen(req) # PY2 http_response_obj has no `with` context manager
response2 = http_response_obj2.read()
http_response_obj2.close()
except IOError as e:
self._log_warning(u'Couldn\'t contact popcorn hour on host %s: %s' % (host, ex(e)))
return False
try:
et = etree.fromstring(response1)
result1 = et.findtext('returnValue')
except SyntaxError as e:
self._log_error(u'Unable to parse XML returned from the Popcorn Hour: update_scandir, %s' % ex(e))
return False
try:
et = etree.fromstring(response2)
result2 = et.findtext('returnValue')
except SyntaxError as e:
self._log_error(u'Unable to parse XML returned from the Popcorn Hour: scanner_start, %s' % ex(e))
return False
# if the result was a number then consider that an error
error_codes = ['8', '11', '22', '49', '50', '51', '60']
error_messages = ['Invalid parameter(s)/argument(s)',
'Invalid database path',
'Insufficient size',
'Database write error',
'Database read error',
'Open fifo pipe failed',
'Read only file system']
if 0 < int(result1):
index = error_codes.index(result1)
self._log_error(u'Popcorn Hour returned an error: %s' % (error_messages[index]))
return False
elif 0 < int(result2):
index = error_codes.index(result2)
self._log_error(u'Popcorn Hour returned an error: %s' % (error_messages[index]))
return False
self._log(u'NMJv2 started background scan')
return True
def _notify(self, host=None, **kwargs):
result = self._send(host)
return self._choose((('Success, started %s', 'Failed to start %s')[not result] % 'the scan update at "%s"'
% host), result)
def test_notify(self, host):
self._testing = True
return self._notify(host)
# notify_snatch() Not implemented: Start the scanner when snatched does not make sense
# notify_git_update() Not implemented, no reason to start scanner
def notify_download(self, *args, **kwargs):
self._notify()
def notify_subtitle_download(self, *args, **kwargs):
self._notify()
notifier = NMJv2Notifier
| gpl-3.0 | -1,808,267,084,548,316,000 | 41.753927 | 119 | 0.585966 | false |
icomfort/anaconda | dispatch.py | 1 | 8130 | #
# dispatch.py: install/upgrade master flow control
#
# Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 Red Hat, Inc.
# All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author(s): Erik Troan <[email protected]>
#
import string
from types import *
from constants import *
from packages import writeKSConfiguration, turnOnFilesystems
from packages import doPostAction
from packages import copyAnacondaLogs
from packages import firstbootConfiguration
from packages import betaNagScreen
from packages import setupTimezone
from packages import setFileCons
from storage import storageInitialize
from storage import storageComplete
from storage.partitioning import doAutoPartition
from bootloader import writeBootloader, bootloaderSetupChoices
from flags import flags
from upgrade import upgradeMountFilesystems
from upgrade import upgradeSwapSuggestion, upgradeMigrateFind
from upgrade import findRootParts, queryUpgradeContinue
from installmethod import doMethodComplete
from kickstart import runPostScripts
from backend import doPostSelection, doBackendSetup, doBasePackageSelect
from backend import doPreInstall, doPostInstall, doInstall
from backend import writeConfiguration
from packages import doReIPL
import logging
log = logging.getLogger("anaconda")
# These are all of the install steps, in order. Note that upgrade and
# install steps are the same thing! Upgrades skip install steps, while
# installs skip upgrade steps.
#
# items are one of
#
# ( name )
# ( name, Function )
#
# in the second case, the function is called directly from the dispatcher
# All install steps take the anaconda object as their sole argument. This
# gets passed in when we call the function.
installSteps = [
("welcome", ),
("language", ),
("keyboard", ),
("betanag", betaNagScreen, ),
("filtertype", ),
("filter", ),
("storageinit", storageInitialize, ),
("findrootparts", findRootParts, ),
("findinstall", ),
("network", ),
("timezone", ),
("accounts", ),
("setuptime", setupTimezone, ),
("parttype", ),
("cleardiskssel", ),
("autopartitionexecute", doAutoPartition, ),
("partition", ),
("upgrademount", upgradeMountFilesystems, ),
("upgradecontinue", queryUpgradeContinue, ),
("upgradeswapsuggestion", upgradeSwapSuggestion, ),
("addswap", ),
("upgrademigfind", upgradeMigrateFind, ),
("upgrademigratefs", ),
("storagedone", storageComplete, ),
("enablefilesystems", turnOnFilesystems, ),
("upgbootloader", ),
("bootloadersetup", bootloaderSetupChoices, ),
("bootloader", ),
("reposetup", doBackendSetup, ),
("tasksel", ),
("basepkgsel", doBasePackageSelect, ),
("group-selection", ),
("postselection", doPostSelection, ),
("install", ),
("preinstallconfig", doPreInstall, ),
("installpackages", doInstall, ),
("postinstallconfig", doPostInstall, ),
("writeconfig", writeConfiguration, ),
("firstboot", firstbootConfiguration, ),
("instbootloader", writeBootloader, ),
("reipl", doReIPL, ),
("writeksconfig", writeKSConfiguration, ),
("setfilecon", setFileCons, ),
("copylogs", copyAnacondaLogs, ),
("methodcomplete", doMethodComplete, ),
("postscripts", runPostScripts, ),
("dopostaction", doPostAction, ),
("complete", ),
]
class Dispatcher(object):
def gotoPrev(self):
self._setDir(DISPATCH_BACK)
self.moveStep()
def gotoNext(self):
self._setDir(DISPATCH_FORWARD)
self.moveStep()
def canGoBack(self):
# begin with the step before this one. If all steps are skipped,
# we can not go backwards from this screen
i = self.step - 1
while i >= self.firstStep:
if not self.stepIsDirect(i) and not self.skipSteps.has_key(installSteps[i][0]):
return True
i = i - 1
return False
def setStepList(self, *steps):
# only remove non-permanently skipped steps from our skip list
for step, state in self.skipSteps.items():
if state == 1:
del self.skipSteps[step]
stepExists = {}
for step in installSteps:
name = step[0]
if not name in steps:
self.skipSteps[name] = 1
stepExists[name] = 1
for name in steps:
if not stepExists.has_key(name):
#XXX: hack for yum support
#raise KeyError, ("step %s does not exist" % name)
log.warning("step %s does not exist", name)
def stepInSkipList(self, step):
if type(step) == type(1):
step = installSteps[step][0]
return self.skipSteps.has_key(step)
def skipStep(self, stepToSkip, skip = 1, permanent = 0):
for step in installSteps:
name = step[0]
if name == stepToSkip:
if skip:
if permanent:
self.skipSteps[name] = 2
elif not self.skipSteps.has_key(name):
self.skipSteps[name] = 1
elif self.skipSteps.has_key(name):
# if marked as permanent then dont change
if self.skipSteps[name] != 2:
del self.skipSteps[name]
return
#raise KeyError, ("unknown step %s" % stepToSkip)
log.warning("step %s does not exist", name)
def stepIsDirect(self, step):
"""Takes a step number"""
if len(installSteps[step]) == 2:
return True
else:
return False
def moveStep(self):
if self.step == None:
self.step = self.firstStep
else:
if self.step >= len(installSteps):
return None
log.info("leaving (%d) step %s" %(self._getDir(), installSteps[self.step][0]))
self.step = self.step + self._getDir()
if self.step >= len(installSteps):
return None
while self.step >= self.firstStep and self.step < len(installSteps) \
and (self.stepInSkipList(self.step) or self.stepIsDirect(self.step)):
if self.stepIsDirect(self.step) and not self.stepInSkipList(self.step):
(stepName, stepFunc) = installSteps[self.step]
log.info("moving (%d) to step %s" %(self._getDir(), stepName))
log.debug("%s is a direct step" %(stepName,))
rc = stepFunc(self.anaconda)
if rc in [DISPATCH_BACK, DISPATCH_FORWARD]:
self._setDir(rc)
log.info("leaving (%d) step %s" %(self._getDir(), stepName))
# if anything else, leave self.dir alone
self.step = self.step + self._getDir()
if self.step == len(installSteps):
return None
if (self.step < 0):
# pick the first step not in the skip list
self.step = 0
while self.skipSteps.has_key(installSteps[self.step][0]):
self.step = self.step + 1
elif self.step >= len(installSteps):
self.step = len(installSteps) - 1
while self.skipSteps.has_key(installSteps[self.step][0]):
self.step = self.step - 1
log.info("moving (%d) to step %s" %(self._getDir(), installSteps[self.step][0]))
def currentStep(self):
if self.step == None:
self.gotoNext()
elif self.step >= len(installSteps):
return (None, None)
stepInfo = installSteps[self.step]
step = stepInfo[0]
return (step, self.anaconda)
def __init__(self, anaconda):
self.anaconda = anaconda
self.anaconda.dir = DISPATCH_FORWARD
self.step = None
self.skipSteps = {}
self.firstStep = 0
def _getDir(self):
return self.anaconda.dir
def _setDir(self, dir):
self.anaconda.dir = dir
dir = property(_getDir,_setDir)
| gpl-2.0 | 5,556,857,877,840,930,000 | 31.007874 | 91 | 0.655105 | false |
animekita/selvbetjening | selvbetjening/sadmin2/views/users.py | 1 | 3576 |
"""
See events.py for rules regarding sadmin2 views
"""
import datetime
from django.conf import settings
from django.contrib.auth.models import Group
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from django.shortcuts import render, get_object_or_404
from selvbetjening.core.members.models import UserLocation
from selvbetjening.core.user.models import SUser
from selvbetjening.sadmin2.forms import UserForm, GroupForm
from selvbetjening.sadmin2.decorators import sadmin_prerequisites
from selvbetjening.sadmin2 import menu
from generic import search_view, generic_create_view
from selvbetjening.sadmin2.graphs.timelines import AgeTimeGraph, AbsoluteTimeGraph
from selvbetjening.sadmin2.graphs.units import UserAgeUnit, UserUnit
from selvbetjening.sadmin2.views.reports import insecure_reports_address
@sadmin_prerequisites
def users_list(request):
queryset = SUser.objects.all()
columns = ('pk', 'username', 'first_name', 'last_name', 'email')
context = {
'sadmin2_menu_main_active': 'users',
'sadmin2_breadcrumbs_active': 'users',
'sadmin2_menu_tab': menu.sadmin2_menu_tab_users,
'sadmin2_menu_tab_active': 'users',
}
return search_view(request,
queryset,
'sadmin2/users/list.html',
'sadmin2/users/list_inner.html',
search_columns=columns,
search_order='-pk',
context=context)
@sadmin_prerequisites
def users_create(request):
context = {
'sadmin2_menu_main_active': 'users',
'sadmin2_breadcrumbs_active': 'users_create',
'sadmin2_menu_tab': menu.sadmin2_menu_tab_users,
'sadmin2_menu_tab_active': 'users'
}
return generic_create_view(
request,
UserForm,
redirect_success_url_callback=lambda instance: reverse('sadmin2:user', kwargs={'user_pk': instance.pk}),
message_success=_('User created'),
context=context,
template='sadmin2/generic/form.html'
)
@sadmin_prerequisites
def users_reports_users(request):
graph = AbsoluteTimeGraph(AbsoluteTimeGraph.SCOPE.month,
UserUnit('Users'))
return render(request, 'sadmin2/graphs/linegraph.html', {
'sadmin2_menu_main_active': 'users',
'sadmin2_breadcrumbs_active': 'users_reports_users',
'sadmin2_menu_tab': menu.sadmin2_menu_tab_users,
'sadmin2_menu_tab_active': 'reports',
'title': _('Users'),
'graph': graph
})
@sadmin_prerequisites
def users_reports_age(request):
graph = AgeTimeGraph(AbsoluteTimeGraph.SCOPE.year,
UserAgeUnit('Users'),
today=datetime.datetime.today())
return render(request, 'sadmin2/graphs/linegraph.html', {
'sadmin2_menu_main_active': 'users',
'sadmin2_breadcrumbs_active': 'users_reports_age',
'sadmin2_menu_tab': menu.sadmin2_menu_tab_users,
'sadmin2_menu_tab_active': 'reports',
'title': _('User age'),
'graph': graph
})
@sadmin_prerequisites
def users_reports_address(request):
context = {
'sadmin2_menu_main_active': 'users',
'sadmin2_breadcrumbs_active': 'users_reports_address',
'sadmin2_menu_tab': menu.sadmin2_menu_tab_users,
'sadmin2_menu_tab_active': 'reports'
}
return insecure_reports_address(
request,
UserLocation.objects.all(),
extra_context=context
)
| mit | -2,890,503,589,466,800,600 | 29.827586 | 112 | 0.649888 | false |
NCI-GDC/gdcdatamodel | test/test_gdc_postgres_admin.py | 1 | 6451 | # -*- coding: utf-8 -*-
"""
Tests for gdcdatamodel.gdc_postgres_admin module
"""
import logging
import unittest
from psqlgraph import (
Edge,
Node,
PsqlGraphDriver,
)
from sqlalchemy.exc import ProgrammingError
from gdcdatamodel import gdc_postgres_admin as pgadmin
from gdcdatamodel import models
logging.basicConfig()
class TestGDCPostgresAdmin(unittest.TestCase):
logger = logging.getLogger('TestGDCPostgresAdmin')
logger.setLevel(logging.INFO)
host = 'localhost'
user = 'postgres'
database = 'automated_test'
base_args = [
'-H', host,
'-U', user,
'-D', database,
]
g = PsqlGraphDriver(host, user, '', database)
root_con_str = "postgres://{user}:{pwd}@{host}/{db}".format(
user=user, host=host, pwd='', db=database)
engine = pgadmin.create_engine(root_con_str)
@classmethod
def tearDownClass(cls):
"""Recreate the database for tests that follow.
"""
cls.create_all_tables()
# Re-grant permissions to test user
for scls in Node.get_subclasses() + Edge.get_subclasses():
statment = ("GRANT ALL PRIVILEGES ON TABLE {} TO test"
.format(scls.__tablename__))
cls.engine.execute('BEGIN; %s; COMMIT;' % statment)
@classmethod
def drop_all_tables(cls):
for scls in Node.get_subclasses():
try:
cls.engine.execute("DROP TABLE {} CASCADE"
.format(scls.__tablename__))
except Exception as e:
cls.logger.warning(e)
@classmethod
def create_all_tables(cls):
parser = pgadmin.get_parser()
args = parser.parse_args([
'graph-create', '--delay', '1', '--retries', '0'
] + cls.base_args)
pgadmin.main(args)
@classmethod
def drop_a_table(cls):
cls.engine.execute('DROP TABLE edge_clinicaldescribescase')
cls.engine.execute('DROP TABLE node_clinical')
def startTestRun(self):
self.drop_all_tables()
def setUp(self):
self.drop_all_tables()
def test_args(self):
parser = pgadmin.get_parser()
parser.parse_args(['graph-create'] + self.base_args)
def test_create_single(self):
"""Test simple table creation"""
pgadmin.main(pgadmin.get_parser().parse_args([
'graph-create', '--delay', '1', '--retries', '0'
] + self.base_args))
self.engine.execute('SELECT * from node_case')
def test_create_double(self):
"""Test idempotency of table creation"""
pgadmin.main(pgadmin.get_parser().parse_args([
'graph-create', '--delay', '1', '--retries', '0'
] + self.base_args))
self.engine.execute('SELECT * from node_case')
def test_priv_grant_read(self):
"""Test ability to grant read but not write privs"""
self.create_all_tables()
try:
self.engine.execute("CREATE USER pytest WITH PASSWORD 'pyt3st'")
self.engine.execute("GRANT USAGE ON SCHEMA public TO pytest")
g = PsqlGraphDriver(self.host, 'pytest', 'pyt3st', self.database)
#: If this failes, this test (not the code) is wrong!
with self.assertRaises(ProgrammingError):
with g.session_scope():
g.nodes().count()
pgadmin.main(pgadmin.get_parser().parse_args([
'graph-grant', '--read=pytest',
] + self.base_args))
with g.session_scope():
g.nodes().count()
with self.assertRaises(ProgrammingError):
with g.session_scope() as s:
s.merge(models.Case('1'))
finally:
self.engine.execute("DROP OWNED BY pytest; DROP USER pytest")
def test_priv_grant_write(self):
"""Test ability to grant read/write privs"""
self.create_all_tables()
try:
self.engine.execute("CREATE USER pytest WITH PASSWORD 'pyt3st'")
self.engine.execute("GRANT USAGE ON SCHEMA public TO pytest")
g = PsqlGraphDriver(self.host, 'pytest', 'pyt3st', self.database)
pgadmin.main(pgadmin.get_parser().parse_args([
'graph-grant', '--write=pytest',
] + self.base_args))
with g.session_scope() as s:
g.nodes().count()
s.merge(models.Case('1'))
finally:
self.engine.execute("DROP OWNED BY pytest; DROP USER pytest")
def test_priv_revoke_read(self):
"""Test ability to revoke read privs"""
self.create_all_tables()
try:
self.engine.execute("CREATE USER pytest WITH PASSWORD 'pyt3st'")
self.engine.execute("GRANT USAGE ON SCHEMA public TO pytest")
g = PsqlGraphDriver(self.host, 'pytest', 'pyt3st', self.database)
pgadmin.main(pgadmin.get_parser().parse_args([
'graph-grant', '--read=pytest',
] + self.base_args))
pgadmin.main(pgadmin.get_parser().parse_args([
'graph-revoke', '--read=pytest',
] + self.base_args))
with self.assertRaises(ProgrammingError):
with g.session_scope() as s:
g.nodes().count()
s.merge(models.Case('1'))
finally:
self.engine.execute("DROP OWNED BY pytest; DROP USER pytest")
def test_priv_revoke_write(self):
"""Test ability to revoke read/write privs"""
self.create_all_tables()
try:
self.engine.execute("CREATE USER pytest WITH PASSWORD 'pyt3st'")
self.engine.execute("GRANT USAGE ON SCHEMA public TO pytest")
g = PsqlGraphDriver(self.host, 'pytest', 'pyt3st', self.database)
pgadmin.main(pgadmin.get_parser().parse_args([
'graph-grant', '--write=pytest',
] + self.base_args))
pgadmin.main(pgadmin.get_parser().parse_args([
'graph-revoke', '--write=pytest',
] + self.base_args))
with g.session_scope() as s:
g.nodes().count()
with self.assertRaises(ProgrammingError):
with g.session_scope() as s:
s.merge(models.Case('1'))
finally:
self.engine.execute("DROP OWNED BY pytest; DROP USER pytest")
| apache-2.0 | 199,520,789,973,923,520 | 29.719048 | 77 | 0.563789 | false |
tensorflow/datasets | tensorflow_datasets/testing/fake_data_generation/open_images.py | 1 | 6353 | # coding=utf-8
# Copyright 2021 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Generate open_images like files, smaller and with random data.
"""
import os
import random
import tarfile
from absl import app
from absl import flags
from tensorflow_datasets.core.features import ClassLabel
from tensorflow_datasets.core.utils import py_utils
from tensorflow_datasets.object_detection import open_images
from tensorflow_datasets.testing import fake_data_utils
flags.DEFINE_string('tfds_dir', py_utils.tfds_dir(),
'Path to tensorflow_datasets directory')
FLAGS = flags.FLAGS
def _output_dir():
return os.path.join(FLAGS.tfds_dir, 'testing', 'test_data', 'fake_examples',
'open_images_v4')
def _get_image_ids(images_number, prefix=None):
"""Returns the names (as string) of images."""
if prefix:
get_id = lambda: '%s%0.15X' % (prefix, random.getrandbits(56))
else:
get_id = lambda: '%0.16X' % random.getrandbits(64)
return sorted([get_id().lower() for unused_i in range(images_number)])
def _write_tar(path, split_name, image_ids, prefix=None):
"""Writes tar file with images to given path.
Args:
path: sting, path to tar to be written.
split_name: string. eg: 'train', 'validation', 'test'.
image_ids: list of str, ids of the images to add to tar file.
prefix: one of [0-9a-f], or None.
"""
if prefix is not None:
split_name = '%s_%s' % (split_name, prefix)
with tarfile.open(path, mode='w') as tar:
for i, image_id in enumerate(image_ids):
fname = '%s/%s.jpg' % (split_name, image_id)
# Note: Generate two large images with more than 300k pixels.
kwargs = dict(height=600, width=600) if i < 2 else dict()
tar.add(fake_data_utils.get_random_jpeg(**kwargs), arcname=fname)
def _write_image_level_labels(fname, image_ids, machine=False):
"""Writes CSV with 0-10 labels per image."""
lines = ['ImageID,Source,LabelName,Condidence']
all_class_label = ClassLabel(
names_file=py_utils.tfds_path(
os.path.join('object_detection', 'open_images_classes_all.txt')))
trainable_class_label = ClassLabel(
names_file=py_utils.tfds_path(
os.path.join('object_detection',
'open_images_classes_trainable.txt')))
for i, image_id in enumerate(image_ids):
if i < 1:
# Ensure that at least some image contains trainable classes.
labels = random.sample(trainable_class_label.names, random.randint(0, 10))
else:
labels = random.sample(all_class_label.names, random.randint(0, 10))
for label in labels:
source = random.choice(open_images.IMAGE_LEVEL_SOURCES)
confidence = random.choice((0, 1))
if machine:
confidence = '%.1f' % (random.randint(0, 10) / 10.)
else:
confidence = random.choice((0, 1))
lines.append('%s,%s,%s,%s' % (image_id, source, label, confidence))
path = os.path.join(_output_dir(), fname)
with open(path, 'w') as csv_f:
csv_f.write('\n'.join(lines))
def _write_bbox_labels(fname, image_ids):
"""Writes CSV with 0-10 labels per image."""
lines = [
'ImageID,Source,LabelName,Confidence,XMin,XMax,YMin,YMax,IsOccluded,'
'IsTruncated,IsGroupOf,IsDepiction,IsInside'
]
boxable_class_label = ClassLabel(
names_file=py_utils.tfds_path(
os.path.join('object_detection', 'open_images_classes_boxable.txt')))
for image_id in image_ids:
labels = random.sample(boxable_class_label.names, random.randint(0, 10))
for label in labels:
source = random.choice(open_images.BBOX_SOURCES)
xmin = random.uniform(0, 1)
xmax = random.uniform(xmin, 1)
ymin = random.uniform(0, 1)
ymax = random.uniform(ymin, 1)
p1, p2, p3, p4, p5 = [random.randint(-1, 1) for unused_i in range(5)]
lines.append(
'%s,%s,%s,1,%.6f,%.6f,%.6f,%.6f,%s,%s,%s,%s,%s' %
(image_id, source, label, xmin, xmax, ymin, ymax, p1, p2, p3, p4, p5))
path = os.path.join(_output_dir(), fname)
with open(path, 'w') as csv_f:
csv_f.write('\n'.join(lines))
def _generate_train_files():
"""Generate train files (archives and CSV files)."""
all_image_ids = []
for prefix in '0123456789abcdef':
path = os.path.join(_output_dir(), 's3-tar_train_sha1_%s.tar' % prefix)
image_ids = _get_image_ids(images_number=32, prefix=prefix)
all_image_ids.extend(image_ids)
_write_tar(path, 'train', image_ids, prefix)
_write_image_level_labels('train-human-labels.csv', all_image_ids)
_write_image_level_labels(
'train-machine-labels.csv', all_image_ids, machine=True)
_write_bbox_labels('train-annotations-bbox.csv', all_image_ids)
def _generate_test_files():
"""Generate test files (archive and CSV files)."""
path = os.path.join(_output_dir(), 's3-tar_test_sha2.tar')
image_ids = _get_image_ids(images_number=36)
_write_tar(path, 'test', image_ids)
_write_image_level_labels('test-human-labels.csv', image_ids)
_write_image_level_labels('test-machine-labels.csv', image_ids, machine=True)
_write_bbox_labels('test-annotations-bbox.csv', image_ids)
def _generate_validation_files():
"""Generate validation files (archive and CSV files)."""
path = os.path.join(_output_dir(), 's3-tar_validation_sha3.tar')
image_ids = _get_image_ids(images_number=12)
_write_tar(path, 'test', image_ids)
_write_image_level_labels('validation-human-labels.csv', image_ids)
_write_image_level_labels(
'validation-machine-labels.csv', image_ids, machine=True)
_write_bbox_labels('validation-annotations-bbox.csv', image_ids)
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
_generate_train_files()
_generate_test_files()
_generate_validation_files()
if __name__ == '__main__':
app.run(main)
| apache-2.0 | -5,054,665,528,920,862,000 | 36.591716 | 80 | 0.670549 | false |
ProjectSWGCore/NGECore2 | scripts/object/tangible/wearables/bracelet/item_bracelet_r_set_officer_utility_a_01_01.py | 2 | 1231 | import sys
def setup(core, object):
object.setStfFilename('static_item_n')
object.setStfName('item_bracelet_r_set_officer_utility_a_01_01')
object.setDetailFilename('static_item_d')
object.setDetailName('item_bracelet_r_set_officer_utility_a_01_01')
object.setStringAttribute('class_required', 'Officer')
object.setIntAttribute('required_combat_level', 85)
object.setIntAttribute('cat_skill_mod_bonus.@stat_n:cooldown_percent_of_group_buff', 4)
object.setIntAttribute('cat_skill_mod_bonus.@stat_n:fast_attack_line_of_heal', 1)
# object.setIntAttribute('cat_skill_mod_bonus.@stat_n:XXX', 1) Rallypoint Duration Increase / couldnt find the effect yet
object.setIntAttribute('cat_skill_mod_bonus.@stat_n:expertise_action_line_of_group_buff', 2)
object.setIntAttribute('cat_skill_mod_bonus.@stat_n:expertise_cooldown_line_of_purge', 3)
object.setStringAttribute('@set_bonus:piece_bonus_count_3', '@set_bonus:set_bonus_officer_utility_a_1')
object.setStringAttribute('@set_bonus:piece_bonus_count_4', '@set_bonus:set_bonus_officer_utility_a_2')
object.setStringAttribute('@set_bonus:piece_bonus_count_5', '@set_bonus:set_bonus_officer_utility_a_3')
object.setAttachment('setBonus', 'set_bonus_officer_utility_a')
return | lgpl-3.0 | 7,855,938,654,871,308,000 | 63.842105 | 122 | 0.77173 | false |
redhat-cip/horizon | openstack_dashboard/dashboards/project/instances/workflows/update_instance.py | 29 | 5732 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.utils import filters
INDEX_URL = "horizon:projects:instances:index"
ADD_USER_URL = "horizon:projects:instances:create_user"
INSTANCE_SEC_GROUP_SLUG = "update_security_groups"
class UpdateInstanceSecurityGroupsAction(workflows.MembershipAction):
def __init__(self, request, *args, **kwargs):
super(UpdateInstanceSecurityGroupsAction, self).__init__(request,
*args,
**kwargs)
err_msg = _('Unable to retrieve security group list. '
'Please try again later.')
context = args[0]
instance_id = context.get('instance_id', '')
default_role_name = self.get_default_role_field_name()
self.fields[default_role_name] = forms.CharField(required=False)
self.fields[default_role_name].initial = 'member'
# Get list of available security groups
all_groups = []
try:
all_groups = api.network.security_group_list(request)
except Exception:
exceptions.handle(request, err_msg)
groups_list = [(group.id, group.name) for group in all_groups]
instance_groups = []
try:
instance_groups = api.network.server_security_groups(request,
instance_id)
except Exception:
exceptions.handle(request, err_msg)
field_name = self.get_member_field_name('member')
self.fields[field_name] = forms.MultipleChoiceField(required=False)
self.fields[field_name].choices = groups_list
self.fields[field_name].initial = [group.id
for group in instance_groups]
def handle(self, request, data):
instance_id = data['instance_id']
wanted_groups = map(filters.get_int_or_uuid, data['wanted_groups'])
try:
api.network.server_update_security_groups(request, instance_id,
wanted_groups)
except Exception as e:
exceptions.handle(request, str(e))
return False
return True
class Meta(object):
name = _("Security Groups")
slug = INSTANCE_SEC_GROUP_SLUG
class UpdateInstanceSecurityGroups(workflows.UpdateMembersStep):
action_class = UpdateInstanceSecurityGroupsAction
help_text = _("Add and remove security groups to this instance "
"from the list of available security groups.")
available_list_title = _("All Security Groups")
members_list_title = _("Instance Security Groups")
no_available_text = _("No security groups found.")
no_members_text = _("No security groups enabled.")
show_roles = False
depends_on = ("instance_id",)
contributes = ("wanted_groups",)
def contribute(self, data, context):
request = self.workflow.request
if data:
field_name = self.get_member_field_name('member')
context["wanted_groups"] = request.POST.getlist(field_name)
return context
class UpdateInstanceInfoAction(workflows.Action):
name = forms.CharField(label=_("Name"),
max_length=255)
def handle(self, request, data):
try:
api.nova.server_update(request,
data['instance_id'],
data['name'])
except Exception:
exceptions.handle(request, ignore=True)
return False
return True
class Meta(object):
name = _("Information")
slug = 'instance_info'
help_text = _("Edit the instance details.")
class UpdateInstanceInfo(workflows.Step):
action_class = UpdateInstanceInfoAction
depends_on = ("instance_id",)
contributes = ("name",)
class UpdateInstance(workflows.Workflow):
slug = "update_instance"
name = _("Edit Instance")
finalize_button_name = _("Save")
success_message = _('Modified instance "%s".')
failure_message = _('Unable to modify instance "%s".')
success_url = "horizon:project:instances:index"
default_steps = (UpdateInstanceInfo,
UpdateInstanceSecurityGroups)
def format_status_message(self, message):
return message % self.context.get('name', 'unknown instance')
# NOTE(kspear): nova doesn't support instance security group management
# by an admin. This isn't really the place for this code,
# but the other ways of special-casing this are even messier.
class AdminUpdateInstance(UpdateInstance):
success_url = "horizon:admin:instances:index"
default_steps = (UpdateInstanceInfo,)
| apache-2.0 | 1,865,101,745,255,435,800 | 37.469799 | 78 | 0.624738 | false |
RyanSkraba/beam | sdks/python/apache_beam/coders/coders_test.py | 1 | 6343 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import base64
import logging
import unittest
from builtins import object
from apache_beam.coders import proto2_coder_test_messages_pb2 as test_message
from apache_beam.coders import coders
from apache_beam.coders.avro_record import AvroRecord
from apache_beam.coders.typecoders import registry as coders_registry
class PickleCoderTest(unittest.TestCase):
def test_basics(self):
v = ('a' * 10, 'b' * 90)
pickler = coders.PickleCoder()
self.assertEqual(v, pickler.decode(pickler.encode(v)))
pickler = coders.Base64PickleCoder()
self.assertEqual(v, pickler.decode(pickler.encode(v)))
self.assertEqual(
coders.Base64PickleCoder().encode(v),
base64.b64encode(coders.PickleCoder().encode(v)))
def test_equality(self):
self.assertEqual(coders.PickleCoder(), coders.PickleCoder())
self.assertEqual(coders.Base64PickleCoder(), coders.Base64PickleCoder())
self.assertNotEqual(coders.Base64PickleCoder(), coders.PickleCoder())
self.assertNotEqual(coders.Base64PickleCoder(), object())
class CodersTest(unittest.TestCase):
def test_str_utf8_coder(self):
real_coder = coders_registry.get_coder(bytes)
expected_coder = coders.BytesCoder()
self.assertEqual(
real_coder.encode(b'abc'), expected_coder.encode(b'abc'))
self.assertEqual(b'abc', real_coder.decode(real_coder.encode(b'abc')))
# The test proto message file was generated by running the following:
#
# `cd <beam repo>`
# `cp sdks/java/core/src/proto/proto2_coder_test_message.proto
# sdks/python/apache_beam/coders`
# `cd sdks/python`
# `protoc apache_beam/coders/proto2_coder_test_messages.proto
# --python_out=apache_beam/coders
# `rm apache_beam/coders/proto2_coder_test_message.proto`
#
# Note: The protoc version should match the protobuf library version specified
# in setup.py.
#
# TODO(vikasrk): The proto file should be placed in a common directory
# that can be shared between java and python.
class ProtoCoderTest(unittest.TestCase):
def test_proto_coder(self):
ma = test_message.MessageA()
mb = ma.field2.add()
mb.field1 = True
ma.field1 = u'hello world'
expected_coder = coders.ProtoCoder(ma.__class__)
real_coder = coders_registry.get_coder(ma.__class__)
self.assertEqual(expected_coder, real_coder)
self.assertEqual(real_coder.encode(ma), expected_coder.encode(ma))
self.assertEqual(ma, real_coder.decode(real_coder.encode(ma)))
class DeterministicProtoCoderTest(unittest.TestCase):
def test_deterministic_proto_coder(self):
ma = test_message.MessageA()
mb = ma.field2.add()
mb.field1 = True
ma.field1 = u'hello world'
expected_coder = coders.DeterministicProtoCoder(ma.__class__)
real_coder = (coders_registry.get_coder(ma.__class__)
.as_deterministic_coder(step_label='unused'))
self.assertTrue(real_coder.is_deterministic())
self.assertEqual(expected_coder, real_coder)
self.assertEqual(real_coder.encode(ma), expected_coder.encode(ma))
self.assertEqual(ma, real_coder.decode(real_coder.encode(ma)))
def test_deterministic_proto_coder_determinism(self):
for _ in range(10):
keys = list(range(20))
mm_forward = test_message.MessageWithMap()
for key in keys:
mm_forward.field1[str(key)].field1 = str(key)
mm_reverse = test_message.MessageWithMap()
for key in reversed(keys):
mm_reverse.field1[str(key)].field1 = str(key)
coder = coders.DeterministicProtoCoder(mm_forward.__class__)
self.assertEqual(coder.encode(mm_forward), coder.encode(mm_reverse))
class AvroTestCoder(coders.AvroGenericCoder):
SCHEMA = """
{
"type": "record", "name": "testrecord",
"fields": [
{"name": "name", "type": "string"},
{"name": "age", "type": "int"}
]
}
"""
def __init__(self):
super(AvroTestCoder, self).__init__(self.SCHEMA)
class AvroTestRecord(AvroRecord):
pass
coders_registry.register_coder(AvroTestRecord, AvroTestCoder)
class AvroCoderTest(unittest.TestCase):
def test_avro_record_coder(self):
real_coder = coders_registry.get_coder(AvroTestRecord)
expected_coder = AvroTestCoder()
self.assertEqual(
real_coder.encode(
AvroTestRecord({"name": "Daenerys targaryen", "age": 23})),
expected_coder.encode(
AvroTestRecord({"name": "Daenerys targaryen", "age": 23}))
)
self.assertEqual(
AvroTestRecord({"name": "Jon Snow", "age": 23}),
real_coder.decode(
real_coder.encode(
AvroTestRecord({"name": "Jon Snow", "age": 23}))
)
)
class DummyClass(object):
"""A class with no registered coder."""
def __init__(self):
pass
def __eq__(self, other):
if isinstance(other, self.__class__):
return True
return False
def __ne__(self, other):
# TODO(BEAM-5949): Needed for Python 2 compatibility.
return not self == other
def __hash__(self):
return hash(type(self))
class FallbackCoderTest(unittest.TestCase):
def test_default_fallback_path(self):
"""Test fallback path picks a matching coder if no coder is registered."""
coder = coders_registry.get_coder(DummyClass)
# No matching coder, so picks the last fallback coder which is a
# FastPrimitivesCoder.
self.assertEqual(coder, coders.FastPrimitivesCoder())
self.assertEqual(DummyClass(), coder.decode(coder.encode(DummyClass())))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
| apache-2.0 | -2,264,454,649,619,296,000 | 32.384211 | 78 | 0.698408 | false |
keflavich/pyregion | setup.py | 1 | 4190 | #!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import glob
import os
import sys
import ah_bootstrap
from setuptools import setup
# A dirty hack to get around some early import/configurations ambiguities
if sys.version_info[0] >= 3:
import builtins
else:
import __builtin__ as builtins
builtins._ASTROPY_SETUP_ = True
from astropy_helpers.setup_helpers import (register_commands, get_debug_option,
get_package_info)
from astropy_helpers.git_helpers import get_git_devstr
from astropy_helpers.version_helpers import generate_version_py
# Get some values from the setup.cfg
try:
from ConfigParser import ConfigParser
except ImportError:
from configparser import ConfigParser
conf = ConfigParser()
conf.read(['setup.cfg'])
metadata = dict(conf.items('metadata'))
PACKAGENAME = metadata.get('package_name', 'packagename')
DESCRIPTION = metadata.get('description', 'Astropy affiliated package')
AUTHOR = metadata.get('author', '')
AUTHOR_EMAIL = metadata.get('author_email', '')
LICENSE = metadata.get('license', 'unknown')
URL = metadata.get('url', 'http://astropy.org')
# Get the long description from the package's docstring
__import__(PACKAGENAME)
package = sys.modules[PACKAGENAME]
LONG_DESCRIPTION = package.__doc__
# Store the package name in a built-in variable so it's easy
# to get from other parts of the setup infrastructure
builtins._ASTROPY_PACKAGE_NAME_ = PACKAGENAME
# VERSION should be PEP386 compatible (http://www.python.org/dev/peps/pep-0386)
VERSION = '2.0.dev'
# Indicates if this version is a release version
RELEASE = 'dev' not in VERSION
if not RELEASE:
VERSION += get_git_devstr(False)
# Populate the dict of setup command overrides; this should be done before
# invoking any other functionality from distutils since it can potentially
# modify distutils' behavior.
cmdclassd = register_commands(PACKAGENAME, VERSION, RELEASE)
# Freeze build information in version.py
generate_version_py(PACKAGENAME, VERSION, RELEASE,
get_debug_option(PACKAGENAME))
# Treat everything in scripts except README.rst as a script to be installed
scripts = [fname for fname in glob.glob(os.path.join('scripts', '*'))
if os.path.basename(fname) != 'README.rst']
# Get configuration information from all of the various subpackages.
# See the docstring for setup_helpers.update_package_files for more
# details.
package_info = get_package_info()
# Add the project-global data
package_info['package_data'].setdefault(PACKAGENAME, [])
package_info['package_data'][PACKAGENAME].append('data/*')
# Include all .c files, recursively, including those generated by
# Cython, since we can not do this in MANIFEST.in with a "dynamic"
# directory name.
c_files = []
for root, dirs, files in os.walk(PACKAGENAME):
for filename in files:
if filename.endswith('.c'):
c_files.append(
os.path.join(
os.path.relpath(root, PACKAGENAME), filename))
package_info['package_data'][PACKAGENAME].extend(c_files)
install_requires = [
'pyparsing>=2.0',
'numpy',
'Cython',
'astropy>=1.0',
]
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Programming Language :: Cython',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
]
setup(
name=PACKAGENAME,
version=VERSION,
description=DESCRIPTION,
scripts=scripts,
install_requires=install_requires,
provides=[PACKAGENAME],
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license=LICENSE,
url=URL,
long_description=LONG_DESCRIPTION,
cmdclass=cmdclassd,
zip_safe=False,
use_2to3=False,
classifiers=classifiers,
**package_info
)
| mit | 9,046,240,946,080,474,000 | 30.742424 | 79 | 0.702625 | false |
chokribr/invenioold | modules/miscutil/lib/upgrades/invenio_2012_10_31_WebAuthorProfile_bibformat_dependency_update.py | 17 | 1778 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
import warnings
from invenio.dbquery import run_sql
from invenio.textutils import wait_for_user
depends_on = ['invenio_release_1_1_0']
def info():
return "New hidden format added to serve as helper in generating WebAuthorProfile pages"
def do_upgrade():
""" Implement your upgrades here """
present = run_sql("select * from format where code='wapdat'")
if not present:
run_sql("INSERT INTO format (name,code,description,content_type,visibility) VALUES ('WebAuthorProfile data helper','wapdat', 'cPickled dicts', 'text', 0)")
def estimate():
""" Estimate running time of upgrade in seconds (optional). """
return 1
def pre_upgrade():
""" Run pre-upgrade checks (optional). """
# Example of raising errors:
# raise RuntimeError("Description of error 1", "Description of error 2")
def post_upgrade():
""" Run post-upgrade checks (optional). """
# Example of issuing warnings:
# warnings.warn("A continuable error occurred")
| gpl-2.0 | 7,223,307,451,009,441,000 | 36.041667 | 163 | 0.708099 | false |
fnp/redakcja | src/catalogue/migrations/0028_auto_20201102_1109.py | 1 | 1140 | # Generated by Django 3.0.4 on 2020-11-02 11:09
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('catalogue', '0027_auto_20201027_1501'),
]
operations = [
migrations.CreateModel(
name='CollectionCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='children', to='catalogue.CollectionCategory')),
],
options={
'verbose_name': 'collection category',
'verbose_name_plural': 'collection categories',
},
),
migrations.AddField(
model_name='collection',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='catalogue.CollectionCategory'),
),
]
| agpl-3.0 | 1,853,706,410,826,060,300 | 35.774194 | 175 | 0.599123 | false |
gymnasium/edx-platform | common/djangoapps/student/management/commands/bulk_change_enrollment.py | 19 | 4548 | """Management command to change many user enrollments at once."""
import logging
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from six import text_type
from course_modes.models import CourseMode
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from student.models import CourseEnrollment
from xmodule.modulestore.django import modulestore
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
class Command(BaseCommand):
"""
Management command to change many user enrollments at once.
"""
help = """
Change the enrollment status for all users enrolled in a
particular mode for a course. Similar to the change_enrollment
script, but more useful for bulk moves.
Example:
Change enrollment for all audit users to honor in the given course.
$ ... bulk_change_enrollment -c course-v1:SomeCourse+SomethingX+2016 --from_mode audit --to_mode honor --commit
Without the --commit option, the command will have no effect.
"""
def add_arguments(self, parser):
group = parser.add_mutually_exclusive_group()
group.add_argument(
'-c', '--course',
help='The course to change enrollments in')
group.add_argument(
'-o', '--org',
help='All courses belonging to this org will be selected for changing the enrollments')
parser.add_argument(
'-f', '--from_mode',
required=True,
help='Move from this enrollment mode')
parser.add_argument(
'-t', '--to_mode',
required=True,
help='Move to this enrollment mode')
parser.add_argument(
'--commit',
action='store_true',
help='Save the changes, without this flag only a dry run will be performed and nothing will be changed')
def handle(self, *args, **options):
course_id = options['course']
org = options['org']
from_mode = options['from_mode']
to_mode = options['to_mode']
commit = options['commit']
course_keys = []
if course_id:
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
raise CommandError('Course ID {} is invalid.'.format(course_id))
if modulestore().get_course(course_key) is None:
raise CommandError('The given course {} does not exist.'.format(course_id))
course_keys.append(course_key)
else:
course_keys = [course.id for course in CourseOverview.get_all_courses(orgs=[org])]
if not course_keys:
raise CommandError('No courses exist for the org "{}".'.format(org))
for course_key in course_keys:
self.move_users_for_course(course_key, from_mode, to_mode, commit)
if not commit:
logger.info('Dry run, changes have not been saved. Run again with "commit" argument to save changes')
def move_users_for_course(self, course_key, from_mode, to_mode, commit):
"""
Change the enrollment mode of users for a course.
Arguments:
course_key (CourseKey): to lookup the course.
from_mode (str): the enrollment mode to change.
to_mode (str): the enrollment mode to change to.
commit (bool): required to make the change to the database. Otherwise
just a count will be displayed.
"""
unicode_course_key = text_type(course_key)
if CourseMode.mode_for_course(course_key, to_mode) is None:
logger.info('Mode ({}) does not exist for course ({}).'.format(to_mode, unicode_course_key))
return
course_enrollments = CourseEnrollment.objects.filter(course_id=course_key, mode=from_mode)
logger.info(
'Moving %d users from %s to %s in course %s.',
course_enrollments.count(), from_mode, to_mode, unicode_course_key
)
if commit:
# call `change_mode` which will change the mode and also emit tracking event
for enrollment in course_enrollments:
with transaction.atomic():
enrollment.change_mode(mode=to_mode)
logger.info('Finished moving users from %s to %s in course %s.', from_mode, to_mode, unicode_course_key)
| agpl-3.0 | -649,933,176,231,585,500 | 38.894737 | 119 | 0.624011 | false |
vjmac15/Lyilis | lib/pymongo/son_manipulator.py | 2 | 6701 | # Copyright 2009-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""**DEPRECATED**: Manipulators that can edit SON objects as they enter and exit
a database.
The :class:`~pymongo.son_manipulator.SONManipulator` API has limitations as a
technique for transforming your data. Instead, it is more flexible and
straightforward to transform outgoing documents in your own code before passing
them to PyMongo, and transform incoming documents after receiving them from
PyMongo. SON Manipulators will be removed from PyMongo in 4.0.
PyMongo does **not** apply SON manipulators to documents passed to
the modern methods :meth:`~pymongo.collection.Collection.bulk_write`,
:meth:`~pymongo.collection.Collection.insert_one`,
:meth:`~pymongo.collection.Collection.insert_many`,
:meth:`~pymongo.collection.Collection.update_one`, or
:meth:`~pymongo.collection.Collection.update_many`. SON manipulators are
**not** applied to documents returned by the modern methods
:meth:`~pymongo.collection.Collection.find_one_and_delete`,
:meth:`~pymongo.collection.Collection.find_one_and_replace`, and
:meth:`~pymongo.collection.Collection.find_one_and_update`.
"""
import collections
from bson.dbref import DBRef
from bson.objectid import ObjectId
from bson.son import SON
class SONManipulator(object):
"""A base son manipulator.
This manipulator just saves and restores objects without changing them.
"""
def will_copy(self):
"""Will this SON manipulator make a copy of the incoming document?
Derived classes that do need to make a copy should override this
method, returning True instead of False. All non-copying manipulators
will be applied first (so that the user's document will be updated
appropriately), followed by copying manipulators.
"""
return False
def transform_incoming(self, son, collection):
"""Manipulate an incoming SON object.
:Parameters:
- `son`: the SON object to be inserted into the database
- `collection`: the collection the object is being inserted into
"""
if self.will_copy():
return SON(son)
return son
def transform_outgoing(self, son, collection):
"""Manipulate an outgoing SON object.
:Parameters:
- `son`: the SON object being retrieved from the database
- `collection`: the collection this object was stored in
"""
if self.will_copy():
return SON(son)
return son
class ObjectIdInjector(SONManipulator):
"""A son manipulator that adds the _id field if it is missing.
.. versionchanged:: 2.7
ObjectIdInjector is no longer used by PyMongo, but remains in this
module for backwards compatibility.
"""
def transform_incoming(self, son, collection):
"""Add an _id field if it is missing.
"""
if not "_id" in son:
son["_id"] = ObjectId()
return son
# This is now handled during BSON encoding (for performance reasons),
# but I'm keeping this here as a reference for those implementing new
# SONManipulators.
class ObjectIdShuffler(SONManipulator):
"""A son manipulator that moves _id to the first position.
"""
def will_copy(self):
"""We need to copy to be sure that we are dealing with SON, not a dict.
"""
return True
def transform_incoming(self, son, collection):
"""Move _id to the front if it's there.
"""
if not "_id" in son:
return son
transformed = SON({"_id": son["_id"]})
transformed.update(son)
return transformed
class NamespaceInjector(SONManipulator):
"""A son manipulator that adds the _ns field.
"""
def transform_incoming(self, son, collection):
"""Add the _ns field to the incoming object
"""
son["_ns"] = collection.name
return son
class AutoReference(SONManipulator):
"""Transparently reference and de-reference already saved embedded objects.
This manipulator should probably only be used when the NamespaceInjector is
also being used, otherwise it doesn't make too much sense - documents can
only be auto-referenced if they have an *_ns* field.
NOTE: this will behave poorly if you have a circular reference.
TODO: this only works for documents that are in the same database. To fix
this we'll need to add a DatabaseInjector that adds *_db* and then make
use of the optional *database* support for DBRefs.
"""
def __init__(self, db):
self.database = db
def will_copy(self):
"""We need to copy so the user's document doesn't get transformed refs.
"""
return True
def transform_incoming(self, son, collection):
"""Replace embedded documents with DBRefs.
"""
def transform_value(value):
if isinstance(value, collections.MutableMapping):
if "_id" in value and "_ns" in value:
return DBRef(value["_ns"], transform_value(value["_id"]))
else:
return transform_dict(SON(value))
elif isinstance(value, list):
return [transform_value(v) for v in value]
return value
def transform_dict(object):
for (key, value) in object.items():
object[key] = transform_value(value)
return object
return transform_dict(SON(son))
def transform_outgoing(self, son, collection):
"""Replace DBRefs with embedded documents.
"""
def transform_value(value):
if isinstance(value, DBRef):
return self.database.dereference(value)
elif isinstance(value, list):
return [transform_value(v) for v in value]
elif isinstance(value, collections.MutableMapping):
return transform_dict(SON(value))
return value
def transform_dict(object):
for (key, value) in object.items():
object[key] = transform_value(value)
return object
return transform_dict(SON(son))
| gpl-3.0 | -3,053,943,644,362,221,600 | 33.901042 | 80 | 0.659454 | false |
drufat/sympy | sympy/physics/quantum/tests/test_printing.py | 20 | 29885 | # -*- encoding: utf-8 -*-
"""
TODO:
* Address Issue 2251, printing of spin states
"""
from sympy.physics.quantum.anticommutator import AntiCommutator
from sympy.physics.quantum.cg import CG, Wigner3j, Wigner6j, Wigner9j
from sympy.physics.quantum.commutator import Commutator
from sympy.physics.quantum.constants import hbar
from sympy.physics.quantum.dagger import Dagger
from sympy.physics.quantum.gate import CGate, CNotGate, IdentityGate, UGate, XGate
from sympy.physics.quantum.hilbert import ComplexSpace, FockSpace, HilbertSpace, L2
from sympy.physics.quantum.innerproduct import InnerProduct
from sympy.physics.quantum.operator import Operator, OuterProduct, DifferentialOperator
from sympy.physics.quantum.qexpr import QExpr
from sympy.physics.quantum.qubit import Qubit, IntQubit
from sympy.physics.quantum.spin import Jz, J2, JzBra, JzBraCoupled, JzKet, JzKetCoupled, Rotation, WignerD
from sympy.physics.quantum.state import Bra, Ket, TimeDepBra, TimeDepKet
from sympy.physics.quantum.tensorproduct import TensorProduct
from sympy.physics.quantum.sho1d import RaisingOp
from sympy import Derivative, Function, Interval, Matrix, Pow, S, symbols, Symbol, oo
from sympy.core.compatibility import exec_
from sympy.utilities.pytest import XFAIL
# Imports used in srepr strings
from sympy.physics.quantum.constants import HBar
from sympy.physics.quantum.hilbert import DirectSumHilbertSpace, TensorProductHilbertSpace, TensorPowerHilbertSpace
from sympy.physics.quantum.spin import JzOp, J2Op
from sympy import Add, Integer, Mul, Rational, Tuple
from sympy.printing import srepr
from sympy.printing.pretty import pretty as xpretty
from sympy.printing.latex import latex
from sympy.core.compatibility import u_decode as u
MutableDenseMatrix = Matrix
ENV = {}
exec_("from sympy import *", ENV)
def sT(expr, string):
"""
sT := sreprTest
from sympy/printing/tests/test_repr.py
"""
assert srepr(expr) == string
assert eval(string) == expr
def pretty(expr):
"""ASCII pretty-printing"""
return xpretty(expr, use_unicode=False, wrap_line=False)
def upretty(expr):
"""Unicode pretty-printing"""
return xpretty(expr, use_unicode=True, wrap_line=False)
def test_anticommutator():
A = Operator('A')
B = Operator('B')
ac = AntiCommutator(A, B)
ac_tall = AntiCommutator(A**2, B)
assert str(ac) == '{A,B}'
assert pretty(ac) == '{A,B}'
assert upretty(ac) == u'{A,B}'
assert latex(ac) == r'\left\{A,B\right\}'
sT(ac, "AntiCommutator(Operator(Symbol('A')),Operator(Symbol('B')))")
assert str(ac_tall) == '{A**2,B}'
ascii_str = \
"""\
/ 2 \\\n\
<A ,B>\n\
\\ /\
"""
ucode_str = \
u("""\
⎧ 2 ⎫\n\
⎨A ,B⎬\n\
⎩ ⎭\
""")
assert pretty(ac_tall) == ascii_str
assert upretty(ac_tall) == ucode_str
assert latex(ac_tall) == r'\left\{A^{2},B\right\}'
sT(ac_tall, "AntiCommutator(Pow(Operator(Symbol('A')), Integer(2)),Operator(Symbol('B')))")
def test_cg():
cg = CG(1, 2, 3, 4, 5, 6)
wigner3j = Wigner3j(1, 2, 3, 4, 5, 6)
wigner6j = Wigner6j(1, 2, 3, 4, 5, 6)
wigner9j = Wigner9j(1, 2, 3, 4, 5, 6, 7, 8, 9)
assert str(cg) == 'CG(1, 2, 3, 4, 5, 6)'
ascii_str = \
"""\
5,6 \n\
C \n\
1,2,3,4\
"""
ucode_str = \
u("""\
5,6 \n\
C \n\
1,2,3,4\
""")
assert pretty(cg) == ascii_str
assert upretty(cg) == ucode_str
assert latex(cg) == r'C^{5,6}_{1,2,3,4}'
sT(cg, "CG(Integer(1), Integer(2), Integer(3), Integer(4), Integer(5), Integer(6))")
assert str(wigner3j) == 'Wigner3j(1, 2, 3, 4, 5, 6)'
ascii_str = \
"""\
/1 3 5\\\n\
| |\n\
\\2 4 6/\
"""
ucode_str = \
u("""\
⎛1 3 5⎞\n\
⎜ ⎟\n\
⎝2 4 6⎠\
""")
assert pretty(wigner3j) == ascii_str
assert upretty(wigner3j) == ucode_str
assert latex(wigner3j) == \
r'\left(\begin{array}{ccc} 1 & 3 & 5 \\ 2 & 4 & 6 \end{array}\right)'
sT(wigner3j, "Wigner3j(Integer(1), Integer(2), Integer(3), Integer(4), Integer(5), Integer(6))")
assert str(wigner6j) == 'Wigner6j(1, 2, 3, 4, 5, 6)'
ascii_str = \
"""\
/1 2 3\\\n\
< >\n\
\\4 5 6/\
"""
ucode_str = \
u("""\
⎧1 2 3⎫\n\
⎨ ⎬\n\
⎩4 5 6⎭\
""")
assert pretty(wigner6j) == ascii_str
assert upretty(wigner6j) == ucode_str
assert latex(wigner6j) == \
r'\left\{\begin{array}{ccc} 1 & 2 & 3 \\ 4 & 5 & 6 \end{array}\right\}'
sT(wigner6j, "Wigner6j(Integer(1), Integer(2), Integer(3), Integer(4), Integer(5), Integer(6))")
assert str(wigner9j) == 'Wigner9j(1, 2, 3, 4, 5, 6, 7, 8, 9)'
ascii_str = \
"""\
/1 2 3\\\n\
| |\n\
<4 5 6>\n\
| |\n\
\\7 8 9/\
"""
ucode_str = \
u("""\
⎧1 2 3⎫\n\
⎪ ⎪\n\
⎨4 5 6⎬\n\
⎪ ⎪\n\
⎩7 8 9⎭\
""")
assert pretty(wigner9j) == ascii_str
assert upretty(wigner9j) == ucode_str
assert latex(wigner9j) == \
r'\left\{\begin{array}{ccc} 1 & 2 & 3 \\ 4 & 5 & 6 \\ 7 & 8 & 9 \end{array}\right\}'
sT(wigner9j, "Wigner9j(Integer(1), Integer(2), Integer(3), Integer(4), Integer(5), Integer(6), Integer(7), Integer(8), Integer(9))")
def test_commutator():
A = Operator('A')
B = Operator('B')
c = Commutator(A, B)
c_tall = Commutator(A**2, B)
assert str(c) == '[A,B]'
assert pretty(c) == '[A,B]'
assert upretty(c) == u'[A,B]'
assert latex(c) == r'\left[A,B\right]'
sT(c, "Commutator(Operator(Symbol('A')),Operator(Symbol('B')))")
assert str(c_tall) == '[A**2,B]'
ascii_str = \
"""\
[ 2 ]\n\
[A ,B]\
"""
ucode_str = \
u("""\
⎡ 2 ⎤\n\
⎣A ,B⎦\
""")
assert pretty(c_tall) == ascii_str
assert upretty(c_tall) == ucode_str
assert latex(c_tall) == r'\left[A^{2},B\right]'
sT(c_tall, "Commutator(Pow(Operator(Symbol('A')), Integer(2)),Operator(Symbol('B')))")
def test_constants():
assert str(hbar) == 'hbar'
assert pretty(hbar) == 'hbar'
assert upretty(hbar) == u'ℏ'
assert latex(hbar) == r'\hbar'
sT(hbar, "HBar()")
def test_dagger():
x = symbols('x')
expr = Dagger(x)
assert str(expr) == 'Dagger(x)'
ascii_str = \
"""\
+\n\
x \
"""
ucode_str = \
u("""\
†\n\
x \
""")
assert pretty(expr) == ascii_str
assert upretty(expr) == ucode_str
assert latex(expr) == r'x^{\dag}'
sT(expr, "Dagger(Symbol('x'))")
@XFAIL
def test_gate_failing():
a, b, c, d = symbols('a,b,c,d')
uMat = Matrix([[a, b], [c, d]])
g = UGate((0,), uMat)
assert str(g) == 'U(0)'
def test_gate():
a, b, c, d = symbols('a,b,c,d')
uMat = Matrix([[a, b], [c, d]])
q = Qubit(1, 0, 1, 0, 1)
g1 = IdentityGate(2)
g2 = CGate((3, 0), XGate(1))
g3 = CNotGate(1, 0)
g4 = UGate((0,), uMat)
assert str(g1) == '1(2)'
assert pretty(g1) == '1 \n 2'
assert upretty(g1) == u'1 \n 2'
assert latex(g1) == r'1_{2}'
sT(g1, "IdentityGate(Integer(2))")
assert str(g1*q) == '1(2)*|10101>'
ascii_str = \
"""\
1 *|10101>\n\
2 \
"""
ucode_str = \
u("""\
1 ⋅❘10101⟩\n\
2 \
""")
assert pretty(g1*q) == ascii_str
assert upretty(g1*q) == ucode_str
assert latex(g1*q) == r'1_{2} {\left|10101\right\rangle }'
sT(g1*q, "Mul(IdentityGate(Integer(2)), Qubit(Integer(1),Integer(0),Integer(1),Integer(0),Integer(1)))")
assert str(g2) == 'C((3,0),X(1))'
ascii_str = \
"""\
C /X \\\n\
3,0\\ 1/\
"""
ucode_str = \
u("""\
C ⎛X ⎞\n\
3,0⎝ 1⎠\
""")
assert pretty(g2) == ascii_str
assert upretty(g2) == ucode_str
assert latex(g2) == r'C_{3,0}{\left(X_{1}\right)}'
sT(g2, "CGate(Tuple(Integer(3), Integer(0)),XGate(Integer(1)))")
assert str(g3) == 'CNOT(1,0)'
ascii_str = \
"""\
CNOT \n\
1,0\
"""
ucode_str = \
u("""\
CNOT \n\
1,0\
""")
assert pretty(g3) == ascii_str
assert upretty(g3) == ucode_str
assert latex(g3) == r'CNOT_{1,0}'
sT(g3, "CNotGate(Integer(1),Integer(0))")
ascii_str = \
"""\
U \n\
0\
"""
ucode_str = \
u("""\
U \n\
0\
""")
assert str(g4) == \
"""\
U((0,),Matrix([\n\
[a, b],\n\
[c, d]]))\
"""
assert pretty(g4) == ascii_str
assert upretty(g4) == ucode_str
assert latex(g4) == r'U_{0}'
sT(g4, "UGate(Tuple(Integer(0)),MutableDenseMatrix([[Symbol('a'), Symbol('b')], [Symbol('c'), Symbol('d')]]))")
def test_hilbert():
h1 = HilbertSpace()
h2 = ComplexSpace(2)
h3 = FockSpace()
h4 = L2(Interval(0, oo))
assert str(h1) == 'H'
assert pretty(h1) == 'H'
assert upretty(h1) == u'H'
assert latex(h1) == r'\mathcal{H}'
sT(h1, "HilbertSpace()")
assert str(h2) == 'C(2)'
ascii_str = \
"""\
2\n\
C \
"""
ucode_str = \
u("""\
2\n\
C \
""")
assert pretty(h2) == ascii_str
assert upretty(h2) == ucode_str
assert latex(h2) == r'\mathcal{C}^{2}'
sT(h2, "ComplexSpace(Integer(2))")
assert str(h3) == 'F'
assert pretty(h3) == 'F'
assert upretty(h3) == u'F'
assert latex(h3) == r'\mathcal{F}'
sT(h3, "FockSpace()")
assert str(h4) == 'L2([0, oo))'
ascii_str = \
"""\
2\n\
L \
"""
ucode_str = \
u("""\
2\n\
L \
""")
assert pretty(h4) == ascii_str
assert upretty(h4) == ucode_str
assert latex(h4) == r'{\mathcal{L}^2}\left( \left[0, \infty\right) \right)'
sT(h4, "L2(Interval(Integer(0), oo, S.false, S.true))")
assert str(h1 + h2) == 'H+C(2)'
ascii_str = \
"""\
2\n\
H + C \
"""
ucode_str = \
u("""\
2\n\
H ⊕ C \
""")
assert pretty(h1 + h2) == ascii_str
assert upretty(h1 + h2) == ucode_str
assert latex(h1 + h2)
sT(h1 + h2, "DirectSumHilbertSpace(HilbertSpace(),ComplexSpace(Integer(2)))")
assert str(h1*h2) == "H*C(2)"
ascii_str = \
"""\
2\n\
H x C \
"""
ucode_str = \
u("""\
2\n\
H ⨂ C \
""")
assert pretty(h1*h2) == ascii_str
assert upretty(h1*h2) == ucode_str
assert latex(h1*h2)
sT(h1*h2,
"TensorProductHilbertSpace(HilbertSpace(),ComplexSpace(Integer(2)))")
assert str(h1**2) == 'H**2'
ascii_str = \
"""\
x2\n\
H \
"""
ucode_str = \
u("""\
⨂2\n\
H \
""")
assert pretty(h1**2) == ascii_str
assert upretty(h1**2) == ucode_str
assert latex(h1**2) == r'{\mathcal{H}}^{\otimes 2}'
sT(h1**2, "TensorPowerHilbertSpace(HilbertSpace(),Integer(2))")
def test_innerproduct():
x = symbols('x')
ip1 = InnerProduct(Bra(), Ket())
ip2 = InnerProduct(TimeDepBra(), TimeDepKet())
ip3 = InnerProduct(JzBra(1, 1), JzKet(1, 1))
ip4 = InnerProduct(JzBraCoupled(1, 1, (1, 1)), JzKetCoupled(1, 1, (1, 1)))
ip_tall1 = InnerProduct(Bra(x/2), Ket(x/2))
ip_tall2 = InnerProduct(Bra(x), Ket(x/2))
ip_tall3 = InnerProduct(Bra(x/2), Ket(x))
assert str(ip1) == '<psi|psi>'
assert pretty(ip1) == '<psi|psi>'
assert upretty(ip1) == u'⟨ψ❘ψ⟩'
assert latex(
ip1) == r'\left\langle \psi \right. {\left|\psi\right\rangle }'
sT(ip1, "InnerProduct(Bra(Symbol('psi')),Ket(Symbol('psi')))")
assert str(ip2) == '<psi;t|psi;t>'
assert pretty(ip2) == '<psi;t|psi;t>'
assert upretty(ip2) == u'⟨ψ;t❘ψ;t⟩'
assert latex(ip2) == \
r'\left\langle \psi;t \right. {\left|\psi;t\right\rangle }'
sT(ip2, "InnerProduct(TimeDepBra(Symbol('psi'),Symbol('t')),TimeDepKet(Symbol('psi'),Symbol('t')))")
assert str(ip3) == "<1,1|1,1>"
assert pretty(ip3) == '<1,1|1,1>'
assert upretty(ip3) == u'⟨1,1❘1,1⟩'
assert latex(ip3) == r'\left\langle 1,1 \right. {\left|1,1\right\rangle }'
sT(ip3, "InnerProduct(JzBra(Integer(1),Integer(1)),JzKet(Integer(1),Integer(1)))")
assert str(ip4) == "<1,1,j1=1,j2=1|1,1,j1=1,j2=1>"
assert pretty(ip4) == '<1,1,j1=1,j2=1|1,1,j1=1,j2=1>'
assert upretty(ip4) == u'⟨1,1,j₁=1,j₂=1❘1,1,j₁=1,j₂=1⟩'
assert latex(ip4) == \
r'\left\langle 1,1,j_{1}=1,j_{2}=1 \right. {\left|1,1,j_{1}=1,j_{2}=1\right\rangle }'
sT(ip4, "InnerProduct(JzBraCoupled(Integer(1),Integer(1),Tuple(Integer(1), Integer(1)),Tuple(Tuple(Integer(1), Integer(2), Integer(1)))),JzKetCoupled(Integer(1),Integer(1),Tuple(Integer(1), Integer(1)),Tuple(Tuple(Integer(1), Integer(2), Integer(1)))))")
assert str(ip_tall1) == '<x/2|x/2>'
ascii_str = \
"""\
/ | \\ \n\
/ x|x \\\n\
\\ -|- /\n\
\\2|2/ \
"""
ucode_str = \
u("""\
╱ │ ╲ \n\
╱ x│x ╲\n\
╲ ─│─ ╱\n\
╲2│2╱ \
""")
assert pretty(ip_tall1) == ascii_str
assert upretty(ip_tall1) == ucode_str
assert latex(ip_tall1) == \
r'\left\langle \frac{x}{2} \right. {\left|\frac{x}{2}\right\rangle }'
sT(ip_tall1, "InnerProduct(Bra(Mul(Rational(1, 2), Symbol('x'))),Ket(Mul(Rational(1, 2), Symbol('x'))))")
assert str(ip_tall2) == '<x|x/2>'
ascii_str = \
"""\
/ | \\ \n\
/ |x \\\n\
\\ x|- /\n\
\\ |2/ \
"""
ucode_str = \
u("""\
╱ │ ╲ \n\
╱ │x ╲\n\
╲ x│─ ╱\n\
╲ │2╱ \
""")
assert pretty(ip_tall2) == ascii_str
assert upretty(ip_tall2) == ucode_str
assert latex(ip_tall2) == \
r'\left\langle x \right. {\left|\frac{x}{2}\right\rangle }'
sT(ip_tall2,
"InnerProduct(Bra(Symbol('x')),Ket(Mul(Rational(1, 2), Symbol('x'))))")
assert str(ip_tall3) == '<x/2|x>'
ascii_str = \
"""\
/ | \\ \n\
/ x| \\\n\
\\ -|x /\n\
\\2| / \
"""
ucode_str = \
u("""\
╱ │ ╲ \n\
╱ x│ ╲\n\
╲ ─│x ╱\n\
╲2│ ╱ \
""")
assert pretty(ip_tall3) == ascii_str
assert upretty(ip_tall3) == ucode_str
assert latex(ip_tall3) == \
r'\left\langle \frac{x}{2} \right. {\left|x\right\rangle }'
sT(ip_tall3,
"InnerProduct(Bra(Mul(Rational(1, 2), Symbol('x'))),Ket(Symbol('x')))")
def test_operator():
a = Operator('A')
b = Operator('B', Symbol('t'), S(1)/2)
inv = a.inv()
f = Function('f')
x = symbols('x')
d = DifferentialOperator(Derivative(f(x), x), f(x))
op = OuterProduct(Ket(), Bra())
assert str(a) == 'A'
assert pretty(a) == 'A'
assert upretty(a) == u'A'
assert latex(a) == 'A'
sT(a, "Operator(Symbol('A'))")
assert str(inv) == 'A**(-1)'
ascii_str = \
"""\
-1\n\
A \
"""
ucode_str = \
u("""\
-1\n\
A \
""")
assert pretty(inv) == ascii_str
assert upretty(inv) == ucode_str
assert latex(inv) == r'A^{-1}'
sT(inv, "Pow(Operator(Symbol('A')), Integer(-1))")
assert str(d) == 'DifferentialOperator(Derivative(f(x), x),f(x))'
ascii_str = \
"""\
/d \\\n\
DifferentialOperator|--(f(x)),f(x)|\n\
\dx /\
"""
ucode_str = \
u("""\
⎛d ⎞\n\
DifferentialOperator⎜──(f(x)),f(x)⎟\n\
⎝dx ⎠\
""")
assert pretty(d) == ascii_str
assert upretty(d) == ucode_str
assert latex(d) == \
r'DifferentialOperator\left(\frac{d}{d x} f{\left (x \right )},f{\left (x \right )}\right)'
sT(d, "DifferentialOperator(Derivative(Function('f')(Symbol('x')), Symbol('x')),Function('f')(Symbol('x')))")
assert str(b) == 'Operator(B,t,1/2)'
assert pretty(b) == 'Operator(B,t,1/2)'
assert upretty(b) == u'Operator(B,t,1/2)'
assert latex(b) == r'Operator\left(B,t,\frac{1}{2}\right)'
sT(b, "Operator(Symbol('B'),Symbol('t'),Rational(1, 2))")
assert str(op) == '|psi><psi|'
assert pretty(op) == '|psi><psi|'
assert upretty(op) == u'❘ψ⟩⟨ψ❘'
assert latex(op) == r'{\left|\psi\right\rangle }{\left\langle \psi\right|}'
sT(op, "OuterProduct(Ket(Symbol('psi')),Bra(Symbol('psi')))")
def test_qexpr():
q = QExpr('q')
assert str(q) == 'q'
assert pretty(q) == 'q'
assert upretty(q) == u'q'
assert latex(q) == r'q'
sT(q, "QExpr(Symbol('q'))")
def test_qubit():
q1 = Qubit('0101')
q2 = IntQubit(8)
assert str(q1) == '|0101>'
assert pretty(q1) == '|0101>'
assert upretty(q1) == u'❘0101⟩'
assert latex(q1) == r'{\left|0101\right\rangle }'
sT(q1, "Qubit(Integer(0),Integer(1),Integer(0),Integer(1))")
assert str(q2) == '|8>'
assert pretty(q2) == '|8>'
assert upretty(q2) == u'❘8⟩'
assert latex(q2) == r'{\left|8\right\rangle }'
sT(q2, "IntQubit(8)")
def test_spin():
lz = JzOp('L')
ket = JzKet(1, 0)
bra = JzBra(1, 0)
cket = JzKetCoupled(1, 0, (1, 2))
cbra = JzBraCoupled(1, 0, (1, 2))
cket_big = JzKetCoupled(1, 0, (1, 2, 3))
cbra_big = JzBraCoupled(1, 0, (1, 2, 3))
rot = Rotation(1, 2, 3)
bigd = WignerD(1, 2, 3, 4, 5, 6)
smalld = WignerD(1, 2, 3, 0, 4, 0)
assert str(lz) == 'Lz'
ascii_str = \
"""\
L \n\
z\
"""
ucode_str = \
u("""\
L \n\
z\
""")
assert pretty(lz) == ascii_str
assert upretty(lz) == ucode_str
assert latex(lz) == 'L_z'
sT(lz, "JzOp(Symbol('L'))")
assert str(J2) == 'J2'
ascii_str = \
"""\
2\n\
J \
"""
ucode_str = \
u("""\
2\n\
J \
""")
assert pretty(J2) == ascii_str
assert upretty(J2) == ucode_str
assert latex(J2) == r'J^2'
sT(J2, "J2Op(Symbol('J'))")
assert str(Jz) == 'Jz'
ascii_str = \
"""\
J \n\
z\
"""
ucode_str = \
u("""\
J \n\
z\
""")
assert pretty(Jz) == ascii_str
assert upretty(Jz) == ucode_str
assert latex(Jz) == 'J_z'
sT(Jz, "JzOp(Symbol('J'))")
assert str(ket) == '|1,0>'
assert pretty(ket) == '|1,0>'
assert upretty(ket) == u'❘1,0⟩'
assert latex(ket) == r'{\left|1,0\right\rangle }'
sT(ket, "JzKet(Integer(1),Integer(0))")
assert str(bra) == '<1,0|'
assert pretty(bra) == '<1,0|'
assert upretty(bra) == u'⟨1,0❘'
assert latex(bra) == r'{\left\langle 1,0\right|}'
sT(bra, "JzBra(Integer(1),Integer(0))")
assert str(cket) == '|1,0,j1=1,j2=2>'
assert pretty(cket) == '|1,0,j1=1,j2=2>'
assert upretty(cket) == u'❘1,0,j₁=1,j₂=2⟩'
assert latex(cket) == r'{\left|1,0,j_{1}=1,j_{2}=2\right\rangle }'
sT(cket, "JzKetCoupled(Integer(1),Integer(0),Tuple(Integer(1), Integer(2)),Tuple(Tuple(Integer(1), Integer(2), Integer(1))))")
assert str(cbra) == '<1,0,j1=1,j2=2|'
assert pretty(cbra) == '<1,0,j1=1,j2=2|'
assert upretty(cbra) == u'⟨1,0,j₁=1,j₂=2❘'
assert latex(cbra) == r'{\left\langle 1,0,j_{1}=1,j_{2}=2\right|}'
sT(cbra, "JzBraCoupled(Integer(1),Integer(0),Tuple(Integer(1), Integer(2)),Tuple(Tuple(Integer(1), Integer(2), Integer(1))))")
assert str(cket_big) == '|1,0,j1=1,j2=2,j3=3,j(1,2)=3>'
# TODO: Fix non-unicode pretty printing
# i.e. j1,2 -> j(1,2)
assert pretty(cket_big) == '|1,0,j1=1,j2=2,j3=3,j1,2=3>'
assert upretty(cket_big) == u'❘1,0,j₁=1,j₂=2,j₃=3,j₁,₂=3⟩'
assert latex(cket_big) == \
r'{\left|1,0,j_{1}=1,j_{2}=2,j_{3}=3,j_{1,2}=3\right\rangle }'
sT(cket_big, "JzKetCoupled(Integer(1),Integer(0),Tuple(Integer(1), Integer(2), Integer(3)),Tuple(Tuple(Integer(1), Integer(2), Integer(3)), Tuple(Integer(1), Integer(3), Integer(1))))")
assert str(cbra_big) == '<1,0,j1=1,j2=2,j3=3,j(1,2)=3|'
assert pretty(cbra_big) == u'<1,0,j1=1,j2=2,j3=3,j1,2=3|'
assert upretty(cbra_big) == u'⟨1,0,j₁=1,j₂=2,j₃=3,j₁,₂=3❘'
assert latex(cbra_big) == \
r'{\left\langle 1,0,j_{1}=1,j_{2}=2,j_{3}=3,j_{1,2}=3\right|}'
sT(cbra_big, "JzBraCoupled(Integer(1),Integer(0),Tuple(Integer(1), Integer(2), Integer(3)),Tuple(Tuple(Integer(1), Integer(2), Integer(3)), Tuple(Integer(1), Integer(3), Integer(1))))")
assert str(rot) == 'R(1,2,3)'
assert pretty(rot) == 'R (1,2,3)'
assert upretty(rot) == u'ℛ (1,2,3)'
assert latex(rot) == r'\mathcal{R}\left(1,2,3\right)'
sT(rot, "Rotation(Integer(1),Integer(2),Integer(3))")
assert str(bigd) == 'WignerD(1, 2, 3, 4, 5, 6)'
ascii_str = \
"""\
1 \n\
D (4,5,6)\n\
2,3 \
"""
ucode_str = \
u("""\
1 \n\
D (4,5,6)\n\
2,3 \
""")
assert pretty(bigd) == ascii_str
assert upretty(bigd) == ucode_str
assert latex(bigd) == r'D^{1}_{2,3}\left(4,5,6\right)'
sT(bigd, "WignerD(Integer(1), Integer(2), Integer(3), Integer(4), Integer(5), Integer(6))")
assert str(smalld) == 'WignerD(1, 2, 3, 0, 4, 0)'
ascii_str = \
"""\
1 \n\
d (4)\n\
2,3 \
"""
ucode_str = \
u("""\
1 \n\
d (4)\n\
2,3 \
""")
assert pretty(smalld) == ascii_str
assert upretty(smalld) == ucode_str
assert latex(smalld) == r'd^{1}_{2,3}\left(4\right)'
sT(smalld, "WignerD(Integer(1), Integer(2), Integer(3), Integer(0), Integer(4), Integer(0))")
def test_state():
x = symbols('x')
bra = Bra()
ket = Ket()
bra_tall = Bra(x/2)
ket_tall = Ket(x/2)
tbra = TimeDepBra()
tket = TimeDepKet()
assert str(bra) == '<psi|'
assert pretty(bra) == '<psi|'
assert upretty(bra) == u'⟨ψ❘'
assert latex(bra) == r'{\left\langle \psi\right|}'
sT(bra, "Bra(Symbol('psi'))")
assert str(ket) == '|psi>'
assert pretty(ket) == '|psi>'
assert upretty(ket) == u'❘ψ⟩'
assert latex(ket) == r'{\left|\psi\right\rangle }'
sT(ket, "Ket(Symbol('psi'))")
assert str(bra_tall) == '<x/2|'
ascii_str = \
"""\
/ |\n\
/ x|\n\
\\ -|\n\
\\2|\
"""
ucode_str = \
u("""\
╱ │\n\
╱ x│\n\
╲ ─│\n\
╲2│\
""")
assert pretty(bra_tall) == ascii_str
assert upretty(bra_tall) == ucode_str
assert latex(bra_tall) == r'{\left\langle \frac{x}{2}\right|}'
sT(bra_tall, "Bra(Mul(Rational(1, 2), Symbol('x')))")
assert str(ket_tall) == '|x/2>'
ascii_str = \
"""\
| \\ \n\
|x \\\n\
|- /\n\
|2/ \
"""
ucode_str = \
u("""\
│ ╲ \n\
│x ╲\n\
│─ ╱\n\
│2╱ \
""")
assert pretty(ket_tall) == ascii_str
assert upretty(ket_tall) == ucode_str
assert latex(ket_tall) == r'{\left|\frac{x}{2}\right\rangle }'
sT(ket_tall, "Ket(Mul(Rational(1, 2), Symbol('x')))")
assert str(tbra) == '<psi;t|'
assert pretty(tbra) == u'<psi;t|'
assert upretty(tbra) == u'⟨ψ;t❘'
assert latex(tbra) == r'{\left\langle \psi;t\right|}'
sT(tbra, "TimeDepBra(Symbol('psi'),Symbol('t'))")
assert str(tket) == '|psi;t>'
assert pretty(tket) == '|psi;t>'
assert upretty(tket) == u'❘ψ;t⟩'
assert latex(tket) == r'{\left|\psi;t\right\rangle }'
sT(tket, "TimeDepKet(Symbol('psi'),Symbol('t'))")
def test_tensorproduct():
tp = TensorProduct(JzKet(1, 1), JzKet(1, 0))
assert str(tp) == '|1,1>x|1,0>'
assert pretty(tp) == '|1,1>x |1,0>'
assert upretty(tp) == u'❘1,1⟩⨂ ❘1,0⟩'
assert latex(tp) == \
r'{{\left|1,1\right\rangle }}\otimes {{\left|1,0\right\rangle }}'
sT(tp, "TensorProduct(JzKet(Integer(1),Integer(1)), JzKet(Integer(1),Integer(0)))")
def test_big_expr():
f = Function('f')
x = symbols('x')
e1 = Dagger(AntiCommutator(Operator('A') + Operator('B'), Pow(DifferentialOperator(Derivative(f(x), x), f(x)), 3))*TensorProduct(Jz**2, Operator('A') + Operator('B')))*(JzBra(1, 0) + JzBra(1, 1))*(JzKet(0, 0) + JzKet(1, -1))
e2 = Commutator(Jz**2, Operator('A') + Operator('B'))*AntiCommutator(Dagger(Operator('C')*Operator('D')), Operator('E').inv()**2)*Dagger(Commutator(Jz, J2))
e3 = Wigner3j(1, 2, 3, 4, 5, 6)*TensorProduct(Commutator(Operator('A') + Dagger(Operator('B')), Operator('C') + Operator('D')), Jz - J2)*Dagger(OuterProduct(Dagger(JzBra(1, 1)), JzBra(1, 0)))*TensorProduct(JzKetCoupled(1, 1, (1, 1)) + JzKetCoupled(1, 0, (1, 1)), JzKetCoupled(1, -1, (1, 1)))
e4 = (ComplexSpace(1)*ComplexSpace(2) + FockSpace()**2)*(L2(Interval(
0, oo)) + HilbertSpace())
assert str(e1) == '(Jz**2)x(Dagger(A) + Dagger(B))*{Dagger(DifferentialOperator(Derivative(f(x), x),f(x)))**3,Dagger(A) + Dagger(B)}*(<1,0| + <1,1|)*(|0,0> + |1,-1>)'
ascii_str = \
"""\
/ 3 \\ \n\
|/ +\\ | \n\
2 / + +\\ <| /d \\ | + +> \n\
/J \\ x \\A + B /*||DifferentialOperator|--(f(x)),f(x)| | ,A + B |*(<1,0| + <1,1|)*(|0,0> + |1,-1>)\n\
\\ z/ \\\\ \dx / / / \
"""
ucode_str = \
u("""\
⎧ 3 ⎫ \n\
⎪⎛ †⎞ ⎪ \n\
2 ⎛ † †⎞ ⎨⎜ ⎛d ⎞ ⎟ † †⎬ \n\
⎛J ⎞ ⨂ ⎝A + B ⎠⋅⎪⎜DifferentialOperator⎜──(f(x)),f(x)⎟ ⎟ ,A + B ⎪⋅(⟨1,0❘ + ⟨1,1❘)⋅(❘0,0⟩ + ❘1,-1⟩)\n\
⎝ z⎠ ⎩⎝ ⎝dx ⎠ ⎠ ⎭ \
""")
assert pretty(e1) == ascii_str
assert upretty(e1) == ucode_str
assert latex(e1) == \
r'{J_z^{2}}\otimes \left({A^{\dag} + B^{\dag}}\right) \left\{\left(DifferentialOperator\left(\frac{d}{d x} f{\left (x \right )},f{\left (x \right )}\right)^{\dag}\right)^{3},A^{\dag} + B^{\dag}\right\} \left({\left\langle 1,0\right|} + {\left\langle 1,1\right|}\right) \left({\left|0,0\right\rangle } + {\left|1,-1\right\rangle }\right)'
sT(e1, "Mul(TensorProduct(Pow(JzOp(Symbol('J')), Integer(2)), Add(Dagger(Operator(Symbol('A'))), Dagger(Operator(Symbol('B'))))), AntiCommutator(Pow(Dagger(DifferentialOperator(Derivative(Function('f')(Symbol('x')), Symbol('x')),Function('f')(Symbol('x')))), Integer(3)),Add(Dagger(Operator(Symbol('A'))), Dagger(Operator(Symbol('B'))))), Add(JzBra(Integer(1),Integer(0)), JzBra(Integer(1),Integer(1))), Add(JzKet(Integer(0),Integer(0)), JzKet(Integer(1),Integer(-1))))")
assert str(e2) == '[Jz**2,A + B]*{E**(-2),Dagger(D)*Dagger(C)}*[J2,Jz]'
ascii_str = \
"""\
[ 2 ] / -2 + +\\ [ 2 ]\n\
[/J \\ ,A + B]*<E ,D *C >*[J ,J ]\n\
[\\ z/ ] \\ / [ z]\
"""
ucode_str = \
u("""\
⎡ 2 ⎤ ⎧ -2 † †⎫ ⎡ 2 ⎤\n\
⎢⎛J ⎞ ,A + B⎥⋅⎨E ,D ⋅C ⎬⋅⎢J ,J ⎥\n\
⎣⎝ z⎠ ⎦ ⎩ ⎭ ⎣ z⎦\
""")
assert pretty(e2) == ascii_str
assert upretty(e2) == ucode_str
assert latex(e2) == \
r'\left[J_z^{2},A + B\right] \left\{E^{-2},D^{\dag} C^{\dag}\right\} \left[J^2,J_z\right]'
sT(e2, "Mul(Commutator(Pow(JzOp(Symbol('J')), Integer(2)),Add(Operator(Symbol('A')), Operator(Symbol('B')))), AntiCommutator(Pow(Operator(Symbol('E')), Integer(-2)),Mul(Dagger(Operator(Symbol('D'))), Dagger(Operator(Symbol('C'))))), Commutator(J2Op(Symbol('J')),JzOp(Symbol('J'))))")
assert str(e3) == \
"Wigner3j(1, 2, 3, 4, 5, 6)*[Dagger(B) + A,C + D]x(-J2 + Jz)*|1,0><1,1|*(|1,0,j1=1,j2=1> + |1,1,j1=1,j2=1>)x|1,-1,j1=1,j2=1>"
ascii_str = \
"""\
[ + ] / 2 \\ \n\
/1 3 5\\*[B + A,C + D]x |- J + J |*|1,0><1,1|*(|1,0,j1=1,j2=1> + |1,1,j1=1,j2=1>)x |1,-1,j1=1,j2=1>\n\
| | \\ z/ \n\
\\2 4 6/ \
"""
ucode_str = \
u("""\
⎡ † ⎤ ⎛ 2 ⎞ \n\
⎛1 3 5⎞⋅⎣B + A,C + D⎦⨂ ⎜- J + J ⎟⋅❘1,0⟩⟨1,1❘⋅(❘1,0,j₁=1,j₂=1⟩ + ❘1,1,j₁=1,j₂=1⟩)⨂ ❘1,-1,j₁=1,j₂=1⟩\n\
⎜ ⎟ ⎝ z⎠ \n\
⎝2 4 6⎠ \
""")
assert pretty(e3) == ascii_str
assert upretty(e3) == ucode_str
assert latex(e3) == \
r'\left(\begin{array}{ccc} 1 & 3 & 5 \\ 2 & 4 & 6 \end{array}\right) {\left[B^{\dag} + A,C + D\right]}\otimes \left({- J^2 + J_z}\right) {\left|1,0\right\rangle }{\left\langle 1,1\right|} \left({{\left|1,0,j_{1}=1,j_{2}=1\right\rangle } + {\left|1,1,j_{1}=1,j_{2}=1\right\rangle }}\right)\otimes {{\left|1,-1,j_{1}=1,j_{2}=1\right\rangle }}'
sT(e3, "Mul(Wigner3j(Integer(1), Integer(2), Integer(3), Integer(4), Integer(5), Integer(6)), TensorProduct(Commutator(Add(Dagger(Operator(Symbol('B'))), Operator(Symbol('A'))),Add(Operator(Symbol('C')), Operator(Symbol('D')))), Add(Mul(Integer(-1), J2Op(Symbol('J'))), JzOp(Symbol('J')))), OuterProduct(JzKet(Integer(1),Integer(0)),JzBra(Integer(1),Integer(1))), TensorProduct(Add(JzKetCoupled(Integer(1),Integer(0),Tuple(Integer(1), Integer(1)),Tuple(Tuple(Integer(1), Integer(2), Integer(1)))), JzKetCoupled(Integer(1),Integer(1),Tuple(Integer(1), Integer(1)),Tuple(Tuple(Integer(1), Integer(2), Integer(1))))), JzKetCoupled(Integer(1),Integer(-1),Tuple(Integer(1), Integer(1)),Tuple(Tuple(Integer(1), Integer(2), Integer(1))))))")
assert str(e4) == '(C(1)*C(2)+F**2)*(L2([0, oo))+H)'
ascii_str = \
"""\
// 1 2\\ x2\\ / 2 \\\n\
\\\\C x C / + F / x \L + H/\
"""
ucode_str = \
u("""\
⎛⎛ 1 2⎞ ⨂2⎞ ⎛ 2 ⎞\n\
⎝⎝C ⨂ C ⎠ ⊕ F ⎠ ⨂ ⎝L ⊕ H⎠\
""")
assert pretty(e4) == ascii_str
assert upretty(e4) == ucode_str
assert latex(e4) == \
r'\left(\left(\mathcal{C}^{1}\otimes \mathcal{C}^{2}\right)\oplus {\mathcal{F}}^{\otimes 2}\right)\otimes \left({\mathcal{L}^2}\left( \left[0, \infty\right) \right)\oplus \mathcal{H}\right)'
sT(e4, "TensorProductHilbertSpace((DirectSumHilbertSpace(TensorProductHilbertSpace(ComplexSpace(Integer(1)),ComplexSpace(Integer(2))),TensorPowerHilbertSpace(FockSpace(),Integer(2)))),(DirectSumHilbertSpace(L2(Interval(Integer(0), oo, S.false, S.true)),HilbertSpace())))")
def _test_sho1d():
ad = RaisingOp('a')
assert pretty(ad) == u' \N{DAGGER}\na '
assert latex(ad) == 'a^{\\dag}'
| bsd-3-clause | -8,748,918,769,740,066,000 | 31.988726 | 738 | 0.522299 | false |
darkonie/dcos | packages/adminrouter/extra/src/test-harness/modules/mocker/endpoints/reflectors.py | 3 | 3938 | # Copyright (C) Mesosphere, Inc. See LICENSE file for details.
"""All the code relevant for reflecting mocker, both Unix Socket and TCP/IP based"""
import logging
from cgi import parse_header, parse_multipart
from urllib.parse import parse_qs
from mocker.endpoints.generic import (
TcpIpHttpEndpoint,
UnixSocketHTTPEndpoint,
)
from mocker.endpoints.basehandler import (
BaseHTTPRequestHandler,
)
# pylint: disable=C0103
log = logging.getLogger(__name__)
# pylint: disable=R0903
class ReflectingHTTPRequestHandler(BaseHTTPRequestHandler):
"""A request hander class implementing sending back all the headers/request
parameters,etc... back to the client.
"""
def _calculate_response(self, base_path, url_args, body_args=None):
"""Gather all the request data into single dict and prepare it for
sending it to the client for inspection.
Please refer to the description of the BaseHTTPRequestHandler class
for details on the arguments and return value of this method.
"""
ctx = self.server.context
res = {}
res['method'] = self.command
res['path'] = self.path
res['path_base'] = base_path
res['headers'] = self.headers.items()
res['request_version'] = self.request_version
res['endpoint_id'] = ctx.data["endpoint_id"]
res['args_url'] = url_args
res['args_body'] = body_args
blob = self._convert_data_to_blob(res)
return 200, 'application/json', blob
def _parse_request_body(self):
"""Parse request body in order to extract arguments.
This method recognizes both `multipart/form-data` and
`multipart/form-data` encoded data. So the client can check how the
Nginx in test behaves. It's based on: http://stackoverflow.com/a/4233452
Returns:
It returns a dictionary that contains all the parsed data. In case
when body did not contain any arguments - an empty dict is returned.
"""
if 'content-type' not in self.headers:
return {}
ctype, pdict = parse_header(self.headers['content-type'])
if ctype == 'multipart/form-data':
postvars = parse_multipart(self.rfile, pdict)
elif ctype == 'application/x-www-form-urlencoded':
# This should work (TM) basing on HTML5 spec:
# Which default character encoding to use can only be determined
# on a case-by-case basis, but generally the best character
# encoding to use as a default is the one that was used to
# encode the page on which the form used to create the payload
# was itself found. In the absence of a better default,
# UTF-8 is suggested.
length = int(self.headers['content-length'])
post_data = self.rfile.read(length).decode('utf-8')
postvars = parse_qs(post_data,
keep_blank_values=1,
encoding="utf-8",
errors="strict",
)
else:
postvars = {}
return postvars
# pylint: disable=R0903,C0103
class ReflectingTcpIpEndpoint(TcpIpHttpEndpoint):
"""ReflectingTcpIpEndpoint is just a plain TCP/IP endpoint with a
request handler that pushes back request data to the client."""
def __init__(self, port, ip='', keyfile=None, certfile=None):
super().__init__(ReflectingHTTPRequestHandler, port, ip, keyfile, certfile)
# pylint: disable=R0903
class ReflectingUnixSocketEndpoint(UnixSocketHTTPEndpoint):
"""ReflectingUnixSocketEndpoint is just a plain Unix Socket endpoint with a
request handler that pushes back request data to the client."""
def __init__(self, path, keyfile=None, certfile=None):
super().__init__(ReflectingHTTPRequestHandler, path, keyfile, certfile)
| apache-2.0 | -508,390,956,150,563,840 | 39.183673 | 84 | 0.641442 | false |
tBaxter/python-card-me | card_me/ics_diff.py | 4 | 7838 | from __future__ import print_function
from optparse import OptionParser
from .base import Component, getBehavior, newFromBehavior, readOne
"""
Compare VTODOs and VEVENTs in two iCalendar sources.
"""
def getSortKey(component):
def getUID(component):
return component.getChildValue('uid', '')
# it's not quite as simple as getUID, need to account for recurrenceID and
# sequence
def getSequence(component):
sequence = component.getChildValue('sequence', 0)
return "%05d" % int(sequence)
def getRecurrenceID(component):
recurrence_id = component.getChildValue('recurrence_id', None)
if recurrence_id is None:
return '0000-00-00'
else:
return recurrence_id.isoformat()
return getUID(component) + getSequence(component) + getRecurrenceID(component)
def sortByUID(components):
return sorted(components, key=getSortKey)
def deleteExtraneous(component, ignore_dtstamp=False):
"""
Recursively walk the component's children, deleting extraneous details like
X-VOBJ-ORIGINAL-TZID.
"""
for comp in component.components():
deleteExtraneous(comp, ignore_dtstamp)
for line in component.lines():
if line.params.has_key('X-VOBJ-ORIGINAL-TZID'):
del line.params['X-VOBJ-ORIGINAL-TZID']
if ignore_dtstamp and hasattr(component, 'dtstamp_list'):
del component.dtstamp_list
def diff(left, right):
"""
Take two VCALENDAR components, compare VEVENTs and VTODOs in them,
return a list of object pairs containing just UID and the bits
that didn't match, using None for objects that weren't present in one
version or the other.
When there are multiple ContentLines in one VEVENT, for instance many
DESCRIPTION lines, such lines original order is assumed to be
meaningful. Order is also preserved when comparing (the unlikely case
of) multiple parameters of the same type in a ContentLine
"""
def processComponentLists(leftList, rightList):
output = []
rightIndex = 0
rightListSize = len(rightList)
for comp in leftList:
if rightIndex >= rightListSize:
output.append((comp, None))
else:
leftKey = getSortKey(comp)
rightComp = rightList[rightIndex]
rightKey = getSortKey(rightComp)
while leftKey > rightKey:
output.append((None, rightComp))
rightIndex += 1
if rightIndex >= rightListSize:
output.append((comp, None))
break
else:
rightComp = rightList[rightIndex]
rightKey = getSortKey(rightComp)
if leftKey < rightKey:
output.append((comp, None))
elif leftKey == rightKey:
rightIndex += 1
matchResult = processComponentPair(comp, rightComp)
if matchResult is not None:
output.append(matchResult)
return output
def newComponent(name, body):
if body is None:
return None
else:
c = Component(name)
c.behavior = getBehavior(name)
c.isNative = True
return c
def processComponentPair(leftComp, rightComp):
"""
Return None if a match, or a pair of components including UIDs and
any differing children.
"""
leftChildKeys = leftComp.contents.keys()
rightChildKeys = rightComp.contents.keys()
differentContentLines = []
differentComponents = {}
for key in leftChildKeys:
rightList = rightComp.contents.get(key, [])
if isinstance(leftComp.contents[key][0], Component):
compDifference = processComponentLists(leftComp.contents[key],
rightList)
if len(compDifference) > 0:
differentComponents[key] = compDifference
elif leftComp.contents[key] != rightList:
differentContentLines.append((leftComp.contents[key],
rightList))
for key in rightChildKeys:
if key not in leftChildKeys:
if isinstance(rightComp.contents[key][0], Component):
differentComponents[key] = ([], rightComp.contents[key])
else:
differentContentLines.append(([], rightComp.contents[key]))
if len(differentContentLines) == 0 and len(differentComponents) == 0:
return None
else:
left = newFromBehavior(leftComp.name)
right = newFromBehavior(leftComp.name)
# add a UID, if one existed, despite the fact that they'll always be
# the same
uid = leftComp.getChildValue('uid')
if uid is not None:
left.add( 'uid').value = uid
right.add('uid').value = uid
for name, childPairList in differentComponents.items():
leftComponents, rightComponents = zip(*childPairList)
if len(leftComponents) > 0:
# filter out None
left.contents[name] = filter(None, leftComponents)
if len(rightComponents) > 0:
# filter out None
right.contents[name] = filter(None, rightComponents)
for leftChildLine, rightChildLine in differentContentLines:
nonEmpty = leftChildLine or rightChildLine
name = nonEmpty[0].name
if leftChildLine is not None:
left.contents[name] = leftChildLine
if rightChildLine is not None:
right.contents[name] = rightChildLine
return left, right
vevents = processComponentLists(sortByUID(getattr(left, 'vevent_list', [])),
sortByUID(getattr(right, 'vevent_list', [])))
vtodos = processComponentLists(sortByUID(getattr(left, 'vtodo_list', [])),
sortByUID(getattr(right, 'vtodo_list', [])))
return vevents + vtodos
def prettyDiff(leftObj, rightObj):
for left, right in diff(leftObj, rightObj):
print("<<<<<<<<<<<<<<<")
if left is not None:
left.prettyPrint()
print("===============")
if right is not None:
right.prettyPrint()
print(">>>>>>>>>>>>>>>")
print
def main():
options, args = getOptions()
if args:
ignore_dtstamp = options.ignore
ics_file1, ics_file2 = args
cal1 = readOne(file(ics_file1))
cal2 = readOne(file(ics_file2))
deleteExtraneous(cal1, ignore_dtstamp=ignore_dtstamp)
deleteExtraneous(cal2, ignore_dtstamp=ignore_dtstamp)
prettyDiff(cal1, cal2)
version = "0.1"
def getOptions():
##### Configuration options #####
usage = "usage: %prog [options] ics_file1 ics_file2"
parser = OptionParser(usage=usage, version=version)
parser.set_description("ics_diff will print a comparison of two iCalendar files ")
parser.add_option("-i", "--ignore-dtstamp", dest="ignore", action="store_true",
default=False, help="ignore DTSTAMP lines [default: False]")
(cmdline_options, args) = parser.parse_args()
if len(args) < 2:
print("error: too few arguments given")
print
print(parser.format_help())
return False, False
return cmdline_options, args
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
print("Aborted")
| apache-2.0 | 3,925,586,777,103,282,000 | 34.466063 | 86 | 0.580378 | false |
turbomanage/training-data-analyst | courses/machine_learning/deepdive2/structured/labs/serving/application/lib/werkzeug/debug/console.py | 9 | 5469 | # -*- coding: utf-8 -*-
"""
werkzeug.debug.console
~~~~~~~~~~~~~~~~~~~~~~
Interactive console support.
:copyright: 2007 Pallets
:license: BSD-3-Clause
"""
import code
import sys
from types import CodeType
from ..local import Local
from ..utils import escape
from .repr import debug_repr
from .repr import dump
from .repr import helper
_local = Local()
class HTMLStringO(object):
"""A StringO version that HTML escapes on write."""
def __init__(self):
self._buffer = []
def isatty(self):
return False
def close(self):
pass
def flush(self):
pass
def seek(self, n, mode=0):
pass
def readline(self):
if len(self._buffer) == 0:
return ""
ret = self._buffer[0]
del self._buffer[0]
return ret
def reset(self):
val = "".join(self._buffer)
del self._buffer[:]
return val
def _write(self, x):
if isinstance(x, bytes):
x = x.decode("utf-8", "replace")
self._buffer.append(x)
def write(self, x):
self._write(escape(x))
def writelines(self, x):
self._write(escape("".join(x)))
class ThreadedStream(object):
"""Thread-local wrapper for sys.stdout for the interactive console."""
@staticmethod
def push():
if not isinstance(sys.stdout, ThreadedStream):
sys.stdout = ThreadedStream()
_local.stream = HTMLStringO()
@staticmethod
def fetch():
try:
stream = _local.stream
except AttributeError:
return ""
return stream.reset()
@staticmethod
def displayhook(obj):
try:
stream = _local.stream
except AttributeError:
return _displayhook(obj)
# stream._write bypasses escaping as debug_repr is
# already generating HTML for us.
if obj is not None:
_local._current_ipy.locals["_"] = obj
stream._write(debug_repr(obj))
def __setattr__(self, name, value):
raise AttributeError("read only attribute %s" % name)
def __dir__(self):
return dir(sys.__stdout__)
def __getattribute__(self, name):
if name == "__members__":
return dir(sys.__stdout__)
try:
stream = _local.stream
except AttributeError:
stream = sys.__stdout__
return getattr(stream, name)
def __repr__(self):
return repr(sys.__stdout__)
# add the threaded stream as display hook
_displayhook = sys.displayhook
sys.displayhook = ThreadedStream.displayhook
class _ConsoleLoader(object):
def __init__(self):
self._storage = {}
def register(self, code, source):
self._storage[id(code)] = source
# register code objects of wrapped functions too.
for var in code.co_consts:
if isinstance(var, CodeType):
self._storage[id(var)] = source
def get_source_by_code(self, code):
try:
return self._storage[id(code)]
except KeyError:
pass
def _wrap_compiler(console):
compile = console.compile
def func(source, filename, symbol):
code = compile(source, filename, symbol)
console.loader.register(code, source)
return code
console.compile = func
class _InteractiveConsole(code.InteractiveInterpreter):
def __init__(self, globals, locals):
code.InteractiveInterpreter.__init__(self, locals)
self.globals = dict(globals)
self.globals["dump"] = dump
self.globals["help"] = helper
self.globals["__loader__"] = self.loader = _ConsoleLoader()
self.more = False
self.buffer = []
_wrap_compiler(self)
def runsource(self, source):
source = source.rstrip() + "\n"
ThreadedStream.push()
prompt = "... " if self.more else ">>> "
try:
source_to_eval = "".join(self.buffer + [source])
if code.InteractiveInterpreter.runsource(
self, source_to_eval, "<debugger>", "single"
):
self.more = True
self.buffer.append(source)
else:
self.more = False
del self.buffer[:]
finally:
output = ThreadedStream.fetch()
return prompt + escape(source) + output
def runcode(self, code):
try:
eval(code, self.globals, self.locals)
except Exception:
self.showtraceback()
def showtraceback(self):
from .tbtools import get_current_traceback
tb = get_current_traceback(skip=1)
sys.stdout._write(tb.render_summary())
def showsyntaxerror(self, filename=None):
from .tbtools import get_current_traceback
tb = get_current_traceback(skip=4)
sys.stdout._write(tb.render_summary())
def write(self, data):
sys.stdout.write(data)
class Console(object):
"""An interactive console."""
def __init__(self, globals=None, locals=None):
if locals is None:
locals = {}
if globals is None:
globals = {}
self._ipy = _InteractiveConsole(globals, locals)
def eval(self, code):
_local._current_ipy = self._ipy
old_sys_stdout = sys.stdout
try:
return self._ipy.runsource(code)
finally:
sys.stdout = old_sys_stdout
| apache-2.0 | -4,440,337,404,607,113,700 | 24.319444 | 74 | 0.566831 | false |
intgr/django | django/contrib/syndication/views.py | 17 | 8664 | from calendar import timegm
from django.conf import settings
from django.contrib.sites.shortcuts import get_current_site
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist
from django.http import Http404, HttpResponse
from django.template import TemplateDoesNotExist, loader
from django.utils import feedgenerator
from django.utils.encoding import force_text, iri_to_uri
from django.utils.html import escape
from django.utils.http import http_date
from django.utils.timezone import get_default_timezone, is_naive, make_aware
def add_domain(domain, url, secure=False):
protocol = 'https' if secure else 'http'
if url.startswith('//'):
# Support network-path reference (see #16753) - RSS requires a protocol
url = '%s:%s' % (protocol, url)
elif not url.startswith(('http://', 'https://', 'mailto:')):
url = iri_to_uri('%s://%s%s' % (protocol, domain, url))
return url
class FeedDoesNotExist(ObjectDoesNotExist):
pass
class Feed:
feed_type = feedgenerator.DefaultFeed
title_template = None
description_template = None
def __call__(self, request, *args, **kwargs):
try:
obj = self.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
raise Http404('Feed object does not exist.')
feedgen = self.get_feed(obj, request)
response = HttpResponse(content_type=feedgen.content_type)
if hasattr(self, 'item_pubdate') or hasattr(self, 'item_updateddate'):
# if item_pubdate or item_updateddate is defined for the feed, set
# header so as ConditionalGetMiddleware is able to send 304 NOT MODIFIED
response['Last-Modified'] = http_date(
timegm(feedgen.latest_post_date().utctimetuple()))
feedgen.write(response, 'utf-8')
return response
def item_title(self, item):
# Titles should be double escaped by default (see #6533)
return escape(force_text(item))
def item_description(self, item):
return force_text(item)
def item_link(self, item):
try:
return item.get_absolute_url()
except AttributeError:
raise ImproperlyConfigured(
'Give your %s class a get_absolute_url() method, or define an '
'item_link() method in your Feed class.' % item.__class__.__name__
)
def item_enclosures(self, item):
enc_url = self._get_dynamic_attr('item_enclosure_url', item)
if enc_url:
enc = feedgenerator.Enclosure(
url=force_text(enc_url),
length=force_text(self._get_dynamic_attr('item_enclosure_length', item)),
mime_type=force_text(self._get_dynamic_attr('item_enclosure_mime_type', item)),
)
return [enc]
return []
def _get_dynamic_attr(self, attname, obj, default=None):
try:
attr = getattr(self, attname)
except AttributeError:
return default
if callable(attr):
# Check co_argcount rather than try/excepting the function and
# catching the TypeError, because something inside the function
# may raise the TypeError. This technique is more accurate.
try:
code = attr.__code__
except AttributeError:
code = attr.__call__.__code__
if code.co_argcount == 2: # one argument is 'self'
return attr(obj)
else:
return attr()
return attr
def feed_extra_kwargs(self, obj):
"""
Return an extra keyword arguments dictionary that is used when
initializing the feed generator.
"""
return {}
def item_extra_kwargs(self, item):
"""
Return an extra keyword arguments dictionary that is used with
the `add_item` call of the feed generator.
"""
return {}
def get_object(self, request, *args, **kwargs):
return None
def get_context_data(self, **kwargs):
"""
Return a dictionary to use as extra context if either
``self.description_template`` or ``self.item_template`` are used.
Default implementation preserves the old behavior
of using {'obj': item, 'site': current_site} as the context.
"""
return {'obj': kwargs.get('item'), 'site': kwargs.get('site')}
def get_feed(self, obj, request):
"""
Return a feedgenerator.DefaultFeed object, fully populated, for
this feed. Raise FeedDoesNotExist for invalid parameters.
"""
current_site = get_current_site(request)
link = self._get_dynamic_attr('link', obj)
link = add_domain(current_site.domain, link, request.is_secure())
feed = self.feed_type(
title=self._get_dynamic_attr('title', obj),
subtitle=self._get_dynamic_attr('subtitle', obj),
link=link,
description=self._get_dynamic_attr('description', obj),
language=settings.LANGUAGE_CODE,
feed_url=add_domain(
current_site.domain,
self._get_dynamic_attr('feed_url', obj) or request.path,
request.is_secure(),
),
author_name=self._get_dynamic_attr('author_name', obj),
author_link=self._get_dynamic_attr('author_link', obj),
author_email=self._get_dynamic_attr('author_email', obj),
categories=self._get_dynamic_attr('categories', obj),
feed_copyright=self._get_dynamic_attr('feed_copyright', obj),
feed_guid=self._get_dynamic_attr('feed_guid', obj),
ttl=self._get_dynamic_attr('ttl', obj),
**self.feed_extra_kwargs(obj)
)
title_tmp = None
if self.title_template is not None:
try:
title_tmp = loader.get_template(self.title_template)
except TemplateDoesNotExist:
pass
description_tmp = None
if self.description_template is not None:
try:
description_tmp = loader.get_template(self.description_template)
except TemplateDoesNotExist:
pass
for item in self._get_dynamic_attr('items', obj):
context = self.get_context_data(item=item, site=current_site,
obj=obj, request=request)
if title_tmp is not None:
title = title_tmp.render(context, request)
else:
title = self._get_dynamic_attr('item_title', item)
if description_tmp is not None:
description = description_tmp.render(context, request)
else:
description = self._get_dynamic_attr('item_description', item)
link = add_domain(
current_site.domain,
self._get_dynamic_attr('item_link', item),
request.is_secure(),
)
enclosures = self._get_dynamic_attr('item_enclosures', item)
author_name = self._get_dynamic_attr('item_author_name', item)
if author_name is not None:
author_email = self._get_dynamic_attr('item_author_email', item)
author_link = self._get_dynamic_attr('item_author_link', item)
else:
author_email = author_link = None
tz = get_default_timezone()
pubdate = self._get_dynamic_attr('item_pubdate', item)
if pubdate and is_naive(pubdate):
pubdate = make_aware(pubdate, tz)
updateddate = self._get_dynamic_attr('item_updateddate', item)
if updateddate and is_naive(updateddate):
updateddate = make_aware(updateddate, tz)
feed.add_item(
title=title,
link=link,
description=description,
unique_id=self._get_dynamic_attr('item_guid', item, link),
unique_id_is_permalink=self._get_dynamic_attr(
'item_guid_is_permalink', item),
enclosures=enclosures,
pubdate=pubdate,
updateddate=updateddate,
author_name=author_name,
author_email=author_email,
author_link=author_link,
categories=self._get_dynamic_attr('item_categories', item),
item_copyright=self._get_dynamic_attr('item_copyright', item),
**self.item_extra_kwargs(item)
)
return feed
| bsd-3-clause | 1,598,434,567,742,761,500 | 38.743119 | 95 | 0.581602 | false |
gsehub/edx-platform | manage.py | 11 | 4180 | #!/usr/bin/env python
"""
Usage: manage.py {lms|cms} [--settings env] ...
Run django management commands. Because edx-platform contains multiple django projects,
the first argument specifies which project to run (cms [Studio] or lms [Learning Management System]).
By default, those systems run in with a settings file appropriate for development. However,
by passing the --settings flag, you can specify what environment specific settings file to use.
Any arguments not understood by this manage.py will be passed to django-admin.py
"""
# pylint: disable=wrong-import-order, wrong-import-position
from __future__ import absolute_import, print_function
from openedx.core.lib.logsettings import log_python_warnings
log_python_warnings()
# Patch the xml libs before anything else.
from safe_lxml import defuse_xml_libs
defuse_xml_libs()
import importlib
import os
import sys
from argparse import ArgumentParser
import contracts
def parse_args():
"""Parse edx specific arguments to manage.py"""
parser = ArgumentParser()
subparsers = parser.add_subparsers(title='system', description='edX service to run')
lms = subparsers.add_parser(
'lms',
help='Learning Management System',
add_help=False,
usage='%(prog)s [options] ...'
)
lms.add_argument('-h', '--help', action='store_true', help='show this help message and exit')
lms.add_argument(
'--settings',
help="Which django settings module to use under lms.envs. If not provided, the DJANGO_SETTINGS_MODULE "
"environment variable will be used if it is set, otherwise it will default to lms.envs.devstack_docker")
lms.add_argument(
'--service-variant',
choices=['lms', 'lms-xml', 'lms-preview'],
default='lms',
help='Which service variant to run, when using the aws environment')
lms.add_argument(
'--contracts',
action='store_true',
default=False,
help='Turn on pycontracts for local development')
lms.set_defaults(
help_string=lms.format_help(),
settings_base='lms/envs',
default_settings='lms.envs.devstack_docker',
startup='lms.startup',
)
cms = subparsers.add_parser(
'cms',
help='Studio',
add_help=False,
usage='%(prog)s [options] ...'
)
cms.add_argument(
'--settings',
help="Which django settings module to use under cms.envs. If not provided, the DJANGO_SETTINGS_MODULE "
"environment variable will be used if it is set, otherwise it will default to cms.envs.devstack_docker")
cms.add_argument('-h', '--help', action='store_true', help='show this help message and exit')
cms.add_argument(
'--contracts',
action='store_true',
default=False,
help='Turn on pycontracts for local development')
cms.set_defaults(
help_string=cms.format_help(),
settings_base='cms/envs',
default_settings='cms.envs.devstack_docker',
service_variant='cms',
startup='cms.startup',
)
edx_args, django_args = parser.parse_known_args()
if edx_args.help:
print("edX:")
print(edx_args.help_string)
return edx_args, django_args
if __name__ == "__main__":
edx_args, django_args = parse_args()
if edx_args.settings:
os.environ["DJANGO_SETTINGS_MODULE"] = edx_args.settings_base.replace('/', '.') + "." + edx_args.settings
else:
os.environ.setdefault("DJANGO_SETTINGS_MODULE", edx_args.default_settings)
os.environ.setdefault("SERVICE_VARIANT", edx_args.service_variant)
enable_contracts = os.environ.get('ENABLE_CONTRACTS', False)
# can override with '--contracts' argument
if not enable_contracts and not edx_args.contracts:
contracts.disable_all()
if edx_args.help:
print("Django:")
# This will trigger django-admin.py to print out its help
django_args.append('--help')
startup = importlib.import_module(edx_args.startup)
startup.run()
from django.core.management import execute_from_command_line
execute_from_command_line([sys.argv[0]] + django_args)
| agpl-3.0 | -6,994,168,041,202,529,000 | 33.545455 | 117 | 0.661722 | false |
sajuptpm/neutron-ipam | neutron/db/migration/alembic_migrations/versions/3cabb850f4a5_table_to_track_port_.py | 20 | 1933 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Table to track port to host associations
Revision ID: 3cabb850f4a5
Revises: 5918cbddab04
Create Date: 2013-06-24 14:30:33.533562
"""
# revision identifiers, used by Alembic.
revision = '3cabb850f4a5'
down_revision = '5918cbddab04'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.plugins.bigswitch.plugin.NeutronRestProxyV2'
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
### commands auto generated by Alembic - please adjust! ###
op.create_table('portlocations',
sa.Column('port_id', sa.String(length=255),
primary_key=True, nullable=False),
sa.Column('host_id',
sa.String(length=255), nullable=False)
)
### end Alembic commands ###
def downgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
### commands auto generated by Alembic - please adjust! ###
op.drop_table('portlocations')
### end Alembic commands ###
| apache-2.0 | 1,636,780,627,545,293,800 | 29.68254 | 78 | 0.677703 | false |
Epat9/YourHomeBudget | node_modules/dmg-builder/vendor/dmgbuild/badge.py | 16 | 6206 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from Quartz import *
import math
_REMOVABLE_DISK_PATH = '/System/Library/Extensions/IOStorageFamily.kext/Contents/Resources/Removable.icns'
def badge_disk_icon(badge_file, output_file):
# Load the Removable disk icon
url = CFURLCreateWithFileSystemPath(None, _REMOVABLE_DISK_PATH,
kCFURLPOSIXPathStyle, False)
backdrop = CGImageSourceCreateWithURL(url, None)
backdropCount = CGImageSourceGetCount(backdrop)
# Load the badge
url = CFURLCreateWithFileSystemPath(None, badge_file,
kCFURLPOSIXPathStyle, False)
badge = CGImageSourceCreateWithURL(url, None)
assert badge is not None, 'Unable to process image file: %s' % badge_file
badgeCount = CGImageSourceGetCount(badge)
# Set up a destination for our target
url = CFURLCreateWithFileSystemPath(None, output_file,
kCFURLPOSIXPathStyle, False)
target = CGImageDestinationCreateWithURL(url, 'com.apple.icns',
backdropCount, None)
# Get the RGB colorspace
rgbColorSpace = CGColorSpaceCreateWithName(kCGColorSpaceGenericRGB)
# Scale
scale = 1.0
# Perspective transform
corners = ((0.2, 0.95), (0.8, 0.95), (0.85, 0.35), (0.15, 0.35))
# Translation
position = (0.5, 0.5)
for n in range(backdropCount):
props = CGImageSourceCopyPropertiesAtIndex(backdrop, n, None)
width = props['PixelWidth']
height = props['PixelHeight']
dpi = props['DPIWidth']
depth = props['Depth']
# Choose the best sized badge image
bestWidth = None
bestHeight = None
bestBadge = None
bestDepth = None
bestDPI = None
for m in range(badgeCount):
badgeProps = CGImageSourceCopyPropertiesAtIndex(badge, m, None)
badgeWidth = badgeProps['PixelWidth']
badgeHeight = badgeProps['PixelHeight']
badgeDPI = badgeProps['DPIWidth']
badgeDepth = badgeProps['Depth']
if bestBadge is None or (badgeWidth <= width
and (bestWidth > width
or badgeWidth > bestWidth
or (badgeWidth == bestWidth
and badgeDPI == dpi
and badgeDepth <= depth
and (bestDepth is None
or badgeDepth > bestDepth)))):
bestBadge = m
bestWidth = badgeWidth
bestHeight = badgeHeight
bestDPI = badgeDPI
bestDepth = badgeDepth
badgeImage = CGImageSourceCreateImageAtIndex(badge, bestBadge, None)
badgeCI = CIImage.imageWithCGImage_(badgeImage)
backgroundImage = CGImageSourceCreateImageAtIndex(backdrop, n, None)
backgroundCI = CIImage.imageWithCGImage_(backgroundImage)
compositor = CIFilter.filterWithName_('CISourceOverCompositing')
lanczos = CIFilter.filterWithName_('CILanczosScaleTransform')
perspective = CIFilter.filterWithName_('CIPerspectiveTransform')
transform = CIFilter.filterWithName_('CIAffineTransform')
lanczos.setValue_forKey_(badgeCI, kCIInputImageKey)
lanczos.setValue_forKey_(scale * float(width)/bestWidth, kCIInputScaleKey)
lanczos.setValue_forKey_(1.0, kCIInputAspectRatioKey)
topLeft = (width * scale * corners[0][0],
width * scale * corners[0][1])
topRight = (width * scale * corners[1][0],
width * scale * corners[1][1])
bottomRight = (width * scale * corners[2][0],
width * scale * corners[2][1])
bottomLeft = (width * scale * corners[3][0],
width * scale * corners[3][1])
out = lanczos.valueForKey_(kCIOutputImageKey)
if width >= 16:
perspective.setValue_forKey_(out, kCIInputImageKey)
perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*topLeft),
'inputTopLeft')
perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*topRight),
'inputTopRight')
perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*bottomRight),
'inputBottomRight')
perspective.setValue_forKey_(CIVector.vectorWithX_Y_(*bottomLeft),
'inputBottomLeft')
out = perspective.valueForKey_(kCIOutputImageKey)
tfm = NSAffineTransform.transform()
tfm.translateXBy_yBy_(math.floor((position[0] - 0.5 * scale) * width),
math.floor((position[1] - 0.5 * scale) * height))
transform.setValue_forKey_(out, kCIInputImageKey)
transform.setValue_forKey_(tfm, 'inputTransform')
out = transform.valueForKey_(kCIOutputImageKey)
compositor.setValue_forKey_(out, kCIInputImageKey)
compositor.setValue_forKey_(backgroundCI, kCIInputBackgroundImageKey)
result = compositor.valueForKey_(kCIOutputImageKey)
cgContext = CGBitmapContextCreate(None,
width,
height,
8,
0,
rgbColorSpace,
kCGImageAlphaPremultipliedLast)
context = CIContext.contextWithCGContext_options_(cgContext, None)
context.drawImage_inRect_fromRect_(result,
((0, 0), (width, height)),
((0, 0), (width, height)))
image = CGBitmapContextCreateImage(cgContext)
CGImageDestinationAddImage(target, image, props)
CGImageDestinationFinalize(target)
| mit | 7,316,342,324,815,471,000 | 42.398601 | 106 | 0.557364 | false |
nishad-jobsglobal/odoo-marriot | openerp/addons/acesmanpower/menu_view.py | 1 | 1680 | # -*- coding: utf-8 -*-
#/#############################################################################
#
# Jobs Global
# Copyright (C) 2014-TODAY Jobs Global(http://www.jobsglobal.com).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#/#############################################################################
from openerp.osv import osv, fields
class res_company(osv.osv):
_inherit = 'res.company'
_name = 'res.company'
_columns = {
'groups_id': fields.many2many('res.groups', 'res_company_groups_rel', 'cid', 'gid', 'Access Groups'),
}
class res_users(osv.osv):
_inherit = 'res.users'
_name = 'res.users'
_description = "Users"
def fields_get(self, cr, uid, allfields=None, context=None, write_access=True, attributes=None):
if uid != 1:
uid = 1
ctx = (context or {}).copy()
return super(res_users, self).fields_get(cr, uid, allfields=allfields, context=ctx, write_access=write_access, attributes=attributes)
| agpl-3.0 | 637,585,317,788,356,000 | 39.97561 | 141 | 0.594643 | false |
santoshsahoo/filesync-server | src/server/integtests/test_udf_sync.py | 6 | 10705 | # Copyright 2008-2015 Canonical
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further info, check http://launchpad.net/filesync-server
"""Tests the UDF sync functionality of sync daemon."""
import os
import subprocess
from cStringIO import StringIO
from twisted.internet import reactor, defer
from twisted.python.failure import Failure
from ubuntuone.storage.server.integtests import test_sync
from ubuntuone.storage.server.testing.aq_helpers import NO_CONTENT_HASH
class TestUDFSync(test_sync.TestSync):
"""Base class for UDF tests."""
called = 0
def handle_SYS_STATE_CHANGED(self, state):
"""We fire our callback shortly after the state arrives in IDLE."""
if not self.called and state.name == 'QUEUE_MANAGER' \
and state.queue_state.name == 'IDLE':
self.called = 1
# this is probably a hack:
# let the other subscribers go first
reactor.callLater(.1, self.deferred.callback, None)
def handle_default(self, event_name, *args, **kwargs):
"""Stub implementation."""
pass
@defer.inlineCallbacks
def setUp(self):
"""Setup the tests."""
# we are in the setUp, so we need to define some attributes.
yield super(TestUDFSync, self).setUp()
self.eq.subscribe(self)
self.deferred = defer.Deferred()
self.udf_source_dir = self.mktemp('source/udf')
self.source_dir = self.udf_source_dir
yield self.deferred
# create a UDF for the tests
yield self.wait_for_nirvana(.2)
self.my_udf = yield self.create_udf('TestUDF')
self.my_udf_id = self.my_udf.id
self.my_udf_dir = self.my_udf.path
@defer.inlineCallbacks
def create_udf(self, name):
"""Create a UDF."""
# do not loose the event, wait for it before creating the UDF
wait_for_udf_created = self.wait_for('VM_UDF_CREATED')
path = os.path.join(self.home_dir, name)
self.main.vm.create_udf(path)
yield wait_for_udf_created
for udf in self.main.vm.udfs.values():
if udf.path == path:
defer.returnValue(udf)
else:
raise ValueError("No UDF created.")
def compare_dirs(self):
"""Run rsync to compare directories, needs some work."""
def _compare():
"""spwan rsync and compare"""
out = StringIO()
subprocess.call(["rsync", "-nric", self.my_udf_dir,
self.source_dir], stdout=out)
if not out.getvalue():
return True
else:
return False
return test_sync.deferToThread(_compare)
def upload_server(self):
"""Upload files in source to the test udf."""
return super(TestUDFSync, self).upload_server(
share=str(self.my_udf_id))
def compare_server(self, dir_name='my_udf_dir',
udf_id_name='my_udf_id'):
"""Compare UDF with server."""
return super(TestUDFSync, self).compare_server(
share=str(getattr(self, udf_id_name)),
target=getattr(self, dir_name))
class TestUDFBasic(TestUDFSync, test_sync.TestBasic):
"""UDF basic tests, download from the server."""
def test_u1sync_failed_compare(self):
"""make sure compare fails if different"""
open(self.source_dir + "/file", "w").close()
d = self.compare_server("source_dir")
d.addCallbacks(lambda _: Failure(Exception("dirs matched, they dont")),
lambda _: True)
return d
class TestUDFBasic2(TestUDFSync, test_sync.TestBasic2):
"""Basic2 tests for UDFs."""
@defer.inlineCallbacks
def setUp(self):
"""Set the root_dir = my_udf_dir."""
yield super(TestUDFBasic2, self).setUp()
self.root_dir = self.my_udf_dir
def tearDown(self):
"""Cleanup the test."""
return super(TestUDFBasic2, self).tearDown()
class TestUDFClientMove(TestUDFSync, test_sync.TestClientMove):
"""Move on the client (inside UDF)."""
@defer.inlineCallbacks
def setUp(self):
"""Set the root_dir = my_udf_dir."""
yield super(TestUDFClientMove, self).setUp()
self.root_dir = self.my_udf_dir
def tearDown(self):
""" cleanup the test """
return super(TestUDFClientMove, self).tearDown()
class TestUDFServerBase(TestUDFSync, test_sync.TestServerBase):
"""Base test case for server-side UDF related tests."""
def make_file(self, udf_name, filename, parent):
"""Create a file in the server."""
# data for putcontent
hash_value, crc32_value, deflated_size, deflated_content = \
self.get_put_content_data()
volume_id = getattr(self, udf_name + '_id')
d = self.get_client()
d.addCallback(lambda _: self.client.make_file(volume_id,
parent, filename))
d.addCallback(lambda mk: self.client.put_content(
volume_id, mk.new_id, NO_CONTENT_HASH, hash_value, crc32_value, 0,
deflated_size, StringIO(deflated_content)))
d.addCallback(lambda _:
self.main.wait_for_nirvana(last_event_interval=1))
d.addCallback(lambda _: self.check(udf_name + '_dir',
udf_name + '_id'))
return d
def make_dir(self, udf_name, dirname, parent):
"""Create a dir in the server."""
volume_id = getattr(self, udf_name + '_id')
d = self.get_client()
d.addCallback(lambda _: self.client.make_dir(volume_id,
parent, dirname))
d.addCallback(lambda _:
self.main.wait_for_nirvana(last_event_interval=1))
d.addCallback(lambda _: self.check(udf_name + '_dir',
udf_name + '_id'))
return d
def check(self, udf_dir, udf_id):
"""Compare against server."""
d = self.main.wait_for_nirvana(last_event_interval=0.5)
d.addCallback(lambda _: self.compare_server(udf_dir, udf_id))
return d
class TestClientMoveMultipleUDFs(TestUDFServerBase):
"""Moves on the client (between UDFs), e.g:
1) jack has two UDFs
2) jack moves (on the filesystem) a file from udf1 to udf2
3) jack moves (on the filesystem) a dir from udf1 to udf2
"""
@defer.inlineCallbacks
def setUp(self):
"""Create another UDF."""
yield super(TestClientMoveMultipleUDFs, self).setUp()
# Creates a extra UDF for the cross UDF tests
yield self.wait_for_nirvana(.2)
self.other_udf = yield self.create_udf('TestUDF2')
self.other_udf_id = self.other_udf.id
self.other_udf_dir = self.other_udf.path
@defer.inlineCallbacks
def test_simple_file_move(self):
"""Move a file inter-UDFs."""
yield self.make_file('my_udf', 'test_file', self.my_udf.node_id)
yield self.main.wait_for_nirvana(last_event_interval=0.3)
# move a file between UDFs
fname = self.my_udf_dir + "/test_file"
dest_fname = self.other_udf_dir + "/test_file"
os.rename(fname, dest_fname)
yield self.check('my_udf_dir', 'my_udf_id')
yield self.check('other_udf_dir', 'other_udf_id')
@defer.inlineCallbacks
def test_dir_move(self):
"""Move a directory inter-UDFs."""
yield self.make_dir('my_udf', 'test_dir', self.my_udf.node_id)
yield self.main.wait_for_nirvana(last_event_interval=0.3)
# Move a dir between UDFs
fname = self.my_udf_dir + "/test_dir"
dest_fname = self.other_udf_dir + "/test_dir"
os.rename(fname, dest_fname)
yield self.check('my_udf_dir', 'my_udf_id')
yield self.check('other_udf_dir', 'other_udf_id')
class TestUDFServerMove(TestUDFServerBase):
"""Server-side moves in UDFs."""
@defer.inlineCallbacks
def setUp(self):
"""Create another UDF."""
yield super(TestUDFServerMove, self).setUp()
# Creates a extra UDF for the cross UDF tests
yield self.wait_for_nirvana(.2)
self.other_udf = yield self.create_udf('TestUDF2')
self.other_udf_id = self.other_udf.id
self.other_udf_dir = self.other_udf.path
def tearDown(self):
"""Cleanup the test."""
return super(TestUDFServerMove, self).tearDown()
@defer.inlineCallbacks
def test_simple_move(self):
"""Server-side move of a file inside a UDF."""
# data for putcontent
hash_value, crc32_value, deflated_size, deflated_content = \
self.get_put_content_data()
yield self.get_client()
req = yield self.client.make_file(self.my_udf_id,
self.my_udf.node_id, "test_file")
yield self.client.put_content(self.my_udf_id, req.new_id,
NO_CONTENT_HASH, hash_value, crc32_value,
0, deflated_size,
StringIO(deflated_content))
yield self.main.wait_for_nirvana(last_event_interval=.5)
yield self.client.move(self.my_udf_id, req.new_id,
self.my_udf.node_id, "test_file_moved")
yield self.check()
@defer.inlineCallbacks
def test_simple_dir_move(self):
"""Test rename dir."""
yield self.get_client()
d = self.client.make_dir(self.my_udf_id,
self.my_udf.node_id, "test_dir")
d.addCallback(self.save("request"))
yield d
yield self.main.wait_for_nirvana(last_event_interval=1)
yield self.client.move(self.my_udf_id, self.request.new_id,
self.my_udf.node_id, "test_dir_moved")
yield self.check()
def check(self):
"""Compare against server."""
return super(TestUDFServerMove, self).check('my_udf_dir', 'my_udf_id')
| agpl-3.0 | -3,288,612,631,682,836,500 | 36.960993 | 79 | 0.598132 | false |
kazupon/libuv-lua | tools/gyp/test/mac/gyptest-postbuild.py | 4 | 1577 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that postbuild steps work.
"""
import TestGyp
import sys
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
test.run_gyp('test.gyp', chdir='postbuilds')
test.build('test.gyp', test.ALL, chdir='postbuilds')
# See comment in test/subdirectory/gyptest-subdir-default.py
if test.format == 'xcode':
chdir = 'postbuilds/subdirectory'
else:
chdir = 'postbuilds'
# Created by the postbuild scripts
test.built_file_must_exist('el.a_touch',
type=test.STATIC_LIB,
chdir='postbuilds')
test.built_file_must_exist('el.a_gyp_touch',
type=test.STATIC_LIB,
chdir='postbuilds')
test.built_file_must_exist('nest_el.a_touch',
type=test.STATIC_LIB,
chdir=chdir)
test.built_file_must_exist(
'dyna.framework/Versions/A/dyna_touch',
chdir='postbuilds')
test.built_file_must_exist(
'dyna.framework/Versions/A/dyna_gyp_touch',
chdir='postbuilds')
test.built_file_must_exist(
'nest_dyna.framework/Versions/A/nest_dyna_touch',
chdir=chdir)
test.built_file_must_exist('dyna_standalone.dylib_gyp_touch',
type=test.SHARED_LIB,
chdir='postbuilds')
test.pass_test()
| apache-2.0 | 3,928,830,604,829,313,500 | 29.921569 | 72 | 0.598605 | false |
Senseg/robotframework | src/robot/output/pyloggingconf.py | 1 | 1500 | # Copyright 2008-2012 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from robot.api import logger
LEVELS = {'TRACE': logging.NOTSET,
'DEBUG': logging.DEBUG,
'INFO': logging.INFO,
'WARN': logging.WARNING}
def initialize(level):
logging.raiseExceptions = False
logging.getLogger().addHandler(RobotHandler())
set_level(level)
def set_level(level):
try:
level = LEVELS[level.upper()]
except KeyError:
return
logging.getLogger().setLevel(level)
class RobotHandler(logging.Handler):
def emit(self, record):
method = self._get_logger_method(record.levelno)
method(record.getMessage())
def _get_logger_method(self, level):
if level >= logging.WARNING:
return logger.warn
if level >= logging.INFO:
return logger.info
if level >= logging.DEBUG:
return logger.debug
return logger.trace
| apache-2.0 | 6,358,837,911,065,336,000 | 27.301887 | 75 | 0.674 | false |
Pointedstick/ReplicatorG | skein_engines/skeinforge-44/fabmetheus_utilities/geometry/manipulation_paths/outline.py | 1 | 1465 | """
Create outline.
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
from fabmetheus_utilities.geometry.creation import lineation
from fabmetheus_utilities.geometry.geometry_utilities import evaluate
from fabmetheus_utilities.vector3 import Vector3
from fabmetheus_utilities import euclidean
from fabmetheus_utilities import intercircle
__author__ = 'Enrique Perez ([email protected])'
__credits__ = 'Art of Illusion <http://www.artofillusion.org/>'
__date__ = '$Date: 2008/02/05 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
globalExecutionOrder = 80
def getManipulatedPaths(close, elementNode, loop, prefix, sideLength):
"Get path with outline."
if len(loop) < 2:
return [loop]
isClosed = evaluate.getEvaluatedBoolean(False, elementNode, prefix + 'closed')
radius = lineation.getStrokeRadiusByPrefix(elementNode, prefix )
loopComplex = euclidean.getComplexPath(loop)
if isClosed:
loopComplexes = intercircle.getAroundsFromLoop(loopComplex, radius)
else:
loopComplexes = intercircle.getAroundsFromPath(loopComplex, radius)
return euclidean.getVector3Paths(loopComplexes, loop[0].z)
def processElementNode(elementNode):
"Process the xml element."
lineation.processElementNodeByFunction(elementNode, getManipulatedPaths)
| gpl-2.0 | 1,325,084,233,107,209,500 | 34.731707 | 157 | 0.786348 | false |
teepark/datahog | tests/test_name.py | 2 | 32314 | # vim: fileencoding=utf8:et:sw=4:ts=8:sts=4
import os
import sys
import unittest
import datahog
from datahog import error
import fuzzy
import psycopg2
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
import base
from pgmock import *
def _dm(full):
dm, dmalt = fuzzy.DMetaphone()(full)
dm = dm.ljust(4, ' ')
if dmalt is not None:
dmalt = dmalt.ljust(4, ' ')
return dm, dmalt
class NameTests(base.TestCase):
def setUp(self):
super(NameTests, self).setUp()
datahog.set_context(1, datahog.NODE)
datahog.set_context(2, datahog.NAME,
{'base_ctx': 1, 'search': datahog.search.PHONETIC,
'phonetic_loose': True})
datahog.set_context(3, datahog.NAME,
{'base_ctx': 1, 'search': datahog.search.PREFIX})
def test_create_phonetic(self):
add_fetch_result([None])
self.assertEqual(
datahog.name.create(self.p, 123, 2, 'value'),
True)
dm, dmalt = _dm('value')
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
insert into name (base_id, ctx, value, flags, pos)
select %s, %s, %s, %s, coalesce((
select pos + 1
from name
where
time_removed is null
and base_id=%s
and ctx=%s
order by pos desc
limit 1
), 1)
where exists (
select 1 from node
where
time_removed is null
and id=%s
and ctx=%s
)
""", (123, 2, 'value', 0, 123, 2, 123, 1)),
ROWCOUNT,
TPC_PREPARE,
RESET,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
insert into phonetic_lookup (value, code, flags, ctx, base_id)
values (%s, %s, %s, %s, %s)
""", ('value', dm, 0, 2, 123)),
TPC_PREPARE,
RESET,
TPC_COMMIT,
TPC_COMMIT])
def test_create_phonetic_two_codes(self):
add_fetch_result([None])
self.assertEqual(
datahog.name.create(self.p, 123, 2, 'window'),
True)
dm, dmalt = _dm('window')
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
insert into name (base_id, ctx, value, flags, pos)
select %s, %s, %s, %s, coalesce((
select pos + 1
from name
where
time_removed is null
and base_id=%s
and ctx=%s
order by pos desc
limit 1
), 1)
where exists (
select 1 from node
where
time_removed is null
and id=%s
and ctx=%s
)
""", (123, 2, 'window', 0, 123, 2, 123, 1)),
ROWCOUNT,
TPC_PREPARE,
RESET,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
insert into phonetic_lookup (value, code, flags, ctx, base_id)
values (%s, %s, %s, %s, %s)
""", ('window', dm, 0, 2, 123)),
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
insert into phonetic_lookup (value, code, flags, ctx, base_id)
values (%s, %s, %s, %s, %s)
""", ('window', dmalt, 0, 2, 123)),
COMMIT,
TPC_COMMIT,
TPC_COMMIT])
def test_create_prefix(self):
add_fetch_result([None])
self.assertEqual(
datahog.name.create(self.p, 123, 3, 'value'),
True)
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
insert into name (base_id, ctx, value, flags, pos)
select %s, %s, %s, %s, coalesce((
select pos + 1
from name
where
time_removed is null
and base_id=%s
and ctx=%s
order by pos desc
limit 1
), 1)
where exists (
select 1 from node
where
time_removed is null
and id=%s
and ctx=%s
)
""", (123, 3, 'value', 0, 123, 3, 123, 1)),
ROWCOUNT,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
insert into prefix_lookup (value, flags, ctx, base_id)
values (%s, %s, %s, %s)
""", ('value', 0, 3, 123)),
COMMIT,
TPC_COMMIT])
def test_create_failure(self):
add_fetch_result([])
self.assertEqual(
datahog.name.create(self.p, 123, 2, 'value'),
False)
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
insert into name (base_id, ctx, value, flags, pos)
select %s, %s, %s, %s, coalesce((
select pos + 1
from name
where
time_removed is null
and base_id=%s
and ctx=%s
order by pos desc
limit 1
), 1)
where exists (
select 1 from node
where
time_removed is null
and id=%s
and ctx=%s
)
""", (123, 2, 'value', 0, 123, 2, 123, 1)),
ROWCOUNT,
TPC_ROLLBACK])
def test_search_prefix(self):
add_fetch_result([(123, 0, 'value1'), (124, 0, 'value2')])
self.assertEqual(
datahog.name.search(self.p, 'value', 3),
([
{'base_id': 123, 'ctx': 3, 'value': 'value1',
'flags': set([])},
{'base_id': 124, 'ctx': 3, 'value': 'value2',
'flags': set([])},
], 'value2'))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select base_id, flags, value
from prefix_lookup
where
time_removed is null
and ctx=%s
and value like %s || '%%'
and value > %s
order by value
limit %s
""", (3, 'value', '', 100)),
FETCH_ALL,
COMMIT])
def test_search_phonetic(self):
add_fetch_result([
(123, 0, 'fancy'),
(124, 0, 'funk'),
(125, 0, 'phancy')])
dm, dmalt = _dm('fancy')
self.assertEqual(
datahog.name.search(self.p, 'fancy', 2),
([
{'base_id': 123, 'ctx': 2, 'value': 'fancy',
'flags': set([])},
{'base_id': 124, 'ctx': 2, 'value': 'funk',
'flags': set([])},
{'base_id': 125, 'ctx': 2, 'value': 'phancy',
'flags': set([])},
], {dm: 125}))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select base_id, flags, value
from phonetic_lookup
where
time_removed is null
and ctx=%s
and code=%s
and base_id > %s
order by base_id
limit %s
""", (2, dm, 0, 100)),
FETCH_ALL,
COMMIT])
def test_search_phonetic_page_2(self):
add_fetch_result([
(126, 0, 'fancy'),
(127, 0, 'funk'),
(128, 0, 'phancy')])
dm, dmalt = _dm('fancy')
self.assertEqual(
datahog.name.search(self.p, 'fancy', 2, start={dm: 125}),
([
{'base_id': 126, 'ctx': 2, 'value': 'fancy',
'flags': set([])},
{'base_id': 127, 'ctx': 2, 'value': 'funk',
'flags': set([])},
{'base_id': 128, 'ctx': 2, 'value': 'phancy',
'flags': set([])},
], {dm: 128}))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select base_id, flags, value
from phonetic_lookup
where
time_removed is null
and ctx=%s
and code=%s
and base_id > %s
order by base_id
limit %s
""", (2, dm, 125, 100)),
FETCH_ALL,
COMMIT])
def test_search_phonetic_both(self):
# not the greatest results, but they would match
add_fetch_result([(126, 0, 'ant')])
add_fetch_result([(127, 0, 'fntf')])
dm, dmalt = _dm('window')
self.assertEqual(
datahog.name.search(self.p, 'window', 2),
([
{'base_id': 126, 'ctx': 2, 'value': 'ant',
'flags': set([])},
{'base_id': 127, 'ctx': 2, 'value': 'fntf',
'flags': set([])},
], {dm: 126, dmalt: 127}))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select base_id, flags, value
from phonetic_lookup
where
time_removed is null
and ctx=%s
and code=%s
and base_id > %s
order by base_id
limit %s
""", (2, dm, 0, 100)),
FETCH_ALL,
COMMIT,
GET_CURSOR,
EXECUTE("""
select base_id, flags, value
from phonetic_lookup
where
time_removed is null
and ctx=%s
and code=%s
and base_id > %s
order by base_id
limit %s
""", (2, dmalt, 0, 100)),
FETCH_ALL,
COMMIT])
def test_list(self):
add_fetch_result([
(0, 'foo', 0),
(0, 'bar', 1),
(0, 'baz', 2)])
self.assertEqual(
datahog.name.list(self.p, 123, 2),
([
{'base_id': 123, 'ctx': 2, 'flags': set([]),
'value': 'foo'},
{'base_id': 123, 'ctx': 2, 'flags': set([]),
'value': 'bar'},
{'base_id': 123, 'ctx': 2, 'flags': set([]),
'value': 'baz'},
], 3))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select flags, value, pos
from name
where
time_removed is null
and base_id=%s
and ctx=%s
and pos >= %s
order by pos asc
limit %s
""", (123, 2, 0, 100)),
FETCH_ALL,
COMMIT])
def test_add_flags_prefix(self):
datahog.set_flag(1, 3)
datahog.set_flag(2, 3)
datahog.set_flag(3, 3)
add_fetch_result([(123, 0)])
add_fetch_result([(6,)])
add_fetch_result([(6,)])
self.assertEqual(
datahog.name.set_flags(self.p, 123, 3, 'value', [2, 3], []),
set([2, 3]))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select base_id, flags
from prefix_lookup
where
time_removed is null
and ctx=%s
and value=%s
and base_id=%s
""", (3, 'value', 123)),
FETCH_ALL,
COMMIT,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update name
set flags=flags | %s
where time_removed is null and ctx=%s and value=%s and base_id=%s
returning flags
""", (6, 3, 'value', 123)),
FETCH_ALL,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update prefix_lookup
set flags=flags | %s
where time_removed is null and ctx=%s and value=%s and base_id=%s
returning flags
""", (6, 3, 'value', 123)),
FETCH_ALL,
COMMIT,
TPC_COMMIT])
def test_add_flags_phonetic_one(self):
datahog.set_flag(1, 2)
datahog.set_flag(2, 2)
datahog.set_flag(3, 2)
add_fetch_result([(123, 0)])
add_fetch_result([(6,)])
add_fetch_result([(6,)])
dm, dmalt = _dm('value')
self.assertEqual(
datahog.name.set_flags(self.p, 123, 2, 'value', [2, 3], []),
set([2, 3]))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select 1
from phonetic_lookup
where
time_removed is null
and ctx=%s
and code=%s
and value=%s
and base_id=%s
""", (2, dm, 'value', 123)),
ROWCOUNT,
COMMIT,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update name
set flags=flags | %s
where time_removed is null and ctx=%s and value=%s and base_id=%s
returning flags
""", (6, 2, 'value', 123)),
FETCH_ALL,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update phonetic_lookup
set flags=flags | %s
where time_removed is null and code=%s and ctx=%s and base_id=%s and value=%s
returning flags
""", (6, dm, 2, 123, 'value')),
FETCH_ALL,
COMMIT,
TPC_COMMIT])
def test_add_flags_phonetic_two(self):
datahog.set_flag(1, 2)
datahog.set_flag(2, 2)
datahog.set_flag(3, 2)
add_fetch_result([(123, 0)])
add_fetch_result([(123, 0)])
add_fetch_result([(6,)])
add_fetch_result([(6,)])
add_fetch_result([(6,)])
dm, dmalt = _dm('window')
self.assertEqual(
datahog.name.set_flags(self.p, 123, 2, 'window', [2, 3], []),
set([2, 3]))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select 1
from phonetic_lookup
where
time_removed is null
and ctx=%s
and code=%s
and value=%s
and base_id=%s
""", (2, dm, 'window', 123)),
ROWCOUNT,
COMMIT,
GET_CURSOR,
EXECUTE("""
select 1
from phonetic_lookup
where
time_removed is null
and ctx=%s
and code=%s
and value=%s
and base_id=%s
""", (2, dmalt, 'window', 123)),
ROWCOUNT,
COMMIT,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update name
set flags=flags | %s
where time_removed is null and ctx=%s and value=%s and base_id=%s
returning flags
""", (6, 2, 'window', 123)),
FETCH_ALL,
TPC_PREPARE,
RESET,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update phonetic_lookup
set flags=flags | %s
where time_removed is null and code=%s and ctx=%s and base_id=%s and value=%s
returning flags
""", (6, dm, 2, 123, 'window')),
FETCH_ALL,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update phonetic_lookup
set flags=flags | %s
where time_removed is null and code=%s and ctx=%s and base_id=%s and value=%s
returning flags
""", (6, dmalt, 2, 123, 'window')),
FETCH_ALL,
COMMIT,
TPC_COMMIT,
TPC_COMMIT])
def test_add_flags_no_name(self):
datahog.set_flag(1, 3)
datahog.set_flag(2, 3)
datahog.set_flag(3, 3)
add_fetch_result([(123, 0)])
add_fetch_result([])
self.assertEqual(
datahog.name.set_flags(self.p, 123, 3, 'value', [2, 3], []),
None)
def test_clear_flags_prefix(self):
datahog.set_flag(1, 3)
datahog.set_flag(2, 3)
datahog.set_flag(3, 3)
add_fetch_result([(123, 0)])
add_fetch_result([(1,)])
add_fetch_result([(1,)])
self.assertEqual(
datahog.name.set_flags(self.p, 123, 3, 'value', [], [2, 3]),
set([1]))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select base_id, flags
from prefix_lookup
where
time_removed is null
and ctx=%s
and value=%s
and base_id=%s
""", (3, 'value', 123)),
FETCH_ALL,
COMMIT,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update name
set flags=flags & ~%s
where time_removed is null and ctx=%s and value=%s and base_id=%s
returning flags
""", (6, 3, 'value', 123)),
FETCH_ALL,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update prefix_lookup
set flags=flags & ~%s
where time_removed is null and ctx=%s and value=%s and base_id=%s
returning flags
""", (6, 3, 'value', 123)),
FETCH_ALL,
COMMIT,
TPC_COMMIT])
def test_clear_flags_phonetic_one(self):
datahog.set_flag(1, 2)
datahog.set_flag(2, 2)
datahog.set_flag(3, 2)
add_fetch_result([(123, 0)])
add_fetch_result([(1,)])
add_fetch_result([(1,)])
dm, dmalt = _dm('value')
self.assertEqual(
datahog.name.set_flags(self.p, 123, 2, 'value', [], [2, 3]),
set([1]))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select 1
from phonetic_lookup
where
time_removed is null
and ctx=%s
and code=%s
and value=%s
and base_id=%s
""", (2, dm, 'value', 123)),
ROWCOUNT,
COMMIT,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update name
set flags=flags & ~%s
where time_removed is null and ctx=%s and value=%s and base_id=%s
returning flags
""", (6, 2, 'value', 123)),
FETCH_ALL,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update phonetic_lookup
set flags=flags & ~%s
where time_removed is null and code=%s and ctx=%s and base_id=%s and value=%s
returning flags
""", (6, dm, 2, 123, 'value')),
FETCH_ALL,
COMMIT,
TPC_COMMIT])
def test_clear_flags_phonetic_two(self):
datahog.set_flag(1, 2)
datahog.set_flag(2, 2)
datahog.set_flag(3, 2)
add_fetch_result([(123, 0)])
add_fetch_result([(123, 0)])
add_fetch_result([(1,)])
add_fetch_result([(1,)])
add_fetch_result([(1,)])
dm, dmalt = _dm('window')
self.assertEqual(
datahog.name.set_flags(self.p, 123, 2, 'window', [], [2, 3]),
set([1]))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select 1
from phonetic_lookup
where
time_removed is null
and ctx=%s
and code=%s
and value=%s
and base_id=%s
""", (2, dm, 'window', 123)),
ROWCOUNT,
COMMIT,
GET_CURSOR,
EXECUTE("""
select 1
from phonetic_lookup
where
time_removed is null
and ctx=%s
and code=%s
and value=%s
and base_id=%s
""", (2, dmalt, 'window', 123)),
ROWCOUNT,
COMMIT,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update name
set flags=flags & ~%s
where time_removed is null and ctx=%s and value=%s and base_id=%s
returning flags
""", (6, 2, 'window', 123)),
FETCH_ALL,
TPC_PREPARE,
RESET,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update phonetic_lookup
set flags=flags & ~%s
where time_removed is null and code=%s and ctx=%s and base_id=%s and value=%s
returning flags
""", (6, dm, 2, 123, 'window')),
FETCH_ALL,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update phonetic_lookup
set flags=flags & ~%s
where time_removed is null and code=%s and ctx=%s and base_id=%s and value=%s
returning flags
""", (6, dmalt, 2, 123, 'window')),
FETCH_ALL,
COMMIT,
TPC_COMMIT,
TPC_COMMIT])
def test_clear_flags_no_name(self):
datahog.set_flag(1, 3)
datahog.set_flag(2, 3)
datahog.set_flag(3, 3)
add_fetch_result([(123, 0)])
add_fetch_result([])
self.assertEqual(
datahog.name.set_flags(self.p, 123, 3, 'value', [], [2, 3]),
None)
def test_set_flags_add(self):
datahog.set_flag(1, 3)
datahog.set_flag(2, 3)
datahog.set_flag(3, 3)
add_fetch_result([(123, 0)])
add_fetch_result([(5,)])
add_fetch_result([(5,)])
self.assertEqual(
datahog.name.set_flags(self.p, 123, 3, 'value', [1, 3], []),
set([1, 3]))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select base_id, flags
from prefix_lookup
where
time_removed is null
and ctx=%s
and value=%s
and base_id=%s
""", (3, 'value', 123)),
FETCH_ALL,
COMMIT,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update name
set flags=flags | %s
where
time_removed is null
and ctx=%s
and value=%s
and base_id=%s
returning flags
""", (5, 3, 'value', 123)),
FETCH_ALL,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update prefix_lookup
set flags=flags | %s
where
time_removed is null
and ctx=%s
and value=%s
and base_id=%s
returning flags
""", (5, 3, 'value', 123)),
FETCH_ALL,
COMMIT,
TPC_COMMIT])
def test_set_flags_clear(self):
datahog.set_flag(1, 3)
datahog.set_flag(2, 3)
datahog.set_flag(3, 3)
add_fetch_result([(123, 0)])
add_fetch_result([(4,)])
add_fetch_result([(4,)])
self.assertEqual(
datahog.name.set_flags(self.p, 123, 3, 'value', [], [1, 2]),
set([3]))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select base_id, flags
from prefix_lookup
where
time_removed is null
and ctx=%s
and value=%s
and base_id=%s
""", (3, 'value', 123)),
FETCH_ALL,
COMMIT,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update name
set flags=flags & ~%s
where
time_removed is null
and ctx=%s
and value=%s
and base_id=%s
returning flags
""", (3, 3, 'value', 123)),
FETCH_ALL,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update prefix_lookup
set flags=flags & ~%s
where
time_removed is null
and ctx=%s
and value=%s
and base_id=%s
returning flags
""", (3, 3, 'value', 123)),
FETCH_ALL,
COMMIT,
TPC_COMMIT])
def test_set_flags_both(self):
datahog.set_flag(1, 3)
datahog.set_flag(2, 3)
datahog.set_flag(3, 3)
add_fetch_result([(123, 0)])
add_fetch_result([(5,)])
add_fetch_result([(5,)])
self.assertEqual(
datahog.name.set_flags(self.p, 123, 3, 'value', [1, 3], [2]),
set([1, 3]))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select base_id, flags
from prefix_lookup
where
time_removed is null
and ctx=%s
and value=%s
and base_id=%s
""", (3, 'value', 123)),
FETCH_ALL,
COMMIT,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update name
set flags=(flags & ~%s) | %s
where
time_removed is null
and ctx=%s
and value=%s
and base_id=%s
returning flags
""", (2, 5, 3, 'value', 123)),
FETCH_ALL,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update prefix_lookup
set flags=(flags & ~%s) | %s
where
time_removed is null
and ctx=%s
and value=%s
and base_id=%s
returning flags
""", (2, 5, 3, 'value', 123)),
FETCH_ALL,
COMMIT,
TPC_COMMIT])
def test_set_flags_phonetic_both(self):
datahog.set_flag(1, 2)
datahog.set_flag(2, 2)
datahog.set_flag(3, 2)
add_fetch_result([(123, 0)])
add_fetch_result([(123, 0)])
add_fetch_result([(6,)])
add_fetch_result([(6,)])
add_fetch_result([(6,)])
dm, dmalt = _dm('window')
self.assertEqual(
datahog.name.set_flags(self.p, 123, 2, 'window', [2, 3], [1]),
set([2, 3]))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select 1
from phonetic_lookup
where
time_removed is null
and ctx=%s
and code=%s
and value=%s
and base_id=%s
""", (2, dm, 'window', 123)),
ROWCOUNT,
COMMIT,
GET_CURSOR,
EXECUTE("""
select 1
from phonetic_lookup
where
time_removed is null
and ctx=%s
and code=%s
and value=%s
and base_id=%s
""", (2, dmalt, 'window', 123)),
ROWCOUNT,
COMMIT,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update name
set flags=(flags & ~%s) | %s
where time_removed is null and ctx=%s and value=%s and base_id=%s
returning flags
""", (1, 6, 2, 'window', 123)),
FETCH_ALL,
TPC_PREPARE,
RESET,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update phonetic_lookup
set flags=(flags & ~%s) | %s
where time_removed is null and code=%s and ctx=%s and base_id=%s and value=%s
returning flags
""", (1, 6, dm, 2, 123, 'window')),
FETCH_ALL,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update phonetic_lookup
set flags=(flags & ~%s) | %s
where time_removed is null and code=%s and ctx=%s and base_id=%s and value=%s
returning flags
""", (1, 6, dmalt, 2, 123, 'window')),
FETCH_ALL,
COMMIT,
TPC_COMMIT,
TPC_COMMIT])
def test_shift(self):
add_fetch_result([None])
self.assertEqual(
datahog.name.shift(self.p, 123, 2, 'value', 7),
True)
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
with oldpos as (
select pos
from name
where
time_removed is null
and base_id=%s
and ctx=%s
and value=%s
), bump as (
update name
set pos=pos + (case
when (select pos from oldpos) < pos
then -1
else 1
end)
where
exists (select 1 from oldpos)
and time_removed is null
and base_id=%s
and ctx=%s
and pos between symmetric (select pos from oldpos) and %s
), maxpos(n) as (
select pos
from name
where
time_removed is null
and base_id=%s
and ctx=%s
order by pos desc
limit 1
), move as (
update name
set pos=(case
when %s > (select n from maxpos)
then (select n from maxpos)
else %s
end)
where
time_removed is null
and base_id=%s
and ctx=%s
and value=%s
returning 1
)
select 1 from move
""", (123, 2, 'value', 123, 2, 7, 123, 2, 7, 7, 123, 2, 'value')),
ROWCOUNT,
COMMIT])
def test_shift_failure(self):
add_fetch_result([])
self.assertEqual(
datahog.name.shift(self.p, 123, 2, 'value', 7),
False)
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
with oldpos as (
select pos
from name
where
time_removed is null
and base_id=%s
and ctx=%s
and value=%s
), bump as (
update name
set pos=pos + (case
when (select pos from oldpos) < pos
then -1
else 1
end)
where
exists (select 1 from oldpos)
and time_removed is null
and base_id=%s
and ctx=%s
and pos between symmetric (select pos from oldpos) and %s
), maxpos(n) as (
select pos
from name
where
time_removed is null
and base_id=%s
and ctx=%s
order by pos desc
limit 1
), move as (
update name
set pos=(case
when %s > (select n from maxpos)
then (select n from maxpos)
else %s
end)
where
time_removed is null
and base_id=%s
and ctx=%s
and value=%s
returning 1
)
select 1 from move
""", (123, 2, 'value', 123, 2, 7, 123, 2, 7, 7, 123, 2, 'value')),
ROWCOUNT,
ROLLBACK])
def test_remove_prefix(self):
add_fetch_result([(123, 0)])
add_fetch_result([(1,)])
add_fetch_result([None])
self.assertEqual(
datahog.name.remove(self.p, 123, 3, 'value'),
True)
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select base_id, flags
from prefix_lookup
where
time_removed is null
and ctx=%s
and value=%s
and base_id=%s
""", (3, 'value', 123)),
FETCH_ALL,
COMMIT,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
with removal as (
update name
set time_removed=now()
where
time_removed is null
and base_id=%s
and ctx=%s
and value=%s
returning pos
), bump as (
update name
set pos = pos - 1
where
exists (select 1 from removal)
and time_removed is null
and base_id=%s
and ctx=%s
and pos > (select pos from removal)
)
select 1 from removal
""", (123, 3, 'value', 123, 3)),
ROWCOUNT,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update prefix_lookup
set time_removed=now()
where
time_removed is null
and base_id=%s
and ctx=%s
and value=%s
""", (123, 3, 'value')),
ROWCOUNT,
COMMIT,
TPC_COMMIT])
def test_remove_phonetic_one(self):
add_fetch_result([(123, 0)])
add_fetch_result([(1,)])
add_fetch_result([None])
dm, dmalt = _dm('value')
self.assertEqual(
datahog.name.remove(self.p, 123, 2, 'value'),
True)
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select 1
from phonetic_lookup
where
time_removed is null
and ctx=%s
and code=%s
and value=%s
and base_id=%s
""", (2, dm, 'value', 123)),
ROWCOUNT,
COMMIT,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
with removal as (
update name
set time_removed=now()
where
time_removed is null
and base_id=%s
and ctx=%s
and value=%s
returning pos
), bump as (
update name
set pos = pos - 1
where
exists (select 1 from removal)
and time_removed is null
and base_id=%s
and ctx=%s
and pos > (select pos from removal)
)
select 1 from removal
""", (123, 2, 'value', 123, 2)),
ROWCOUNT,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update phonetic_lookup
set time_removed=now()
where
time_removed is null
and ctx=%s
and code=%s
and value=%s
and base_id=%s
""", (2, dm, 'value', 123)),
ROWCOUNT,
COMMIT,
TPC_COMMIT])
def test_remove_phonetic_two(self):
add_fetch_result([(123, 0)])
add_fetch_result([(123, 0)])
add_fetch_result([(1,)])
add_fetch_result([None])
add_fetch_result([None])
dm, dma = _dm('window')
self.assertEqual(
datahog.name.remove(self.p, 123, 2, 'window'),
True)
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select 1
from phonetic_lookup
where
time_removed is null
and ctx=%s
and code=%s
and value=%s
and base_id=%s
""", (2, dm, 'window', 123)),
ROWCOUNT,
COMMIT,
GET_CURSOR,
EXECUTE("""
select 1
from phonetic_lookup
where
time_removed is null
and ctx=%s
and code=%s
and value=%s
and base_id=%s
""", (2, dma, 'window', 123)),
ROWCOUNT,
COMMIT,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
with removal as (
update name
set time_removed=now()
where
time_removed is null
and base_id=%s
and ctx=%s
and value=%s
returning pos
), bump as (
update name
set pos = pos - 1
where
exists (select 1 from removal)
and time_removed is null
and base_id=%s
and ctx=%s
and pos > (select pos from removal)
)
select 1 from removal
""", (123, 2, 'window', 123, 2)),
ROWCOUNT,
TPC_PREPARE,
RESET,
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update phonetic_lookup
set time_removed=now()
where
time_removed is null
and ctx=%s
and code=%s
and value=%s
and base_id=%s
""", (2, dm, 'window', 123)),
ROWCOUNT,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update phonetic_lookup
set time_removed=now()
where
time_removed is null
and ctx=%s
and code=%s
and value=%s
and base_id=%s
""", (2, dma, 'window', 123)),
ROWCOUNT,
COMMIT,
TPC_COMMIT,
TPC_COMMIT])
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | 4,612,566,825,079,213,000 | 22.812822 | 78 | 0.490995 | false |
rockychen-dpaw/borgcollector | tablemanager/models.py | 1 | 122687 | # coding=utf8
from __future__ import absolute_import, unicode_literals, division
import os
import re
import pytz
import logging
import tempfile
import subprocess
import threading
import shutil
import time
import signal
import sys
import json
import StringIO
import codecs
import traceback
import xml.etree.ElementTree as ET
from functools import wraps
from datetime import datetime, timedelta
from xml.dom import minidom
import requests
from django.db import models, connection,transaction,connections
from django.db.utils import load_backend, DEFAULT_DB_ALIAS
from django.conf import settings
from django.utils import timezone
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.db.models.signals import pre_save, pre_delete,post_save,post_delete
from django.dispatch import receiver
from django.core.exceptions import ValidationError,ObjectDoesNotExist
from django.core.validators import RegexValidator
from django.template import Context, Template
from django.contrib import messages
from django.conf import settings
from django.utils.safestring import SafeText
from django.template.loader import render_to_string
import hglib
from codemirror import CodeMirrorTextarea
from sqlalchemy import create_engine
from borg_utils.gdal import detect_epsg
from borg_utils.spatial_table import SpatialTableMixin
from borg_utils.borg_config import BorgConfiguration
from borg_utils.jobintervals import JobInterval
from borg_utils.resource_status import ResourceStatus,ResourceStatusMixin
from borg_utils.db_util import defaultDbUtil
from borg_utils.hg_batch_push import try_set_push_owner, try_clear_push_owner, increase_committed_changes, try_push_to_repository
from borg_utils.signals import refresh_select_choices
from borg_utils.models import BorgModel,SQLField
from borg_utils.utils import file_md5
logger = logging.getLogger(__name__)
def close_cursor(cursor):
try:
if not cursor:
return
if hasattr(cursor,"close"): cursor.close()
except:
pass
slug_re = re.compile(r'^[a-z_][a-z0-9_]+$')
validate_slug = RegexValidator(slug_re, "Slug can only start with lowercase letters or underscore, and contain lowercase letters, numbers and underscore", "invalid")
def in_schema(search, db_url=None,input_schema=None,trans_schema=None,normal_schema=None):
try:
if db_url:
cursor = create_engine(db_url).connect()
else:
cursor = connection.cursor()
schema = search.split(",")[0]
schemas = {schema}
if input_schema: schemas.add(input_schema)
if trans_schema: schemas.add(trans_schema)
if normal_schema: schemas.add(normal_schema)
#import ipdb; ipdb.set_trace()
sql = ";".join(["CREATE SCHEMA IF NOT EXISTS \"{}\"".format(s) for s in schemas])
cursor.execute(sql)
finally:
close_cursor(cursor)
sql = None
schemas = None
cursor = None
def schema_decorator(func):
@wraps(func)
def func_wrapper(*args, **kwargs):
try:
cursor = None
if db_url:
cursor = create_engine(db_url).connect()
else:
db = connections.databases[DEFAULT_DB_ALIAS]
backend = load_backend(db['ENGINE'])
conn = backend.DatabaseWrapper(db, DEFAULT_DB_ALIAS)
cursor = conn.cursor()
cursor.execute(("SET search_path TO {};").format(search))
kwargs["cursor"] = cursor
kwargs["schema"] = schema
if input_schema: kwargs["input_schema"] = input_schema
if trans_schema: kwargs["trans_schema"] = trans_schema
if normal_schema: kwargs["normal_schema"] = normal_schema
result = func(*args, **kwargs)
finally:
if cursor:
cursor.execute("SET search_path TO {0};".format(BorgConfiguration.BORG_SCHEMA))
close_cursor(cursor)
cursor = None
return result
return func_wrapper
return schema_decorator
def switch_searchpath(cursor_pos=1,searchpath="{2}," + BorgConfiguration.BORG_SCHEMA):
def switch_searchpath_decorator(func):
@wraps(func)
def func_wrapper(*args,**kwargs):
previous_searchpath = None
cursor = args[cursor_pos]
searchpath_switched = False
new_searchpath = searchpath.format(*args,**kwargs)
try:
#import ipdb; ipdb.set_trace()
#get the current search path
sql_result = cursor.execute("show search_path;")
row = None
if sql_result:
row = sql_result.fetchone()
else:
row = cursor.fetchone()
previous_searchpath = row[0]
if previous_searchpath != new_searchpath:
searchpath_switched = True
cursor.execute("SET search_path TO {0}".format(new_searchpath))
result = func(*args, **kwargs)
finally:
#reset to the original search path
if searchpath_switched:
cursor.execute("SET search_path TO {0};".format(previous_searchpath))
return result
return func_wrapper
return switch_searchpath_decorator
class XMLField(models.TextField):
def formfield(self, **kwargs):
field = super(XMLField, self).formfield(**kwargs)
field.widget = CodeMirrorTextarea(mode="xml", theme="mdn-like",config={"lineWrapping":True})
return field
class DatasourceWidget(CodeMirrorTextarea):
def render(self,name,value,attrs=None):
html = super(DatasourceWidget,self).render(name,value,attrs)
html = SafeText('<input type="submit" name="_insert_fields" value="Insert Fields" onclick="this.value=\'processing\';">' + str(html))
return html
class DatasourceField(models.TextField):
def formfield(self, **kwargs):
field = super(DatasourceField, self).formfield(**kwargs)
field.widget = DatasourceWidget(mode="xml", theme="mdn-like",js_var_format="editor_%s")
return field
class JobFields(BorgModel):
"""
Abstract model to group job related fields
"""
job_batch_id = models.CharField(max_length=64,null=True,editable=False)
job_id = models.IntegerField(null=True,editable=False,db_index=True)
job_state = models.CharField(max_length=64,null=True, editable=False)
job_status = models.NullBooleanField(null=True, editable=False)
job_message = models.TextField(null=True, editable=False)
job_run_time = models.DateTimeField(editable=False,null=True)
class Meta:
abstract = True
class DatasourceType(object):
FILE_SYSTEM = "FileSystem"
DATABASE = "Database"
MUDMAP = "Mudmap"
options = (
(FILE_SYSTEM,FILE_SYSTEM),
(DATABASE,DATABASE),
(MUDMAP,MUDMAP)
)
@python_2_unicode_compatible
class DataSource(BorgModel):
"""
Represents a data source which the input is belonging to
"""
name = models.SlugField(max_length=255, unique=True, help_text="The name of data source", validators=[validate_slug])
type = models.CharField(max_length=32, choices=DatasourceType.options,default="FileSystem", help_text="The type of data source")
description = models.CharField(max_length=255, null=True,blank=True)
user = models.CharField(max_length=320,null=True,blank=True)
password = models.CharField(max_length=320,null=True,blank=True)
sql = SQLField(null=True,blank=True)
vrt = XMLField(help_text="GDAL VRT template in xml", default="")
last_modify_time = models.DateTimeField(auto_now=False,auto_now_add=True,editable=False,null=False)
def drop(self,cursor,schema,name):
"""
drop the foreign server from specified schema
"""
if self.type != DatasourceType.DATABASE:
return
cursor.execute("DROP SERVER IF EXISTS {0} CASCADE;".format(name))
@switch_searchpath()
def create(self,cursor,schema,name):
"""
create the foreign server in specified schema
"""
if self.type != DatasourceType.DATABASE:
return
if self.name == name:
#not in validation mode
context = Context({"self": self})
connect_sql = Template(self.sql).render(context)
else:
#in validation mode, use the testing name replace the regular name
origname = self.name
self.name = name
context = Context({"self": self})
connect_sql = Template(self.sql).render(context)
#reset the name from testing name to regular name
self.name = origname
cursor.execute(connect_sql)
cursor.execute("CREATE USER MAPPING FOR {} SERVER {} OPTIONS (user '{}', password '{}');".format(cursor.engine.url.username, name, self.user, self.password))
@in_schema(BorgConfiguration.TEST_SCHEMA, db_url=settings.FDW_URL)
def clean(self, cursor,schema):
if self.type == DatasourceType.DATABASE :
if not self.user:
raise ValidationError("User can't be empty.")
if not self.password:
raise ValidationError("Password can't be empty.")
if not self.sql:
raise ValidationError("Sql can't be empty.")
#check whether sql is ascii string
try:
self.sql = codecs.encode(self.sql,'ascii')
except :
raise ValidationError("Sql contains non ascii character.")
name = "test_" + self.name
try:
self.drop(cursor,schema,name)
self.create(cursor,schema,name)
#after validation, clear testing server and testing foreign table
self.drop(cursor,schema,name)
except ValidationError as e:
raise e
except Exception as e:
raise ValidationError(e)
self.last_modify_time = timezone.now()
@in_schema("public", db_url=settings.FDW_URL)
def execute(self, cursor,schema):
"""
create a foreign server
"""
self.drop(cursor,schema,self.name)
self.create(cursor,schema,self.name)
def delete(self,using=None):
logger.info('Delete {0}:{1}'.format(type(self),self.name))
if try_set_push_owner("datasource"):
try:
with transaction.atomic():
super(DataSource,self).delete(using)
try_push_to_repository('datasource')
finally:
try_clear_push_owner("datasource")
else:
super(DataSource,self).delete(using)
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if not self.data_changed: return
with transaction.atomic():
super(DataSource,self).save(force_insert,force_update,using,update_fields)
def __str__(self):
return self.name or ""
class Meta:
ordering = ['name']
class DataSourceEventListener(object):
"""
Event listener for DataSource.
Encapsulated the event listener into a class is to resolve the issue "Exception TypeError: "'NoneType' object is not callable" in <function <lambda> at 0x7f45abef8aa0> ignored"
"""
@staticmethod
@receiver(pre_save, sender=DataSource)
def _pre_save(sender, instance,**kwargs):
if not instance.pk:
instance.new_object = True
if not instance.editing_mode:
return
instance.execute()
@staticmethod
@receiver(post_save, sender=DataSource)
def _post_save(sender, instance, **args):
if (hasattr(instance,"new_object") and getattr(instance,"new_object")):
delattr(instance,"new_object")
refresh_select_choices.send(instance,choice_family="datasource")
@staticmethod
@receiver(pre_delete, sender=DataSource)
def _pre_delete(sender, instance, **args):
# drop server and foreign tables.
# testing table and server have been droped immediately after validation.
cursor=None
try:
cursor=create_engine(settings.FDW_URL).connect()
instance.drop(cursor, "public",instance.name)
finally:
close_cursor(cursor)
@staticmethod
@receiver(post_delete, sender=DataSource)
def _post_delete(sender, instance, **args):
refresh_select_choices.send(instance,choice_family="datasource")
@python_2_unicode_compatible
class ForeignTable(BorgModel):
"""
Represents a table to be harvested via a foreign data wrapper. Data will be
proxied by adding a server and foreign table record to the Postgres
database located at FDW_URL.
Server has the same name as the foreignt table
In validation phase, use the name ("test_" + name) for testing
"""
name = models.SlugField(max_length=255, unique=True, help_text="The name of foreign table", validators=[validate_slug])
server = models.ForeignKey(DataSource,limit_choices_to={"type":DatasourceType.DATABASE})
sql = SQLField(default="CREATE FOREIGN TABLE \"{{schema}}\".\"{{self.name}}\" (<columns>) SERVER {{self.server.name}} OPTIONS (schema '<schema>', table '<table>');")
table_md5_support = models.BooleanField(null=False, default=True, help_text="If true, table md5 is used to check whether the data source is up to date.")
last_modify_time = models.DateTimeField(auto_now=False,auto_now_add=True,editable=False,null=False)
ROW_COUNT_SQL = "SELECT COUNT(*) FROM \"{0}\".\"{1}\";"
TABLE_MD5_SQL = "SELECT md5(string_agg(md5(CAST(t.* as text)),',')) FROM (SELECT * from \"{0}\".\"{1}\") as t;"
def drop(self,cursor,schema,name):
"""
drop the foreign table from specified schema
"""
cursor.execute("DROP FOREIGN TABLE IF EXISTS \"{0}\".\"{1}\" CASCADE;".format(schema,name))
@switch_searchpath()
def create(self,cursor,schema,name):
"""
create the foreign table in specified schema
"""
if self.name == name:
#not in validation mode
context = Context({"schema":schema,"self": self})
create_sql = Template(self.sql).render(context)
else:
#in validation mode, use the testing name replace the regular name
origname = self.name
self.name = name
context = Context({"schema":schema,"self": self})
create_sql = Template(self.sql).render(context)
#reset the name from testing name to regular name
self.name = origname
cursor.execute(create_sql)
cursor.execute("SELECT COUNT(*) FROM \"{}\";".format(name))
@in_schema(BorgConfiguration.TEST_SCHEMA, db_url=settings.FDW_URL)
def clean(self, cursor,schema):
#generate the testing name
if not self.sql:
raise ValidationError("Sql can't be empty.")
#check whether sql is ascii string
try:
self.sql = codecs.encode(self.sql,'ascii')
except :
raise ValidationError("Sql contains non ascii character.")
name = "test_" + self.name
try:
self.drop(cursor,schema,name)
self.create(cursor,schema,name)
#after validation, clear testing server and testing foreign table
self.drop(cursor,schema,name)
except ValidationError as e:
raise e
except Exception as e:
raise ValidationError(e)
self.last_modify_time = timezone.now()
@in_schema("public", db_url=settings.FDW_URL)
def execute(self, cursor,schema):
"""
Bind a foreign table to the FDW database. Pre-save hook for ForeignTable.
"""
self.drop(cursor,schema,self.name)
self.create(cursor,schema,self.name)
@in_schema("public", db_url=settings.FDW_URL)
def table_row_count(self,cursor,schema):
sql_result = cursor.execute(self.ROW_COUNT_SQL.format(schema,self.name))
if sql_result:
return sql_result.fetchone()[0]
else:
return cursor.fetchone()[0]
@in_schema("public", db_url=settings.FDW_URL)
def table_md5(self,cursor,schema):
sql_result = cursor.execute(self.TABLE_MD5_SQL.format(schema,self.name))
if sql_result:
return sql_result.fetchone()[0]
else:
return cursor.fetchone()[0]
def delete(self,using=None):
logger.info('Delete {0}:{1}'.format(type(self),self.name))
if try_set_push_owner("foreign_table"):
try:
with transaction.atomic():
super(ForeignTable,self).delete(using)
try_push_to_repository('foreign_table')
finally:
try_clear_push_owner("foreign_table")
else:
super(ForeignTable,self).delete(using)
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if not self.data_changed: return
with transaction.atomic():
super(ForeignTable,self).save(force_insert,force_update,using,update_fields)
def __str__(self):
return self.name
class Meta:
ordering = ['name']
class ForeignTableEventListener(object):
"""
Event listener for foreign table.
Encapsulated the event listener into a class is to resolve the issue "Exception TypeError: "'NoneType' object is not callable" in <function <lambda> at 0x7f45abef8aa0> ignored"
"""
@staticmethod
@receiver(pre_save, sender=ForeignTable)
def _pre_save(sender, instance,**kwargs):
"""
Bind a foreign table to the FDW database. Pre-save hook for ForeignTable.
"""
if not instance.pk:
instance.new_object = True
if not instance.editing_mode:
return
try:
instance.execute()
except Exception as e:
raise ValidationError(e)
@staticmethod
@receiver(post_save, sender=ForeignTable)
def _post_save(sender, instance, **args):
if (hasattr(instance,"new_object") and getattr(instance,"new_object")):
delattr(instance,"new_object")
refresh_select_choices.send(instance,choice_family="foreigntable")
@staticmethod
@receiver(pre_delete, sender=ForeignTable)
def _pre_delete(sender, instance, **args):
# drop server and foreign tables.
# testing table and server have been droped immediately after validation.
cursor = None
try:
cursor=create_engine(settings.FDW_URL).connect()
instance.drop(cursor, "public",instance.name)
finally:
close_cursor(cursor)
@staticmethod
@receiver(post_delete, sender=ForeignTable)
def _post_delete(sender, instance, **args):
refresh_select_choices.send(instance,choice_family="foreigntable")
class Input(JobFields,SpatialTableMixin):
"""
Represents an input table in the harvest DB. Also contains source info
(as a GDAL VRT definition) so it can be loaded using the OGR toolset.
"""
name = models.SlugField(max_length=255, unique=True, help_text="Name of table in harvest DB", validators=[validate_slug])
data_source = models.ForeignKey(DataSource,limit_choices_to={"type__in":[DatasourceType.FILE_SYSTEM,DatasourceType.DATABASE]})
foreign_table = models.ForeignKey(ForeignTable, null=True, blank=True, help_text="Foreign table to update VRT from")
generate_rowid = models.BooleanField(null=False, default=False, help_text="If true, a _rowid column will be added and filled with row data's hash value")
source = DatasourceField(help_text="GDAL VRT definition in xml", unique=True)
advanced_options = models.CharField(max_length=128, null=True, editable=False,blank=True,help_text="Advanced ogr2ogr options")
info = models.TextField(editable=False)
spatial_info = models.TextField(max_length=512,editable=False,null=True,blank=True)
create_table_sql = models.TextField(null=True, editable=False)
importing_info = models.TextField(max_length=255, null=True, editable=False)
last_modify_time = models.DateTimeField(auto_now=False,auto_now_add=True,editable=False,null=False)
ds_modify_time = models.DateTimeField(editable=False,null=True)
ABSTRACT_TEMPLATE = """{% if info_dict.abstract %}{{ info_dict.abstract }}
{% endif %}{% if info_dict.mdDateSt %}Date: {{ info_dict.mdDateSt }}
{% endif %}{% if info_dict.lineage %}Lineage: {{ info_dict.lineage }}
{% endif %}{% if info_dict.complete %}Completeness: {{ info_dict.complete }}
{% endif %}{% if info_dict.posacc %}Positional accuracy: {{ info_dict.posacc }}
{% endif %}{% if info_dict.attracc %}Attribute accuracy: {{ info_dict.attracc }}
{% endif %}"""
_field_re = re.compile("[ \t]*(?P<type>[a-zA-Z0-9]+)[ \t]*(\([ \t]*(?P<width>[0-9]+)\.(?P<precision>[0-9]+)\))?[ \t]*")
_datasource_info_re = re.compile("[(\n)|(\r\n)](?P<key>[a-zA-Z0-9_\-][a-zA-Z0-9_\- ]*[a-zA-Z0-9_\-]?)[ \t]*:(?P<value>[^\r\n]*([(\r\n)|(\n)](([ \t]+[^\r\n]*)|(GEOGCS[^\r\n]*)))*)")
DB_TEMPLATE_CONTEXT = {'NAME':'{{db.NAME}}','HOST':'{{db.HOST}}',"PORT":'{{db.PORT}}','USER':'{{db.USER}}','PASSWORD':'{{db.PASSWORD}}'}
@property
def rowid_column(self):
return BorgConfiguration.ROWID_COLUMN
@property
def table_name(self):
return self.name
@property
def table_schema(self):
return BorgConfiguration.TEST_INPUT_SCHEMA
@property
def db_util(self):
return defaultDbUtil
_datasource = None
_datasource_re = re.compile("<SrcDataSource>(?P<data_source>.*)</SrcDataSource>")
@property
def datasource(self):
"""
The data source;
If data source is a file, the value is the file path.
If data source is foreign table, the value is the connection parameters
"""
if self.source:
if not self._datasource:
self._datasource = self._datasource_re.findall(self.source)
else:
self._datasource = None
return self._datasource
def style_file(self,style_format="sld"):
"""
Return the style file
if data source is not a shape file or style file does not exist, return None
"""
#import ipdb;ipdb.set_trace()
if not hasattr(self,"_style_file"):
self._style_file = {"sld":'N/A',"qml":"N/A","lyr":"N/A"}
if self._style_file[style_format] == 'N/A':
if self.datasource:
#datasource has valid value
if self.datasource[0].lower().endswith(".shp"):
#datasource is a shape file
f = "{}.{}".format(self.datasource[0][:-4],style_format)
if os.path.exists(f):
#sld file exists
self._style_file[style_format] = f
else:
#sld file not exists
self._style_file[style_format] = None
if self._style_file[style_format] != "N/A":
return self._style_file[style_format]
else:
return None
@property
def vrt(self):
"""
A temporary vrt format file which contains the data source information.
"""
if hasattr(self, "_vrt"): return self._vrt
self._vrt = tempfile.NamedTemporaryFile()
self._vrt.write(Template(self.source).render(Context({"self": self,"db":settings.FDW_URL_SETTINGS})))
self._vrt.flush()
return self._vrt
@property
def info_dict(self):
"""
A dictionary contains the key information about a data source.
"""
now = datetime.now()
if hasattr(self,"_info_dict"):
if (now - self._info_dict.get("_last_update", now)) < timedelta(hours=1):
return self._info_dict
#import ipdb;ipdb.set_trace()
search , info = "(Layer name: .*)\n+(Geometry: .*)\n+(Feature Count: .*)\n+(Extent: .*)\n+", self.info
if not info.find("Extent: ") > -1:
info = info.replace("\nLayer SRS", "\nExtent: Non Spatial\nLayer SRS")
data = re.findall(search, info, re.M)
if data and len(data) >= 1:
self._info_dict = dict([(r.split(": ")[0].replace(" ", "_").lower(), r.split(": ")[1])
for r in data[0]])
else:
self._info_dict = {"geometry": "Unknown", "feature_count": "Unknown", "extent": "Unknown"}
self._info_dict["_last_update"] = now
return self._info_dict
@property
def layer(self):
"""
the layer name of the data source.
"""
try:
return self.info_dict["layer_name"]
except:
return self.get_layer_name()
@property
def kmi_info_dict(self):
if not self.datasource:
return
info = self.info_dict
if info.get("kmi_info_populated",False):
return info
if os.path.isfile(self.datasource[0] + ".xml"):
xml_data = ET.parse(self.datasource[0] + ".xml")
def tag_to_dict(tag):
for i in [x.text for x in xml_data.iter(tag) if x.text]:
info[tag] = ' '.join(i.split())
return
tag_to_dict("abstract")
tag_to_dict("title")
tag_to_dict("lineage")
tag_to_dict("posacc")
tag_to_dict("attracc")
tag_to_dict("complete")
tag_to_dict("mdDateSt")
info["kmi_abstract"] = Template(self.ABSTRACT_TEMPLATE).render(Context({"info_dict": info}))
info["kmi_info_populated"] = True
return info
@property
def kmi_abstract(self):
return self.kmi_info_dict.get("kmi_abstract","")
@property
def abstract(self):
return self.kmi_info_dict.get("abstract","")
@property
def title(self):
return self.kmi_info_dict.get("title", "")
@property
def geometry(self): return self.info_dict["geometry"]
@property
def count(self): return self.info_dict["feature_count"]
@property
def extent(self): return self.info_dict["extent"]
@property
def importing_dict(self):
if not hasattr(self,"_importing_dict"):
if self.importing_info:
self._importing_dict = json.loads(self.importing_info)
else:
self._importing_dict = {}
return self._importing_dict
def is_up_to_date(self,job=None,enforce=False):
"""
Returns True if up to date;otherwise return False
"""
#import ipdb;ipdb.set_trace()
from harvest.harveststates import Importing
if (self.job_status or self.job_state != Importing.instance().name) and self.job_id and self.job_batch_id and self.datasource and self.job_run_time:
if not enforce and job and job.batch_id:
if job.batch_id == self.job_batch_id:
#last importing job has the same batch_id as current job
return True
elif self.importing_dict and self.importing_dict.get("check_batch_id") == job.batch_id:
#last checking job has the same batch_id as current job
return True
try:
if self.job_run_time <= self.last_modify_time:
return False
if self.foreign_table:
if not job:
return None
elif job.job_type == JobInterval.Triggered.name:
return False
elif job.batch_id:
if "row_count" in self.importing_dict:
if self.foreign_table.table_row_count() != self.importing_dict["row_count"]:
#inputing table has different number of rows with inputed table
self.importing_info = None
self.save(update_fields=['importing_info'])
return False
if self.foreign_table.table_md5_support and "table_md5" in self.importing_dict :
if self.foreign_table.table_md5() == self.importing_dict["table_md5"]:
self.importing_dict["check_job_id"] = job.id
self.importing_dict["check_batch_id"] = job.batch_id
self.importing_info = json.dumps(self.importing_dict)
self.save(update_fields=['importing_info'])
return True
else:
self.importing_info = None
self.save(update_fields=['importing_info'])
return False
else:
return False
else:
return False
else:
mod_time = None
result = True
if job and job.batch_id:
#check for harvest, should always check.
for ds in self.datasource:
if os.path.exists(ds):
#data source is a file
if self.job_run_time <= datetime.utcfromtimestamp(os.path.getmtime(ds)).replace(tzinfo=pytz.UTC):
return False
else:
result = None
else:
#check for web app. check against "ds_modify_time" which is harvested by harvest job.
if self.ds_modify_time:
result = self.job_run_time > self.ds_modify_time
else:
result = None
return result
except:
return False
return False
def _populate_rowid(self,cursor,schema):
"""
generate the rowid for input table
if the input table is not required to generate rowid, return directly.
otherwise,do the follwoing things:
1. add a rowid column, and set rowid as primary key
2. construnct the sql to update the rowid.
3. execute the sql.
"""
if not self.generate_rowid:
return
#check whether rowid column exists or not
sql = "SELECT count(1) FROM pg_attribute a JOIN pg_class b ON a.attrelid = b.oid JOIN pg_namespace c ON b.relnamespace = c.oid WHERE a.attname='{2}' AND b.relname='{1}' AND c.nspname='{0}' ".format(schema,self.name,self.rowid_column)
sql_result = cursor.execute(sql)
column_exists = None
if sql_result:
column_exists = (sql_result.fetchone())[0]
else:
column_exists = (cursor.fetchone())[0]
#add rowid column if required
if not column_exists:
#add column
sql = "ALTER TABLE {0}.{1} ADD COLUMN {2} text".format(schema,self.name,self.rowid_column)
cursor.execute(sql)
#construct the update sql
sql = "SELECT a.attname FROM pg_attribute a JOIN pg_class b ON a.attrelid = b.oid JOIN pg_namespace c ON b.relnamespace = c.oid WHERE a.attnum > 0 AND a.attname != '{2}' AND b.relname='{1}' AND c.nspname='{0}' ".format(schema,self.name,self.rowid_column)
sql_result = cursor.execute(sql)
input_table_columns = None
if sql_result:
input_table_columns = ",".join([x[0] for x in sql_result.fetchall()])
else:
input_table_columns = ",".join([x[0] for x in cursor.fetchall()])
sql = "UPDATE \"{0}\".\"{1}\" set {2} = md5(CAST(({3}) AS text))".format(schema,self.name,self.rowid_column,input_table_columns)
cursor.execute(sql)
#set the rowid as the unique key
#first check whether the unique key exists or not
constraint_name = "{0}_index_{1}".format(self.name,self.rowid_column)
sql = "SELECT count(1) FROM pg_constraint a JOIN pg_class b ON a.conrelid = b.oid JOIN pg_namespace c ON b.relnamespace = c.oid WHERE a.conname='{2}' AND b.relname='{1}' AND c.nspname='{0}' ".format(schema,self.name,constraint_name)
sql_result = cursor.execute(sql)
constraint_exists = None
if sql_result:
constraint_exists = (sql_result.fetchone())[0]
else:
constraint_exists = (cursor.fetchone())[0]
if not constraint_exists:
#unique key does not exist
sql = "ALTER TABLE \"{0}\".\"{1}\" ADD CONSTRAINT {3} UNIQUE ({2})".format(schema,self.name,self.rowid_column,constraint_name)
cursor.execute(sql)
@in_schema(BorgConfiguration.INPUT_SCHEMA)
def populate_rowid(self,cursor,schema):
self._populate_rowid(cursor,schema)
@in_schema(BorgConfiguration.TEST_INPUT_SCHEMA + "," + BorgConfiguration.BORG_SCHEMA)
def clean(self,cursor,schema):
if self.foreign_table:
self.source = re.sub('(<OGRVRTLayer name=")[^"]+(">)', r'\1{}\2'.format(self.foreign_table.name), self.source)
self.source = re.sub('(<SrcDataSource>)[^<]+(</SrcDataSource>)', r"\1PG:dbname='{{db.NAME}}' host='{{db.HOST}}' user='{{db.USER}}' password='{{db.PASSWORD}}' port='{{db.PORT}}'\2", self.source)
self.source = Template(self.source).render(Context({"self": self,"db":Input.DB_TEMPLATE_CONTEXT}))
self.advanced_options = self.advanced_options or None
self.last_modify_time = timezone.now()
try:
logger.debug("Begin to get dataset spatial information")
self._set_info()
logger.debug("End to get dataset spatial information and begin to harvest data")
#automatically add a "<GeometryType>WkbNone</GeometryType>" if the data set is not a spatial data set
if self.source.find("GeometryType") == -1 and self.source.find("GeometryField") == -1 and self.source.find("LayerSRS") == -1:
#data source does not contain any spatial related properties.
if self.extent.lower().find("non spatial") >= 0:
#data source is not a spatial data set. try to insert a element <GeometryType>wkbNone</GeometryType>
self.source = self.source.replace("</SrcDataSource>","</SrcDataSource>\n <GeometryType>wkbNone</GeometryType>")
if hasattr(self, "_vrt"): delattr(self,"_vrt")
self._set_info()
self.invoke(cursor,schema)
logger.debug("End to harvest data and begin to generate row id")
self._populate_rowid(cursor,schema)
logger.debug("End to generate row id and begin to get table structure")
self.create_table_sql = self.get_create_table_sql()
logger.debug("End to get table structure and begin to get table's spatial info")
#import ipdb;ipdb.set_trace()
#check the table is spatial or non spatial
self.spatial_info = self.refresh_spatial_info(schema).get_spatial_info()
logger.debug("End to validation the table({})'sdata".format(self.table_name))
except ValidationError as e:
raise e
except Exception as e:
raise ValidationError(e)
def get_layer_name(self):
"""
return the data source's layer name
"""
if hasattr(self, "_layer_name"): return self._layer_name
output = subprocess.check_output(["ogrinfo", "-q", "-ro","-so","-al", self.vrt.name], stderr=subprocess.STDOUT)
if output.find("ERROR") > -1:
raise Exception(output)
else:
m = self._layer_name_re.search(output)
if m:
self._layer_name = m.group("layerName")
else:
raise Exception("Failed to find layer name")
return self._layer_name
def insert_fields(self):
origin_source = self.source
try:
root = None
#parse string to xml object
try:
root = ET.fromstring(self.source)
except:
raise ValidationError("Source is invalid xml.")
layer = list(root)[0]
#find the first non OGRVRTWarpedLayer layer
while layer.tag == "OGRVRTWarpedLayer":
layer = layer.find("OGRVRTLayer") or layer.find("OGRVRTUnionLayer") or layer.find("OGRVRTWarpedLayer")
union_layer = None
if layer.tag == "OGRVRTUnionLayer":
#currently only support union similiar layers which has same table structure, all fields will be configured in the first layer,
union_layer = layer
layer = list(union_layer)[0]
while layer.tag == "OGRVRTWarpedLayer":
layer = layer.find("OGRVRTLayer") or layer.find("OGRVRTUnionLayer") or layer.find("OGRVRTWarpedLayer")
#currently,does not support union layer include another union layer .
if layer.tag == "OGRVRTUnionLayer":
raise ValidationError("Does not support union layer includes another union layer.")
field_childs = layer.findall("Field") or []
#remove fields first
for f in field_childs:
layer.remove(f)
if union_layer is not None:
#remove all fields from union layer
for f in union_layer.findall("Field") or []:
union_layer.remove(f)
#remove all fields from included layers
for l in list(union_layer):
while l.tag == "OGRVRTWarpedLayer":
l = layer.find("OGRVRTLayer") or layer.find("OGRVRTUnionLayer") or layer.find("OGRVRTWarpedLayer")
#currently,does not support union layer include another union layer .
if l.tag == "OGRVRTUnionLayer":
raise ValidationError("Does not support union layer includes another union layer.")
for f in l.findall("Field") or []:
l.remove(f)
#remote field strategy from union layer
field_strategy = union_layer.find("FieldStrategy")
if field_strategy is not None:
union_layer.remove(field_strategy)
#add first layer strategy into union layer
field_strategy = ET.Element("FieldStrategy")
setattr(field_strategy,"text","FirstLayer")
union_layer.append(field_strategy)
#assign the new source string to self.source to get datasource information
self.source = ET.tostring(root,"UTF-8")
#get datasource information based on new source string
self._set_info()
fields = []
#using regrex to parse output into datasource items
datasource_items = self._datasource_info_re.findall(self.info)
#get all fields from datasource items
info_items = [(item[0],item[1]) for item in datasource_items]
for k,v in info_items:
if k in ("INFO","Layer name","Geometry","Metadata","Feature Count","Extent","Layer SRS WKT"):
continue
if k.find(" ") >= 0:
#include a emptry, can't be a column
continue
m = self._field_re.search(v)
if m:
#convert the column name to lower case
fields.append((k.lower(),m.group('type'),m.group('width'),m.group('precision')))
#convert the column name into lower case,
for f in field_childs:
f.set('name',f.get('name').lower())
field_child_dict = dict(zip([f.get('name') for f in field_childs],field_childs))
#readd all the fields, keep the cutomized fields and add the missing fields
element_attrs = {}
for f in fields:
if f[0] in field_child_dict:
#customized field, keep it
layer.append(field_child_dict[f[0]])
else:
#missing field, populate it
element_attrs['name'] = f[0]
element_attrs['type'] = f[1]
if f[2] and f[2] != "0":
element_attrs['width'] = f[2]
elif 'width' in element_attrs:
del element_attrs['width']
if f[3] and f[3] != "0":
element_attrs['precision'] = f[3]
elif 'precision' in element_attrs:
del element_attrs['precision']
layer.append(ET.Element("Field",attrib=element_attrs))
#convert xml to pretty string
self.source = ET.tostring(root,"UTF-8")
root = minidom.parseString(self.source)
self.source = root.toprettyxml(indent=" ")
self.source = "\n".join([line for line in self.source.splitlines() if line.strip()])
except Exception as e:
self.source = origin_source
logger.error(traceback.format_exc())
raise ValidationError(e)
_layer_name_re = re.compile('Layer name: (?P<layerName>[^\n]+)')
def _set_info(self,database=None,table=None):
"""
set the data source's information dictionary
if database is not None, read the information from table;
if database is None, read the information from data source;
"""
if database and table:
cmd = ["ogrinfo", "-ro", "-so", database, table]
else:
cmd = ["ogrinfo", "-ro","-al","-so", self.vrt.name]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,stderr=subprocess.PIPE)
output = p.communicate()
if p.returncode != 0:
error_msg = output[1].replace("ERROR 1: Invalid geometry field index : -1","")
if error_msg.strip():
raise Exception(error_msg)
else:
raise Exception("orginfo failed with unknown exception")
try:
delattr(self,"_info_dict")
except:
pass
self.info = output[0]
if database and table:
#replace the layername with datasource's layer name
self.info = Input._layer_name_re.sub("Layer name: {0}\n".format(self.get_layer_name()),output[0],count=1)
else:
self.info = output[0]
def invoke(self ,cursor,schema,job_id=None):
"""
Use ogr2ogr to copy the VRT source defined in Input into the harvest DB.
Pre-save hook for Input.
can be invoked by havest or user maintain action
Return True if import successfully; False if import process is terminated.
"""
validation = not job_id
# Make sure DB is GIS enabled and then load using ogr2ogr
database = "PG:dbname='{NAME}' host='{HOST}' port='{PORT}' user='{USER}' password='{PASSWORD}'".format(**settings.DATABASES["default"])
table = "{0}.{1}".format(schema,self.name)
cursor.execute("CREATE EXTENSION IF NOT EXISTS postgis;")
cmd = ["ogr2ogr", "-overwrite", "-gt", "20000", "-preserve_fid", "-skipfailures", "--config", "PG_USE_COPY", "YES",
"-f", "PostgreSQL", database, self.vrt.name, "-nln", table, "-nlt", "PROMOTE_TO_MULTI", self.layer]
if self.advanced_options:
cmd += self.advanced_options.split()
srid = detect_epsg(self.vrt.name)
if srid:
cmd += ['-a_srs', srid]
logger.info(" ".join(cmd))
cancelled = False
outputFile = None
errorFile = None
output = None
try:
outputFile = tempfile.NamedTemporaryFile(delete=False)
errorFile = tempfile.NamedTemporaryFile(delete=False)
logger.info("Importing data using ogr2ogr, name={},outputFile={},errorFile={}".format(self.name,outputFile.name,errorFile.name))
#p = subprocess.Popen(cmd,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
p = subprocess.Popen(cmd,stdout=outputFile,stderr=errorFile)
if validation:
sleep_time = 0
max_sleep_time = BorgConfiguration.MAX_TEST_IMPORT_TIME * 1000
finished = False
table_exist = False
while sleep_time < max_sleep_time or not table_exist:
if p.poll() is not None:
finished = True
break;
time.sleep(0.2)
sleep_time += 200
if not table_exist and sleep_time>= max_sleep_time:
sql_result = cursor.execute("SELECT count(1) FROM pg_class a JOIN pg_namespace b ON a.relnamespace=b.oid where a.relname='{1}' and b.nspname='{0}'".format(schema,self.name))
table_exist = bool(sql_result.fetchone()[0] if sql_result else cursor.fetchone()[0])
if not finished:
logger.info("The data set is too big, terminate the test importing process for '{0}'".format(self.name))
cancelled = True
try:
p.terminate()
except:
pass
returncode = p.wait()
output = (outputFile.read(),errorFile.read())
if returncode != signal.SIGTERM * -1 and output[1].strip():
raise Exception(output[1])
else:
sleep_time = 0
cancel_time = BorgConfiguration.IMPORT_CANCEL_TIME * 1000
from harvest.jobstates import JobStateOutcome
cancelled = False
while True:
if p.poll() is not None:
break;
time.sleep(0.2)
sleep_time += 200
if sleep_time >= cancel_time:
sleep_time = 0
job = self._get_job(cursor,job_id)
if job.user_action and job.user_action.lower() == JobStateOutcome.cancelled_by_custodian.lower():
#job cancelled
try:
p.terminate()
except:
pass
cancelled = True
logger.info("The job({1}) is cancelled, terminate the importing process for '{0}'".format(self.name,job_id))
break;
returncode = p.wait()
outputFile.seek(0)
errorFile.seek(0)
output = (outputFile.read(),errorFile.read())
if cancelled:
#clear the user action
job.user_action = None
self._save_job(cursor,job,["user_action"])
else:
if p.returncode != 0:
if output[1].strip() :
raise Exception(output[1])
else:
raise Exception("ogr2ogr failed with unknown exception")
elif output[1]:
logger.error(output[1])
self._set_info(database,table)
finally:
if outputFile:
try:
outputFile.close()
except:
pass
if errorFile:
try:
errorFile.close()
except:
pass
try:
os.remove(outputFile.name)
except:
pass
try:
os.remove(errorFile.name)
except:
pass
return (not cancelled,output[1] if output and output[1].strip() else None)
@switch_searchpath(searchpath=BorgConfiguration.BORG_SCHEMA)
def _get_job(self,cursor,job_id):
from harvest.models import Job
return Job.objects.get(pk=job_id)
@switch_searchpath(searchpath=BorgConfiguration.BORG_SCHEMA)
def _save_job(self,cursor,job,update_fields):
job.save(update_fields=update_fields)
@switch_searchpath(searchpath=BorgConfiguration.BORG_SCHEMA)
def _post_execute(self,cursor):
if self.foreign_table:
self.importing_dict["row_count"] = self.foreign_table.table_row_count()
if self.foreign_table.table_md5_support:
self.importing_dict["table_md5"] = self.foreign_table.table_md5()
if "check_job_id" in self.importing_dict: del self.importing_dict["check_job_id"]
if "check_batch_id" in self.importing_dict: del self.importing_dict["check_batch_id"]
#import ipdb;ipdb.set_trace()
self.importing_info = json.dumps(self.importing_dict)
self.save(update_fields=["importing_info","job_run_time","info"])
@in_schema(BorgConfiguration.INPUT_SCHEMA)
def execute(self,job_id ,cursor,schema):
from harvest.jobstates import JobStateOutcome
begin_time = timezone.now()
result = self.invoke(cursor,schema,job_id)
if result[0]:
# all data is imported
self.job_run_time = begin_time
#save the latest data source information to table
self._post_execute(cursor)
if result[1] and result[1].lower().find("error") >= 0 :
return (JobStateOutcome.warning,result[1])
else:
return (JobStateOutcome.succeed,result[1])
else:
#import process is cancelled
from harvest.jobstates import JobStateOutcome
return (JobStateOutcome.cancelled_by_custodian,JobStateOutcome.cancelled_by_custodian)
def drop(self,cursor,schema):
cursor.execute("DROP TABLE IF EXISTS \"{0}\".\"{1}\" CASCADE;".format(schema,self.name))
def create(self,cursor,schema):
pass
def delete(self,using=None):
logger.info('Delete {0}:{1}'.format(type(self),self.name))
if try_set_push_owner("input"):
try:
with transaction.atomic():
super(Input,self).delete(using)
try_push_to_repository('input')
finally:
try_clear_push_owner("input")
else:
super(Input,self).delete(using)
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if not self.data_changed: return
with transaction.atomic():
super(Input,self).save(force_insert,force_update,using,update_fields)
def __str__(self):
return self.name
class Meta:
ordering = ['data_source','name']
class InputEventListener(object):
@staticmethod
@receiver(pre_delete, sender=Input)
def _pre_delete(sender, instance, **args):
# drop tables in both schema
cursor = None
try:
cursor=connection.cursor()
instance.drop(cursor, BorgConfiguration.TEST_INPUT_SCHEMA)
instance.drop(cursor, BorgConfiguration.INPUT_SCHEMA)
finally:
close_cursor(cursor)
@staticmethod
@receiver(post_delete, sender=Input)
def _post_delete(sender, instance, **args):
refresh_select_choices.send(instance,choice_family="input")
@staticmethod
@receiver(pre_save, sender=Input)
def _pre_save(sender, instance,**kwargs):
if not instance.pk:
instance.new_object = True
@staticmethod
@receiver(post_save, sender=Input)
def _post_save(sender, instance, **args):
if (hasattr(instance,"new_object") and getattr(instance,"new_object")):
delattr(instance,"new_object")
refresh_select_choices.send(instance,choice_family="input")
@python_2_unicode_compatible
class Transform(JobFields):
"""
Base class for a generic transform to be performed on an Input table in
the harvest DB.
"""
last_modify_time = models.DateTimeField(auto_now=False,auto_now_add=True,editable=False,null=False)
def drop(self, cursor,schema):
"""
drop the function from specified schema
"""
cursor.execute("DROP FUNCTION IF EXISTS \"{0}\".\"{1}\"() CASCADE;".format(schema,self.func_name))
@switch_searchpath()
def create(self, cursor,schema,input_schema=None,normal_schema=None,input_table_schema=None,input_table_name=None):
"""
create the function in specified schema
"""
if input_table_schema:
sql = Template(self.sql).render(Context({"self": self,"trans_schema":schema,"input_schema":input_schema,"normal_schema":normal_schema,"input_table_schema":input_table_schema,"input_table_name":input_table_name}))
else:
sql = Template(self.sql).render(Context({"self": self,"trans_schema":schema,"input_schema":input_schema,"normal_schema":normal_schema}))
cursor.execute(sql)
def invoke(self, **kwargs):
"""
invoke the function to populate the table data in speicifed schema
"""
raise NotImplementedError("Not implemented.")
def execute(self):
"""
execute this function
"""
raise NotImplementedError("Not implemented.")
def __str__(self):
return self.name or ""
class Meta:
abstract = True
class Normalise(Transform):
"""
Represents a normalisation transform to be performed on an Input table
in the harvest DB.
"""
TRANSFORM = [
"CREATE FUNCTION \"{{trans_schema}}\".\"{{self.func_name}}\"() RETURNS SETOF \"{{normal_schema}}\".\"{{self.output_table.name}}\" as ",
"\nBEGIN\n RETURN QUERY SELECT * FROM \"{{input_schema}}\".\"{{self.input_table.name}}\";\nEND;\n",
" LANGUAGE plpgsql;"
]
name = models.CharField(unique=True, max_length=255, validators=[validate_slug],editable=True)
input_table = models.ForeignKey(Input) # Referencing the schema which to introspect for the output of this transform
sql = SQLField(default="$$".join(TRANSFORM).strip())
relation_1 = models.OneToOneField('Normalise_NormalTable',blank=True,null=True,related_name="normalise_1",editable=False)
relation_2 = models.OneToOneField('Normalise_NormalTable',blank=True,null=True,related_name="normalise_2",editable=False)
relation_3 = models.OneToOneField('Normalise_NormalTable',blank=True,null=True,related_name="normalise_3",editable=False)
normal_table = None
def set_relation(self,pos,relation):
"""
set the relation at position, position is based 0
"""
if pos == 0:
self.relation_1 = relation
elif pos == 1:
self.relation_2 = relation
elif pos == 2:
self.relation_3 = relation
@property
def relations(self):
return [self.relation_1,self.relation_2,self.relation_3]
@property
def func_name(self):
"""
normalise function name
"""
return "n_{0}".format(self.name)
@property
def output_table(self):
"""
The output table
The user input value has high priority than database value.
"""
if self.normal_table:
return self.normal_table
elif self.pk:
try:
return self.normaltable
except:
return None
else:
return None
def is_up_to_date(self,job=None,enforce=False):
"""
Returns True if up to date;otherwise return False
"""
#import ipdb;ipdb.set_trace()
if self.job_status and self.job_id and self.job_batch_id and self.job_run_time and self.normaltable and self.input_table:
if self.job_run_time <= self.last_modify_time or self.job_run_time <= self.normaltable.last_modify_time:
#normalise or normal table have been modified after last job run time.
return False
up_to_date = self.input_table.is_up_to_date(job,enforce)
result = True
if up_to_date == False:
#input_table is not up to date
return False
elif up_to_date is None:
result = None
if self.job_run_time < self.input_table.job_run_time:
#input table is up to date but input table's last job run after normalise's last job run.
return False
for relation in self.relations:
if relation:
for normal_table in relation.normal_tables:
if normal_table:
up_to_date = normal_table.is_up_to_date(job,enforce)
#import ipdb;ipdb.set_trace()
if up_to_date == False:
#dependent normal table is not up to date
return False
elif up_to_date is None:
result = None
if self.job_run_time < normal_table.job_run_time:
#dependent normal table is up to date but its last job run after normalise's last job run.
return False
return result
else:
return False
@property
def inputs(self):
if not hasattr(self,"_inputs_cache"):
inputs = []
for n in self.normalises:
if not n.input_table:
raise ValidationError("Normalise({0}) does not connect to a input table.".format(self.name))
if n.input_table not in inputs:
inputs.append(n.input_table)
self._inputs_cache = inputs
return self._inputs_cache
@property
def normalises(self):
"""
return a sorted normalises including self and dependent normalises based on dependency relationship
"""
return self._normalises()
def _normalises(self,parents=None):
"""
return a sorted normalises including self and dependent normalises based on dependency relationship
"""
if not hasattr(self,"_normalises_cache"):
normalises = [self]
if parents:
parents = parents + [self]
else:
parents = [self]
for relation in self.relations:
if not relation:
continue
for normal_table in relation.normal_tables:
if not normal_table:
continue
try:
if not normal_table.normalise:
raise ValidationError("NormalTable({0}) does not connect to a normalise function.".format(normal_table.name))
except ObjectDoesNotExist:
raise ValidationError("NormalTable({0}) does not connect to a normalise function.".format(normal_table.name))
if normal_table.normalise in parents:
raise ValidationError("Found a circular dependency:{0}".format("=>".join([n.name for n in parents + [normal_table.normalise]])))
for n in normal_table.normalise._normalises(parents):
if n not in normalises:
normalises.append(n)
self._normalises_cache = list(reversed(normalises))
return self._normalises_cache
def invoke(self, cursor,trans_schema,input_schema,normal_schema):
"""
invoke the function to populate the table data in specified schema
"""
#populate the data
sql = "INSERT INTO \"{3}\".\"{0}\" SELECT * FROM \"{2}\".\"{1}\"();".format(self.output_table.name, self.func_name, trans_schema, normal_schema)
cursor.execute(sql)
@in_schema(BorgConfiguration.TEST_TRANSFORM_SCHEMA + "," + BorgConfiguration.BORG_SCHEMA,input_schema=BorgConfiguration.TEST_INPUT_SCHEMA,normal_schema=BorgConfiguration.TEST_NORMAL_SCHEMA)
def clean(self, cursor,schema,input_schema,normal_schema):
"""
Check whether the publish function is correct, by creating in test schema
"""
self.sql = None if not self.sql else self.sql.strip()
if not self.sql:
raise ValidationError("Sql can't be empty.")
#check whether sql is ascii string
try:
self.sql = codecs.encode(self.sql,'ascii')
except :
raise ValidationError("Sql contains non ascii character.")
try:
#import ipdb;ipdb.set_trace()
self.last_modify_time = timezone.now()
#import ipdb; ipdb.set_trace()
if self.normal_table:
#check the circle dependency relationship
all_normalises = self.normalises
#drop the previous created testing function
self.drop(cursor,schema)
if self.normal_table:
#recreate the normal table
self.output_table.drop(cursor,normal_schema)
self.output_table.create(cursor,normal_schema)
#speicfy a output normal table
self.create(cursor,schema,input_schema,normal_schema)
#invoke the normalise function to check whether it is correct or not.
self.invoke(cursor,schema,input_schema,normal_schema)
except ValidationError as e:
logger.error(traceback.format_exc())
raise e
except Exception as e:
logger.error(traceback.format_exc())
raise ValidationError(e)
@switch_searchpath(searchpath=BorgConfiguration.BORG_SCHEMA)
def _post_execute(self,cursor):
self.save(update_fields=['job_run_time'])
@in_schema(BorgConfiguration.TRANSFORM_SCHEMA + "," + BorgConfiguration.BORG_SCHEMA,input_schema=BorgConfiguration.INPUT_SCHEMA,normal_schema=BorgConfiguration.NORMAL_SCHEMA)
def execute(self,cursor,schema,input_schema,normal_schema):
"""
recreate the normailzied table and repopulate the table data
"""
begin_time = timezone.now()
self.drop(cursor,schema)
self.output_table.drop(cursor,normal_schema)
self.output_table.create(cursor,normal_schema)
self.create(cursor,schema,input_schema,normal_schema)
self.invoke(cursor,schema,input_schema,normal_schema)
self.job_run_time = begin_time
self._post_execute(cursor)
def delete(self,using=None):
logger.info('Delete {0}:{1}'.format(type(self),self.name))
if try_set_push_owner("normalise"):
try:
with transaction.atomic():
super(Normalise,self).delete(using)
try_push_to_repository('normalise')
finally:
try_clear_push_owner("normalise")
else:
super(Normalise,self).delete(using)
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if not self.data_changed: return
#import ipdb;ipdb.set_trace()
with transaction.atomic():
NormaliseEventListener.preprocess_relation(self)
super(Normalise,self).save(force_insert,force_update,using,update_fields)
def __str__(self):
return self.name
class Meta:
ordering = ['name']
class NormaliseEventListener(object):
@staticmethod
@receiver(pre_delete, sender=Normalise)
def _pre_delete(sender, instance, **args):
# drop tables in both schema
cursor = None
try:
cursor=connection.cursor()
instance.drop(cursor, BorgConfiguration.TRANSFORM_SCHEMA)
instance.drop(cursor, BorgConfiguration.TEST_TRANSFORM_SCHEMA)
finally:
close_cursor(cursor)
@staticmethod
def preprocess_relation(instance):
if not instance.editing_mode:
return
#import ipdb;ipdb.set_trace()
#save relationship first
instance._del_relations = []
#break the relationship between normalise and normalise_normaltable
pos = 0
for relation in instance.relations:
if relation and relation.is_empty:
if relation.pk:
instance._del_relations.append(relation)
instance.set_relation(pos, None)
pos += 1
#save the relationship row
pos = 0
for relation in instance.relations:
if relation:
relation.save()
instance.set_relation(pos,relation)
pos += 1
@staticmethod
@receiver(pre_save, sender=Normalise)
def _pre_save(sender, instance, **args):
pass
@staticmethod
@receiver(post_save, sender=Normalise)
def _post_save(sender, instance, **args):
#import ipdb;ipdb.set_trace()
if not instance.editing_mode:
return
#save normal table's foreign key
save = False
try:
save = instance.normaltable != instance.normal_table
except ObjectDoesNotExist:
save = True
#delete the empty relations
if hasattr(instance,"_del_relations"):
for relation in instance._del_relations:
relation.delete()
delattr(instance,"_del_relations")
if save:
try:
old_normal_table = instance.normaltable
except ObjectDoesNotExist:
old_normal_table = None
if old_normal_table:
old_normal_table.normalise = None
old_normal_table.save()
if instance.normal_table:
instance.normal_table.normalise = instance
instance.normal_table.save()
for relation in instance.relations:
if relation and not relation.normalise:
relation.normalise = instance
relation.save()
class NormalTable(BorgModel):
"""
Represents a table in the harvest DB generated by a Normalise operation on
an Input table, with associated constraints.
"""
name = models.CharField(unique=True, max_length=255, validators=[validate_slug])
normalise = models.OneToOneField(Normalise,null=True,editable=False)
create_sql = SQLField(default="CREATE TABLE \"{{self.name}}\" (name varchar(32) unique);")
last_modify_time = models.DateTimeField(auto_now=False,auto_now_add=True,editable=False,null=False)
def is_up_to_date(self,job=None,enforce=False):
"""
Returns True if up to date;otherwise return False
"""
#import ipdb;ipdb.set_trace()
if self.normalise:
return self.normalise.is_up_to_date(job,enforce)
else:
return False
@property
def job_run_time(self):
"""
return the last job's run time
"""
if self.normalise:
return self.normalise.job_run_time
else:
return None
def drop(self, cursor,schema):
"""
Drop the table from specified schema
"""
cursor.execute("DROP TABLE IF EXISTS \"{0}\".\"{1}\" CASCADE;".format(schema,self.name))
@switch_searchpath()
def create(self, cursor,schema):
"""
Create the table in specified schema
"""
sql = Template(self.create_sql)
sql = sql.render(Context({"self": self,"schema":schema}))
cursor.execute(sql)
@in_schema(BorgConfiguration.TEST_NORMAL_SCHEMA)
def clean(self, cursor,schema):
"""
check whether the NormalTable is correct, by recreating it in test schema
"""
self.create_sql = None if not self.create_sql else self.create_sql.strip()
if not self.create_sql:
raise ValidationError("Create sql can't be empty.")
#check whether create sql is ascii string
try:
self.create_sql = codecs.encode(self.create_sql,'ascii')
except :
raise ValidationError("Create sql contains non ascii character.")
orig = None
if self.pk:
orig = NormalTable.objects.get(pk=self.pk)
if orig and orig.create_sql == self.create_sql:
#create_sql not changed, no need to do the validation.
return
self.last_modify_time = timezone.now()
self.drop(cursor,schema)
try:
self.create(cursor,schema)
self.drop(cursor,schema)
except ValidationError as e:
raise e
except Exception as e:
raise ValidationError(e)
def delete(self,using=None):
logger.info('Delete {0}:{1}'.format(type(self),self.name))
if try_set_push_owner("normal_table"):
try:
with transaction.atomic():
super(NormalTable,self).delete(using)
try_push_to_repository('normal_table')
finally:
try_clear_push_owner("normal_table")
else:
super(NormalTable,self).delete(using)
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if not self.data_changed: return
with transaction.atomic():
super(NormalTable,self).save(force_insert,force_update,using,update_fields)
def __str__(self):
return self.name
class Meta:
ordering = ['name']
class NormalTableEventListener(object):
@staticmethod
@receiver(pre_delete, sender=NormalTable)
def _pre_delete(sender, instance, **args):
# import ipdb;ipdb.set_trace()
# drop tables in both schema
cursor = None
try:
cursor=connection.cursor()
instance.drop(cursor, BorgConfiguration.NORMAL_SCHEMA)
instance.drop(cursor, BorgConfiguration.TEST_NORMAL_SCHEMA)
finally:
close_cursor(cursor)
class Normalise_NormalTable(BorgModel):
"""
Analogous a many to many relationship between Normalise and NormalTable
"""
normalise = models.ForeignKey(Normalise,blank=True,null=True)
normal_table_1 = models.ForeignKey(NormalTable,blank=True,null=True,related_name="normalise_normaltable_1")
normal_table_2 = models.ForeignKey(NormalTable,blank=True,null=True,related_name="normalise_normaltable_2")
normal_table_3 = models.ForeignKey(NormalTable,blank=True,null=True,related_name="normalise_normaltable_3")
normal_table_4 = models.ForeignKey(NormalTable,blank=True,null=True,related_name="normalise_normaltable_4")
@property
def normal_tables(self):
return [self.normal_table_1,self.normal_table_2,self.normal_table_3,self.normal_table_4]
def set_normal_table(self,pos,normal_table):
"""
set the normal_table at position pos, position is based 0
"""
if pos == 0:
self.normal_table_1 = normal_table
elif pos == 1:
self.normal_table_2 = normal_table
elif pos == 2:
self.normal_table_3 = normal_table
elif pos == 3:
self.normal_table_4 = normal_table
@property
def is_empty(self):
return not any(self.normal_tables)
def __str__(self):
if self.normal_table_1 or self.normal_table_2 or self.normal_table_3 or self.normal_table_4:
return "{0} depedents on [{1} {2} {3} {4}]".format(self.normalise.name if self.normalise else "",
self.normal_table_1.name if self.normal_table_1 else "",
", " + self.normal_table_2.name if self.normal_table_2 else "",
", " + self.normal_table_3.name if self.normal_table_3 else "",
", " + self.normal_table_4.name if self.normal_table_4 else "",
)
else:
return self.normalise.name if self.normalise else ""
class PublishChannel(BorgModel):
"""
The publish channel
"""
name = models.SlugField(max_length=255, unique=True, help_text="Name of publish destination", validators=[validate_slug])
sync_postgres_data = models.BooleanField(default=True)
sync_geoserver_data = models.BooleanField(default=True)
wfs_version = models.CharField(max_length=32, null=True,blank=True)
wfs_endpoint = models.CharField(max_length=256, null=True,blank=True)
wms_version = models.CharField(max_length=32, null=True,blank=True)
wms_endpoint = models.CharField(max_length=256, null=True,blank=True)
gwc_endpoint = models.CharField(max_length=256, null=True,blank=True)
last_modify_time = models.DateTimeField(auto_now=False,auto_now_add=True,editable=False,null=False)
def delete(self,using=None):
logger.info('Delete {0}:{1}'.format(type(self),self.name))
if try_set_push_owner("publish_channel"):
try:
with transaction.atomic():
super(PublishChannel,self).delete(using)
try_push_to_repository('publish_channel')
finally:
try_clear_push_owner("publish_channel")
else:
super(PublishChannel,self).delete(using)
def clean(self):
if self.sync_geoserver_data:
if not self.wfs_version or not self.wfs_endpoint or not self.wms_version or not self.wms_endpoint or not self.gwc_endpoint:
raise ValidationError("Please input wfs, wms and gwc related information.")
self.last_modify_time = timezone.now()
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if not self.data_changed: return
with transaction.atomic():
super(PublishChannel,self).save(force_insert,force_update,using,update_fields)
def __str__(self):
return self.name
class Meta:
ordering = ['name']
@python_2_unicode_compatible
class Workspace(BorgModel):
"""
Analogous to a workspace in GeoServer.
"""
name = models.SlugField(max_length=255, help_text="Name of workspace", validators=[validate_slug])
publish_channel = models.ForeignKey(PublishChannel)
AUTH_CHOICES = (
(0, 'Public access'),
(1, 'SSO access'),
(2, 'SSO restricted role access')
)
auth_level = models.PositiveSmallIntegerField(choices=AUTH_CHOICES, default=1)
default_schema = BorgConfiguration.PUBLISH_SCHEMA
default_view_schema = BorgConfiguration.PUBLISH_VIEW_SCHEMA
@property
def workspace_as_schema(self):
return BorgConfiguration.WORKSPACE_AS_SCHEMA
@property
def schema(self):
if self.workspace_as_schema:
return '{0}_{1}'.format(self.publish_channel.name,self.name)
else:
return '{0}_{1}'.format(self.publish_channel.name,self.default_schema)
@property
def publish_schema(self):
"""
The schema used by borg slave to let user access the table
"""
if self.workspace_as_schema:
return self.name
else:
return self.default_schema
@property
def publish_data_schema(self):
"""
The schema used by borg slave to save the table data.
"""
return "{0}_data".format(self.publish_schema)
@property
def publish_outdated_schema(self):
"""
The schema used by borg slave to temporary save the outdated table data
"""
return "{0}_outdated".format(self.publish_schema)
@property
def view_schema(self):
if self.workspace_as_schema:
return '{0}_{1}_view'.format(self.publish_channel.name,self.name)
else:
return '{0}_{1}'.format(self.publish_channel.name,self.default_view_schema)
@property
def test_schema(self):
return BorgConfiguration.test_schema(self.schema)
@property
def test_view_schema(self):
return BorgConfiguration.test_schema(self.view_schema)
@in_schema(BorgConfiguration.BORG_SCHEMA)
def execute(self,validation_mode,cursor,schema):
if validation_mode:
sql = ";".join(["CREATE SCHEMA IF NOT EXISTS \"{}\"".format(s) for s in [self.test_schema,self.test_view_schema]])
else:
sql = ";".join(["CREATE SCHEMA IF NOT EXISTS \"{}\"".format(s) for s in [self.schema,self.view_schema,self.publish_data_schema]])
cursor.execute(sql)
def output_filename(self,action='publish'):
if action == 'publish':
return os.path.join(self.publish_channel.name,"workspaces", "{}.json".format(self.name))
else:
return os.path.join(self.publish_channel.name,"workspaces", "{}.{}.json".format(self.name,action))
def output_filename_abs(self,action='publish'):
return os.path.join(BorgConfiguration.BORG_STATE_REPOSITORY, self.output_filename(action))
def publish(self):
try_set_push_owner("workspace")
hg = None
try:
json_files = []
if self.publish_channel.sync_postgres_data:
json_file = self.output_filename_abs('publish')
# Write JSON output file
json_out = {}
json_out["schema"] = self.publish_schema
json_out["data_schema"] = self.publish_data_schema
json_out["outdated_schema"] = self.publish_outdated_schema
json_out["channel"] = self.publish_channel.name
json_out["sync_postgres_data"] = self.publish_channel.sync_postgres_data
json_out["sync_geoserver_data"] = self.publish_channel.sync_geoserver_data
json_out["action"] = 'publish'
json_out["auth_level"] = self.auth_level
json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")
#create the dir if required
if not os.path.exists(os.path.dirname(json_file)):
os.makedirs(os.path.dirname(json_file))
with open(json_file, "wb") as output:
json.dump(json_out, output, indent=4)
json_files.append(json_file)
if self.publish_channel.sync_geoserver_data:
workspaces = Workspace.objects.filter(publish_channel=self.publish_channel).order_by('name')
# Generate user data SQL through template
latest_data = render_to_string("layers.properties", {"workspaces": workspaces})
old_data = None
access_rule_json_file = os.path.join(BorgConfiguration.BORG_STATE_REPOSITORY,self.publish_channel.name, "layers.properties")
#create dir if required
if os.path.exists(access_rule_json_file):
with open(access_rule_json_file,"rb") as output_file:
old_data = output_file.read()
elif not os.path.exists(os.path.dirname(access_rule_json_file)):
os.makedirs(os.path.dirname(access_rule_json_file))
if not old_data or old_data != latest_data:
# Write output layer access rule, commit + push
with open(access_rule_json_file, "wb") as output:
output.write(latest_data)
json_files.append(access_rule_json_file)
if json_files:
hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)
hg.commit(include=json_files,addremove=True, user=BorgConfiguration.BORG_STATE_USER, message="Update workspace {}".format(self.name))
increase_committed_changes()
try_push_to_repository('workspace',hg)
finally:
if hg: hg.close()
try_clear_push_owner("workspace")
def delete(self,using=None):
logger.info('Delete {0}:{1}'.format(type(self),self.name))
if try_set_push_owner("workspace"):
try:
with transaction.atomic():
super(Workspace,self).delete(using)
try_push_to_repository('workspace')
finally:
try_clear_push_owner("workspace")
else:
super(Workspace,self).delete(using)
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if not self.data_changed: return
with transaction.atomic():
super(Workspace,self).save(force_insert,force_update,using,update_fields)
def __str__(self):
return '{0}.{1}'.format(self.publish_channel.name,self.name)
class Meta:
ordering = ['publish_channel','name']
unique_together=(('publish_channel','name'),)
class WorkspaceEventListener(object):
@staticmethod
@receiver(post_delete, sender=Workspace)
def _post_delete(sender, instance, **args):
refresh_select_choices.send(instance,choice_family="workspace")
@staticmethod
@receiver(pre_save, sender=Workspace)
def _pre_save(sender, instance,**kwargs):
if not instance.pk:
instance.new_object = True
@staticmethod
@receiver(post_save, sender=Workspace)
def _post_save(sender, instance, **args):
if (hasattr(instance,"new_object") and getattr(instance,"new_object")):
delattr(instance,"new_object")
refresh_select_choices.send(instance,choice_family="workspace")
STATUS_CHOICES = (
(0, "idle"),
(1, "harvesting"),
(2, "harvested"),
(3, "failed")
)
class Publish(Transform,ResourceStatusMixin,SpatialTableMixin):
"""
A feature, whose data is derived from input and normal table, will be published to slave server and then can be accessed through kmi.
"""
TRANSFORM = [
"CREATE FUNCTION \"{{trans_schema}}\".\"{{self.func_name}}\"() RETURNS SETOF \"{{input_table_schema}}\".\"{{input_table_name}}\" as ",
"\nBEGIN\n RETURN QUERY SELECT * FROM \"{{input_table_schema}}\".\"{{input_table_name}}\";\nEND;\n",
" LANGUAGE plpgsql;"
]
name = models.SlugField(max_length=255, unique=True, help_text="Name of Publish", validators=[validate_slug])
workspace = models.ForeignKey(Workspace)
interval = models.CharField(max_length=64, choices=JobInterval.publish_options(), default=JobInterval.Weekly.name)
status = models.CharField(max_length=32, choices=ResourceStatus.publish_status_options,default=ResourceStatus.Enabled.name)
input_table = models.ForeignKey(Input, blank=True,null=True) # Referencing the schema which to introspect for the output of this transform
sql = SQLField(default="$$".join(TRANSFORM).strip())
spatial_info = models.TextField(max_length=512,editable=False,null=True,blank=True)
create_extra_index_sql = SQLField(null=True, editable=True,blank=True)
priority = models.PositiveIntegerField(default=1000)
create_table_sql = SQLField(null=True, editable=False)
geoserver_setting = models.TextField(blank=True,null=True,editable=False)
pending_actions = models.IntegerField(blank=True,null=True,editable=False)
relation_1 = models.OneToOneField('Publish_NormalTable',blank=True,null=True,related_name="publish_1",editable=False)
relation_2 = models.OneToOneField('Publish_NormalTable',blank=True,null=True,related_name="publish_2",editable=False)
relation_3 = models.OneToOneField('Publish_NormalTable',blank=True,null=True,related_name="publish_3",editable=False)
running = models.PositiveIntegerField(default=0,editable=False)
completed = models.PositiveIntegerField(default=0,editable=False)
failed = models.PositiveIntegerField(default=0,editable=False)
waiting = models.PositiveIntegerField(default=0,editable=False)
job_create_time = models.DateTimeField(null=True, editable=False)
job_start_time = models.DateTimeField(null=True, editable=False)
job_end_time = models.DateTimeField(null=True, editable=False)
default_layer_setting = {}
def set_relation(self,pos,relation):
"""
set the relation at position, position is based 0
"""
if pos == 0:
self.relation_1 = relation
elif pos == 1:
self.relation_2 = relation
elif pos == 2:
self.relation_3 = relation
@property
def publish_action(self):
from tablemanager.publish_action import PublishAction
return PublishAction(self.pending_actions)
def builtin_style_file(self,style_format="sld"):
if self.is_normal:
#is a normal table, no style file
return None
elif self.input_table and self.input_table.spatial_type == self.spatial_type and self.input_table.style_file(style_format):
#publish's input_table has style file, use it
return self.input_table.style_file(style_format)
return None
@property
def relations(self):
return [self.relation_1,self.relation_2,self.relation_3]
@property
def func_name(self):
return "p_{0}".format(self.table_name)
@property
def table_name(self):
if self.workspace.workspace_as_schema:
return self.name
else:
return "{}_{}".format(self.workspace, self.name)
@property
def table_schema(self):
return self.workspace.test_schema
@property
def db_util(self):
return defaultDbUtil
@property
def normalises(self):
"""
the sorted related normalises
"""
if not hasattr(self,"_normalises_cache"):
normalises = []
for relation in self.relations:
if not relation:
continue
for normal_table in relation.normal_tables:
if not normal_table:
continue
try:
if not normal_table.normalise:
raise ValidationError("NormalTable({0}) does not connect to a normalise function.".format(normal_table.name))
except ObjectDoesNotExist:
raise ValidationError("NormalTable({0}) does not connect to a normalise function.".format(normal_table.name))
for n in normal_table.normalise.normalises:
if n not in normalises:
normalises.append(n)
self._normalises_cache = normalises
return self._normalises_cache
@property
def inputs(self):
"""
a set object contains all related inputs.
"""
#import ipdb;ipdb.set_trace()
if not hasattr(self,"_inputs_cache"):
inputs = []
try:
if self.input_table:
inputs.append(self.input_table)
except ObjectDoesNotExist:
pass
for n in self.normalises:
if not n.input_table:
raise ValidationError("Normalise({0}) does not connect to a input table.".format(self.name))
if n.input_table not in inputs:
inputs.append(n.input_table)
self._inputs_cache = inputs
return self._inputs_cache
def drop(self,cursor,transform_schema,publish_schema):
"""
drop related tables and transform functions
"""
cursor.execute("DROP TABLE IF EXISTS \"{0}\".\"{1}\" CASCADE;".format(publish_schema,self.table_name))
super(Publish,self).drop(cursor,transform_schema)
get_timeout_sql = "show statement_timeout"
set_timeout_sql = "set statement_timeout to '{}'"
def invoke(self, cursor,trans_schema,normal_schema,publish_view_schema,publish_schema):
"""
invoke the function to populate the table data in speicifed schema
"""
#import ipdb; ipdb.set_trace()
#drop auto generated spatial index
#SpatialTable.get_instance(publish_schema,self.table_name,True).drop_indexes()
#drop all indexes except primary key
#defaultDbUtil.drop_all_indexes(self.table_name,publish_schema,False)
cursor.execute(self.get_timeout_sql)
default_timeout = cursor.fetchone()[0]
try:
#clear the statement timeout
cursor.execute(self.set_timeout_sql.format("0"))
sql = "CREATE OR REPLACE VIEW \"{3}\".\"{0}\" AS SELECT *, md5(CAST(row.* AS text)) as md5_rowhash FROM \"{2}\".\"{1}\"() as row;".format(self.table_name,self.func_name,trans_schema,publish_view_schema)
cursor.execute(sql)
sql = (
"DROP TABLE IF EXISTS \"{4}\".\"{0}\" CASCADE;\n"
#"CREATE TABLE IF NOT EXISTS \"{4}\".\"{0}\" (LIKE \"{3}\".\"{0}\",\n"
"CREATE TABLE \"{4}\".\"{0}\" (LIKE \"{3}\".\"{0}\",\n"
"CONSTRAINT pk_{0} PRIMARY KEY (md5_rowhash));\n"
#"CREATE TABLE IF NOT EXISTS \"{0}_diff\" (\n"
#"difftime TIMESTAMP PRIMARY KEY\n,"
#"inserts VARCHAR(32)[], deletes VARCHAR(32)[]);\n"
#"INSERT INTO \"{0}_diff\" select now() as difftime, del.array_agg as deletes, ins.array_agg as inserts from\n"
#"(select array_agg(d.md5_rowhash) from (select md5_rowhash from \"{0}\" except (select md5_rowhash from publish_view.\"{0}\")) as d) as del,\n"
#"(select array_agg(i.md5_rowhash) from (select md5_rowhash from publish_view.\"{0}\" except (select md5_rowhash from \"{0}\")) as i) as ins;\n"
#"TRUNCATE \"{4}\".\"{0}\";" # For now don't actually use diff just truncate/full reinsert
"INSERT INTO \"{4}\".\"{0}\" SELECT * FROM \"{3}\".\"{0}\";"
).format(self.table_name, timezone.now(),trans_schema,publish_view_schema,publish_schema)
cursor.execute(sql)
#create extra index
if self.create_extra_index_sql and self.create_extra_index_sql.strip():
sql = Template(self.create_extra_index_sql).render(Context({"self": self,"publish_schema":publish_schema}))
cursor.execute(sql)
#create index
#print "refresh spatial info for table (id={}, name={})".format(self.id,self.table_name)
self.refresh_spatial_info(publish_schema).create_indexes(cursor=cursor)
finally:
#reset the default timeout
cursor.execute(self.set_timeout_sql.format(default_timeout))
def _create(self, cursor,schema,input_schema=None,normal_schema=None):
"""
This function is used to take care two different scenario:
1. when the publish dependent on an input_table.
2. when the publish does not dependent on an input table
"""
if self.input_table:
self.create(cursor,schema,input_schema,normal_schema,input_schema,self.input_table.name)
else:
first_normal_table = None
for relation in self.relations:
if relation:
for normal_table in relation.normal_tables:
if normal_table:
first_normal_table = normal_table
break;
if first_normal_table:
break;
if first_normal_table:
self.create(cursor,schema,input_schema,normal_schema,normal_schema,first_normal_table.name)
else:
raise ValidationError("Must specify input or dependencies or both.")
@in_schema(BorgConfiguration.TEST_TRANSFORM_SCHEMA + "," + BorgConfiguration.BORG_SCHEMA,input_schema=BorgConfiguration.TEST_INPUT_SCHEMA, normal_schema=BorgConfiguration.TEST_NORMAL_SCHEMA)
def clean(self,cursor,schema,input_schema,normal_schema):
"""
Check whether the publish function is correct, by creating in test schema
"""
if not self.sql :
raise ValidationError("Sql can't be empty.")
#check whether sql is ascii string
try:
self.sql = codecs.encode(self.sql,'ascii')
except :
raise ValidationError("Sql contains non ascii character.")
self.create_extra_index_sql = None if not self.create_extra_index_sql else self.create_extra_index_sql.strip()
if self.create_extra_index_sql:
try:
self.create_extra_index_sql = codecs.encode(self.create_extra_index_sql,'ascii')
except :
raise ValidationError("Sql contains non ascii character.")
else:
self.create_extra_index_sql = None
if not self.input_table and not any(self.relations):
raise ValidationError("Must specify input or dependencies or both.")
try:
#drop transform functions, but not drop related tables
super(Publish,self).drop(cursor,schema)
self.last_modify_time = timezone.now()
self._create(cursor,schema,input_schema,normal_schema)
self.workspace.execute(True)
#invoke the normalise function to check whether it is correct or not.
self.invoke(cursor,schema,normal_schema,self.workspace.test_view_schema,self.workspace.test_schema)
self.create_table_sql = self.get_create_table_sql()
#check the table is spatial or non spatial
self.spatial_info = self.get_spatial_info()
if self.pk and hasattr(self,"changed_fields") and "status" in self.changed_fields:
#publish status changed.
orig = None
if self.pk:
orig = Publish.objects.get(pk=self.pk)
if (not self.publish_status.publish_enabled) and (orig and orig.publish_status.publish_enabled):
#from publish enabled to publish disabled.
try:
self.unpublish()
self.pending_actions = None
self.job_id = None
self.job_batch_id = None
self.job_status = None
except:
error = sys.exc_info()
raise ValidationError(traceback.format_exception_only(error[0],error[1]))
self.job_id = None
self.job_batch_id = None
self.job_status = None
except ValidationError as e:
raise e
except Exception as e:
raise ValidationError(e)
def unpublish(self):
"""
sometimes, failed to remove the json file from repository to trigger the unpublish action if the slave side has some uncommitted transaction.
for example:
1. slave A is failed to commit the tranaction
2. User publish a layer 'test' by adding 'test.json' to repository
3. Adding file 'test.json' trigger a publish action to publish 'test' layer in slave A
4. User unpublish the layer 'test' by removing 'test.json' to repository
5. Slave a can't oberseve the removing action, because 'test.json' was added and removed after the last committed time, so slave A can't unpublish the layer
the solution is update the existing file instead of removing it.
"""
#use published meta file as the meta file for unpublish
publish_file = self.output_filename_abs('publish')
publish_json = None
if os.path.exists(publish_file):
with open(publish_file,"r") as f:
publish_json = json.loads(f.read())
else:
publish_json = {}
json_file = self.output_filename_abs('unpublish')
json_out = None
if publish_json.get("action","publish") != "remove":
#require the properties ("name", "workspace","schema","data_schema","outdated_schema","action","styles", "channel", "spatial_data", "sync_postgres_data", "sync_geoserver_data") to unpublish
json_out = {}
json_out["name"] = self.table_name
json_out["workspace"] = self.workspace.name
json_out["schema"] = self.workspace.publish_schema
json_out["data_schema"] = self.workspace.publish_data_schema
json_out["outdated_schema"] = self.workspace.publish_outdated_schema
json_out["channel"] = self.workspace.publish_channel.name
json_out["spatial_data"] = self.is_spatial
json_out["sync_postgres_data"] = self.workspace.publish_channel.sync_postgres_data
json_out["sync_geoserver_data"] = self.workspace.publish_channel.sync_geoserver_data
json_out["styles"] = {}
#retrieve meta data from the last publish task
meta_json = publish_json
if "meta" in publish_json and "file" in publish_json["meta"]:
meta_file = publish_json["meta"]["file"][len(BorgConfiguration.MASTER_PATH_PREFIX):]
if os.path.exists(meta_file):
with open(meta_file,"r") as f:
meta_json = json.loads(f.read())
else:
meta_json = {}
for key,value in (meta_json.get("styles") or {}).iteritems():
json_out["styles"][key] = {"default":value.get("default",False)}
for key in ["name","workspace","schema","data_schema","outdated_schema","channel","spatial_data","sync_postgres_data","sync_geoserver_data"]:
if key in meta_json:
json_out[key] = meta_json[key]
json_out["action"] = 'remove'
else:
json_out = publish_json
#remove it from catalogue service
res = requests.delete("{}/catalogue/api/records/{}:{}/".format(settings.CSW_URL,self.workspace.name,self.table_name),auth=(settings.CSW_USER,settings.CSW_PASSWORD),verify=settings.CSW_CERT_VERIFY)
if res.status_code != 404:
res.raise_for_status()
json_out["remove_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")
with open(json_file, "wb") as output:
json.dump(json_out, output, indent=4)
try_set_push_owner("unpublish")
hg = None
try:
hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)
#get all possible files
files =[self.output_filename_abs(action) for action in ['meta','empty_gwc'] ]
#get all existing files.
files =[ f for f in files if os.path.exists(f)]
if files:
hg.remove(files=files)
files.append(json_file)
hg.commit(include=files,addremove=True, user=BorgConfiguration.BORG_STATE_USER, message="Removed {}.{}".format(self.workspace.name, self.name))
increase_committed_changes()
try_push_to_repository("unpublish",hg)
finally:
if hg: hg.close()
try_clear_push_owner("unpublish")
@switch_searchpath(searchpath=BorgConfiguration.BORG_SCHEMA)
def _post_execute(self,cursor):
self.save(update_fields=['job_run_time'])
@in_schema(BorgConfiguration.TRANSFORM_SCHEMA + "," + BorgConfiguration.BORG_SCHEMA,input_schema=BorgConfiguration.INPUT_SCHEMA, normal_schema=BorgConfiguration.NORMAL_SCHEMA)
def execute(self,cursor,schema,input_schema,normal_schema):
"""
recreate the function;
recreate the latest data view
publish the data
"""
#drop transform functions, but not drop related tables
begin_time = timezone.now()
super(Publish,self).drop(cursor,schema)
self._create(cursor,schema,input_schema,normal_schema)
self.workspace.execute(False)
self.invoke(cursor,schema,normal_schema,self.workspace.view_schema,self.workspace.schema)
#save the latest spatial info
self.spatial_info = self.get_spatial_info()
#print "id={}, table_name={} , spatial_info = {}".format(self.id,self.table_name,self.spatial_info)
self.save(update_fields=["spatial_info"])
self.job_run_time = begin_time
self._post_execute(cursor)
def publish_meta_data(self):
if self.publish_status != ResourceStatus.Enabled:
raise ValidationError("The publish({0}) is disabled".format(self.name))
if not self.workspace.publish_channel.sync_geoserver_data:
raise ValidationError("The publish channel({1}) of publish({0}) does not support geoserver.".format(self.name,self.workspace.publish_channel.name))
publish_action = self.publish_action
if publish_action.publish_all:
raise ValidationError("Publish({0}) requires a full publish including data and metadata".format(self.name))
try_set_push_owner("publish")
hg = None
try:
if self.workspace.workspace_as_schema:
style_file_folder = os.path.join(BorgConfiguration.STYLE_FILE_DUMP_DIR,self.workspace.publish_channel.name, self.workspace.name)
else:
style_file_folder = os.path.join(BorgConfiguration.STYLE_FILE_DUMP_DIR,self.workspace.publish_channel.name)
meta_data = self.update_catalogue_service(style_dump_dir=style_file_folder,md5=True,extra_datas={"publication_date":datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")})
#write meta data file
file_name = "{}.meta.json".format(self.table_name)
meta_file = os.path.join(style_file_folder,file_name)
with open(meta_file,"wb") as output:
json.dump(meta_data, output, indent=4)
json_out = {}
json_out["action"] = 'meta'
json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")
json_out['meta'] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, meta_file),"md5":file_md5(meta_file)}
json_file = self.output_filename_abs('meta')
#create the dir if required
if not os.path.exists(os.path.dirname(json_file)):
os.makedirs(os.path.dirname(json_file))
with open(json_file, "wb") as output:
json.dump(json_out, output, indent=4)
hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)
hg.commit(include=[json_file],addremove=True, user=BorgConfiguration.BORG_STATE_USER, message="Update feature's meta data {}.{}".format(self.workspace.name, self.name))
increase_committed_changes()
try_push_to_repository('publish',hg)
actions = publish_action.clear_feature_action().clear_gwc_action().actions
if self.pending_actions != actions:
self.pending_actions = actions
self.save(update_fields=['pending_actions'])
finally:
if hg: hg.close()
try_clear_push_owner("publish")
def empty_gwc(self):
"""
Empty gwc to the repository
"""
if self.publish_status not in [ResourceStatus.Enabled]:
#layer is not published, no need to empty gwc
raise ValidationError("The publish({0}) is disabled".format(self.name))
geo_settings = json.loads(self.geoserver_setting) if self.geoserver_setting else {}
if not geo_settings.get("create_cache_layer",False):
#layer does not enable gwc, no need to empty gwc
raise ValidationError("The publish({0}) doesn't enable gwc.".format(self.name))
#check whether this publish is published before or not
if not os.path.exists(self.output_filename_abs('publish')):
#not published before.
raise ValidationError("The publish({0}) is not published or already unpublished.".format(self.name))
json_file = self.output_filename_abs('empty_gwc');
try_set_push_owner("publish")
hg = None
try:
json_out = {}
json_out["name"] = self.table_name
json_out["workspace"] = self.workspace.name
json_out["action"] = "empty_gwc"
json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f")
json_out["auth_level"] = self.workspace.auth_level
#create the dir if required
if not os.path.exists(os.path.dirname(json_file)):
os.makedirs(os.path.dirname(json_file))
with open(json_file, "wb") as output:
json.dump(json_out, output, indent=4)
hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY)
hg.commit(include=[json_file],addremove=True, user="borgcollector", message="Empty GWC of publish {}.{}".format(self.workspace.name, self.name))
increase_committed_changes()
try_push_to_repository("publish",hg)
finally:
if hg: hg.close()
try_clear_push_owner("publish")
@property
def title(self):
return self.input_table.title if self.input_table else ""
@property
def abstract(self):
return self.input_table.kmi_abstract if self.input_table else ""
def output_filename(self,action='publish'):
if action in ['publish','unpublish']:
return os.path.join(self.workspace.publish_channel.name,"layers", "{}.{}.json".format(self.workspace.name, self.name))
else:
return os.path.join(self.workspace.publish_channel.name,"layers", "{}.{}.{}.json".format(self.workspace.name, self.name,action))
def output_filename_abs(self,action='publish'):
return os.path.join(BorgConfiguration.BORG_STATE_REPOSITORY, self.output_filename(action))
_style_name_re = re.compile("<se:Name>(?P<layer>.*?)</se:Name>")
_property_re = re.compile("<ogc:PropertyName>(?P<property>.*?)</ogc:PropertyName>")
def format_sld_style(self,sld):
"""
reset <se:Name> based on publish name.
"""
try:
sld = minidom.parseString(sld).toprettyxml(indent=" ")
sld = [line for line in sld.splitlines() if line.strip()]
except:
raise ValidationError("Incorrect xml format.{}".format(traceback.format_exc()))
sld = os.linesep.join(sld)
#do some transformation.
sld = self._style_name_re.sub("<se:Name>{}</se:Name>".format(self.table_name),sld,2)
sld = self._property_re.sub((lambda m: "<ogc:PropertyName>{}</ogc:PropertyName>".format(m.group(1).lower())), sld)
return sld
@property
def builtin_metadata(self):
meta_data = {}
meta_data["workspace"] = self.workspace.name
meta_data["name"] = self.table_name
if self.is_normal:
meta_data["service_type"] = "WFS"
elif self.is_raster:
meta_data["service_type"] = "WMS"
meta_data["service_type_version"] = self.workspace.publish_channel.wms_version
else:
meta_data["service_type"] = "WFS"
meta_data["service_type_version"] = self.workspace.publish_channel.wfs_version
meta_data["title"] = self.title
meta_data["abstract"] = self.abstract
modify_time = None
if self.input_table:
for ds in self.input_table.datasource:
if os.path.exists(ds):
input_modify_time = datetime.utcfromtimestamp(os.path.getmtime(ds)).replace(tzinfo=pytz.UTC)
if modify_time:
if modify_time < input_modify_time:
modify_time = input_modify_time
else:
modify_time = input_modify_time
else:
modify_time = self.last_modify_time
else:
modify_time = self.last_modify_time
meta_data["modified"] = modify_time.astimezone(timezone.get_default_timezone()).strftime("%Y-%m-%d %H:%M:%S.%f") if modify_time else None
#bbox
if self.is_spatial:
meta_data["bounding_box"] = json.dumps(self.bbox)
meta_data["crs"] = self.crs
meta_data["styles"] = []
if self.workspace.publish_channel.sync_geoserver_data:
for style_format in ["sld","qml","lyr"]:
f = self.builtin_style_file(style_format)
if f:
with open(f,"r") as r:
meta_data["styles"].append({"content":(self.format_sld_style(r.read()) if style_format == "sld" else r.read()).encode("base64"),"format":style_format.upper()})
#OWS info
meta_data["ows_resource"] = {}
if meta_data["service_type"] == "WFS" and self.workspace.publish_channel.wfs_endpoint:
meta_data["ows_resource"]["wfs"] = True
meta_data["ows_resource"]["wfs_version"] = self.workspace.publish_channel.wfs_version
meta_data["ows_resource"]["wfs_endpoint"] = self.workspace.publish_channel.wfs_endpoint
if meta_data["service_type"] in ("WFS","WMS") and self.workspace.publish_channel.wfs_endpoint and self.is_spatial:
meta_data["ows_resource"]["wms"] = True
meta_data["ows_resource"]["wms_version"] = self.workspace.publish_channel.wms_version
meta_data["ows_resource"]["wms_endpoint"] = self.workspace.publish_channel.wms_endpoint
geo_settings = json.loads(self.geoserver_setting) if self.geoserver_setting else {}
if geo_settings.get("create_cache_layer",False) and self.workspace.publish_channel.gwc_endpoint:
meta_data["ows_resource"]["gwc"] = True
meta_data["ows_resource"]["gwc_endpoint"] = self.workspace.publish_channel.gwc_endpoint
return meta_data
def update_catalogue_service(self,style_dump_dir=None,md5=False,extra_datas=None):
meta_data = self.builtin_metadata
if extra_datas:
meta_data.update(extra_datas)
bbox = meta_data.get("bounding_box",None)
crs = meta_data.get("crs",None)
#update catalog service
if self.workspace.publish_channel.sync_geoserver_data:
res = requests.post("{}/catalogue/api/records/?style_content=true".format(settings.CSW_URL),json=meta_data,auth=(settings.CSW_USER,settings.CSW_PASSWORD),verify=settings.CSW_CERT_VERIFY)
if 400 <= res.status_code < 600 and res.content:
res.reason = "{}({})".format(res.reason,res.content)
res.raise_for_status()
try:
meta_data = res.json()
except:
res.status_code = 400
if res.content.find("microsoft") >= 0:
res.status_code = 401
res.reason = "Please login"
else:
res.status_code = 400
res.reason = "Unknown reason"
res.raise_for_status()
#process styles
styles = meta_data.get("styles",[])
#filter out qml and lyr styles
sld_styles = [s for s in meta_data.get("styles",[]) if s["format"].lower() == "sld" and s.get("raw_content")]
meta_data["styles"] = {}
if style_dump_dir:
if not os.path.exists(style_dump_dir):
os.makedirs(style_dump_dir)
for style in sld_styles:
if style["default"]:
#default sld file
meta_data["default_style"] = style["name"]
if style_dump_dir:
#write the style into file system
style_file = os.path.join(style_dump_dir,"{}.{}.sld".format(self.table_name,style["name"]))
with open(style_file,"wb") as f:
f.write(style["raw_content"].decode("base64"))
if md5:
meta_data["styles"][style["name"]] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, style_file),"default":style["default"],"md5":file_md5(style_file)}
else:
meta_data["styles"][style["name"]] = {"file":"{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, style_file),"default":style["default"]}
else:
meta_data["styles"][style["name"]] = {"content":style["raw_content"].decode("base64")}
else:
meta_data["styles"] = None
#add extra data to meta data
meta_data["workspace"] = self.workspace.name
meta_data["name"] = self.table_name
meta_data["schema"] = self.workspace.publish_schema
meta_data["data_schema"] = self.workspace.publish_data_schema
meta_data["outdated_schema"] = self.workspace.publish_outdated_schema
meta_data["channel"] = self.workspace.publish_channel.name
meta_data["spatial_data"] = self.is_spatial
meta_data["sync_postgres_data"] = self.workspace.publish_channel.sync_postgres_data
meta_data["sync_geoserver_data"] = self.workspace.publish_channel.sync_geoserver_data
meta_data["preview_path"] = "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, BorgConfiguration.PREVIEW_DIR)
meta_data["auth_level"] = self.workspace.auth_level
if self.geoserver_setting:
meta_data["geoserver_setting"] = json.loads(self.geoserver_setting)
#bbox
if "bounding_box" in meta_data:
del meta_data["bounding_box"]
if self.is_spatial:
meta_data["bbox"] = bbox
meta_data["crs"] = crs
meta_data["spatial_type"] = self.spatial_type
meta_data["spatial_column"] = self.spatial_column
return meta_data
def is_up_to_date(self,job=None,enforce=False):
"""
Returns PublishAction object.
"""
#import ipdb;ipdb.set_trace();
if self.publish_status != ResourceStatus.Enabled:
return None
publish_action = self.publish_action
if publish_action.publish_all or publish_action.publish_data:
return publish_action
if self.input_table:
up_to_date = self.input_table.is_up_to_date(job,enforce)
if up_to_date == False:
#input_table is not up to date
return publish_action.column_changed("input_table")
elif up_to_date is None:
publish_action.possible_data_changed = True
if not self.job_run_time or self.job_run_time < self.input_table.job_run_time:
#input table is up to date but input table's last job run after normalise's last job run.
return publish_action.column_changed("input_table")
for relation in self.relations:
if relation:
for normal_table in relation.normal_tables:
if normal_table:
up_to_date = normal_table.is_up_to_date(job,enforce)
if up_to_date == False:
#dependent normal table is not up to date
return publish_action.column_changed("normal_tables")
elif up_to_date is None:
publish_action.possible_data_changed = True
if self.job_run_time < normal_table.job_run_time:
#dependent normal table is up to date but its last job run after normalise's last job run.
return publish_action.column_changed("normal_tables")
return publish_action
def delete(self,using=None):
logger.info('Delete {0}:{1}'.format(type(self),self.name))
if try_set_push_owner("publish"):
try:
with transaction.atomic():
super(Publish,self).delete(using)
try_push_to_repository('publish')
finally:
try_clear_push_owner("publish")
else:
super(Publish,self).delete(using)
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
if not self.data_changed: return
with transaction.atomic():
#import ipdb;ipdb.set_trace()
PublishEventListener.preprocess_relation(self)
super(Publish,self).save(force_insert,force_update,using,update_fields)
def __str__(self):
if self.workspace.workspace_as_schema:
return "{}.{}".format(self.workspace, self.name)
else:
return "{}_{}".format(self.workspace, self.name)
class Meta(Transform.Meta):
unique_together = [['workspace','name']]
ordering = ["workspace","name"]
class PublishEventListener(object):
@staticmethod
@receiver(pre_delete, sender=Publish)
def _pre_delete(sender, instance, **args):
#import ipdb;ipdb.set_trace()
# drop tables in both schema
if instance.waiting + instance.running > 0:
raise Exception("Can not delete publish which has some waiting or running jobs.")
cursor = None
try:
cursor=connection.cursor()
instance.drop(cursor, BorgConfiguration.TRANSFORM_SCHEMA,instance.workspace.schema)
instance.drop(cursor, BorgConfiguration.TEST_TRANSFORM_SCHEMA,instance.workspace.test_schema)
instance.unpublish()
finally:
close_cursor(cursor)
@staticmethod
@receiver(post_delete, sender=Publish)
def _post_delete(sender, instance, **args):
refresh_select_choices.send(instance,choice_family="publish")
@staticmethod
def preprocess_relation(instance):
if not instance.editing_mode:
return
#save relationship first
instance._del_relations = []
#break the relationship between publish and publish_normaltable
pos = 0
for relation in instance.relations:
if relation and relation.is_empty:
if relation.pk:
instance._del_relations.append(relation)
instance.set_relation(pos, None)
pos += 1
#save the relationship row
pos = 0
for relation in instance.relations:
if relation:
relation.save()
instance.set_relation(pos,relation)
pos += 1
@staticmethod
@receiver(pre_save, sender=Publish)
def _pre_save(sender, instance, **args):
if not instance.pk:
instance.new_object = True
@staticmethod
@receiver(post_save, sender=Publish)
def _post_save(sender, instance, **args):
#import ipdb;ipdb.set_trace()
if not instance.editing_mode:
return
#delete the empty relations
if hasattr(instance,"_del_relations"):
for relation in instance._del_relations:
relation.delete()
delattr(instance,"_del_relations")
for relation in instance.relations:
if relation and not relation.publish:
relation.publish = instance
relation.save()
if (hasattr(instance,"new_object") and getattr(instance,"new_object")):
delattr(instance,"new_object")
refresh_select_choices.send(instance,choice_family="publish")
class Publish_NormalTable(BorgModel):
"""
Analogous a many to many relationship between Publish and NormalTable
"""
publish = models.ForeignKey(Publish,blank=True,null=True)
normal_table_1 = models.ForeignKey(NormalTable,blank=True,null=True,related_name="publish_normaltable_1")
normal_table_2 = models.ForeignKey(NormalTable,blank=True,null=True,related_name="publish_normaltable_2")
normal_table_3 = models.ForeignKey(NormalTable,blank=True,null=True,related_name="publish_normaltable_3")
normal_table_4 = models.ForeignKey(NormalTable,blank=True,null=True,related_name="publish_normaltable_4")
@property
def normal_tables(self):
return [self.normal_table_1,self.normal_table_2,self.normal_table_3,self.normal_table_4]
def set_normal_table(self,pos,normal_table):
"""
set the normal_table at position pos, position is based 0
"""
if pos == 0:
self.normal_table_1 = normal_table
elif pos == 1:
self.normal_table_2 = normal_table
elif pos == 2:
self.normal_table_3 = normal_table
elif pos == 3:
self.normal_table_4 = normal_table
@property
def is_empty(self):
return not any(self.normal_tables)
def __nonzero__(self):
return any(self.normal_tables)
def __str__(self):
if self.normal_table_1 or self.normal_table_2 or self.normal_table_3 or self.normal_table_4:
return "{0} depedents on {1} {2} {3} {4}".format(self.publish.name if self.publish else "",
self.normal_table_1.name if self.normal_table_1 else "",
", " + self.normal_table_2.name if self.normal_table_2 else "",
", " + self.normal_table_3.name if self.normal_table_3 else "",
", " + self.normal_table_4.name if self.normal_table_4 else "",
)
else:
return self.publish.name if self.publish else ""
| bsd-3-clause | -1,017,023,698,118,988,300 | 41.073731 | 262 | 0.587462 | false |
tboyce021/home-assistant | script/hassfest/__main__.py | 6 | 5278 | """Validate manifests."""
import argparse
import pathlib
import sys
from time import monotonic
from . import (
codeowners,
config_flow,
coverage,
dependencies,
json,
manifest,
mqtt,
requirements,
services,
ssdp,
translations,
zeroconf,
)
from .model import Config, Integration
INTEGRATION_PLUGINS = [
json,
codeowners,
config_flow,
dependencies,
manifest,
mqtt,
services,
ssdp,
translations,
zeroconf,
]
HASS_PLUGINS = [
coverage,
]
def valid_integration_path(integration_path):
"""Test if it's a valid integration."""
path = pathlib.Path(integration_path)
if not path.is_dir():
raise argparse.ArgumentTypeError(f"{integration_path} is not a directory.")
return path
def get_config() -> Config:
"""Return config."""
parser = argparse.ArgumentParser(description="Hassfest")
parser.add_argument(
"--action", type=str, choices=["validate", "generate"], default=None
)
parser.add_argument(
"--integration-path",
action="append",
type=valid_integration_path,
help="Validate a single integration",
)
parser.add_argument(
"--requirements",
action="store_true",
help="Validate requirements",
)
parsed = parser.parse_args()
if parsed.action is None:
parsed.action = "validate" if parsed.integration_path else "generate"
if parsed.action == "generate" and parsed.integration_path:
raise RuntimeError(
"Generate is not allowed when limiting to specific integrations"
)
if (
not parsed.integration_path
and not pathlib.Path("requirements_all.txt").is_file()
):
raise RuntimeError("Run from Home Assistant root")
return Config(
root=pathlib.Path(".").absolute(),
specific_integrations=parsed.integration_path,
action=parsed.action,
requirements=parsed.requirements,
)
def main():
"""Validate manifests."""
try:
config = get_config()
except RuntimeError as err:
print(err)
return 1
plugins = [*INTEGRATION_PLUGINS]
if config.requirements:
plugins.append(requirements)
if config.specific_integrations:
integrations = {}
for int_path in config.specific_integrations:
integration = Integration(int_path)
integration.load_manifest()
integrations[integration.domain] = integration
else:
integrations = Integration.load_dir(pathlib.Path("homeassistant/components"))
plugins += HASS_PLUGINS
for plugin in plugins:
try:
start = monotonic()
print(f"Validating {plugin.__name__.split('.')[-1]}...", end="", flush=True)
if plugin is requirements and not config.specific_integrations:
print()
plugin.validate(integrations, config)
print(" done in {:.2f}s".format(monotonic() - start))
except RuntimeError as err:
print()
print()
print("Error!")
print(err)
return 1
# When we generate, all errors that are fixable will be ignored,
# as generating them will be fixed.
if config.action == "generate":
general_errors = [err for err in config.errors if not err.fixable]
invalid_itg = [
itg
for itg in integrations.values()
if any(not error.fixable for error in itg.errors)
]
else:
# action == validate
general_errors = config.errors
invalid_itg = [itg for itg in integrations.values() if itg.errors]
warnings_itg = [itg for itg in integrations.values() if itg.warnings]
print()
print("Integrations:", len(integrations))
print("Invalid integrations:", len(invalid_itg))
print()
if not invalid_itg and not general_errors:
print_integrations_status(config, warnings_itg, show_fixable_errors=False)
if config.action == "generate":
for plugin in plugins:
if hasattr(plugin, "generate"):
plugin.generate(integrations, config)
return 0
if config.action == "generate":
print("Found errors. Generating files canceled.")
print()
if general_errors:
print("General errors:")
for error in general_errors:
print("*", error)
print()
invalid_itg.extend(itg for itg in warnings_itg if itg not in invalid_itg)
print_integrations_status(config, invalid_itg, show_fixable_errors=False)
return 1
def print_integrations_status(config, integrations, *, show_fixable_errors=True):
"""Print integration status."""
for integration in sorted(integrations, key=lambda itg: itg.domain):
extra = f" - {integration.path}" if config.specific_integrations else ""
print(f"Integration {integration.domain}{extra}:")
for error in integration.errors:
if show_fixable_errors or not error.fixable:
print("*", error)
for warning in integration.warnings:
print("*", "[WARNING]", warning)
print()
if __name__ == "__main__":
sys.exit(main())
| apache-2.0 | -2,689,471,858,138,553,300 | 26.633508 | 88 | 0.612164 | false |
lorensen/VTKExamples | src/Python/VisualizationAlgorithms/MarchingCases.py | 1 | 19479 | #!/usr/bin/env python
import vtk
def main():
mc_cases, rotation, label = get_program_parameters()
if not mc_cases:
mc_cases = [7]
else:
# Ensure that they are unique.
mc_cases = list(set(mc_cases))
# Check that they lie in the correct range.
badCases = []
for item in mc_cases:
if abs(int(item) > 14):
badCases.append(item)
if badCases:
print('Bad case number(s)', ','.join(map(str, badCases)))
for item in badCases:
mc_cases.remove(item)
if not mc_cases:
print('No cases.')
return
marching_cubes(mc_cases, rotation, label)
def get_program_parameters():
import argparse
description = 'Marching cubes cases for 3D isosurface generation.'
epilogue = '''
Marching cubes cases for 3D isosurface generation.
The 256 possible cases have been reduced to 15 cases using symmetry.
Dark vertices are greater than the selected isosurface value.
For the cases, enter them as integers separated by a space e.g: 1 2 3
'''
parser = argparse.ArgumentParser(description=description, epilog=epilogue,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('cases', nargs='*', type=int, default=[],
help='A list of integers i such that 0 <= abs(i) < 14, corresponding to the cases desired.')
parser.add_argument('-r', '--rotation', type=int, default=0,
help='Rotate camera around the cube, for i such that 0 <= abs(i) < 4,\
corresponding to 0, 90, 180, 270 degrees.')
# Use a mutually exclusive group.
label_parser = parser.add_mutually_exclusive_group(required=False)
label_parser.add_argument('-l', '--label', action='store_true', dest='label',
help='Display a label, true by default.')
label_parser.add_argument('-n', '--no_label', action='store_false', dest='label',
help='Supress diaplaying a label.')
parser.set_defaults(label=True)
args = parser.parse_args()
return args.cases, args.rotation, args.label
def marching_cubes(mcCases, rotation=0, label=True):
color = vtk.vtkNamedColors()
# Rotate the final figure 0, 90, 180, 270 degrees.
rotation = abs(int(rotation))
if rotation > 3:
rotation = 0
if len(mcCases) > 1:
print('Cases', ', '.join(map(str, mcCases)))
else:
print('Cases', ','.join(map(str, mcCases)))
print('Rotated', rotation * 90, 'degrees.')
renWin = vtk.vtkRenderWindow()
renWin.SetSize(640, 480)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Always use a grid of four columns unless number of cases < 4.
renderers = list()
gridSize = ((len(mcCases) + 3) // 4) * 4
if len(mcCases) < 4:
gridSize = len(mcCases)
for i in range(0, gridSize):
# Create the Renderer
renderer = vtk.vtkRenderer()
renderers.append(renderer)
# Set the background color.
renderers[i].SetBackground(color.GetColor3d("slate_grey"))
renWin.AddRenderer(renderer)
for i in range(0, len(mcCases)):
# Define a Single Cube
Scalars = vtk.vtkFloatArray()
Scalars.InsertNextValue(1.0)
Scalars.InsertNextValue(0.0)
Scalars.InsertNextValue(0.0)
Scalars.InsertNextValue(1.0)
Scalars.InsertNextValue(0.0)
Scalars.InsertNextValue(0.0)
Scalars.InsertNextValue(0.0)
Scalars.InsertNextValue(0.0)
Points = vtk.vtkPoints()
Points.InsertNextPoint(0, 0, 0)
Points.InsertNextPoint(1, 0, 0)
Points.InsertNextPoint(1, 1, 0)
Points.InsertNextPoint(0, 1, 0)
Points.InsertNextPoint(0, 0, 1)
Points.InsertNextPoint(1, 0, 1)
Points.InsertNextPoint(1, 1, 1)
Points.InsertNextPoint(0, 1, 1)
Ids = vtk.vtkIdList()
Ids.InsertNextId(0)
Ids.InsertNextId(1)
Ids.InsertNextId(2)
Ids.InsertNextId(3)
Ids.InsertNextId(4)
Ids.InsertNextId(5)
Ids.InsertNextId(6)
Ids.InsertNextId(7)
Grid = vtk.vtkUnstructuredGrid()
Grid.Allocate(10, 10)
Grid.InsertNextCell(12, Ids)
Grid.SetPoints(Points)
Grid.GetPointData().SetScalars(Scalars)
# Find the triangles that lie along the 0.5 contour in this cube.
Marching = vtk.vtkContourFilter()
Marching.SetInputData(Grid)
Marching.SetValue(0, 0.5)
Marching.Update()
# Extract the edges of the triangles just found.
triangleEdges = vtk.vtkExtractEdges()
triangleEdges.SetInputConnection(Marching.GetOutputPort())
# Draw the edges as tubes instead of lines. Also create the associated
# mapper and actor to display the tubes.
triangleEdgeTubes = vtk.vtkTubeFilter()
triangleEdgeTubes.SetInputConnection(triangleEdges.GetOutputPort())
triangleEdgeTubes.SetRadius(.005)
triangleEdgeTubes.SetNumberOfSides(6)
triangleEdgeTubes.UseDefaultNormalOn()
triangleEdgeTubes.SetDefaultNormal(.577, .577, .577)
triangleEdgeMapper = vtk.vtkPolyDataMapper()
triangleEdgeMapper.SetInputConnection(triangleEdgeTubes.GetOutputPort())
triangleEdgeMapper.ScalarVisibilityOff()
triangleEdgeActor = vtk.vtkActor()
triangleEdgeActor.SetMapper(triangleEdgeMapper)
triangleEdgeActor.GetProperty().SetDiffuseColor(
color.GetColor3d("lamp_black"))
triangleEdgeActor.GetProperty().SetSpecular(.4)
triangleEdgeActor.GetProperty().SetSpecularPower(10)
# Shrink the triangles we found earlier. Create the associated mapper
# and actor. Set the opacity of the shrunken triangles.
aShrinker = vtk.vtkShrinkPolyData()
aShrinker.SetShrinkFactor(1)
aShrinker.SetInputConnection(Marching.GetOutputPort())
aMapper = vtk.vtkPolyDataMapper()
aMapper.ScalarVisibilityOff()
aMapper.SetInputConnection(aShrinker.GetOutputPort())
Triangles = vtk.vtkActor()
Triangles.SetMapper(aMapper)
Triangles.GetProperty().SetDiffuseColor(
color.GetColor3d("banana"))
Triangles.GetProperty().SetOpacity(.6)
# Draw a cube the same size and at the same position as the one
# created previously. Extract the edges because we only want to see
# the outline of the cube. Pass the edges through a vtkTubeFilter so
# they are displayed as tubes rather than lines.
CubeModel = vtk.vtkCubeSource()
CubeModel.SetCenter(.5, .5, .5)
Edges = vtk.vtkExtractEdges()
Edges.SetInputConnection(CubeModel.GetOutputPort())
Tubes = vtk.vtkTubeFilter()
Tubes.SetInputConnection(Edges.GetOutputPort())
Tubes.SetRadius(.01)
Tubes.SetNumberOfSides(6)
Tubes.UseDefaultNormalOn()
Tubes.SetDefaultNormal(.577, .577, .577)
# Create the mapper and actor to display the cube edges.
TubeMapper = vtk.vtkPolyDataMapper()
TubeMapper.SetInputConnection(Tubes.GetOutputPort())
CubeEdges = vtk.vtkActor()
CubeEdges.SetMapper(TubeMapper)
CubeEdges.GetProperty().SetDiffuseColor(
color.GetColor3d("khaki"))
CubeEdges.GetProperty().SetSpecular(.4)
CubeEdges.GetProperty().SetSpecularPower(10)
# Create a sphere to use as a glyph source for vtkGlyph3D.
Sphere = vtk.vtkSphereSource()
Sphere.SetRadius(0.04)
Sphere.SetPhiResolution(20)
Sphere.SetThetaResolution(20)
# Remove the part of the cube with data values below 0.5.
ThresholdIn = vtk.vtkThresholdPoints()
ThresholdIn.SetInputData(Grid)
ThresholdIn.ThresholdByUpper(.5)
# Display spheres at the vertices remaining in the cube data set after
# it was passed through vtkThresholdPoints.
Vertices = vtk.vtkGlyph3D()
Vertices.SetInputConnection(ThresholdIn.GetOutputPort())
Vertices.SetSourceConnection(Sphere.GetOutputPort())
# Create a mapper and actor to display the glyphs.
SphereMapper = vtk.vtkPolyDataMapper()
SphereMapper.SetInputConnection(Vertices.GetOutputPort())
SphereMapper.ScalarVisibilityOff()
CubeVertices = vtk.vtkActor()
CubeVertices.SetMapper(SphereMapper)
CubeVertices.GetProperty().SetDiffuseColor(
color.GetColor3d("tomato"))
# Define the text for the label
caseLabel = vtk.vtkVectorText()
caseLabel.SetText("Case 1")
if label:
# Set up a transform to move the label to a new position.
aLabelTransform = vtk.vtkTransform()
aLabelTransform.Identity()
# Position the label according to the rotation of the figure.
if rotation == 0:
aLabelTransform.Translate(-0.2, 0, 1.25)
aLabelTransform.Scale(.05, .05, .05)
elif rotation == 1:
aLabelTransform.RotateY(90)
aLabelTransform.Translate(-1.25, 0, 1.25)
aLabelTransform.Scale(.05, .05, .05)
elif rotation == 2:
aLabelTransform.RotateY(180)
aLabelTransform.Translate(-1.25, 0, 0.2)
aLabelTransform.Scale(.05, .05, .05)
else:
aLabelTransform.RotateY(270)
aLabelTransform.Translate(-0.2, 0, 0.2)
aLabelTransform.Scale(.05, .05, .05)
# Move the label to a new position.
labelTransform = vtk.vtkTransformPolyDataFilter()
labelTransform.SetTransform(aLabelTransform)
labelTransform.SetInputConnection(caseLabel.GetOutputPort())
# Create a mapper and actor to display the text.
labelMapper = vtk.vtkPolyDataMapper()
labelMapper.SetInputConnection(labelTransform.GetOutputPort())
labelActor = vtk.vtkActor()
labelActor.SetMapper(labelMapper)
# Define the base that the cube sits on. Create its associated mapper
# and actor. Set the position of the actor.
baseModel = vtk.vtkCubeSource()
baseModel.SetXLength(1.5)
baseModel.SetYLength(.01)
baseModel.SetZLength(1.5)
baseMapper = vtk.vtkPolyDataMapper()
baseMapper.SetInputConnection(baseModel.GetOutputPort())
base = vtk.vtkActor()
base.SetMapper(baseMapper)
base.SetPosition(.5, -0.09, .5)
# Set the scalar values for this case of marching cubes.
# A negative case number will generate a complementary case
mcCase = mcCases[i]
if mcCase < 0:
cases[-mcCase](Scalars, caseLabel, 0, 1)
else:
cases[mcCase](Scalars, caseLabel, 1, 0)
# Force the grid to update.
Grid.Modified()
# Add the actors to the renderer
renderers[i].AddActor(triangleEdgeActor)
renderers[i].AddActor(base)
if label:
renderers[i].AddActor(labelActor)
renderers[i].AddActor(CubeEdges)
renderers[i].AddActor(CubeVertices)
renderers[i].AddActor(Triangles)
# Position the camera.
renderers[i].GetActiveCamera().Dolly(1.2)
# Rotate the camera an extra 30 degrees so the cube is not face on.
if rotation == 0:
renderers[i].GetActiveCamera().Azimuth(30)
elif rotation == 1:
renderers[i].GetActiveCamera().Azimuth(30 + 90)
elif rotation == 2:
renderers[i].GetActiveCamera().Azimuth(30 + 180)
else:
renderers[i].GetActiveCamera().Azimuth(30 + 270)
renderers[i].GetActiveCamera().Elevation(20)
renderers[i].ResetCamera()
renderers[i].ResetCameraClippingRange()
if i > 0:
renderers[i].SetActiveCamera(renderers[0].GetActiveCamera())
# Setup viewports for the renderers
rendererSize = 300
xGridDimensions = 4
if len(mcCases) < 4:
xGridDimensions = len(mcCases)
yGridDimensions = (len(mcCases) - 1) // 4 + 1
print("Grid dimensions, (x, y): ({:d}, {:d})".format(xGridDimensions, yGridDimensions))
renWin.SetSize(
rendererSize * xGridDimensions, rendererSize * yGridDimensions)
for row in range(0, yGridDimensions):
for col in range(0, xGridDimensions):
index = row * xGridDimensions + col
# (xmin, ymin, xmax, ymax)
viewport = [
float(col) / xGridDimensions,
float(yGridDimensions - (row + 1)) / yGridDimensions,
float(col + 1) / xGridDimensions,
float(yGridDimensions - row) / yGridDimensions]
renderers[index].SetViewport(viewport)
iren.Initialize()
renWin.Render()
iren.Start()
def case0(scalars, caseLabel, IN, OUT):
scalars.InsertValue(0, OUT)
scalars.InsertValue(1, OUT)
scalars.InsertValue(2, OUT)
scalars.InsertValue(3, OUT)
scalars.InsertValue(4, OUT)
scalars.InsertValue(5, OUT)
scalars.InsertValue(6, OUT)
scalars.InsertValue(7, OUT)
if IN == 1:
caseLabel.SetText("Case 0 - 00000000")
else:
caseLabel.SetText("Case 0c - 11111111")
def case1(scalars, caseLabel, IN, OUT):
scalars.InsertValue(0, IN)
scalars.InsertValue(1, OUT)
scalars.InsertValue(2, OUT)
scalars.InsertValue(3, OUT)
scalars.InsertValue(4, OUT)
scalars.InsertValue(5, OUT)
scalars.InsertValue(6, OUT)
scalars.InsertValue(7, OUT)
if IN == 1:
caseLabel.SetText("Case 1 - 00000001")
else:
caseLabel.SetText("Case 1c - 11111110")
def case2(scalars, caseLabel, IN, OUT):
scalars.InsertValue(0, IN)
scalars.InsertValue(1, IN)
scalars.InsertValue(2, OUT)
scalars.InsertValue(3, OUT)
scalars.InsertValue(4, OUT)
scalars.InsertValue(5, OUT)
scalars.InsertValue(6, OUT)
scalars.InsertValue(7, OUT)
if IN == 1:
caseLabel.SetText("Case 2 - 00000011")
else:
caseLabel.SetText("Case 2c - 11111100")
def case3(scalars, caseLabel, IN, OUT):
scalars.InsertValue(0, IN)
scalars.InsertValue(1, OUT)
scalars.InsertValue(2, IN)
scalars.InsertValue(3, OUT)
scalars.InsertValue(4, OUT)
scalars.InsertValue(5, OUT)
scalars.InsertValue(6, OUT)
scalars.InsertValue(7, OUT)
if IN == 1:
caseLabel.SetText("Case 3 - 00000101")
else:
caseLabel.SetText("Case 3c - 11111010")
def case4(scalars, caseLabel, IN, OUT):
scalars.InsertValue(0, IN)
scalars.InsertValue(1, OUT)
scalars.InsertValue(2, OUT)
scalars.InsertValue(3, OUT)
scalars.InsertValue(4, OUT)
scalars.InsertValue(5, OUT)
scalars.InsertValue(6, IN)
scalars.InsertValue(7, OUT)
if IN == 1:
caseLabel.SetText("Case 4 - 01000001")
else:
caseLabel.SetText("Case 4c - 10111110")
def case5(scalars, caseLabel, IN, OUT):
scalars.InsertValue(0, OUT)
scalars.InsertValue(1, IN)
scalars.InsertValue(2, OUT)
scalars.InsertValue(3, OUT)
scalars.InsertValue(4, IN)
scalars.InsertValue(5, IN)
scalars.InsertValue(6, OUT)
scalars.InsertValue(7, OUT)
if IN == 1:
caseLabel.SetText("Case 5 - 00110010")
else:
caseLabel.SetText("Case 5c - 11001101")
def case6(scalars, caseLabel, IN, OUT):
scalars.InsertValue(0, OUT)
scalars.InsertValue(1, IN)
scalars.InsertValue(2, OUT)
scalars.InsertValue(3, IN)
scalars.InsertValue(4, IN)
scalars.InsertValue(5, OUT)
scalars.InsertValue(6, OUT)
scalars.InsertValue(7, OUT)
if IN == 1:
caseLabel.SetText("Case 6 - 00011010")
else:
caseLabel.SetText("Case 6c - 11100101")
def case7(scalars, caseLabel, IN, OUT):
scalars.InsertValue(0, IN)
scalars.InsertValue(1, IN)
scalars.InsertValue(2, OUT)
scalars.InsertValue(3, OUT)
scalars.InsertValue(4, OUT)
scalars.InsertValue(5, OUT)
scalars.InsertValue(6, IN)
scalars.InsertValue(7, OUT)
if IN == 1:
caseLabel.SetText("Case 7 - 01000011")
else:
caseLabel.SetText("Case 7c - 10111100")
def case8(scalars, caseLabel, IN, OUT):
scalars.InsertValue(0, IN)
scalars.InsertValue(1, IN)
scalars.InsertValue(2, OUT)
scalars.InsertValue(3, OUT)
scalars.InsertValue(4, IN)
scalars.InsertValue(5, IN)
scalars.InsertValue(6, OUT)
scalars.InsertValue(7, OUT)
if IN == 1:
caseLabel.SetText("Case 8 - 00110011")
else:
caseLabel.SetText("Case 8c - 11001100")
def case9(scalars, caseLabel, IN, OUT):
scalars.InsertValue(0, OUT)
scalars.InsertValue(1, IN)
scalars.InsertValue(2, IN)
scalars.InsertValue(3, IN)
scalars.InsertValue(4, OUT)
scalars.InsertValue(5, OUT)
scalars.InsertValue(6, IN)
scalars.InsertValue(7, OUT)
if IN == 1:
caseLabel.SetText("Case 9 - 01001110")
else:
caseLabel.SetText("Case 9c - 10110001")
def case10(scalars, caseLabel, IN, OUT):
scalars.InsertValue(0, IN)
scalars.InsertValue(1, OUT)
scalars.InsertValue(2, OUT)
scalars.InsertValue(3, IN)
scalars.InsertValue(4, OUT)
scalars.InsertValue(5, IN)
scalars.InsertValue(6, IN)
scalars.InsertValue(7, OUT)
if IN == 1:
caseLabel.SetText("Case 10 - 01101001")
else:
caseLabel.SetText("Case 10c - 10010110")
def case11(scalars, caseLabel, IN, OUT):
scalars.InsertValue(0, IN)
scalars.InsertValue(1, OUT)
scalars.InsertValue(2, OUT)
scalars.InsertValue(3, OUT)
scalars.InsertValue(4, IN)
scalars.InsertValue(5, IN)
scalars.InsertValue(6, IN)
scalars.InsertValue(7, OUT)
if IN == 1:
caseLabel.SetText("Case 11 - 01110001")
else:
caseLabel.SetText("Case 11c - 10001110")
def case12(scalars, caseLabel, IN, OUT):
scalars.InsertValue(0, OUT)
scalars.InsertValue(1, IN)
scalars.InsertValue(2, OUT)
scalars.InsertValue(3, IN)
scalars.InsertValue(4, IN)
scalars.InsertValue(5, IN)
scalars.InsertValue(6, OUT)
scalars.InsertValue(7, OUT)
if IN == 1:
caseLabel.SetText("Case 12 - 00111010")
else:
caseLabel.SetText("Case 12c - 11000101")
def case13(scalars, caseLabel, IN, OUT):
scalars.InsertValue(0, OUT)
scalars.InsertValue(1, IN)
scalars.InsertValue(2, OUT)
scalars.InsertValue(3, IN)
scalars.InsertValue(4, IN)
scalars.InsertValue(5, OUT)
scalars.InsertValue(6, IN)
scalars.InsertValue(7, OUT)
if IN == 1:
caseLabel.SetText("Case 13 - 01011010")
else:
caseLabel.SetText("Case 13c - 10100101")
def case14(scalars, caseLabel, IN, OUT):
scalars.InsertValue(0, IN)
scalars.InsertValue(1, OUT)
scalars.InsertValue(2, IN)
scalars.InsertValue(3, IN)
scalars.InsertValue(4, OUT)
scalars.InsertValue(5, IN)
scalars.InsertValue(6, IN)
scalars.InsertValue(7, IN)
if IN == 1:
caseLabel.SetText("Case 14 - 11101101")
else:
caseLabel.SetText("Case 14c - 00010010")
cases = [case0, case1, case2, case3, case4, case5, case6, case7, case8, case9, case10, case11, case12, case13, case14]
if __name__ == '__main__':
main()
| apache-2.0 | 6,219,997,447,713,518,000 | 33.476106 | 118 | 0.633195 | false |
public-ink/public-ink | server/appengine-staging/lib/graphql/type/schema.py | 2 | 3470 | from collections import Iterable
from .definition import GraphQLObjectType
from .directives import GraphQLDirective, specified_directives
from .introspection import IntrospectionSchema
from .typemap import GraphQLTypeMap
class GraphQLSchema(object):
"""Schema Definition
A Schema is created by supplying the root types of each type of operation, query and mutation (optional).
A schema definition is then supplied to the validator and executor.
Example:
MyAppSchema = GraphQLSchema(
query=MyAppQueryRootType,
mutation=MyAppMutationRootType,
)
Note: If an array of `directives` are provided to GraphQLSchema, that will be
the exact list of directives represented and allowed. If `directives` is not
provided then a default set of the specified directives (e.g. @include and
@skip) will be used. If you wish to provide *additional* directives to these
specified directives, you must explicitly declare them. Example:
MyAppSchema = GraphQLSchema(
...
directives=specified_directives.extend([MyCustomerDirective]),
)
"""
__slots__ = '_query', '_mutation', '_subscription', '_type_map', '_directives', '_implementations', '_possible_type_map'
def __init__(self, query, mutation=None, subscription=None, directives=None, types=None):
assert isinstance(query, GraphQLObjectType), 'Schema query must be Object Type but got: {}.'.format(query)
if mutation:
assert isinstance(mutation, GraphQLObjectType), \
'Schema mutation must be Object Type but got: {}.'.format(mutation)
if subscription:
assert isinstance(subscription, GraphQLObjectType), \
'Schema subscription must be Object Type but got: {}.'.format(subscription)
if types:
assert isinstance(types, Iterable), \
'Schema types must be iterable if provided but got: {}.'.format(types)
self._query = query
self._mutation = mutation
self._subscription = subscription
if directives is None:
directives = specified_directives
assert all(isinstance(d, GraphQLDirective) for d in directives), \
'Schema directives must be List[GraphQLDirective] if provided but got: {}.'.format(
directives
)
self._directives = directives
initial_types = [
query,
mutation,
subscription,
IntrospectionSchema
]
if types:
initial_types += types
self._type_map = GraphQLTypeMap(initial_types)
def get_query_type(self):
return self._query
def get_mutation_type(self):
return self._mutation
def get_subscription_type(self):
return self._subscription
def get_type_map(self):
return self._type_map
def get_type(self, name):
return self._type_map.get(name)
def get_directives(self):
return self._directives
def get_directive(self, name):
for directive in self.get_directives():
if directive.name == name:
return directive
return None
def get_possible_types(self, abstract_type):
return self._type_map.get_possible_types(abstract_type)
def is_possible_type(self, abstract_type, possible_type):
return self._type_map.is_possible_type(abstract_type, possible_type)
| gpl-3.0 | 7,295,277,902,535,537,000 | 33.7 | 124 | 0.649568 | false |
CZ-NIC/knot | tests-extra/tests/zone/reload/test.py | 1 | 1592 | #!/usr/bin/env python3
'''Test for reload of a changed zone (serial up, nochange, serial down).'''
from dnstest.test import Test
from dnstest.utils import set_err, detail_log
import random
t = Test()
master = t.server("knot")
# Zone setup
zone = t.zone("serial.", storage=".")
t.link(zone, master, ixfr=True)
t.start()
# Load zones
serial = master.zone_wait(zone)
def reload_zone(version, exp_serial, exp_version, msg):
master.update_zonefile(zone, version)
opt = random.choice([1, 2, 3])
if opt == 1:
master.reload()
if opt == 2:
master.ctl("zone-reload " + zone[0].name) # the reload should fail -> no blocking mode, no error
t.sleep(2)
if opt == 3:
master.ctl("-f zone-reload " + zone[0].name, wait=True)
t.sleep(1) # forced zone-reload does *only schedule* LOAD event
new_serial = master.zone_wait(zone)
if new_serial != exp_serial:
set_err("SOA MISMATCH %s (%d)" % (msg, opt))
detail_log("!Zone '%s' SOA serial %s != %s" % (zone[0].name, new_serial, exp_serial))
return
resp = master.dig("new-record%d.%s" % (exp_version, zone[0].name), "A")
resp.check(rcode="NOERROR")
# Zone changes, serial increases (create changeset)
reload_zone(1, serial + 1, 1, "create changeset")
# Zone changes, serial doesn't change (create changeset, increment serial automatically)
reload_zone(2, serial + 2, 2, "auto incremented")
# Zone changes, serial jumps out-of-range (journal is not applicable)
reload_zone(3, serial + 2, 2, "journal not applied")
# Stop master.
master.stop()
t.end()
| gpl-3.0 | -6,542,197,897,110,202,000 | 27.428571 | 104 | 0.646357 | false |
hkariti/ansible | lib/ansible/modules/packaging/os/apk.py | 14 | 11310 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Kevin Brebanov <https://github.com/kbrebanov>
# Based on pacman (Afterburn <http://github.com/afterburn>, Aaron Bull Schaefer <[email protected]>)
# and apt (Matthew Williams <[email protected]>) modules.
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: apk
short_description: Manages apk packages
description:
- Manages I(apk) packages for Alpine Linux.
author: "Kevin Brebanov (@kbrebanov)"
version_added: "2.0"
options:
available:
description:
- During upgrade, reset versioned world dependencies and change logic to prefer replacing or downgrading packages (instead of holding them)
if the currently installed package is no longer available from any repository.
required: false
default: no
choices: [ "yes", "no" ]
version_added: "2.4"
name:
description:
- A package name, like C(foo), or multiple packages, like C(foo, bar).
required: false
default: null
repository:
description:
- A package repository or multiple repositories.
Unlike with the underlying apk command, this list will override the system repositories rather than supplement them.
required: false
default: null
version_added: "2.4"
state:
description:
- Indicates the desired package(s) state.
- C(present) ensures the package(s) is/are present.
- C(absent) ensures the package(s) is/are absent.
- C(latest) ensures the package(s) is/are present and the latest version(s).
required: false
default: present
choices: [ "present", "absent", "latest" ]
update_cache:
description:
- Update repository indexes. Can be run with other steps or on it's own.
required: false
default: no
choices: [ "yes", "no" ]
upgrade:
description:
- Upgrade all installed packages to their latest version.
required: false
default: no
choices: [ "yes", "no" ]
notes:
- '"name" and "upgrade" are mutually exclusive.'
- When used with a `loop:` each package will be processed individually, it is much more efficient to pass the list directly to the `name` option.
'''
EXAMPLES = '''
# Update repositories and install "foo" package
- apk:
name: foo
update_cache: yes
# Update repositories and install "foo" and "bar" packages
- apk:
name: foo,bar
update_cache: yes
# Remove "foo" package
- apk:
name: foo
state: absent
# Remove "foo" and "bar" packages
- apk:
name: foo,bar
state: absent
# Install the package "foo"
- apk:
name: foo
state: present
# Install the packages "foo" and "bar"
- apk:
name: foo,bar
state: present
# Update repositories and update package "foo" to latest version
- apk:
name: foo
state: latest
update_cache: yes
# Update repositories and update packages "foo" and "bar" to latest versions
- apk:
name: foo,bar
state: latest
update_cache: yes
# Update all installed packages to the latest versions
- apk:
upgrade: yes
# Upgrade / replace / downgrade / uninstall all installed packages to the latest versions available
- apk:
available: yes
upgrade: yes
# Update repositories as a separate step
- apk:
update_cache: yes
# Install package from a specific repository
- apk:
name: foo
state: latest
update_cache: yes
repository: http://dl-3.alpinelinux.org/alpine/edge/main
'''
RETURN = '''
packages:
description: a list of packages that have been changed
returned: when packages have changed
type: list
sample: ['package', 'other-package']
'''
import re
# Import module snippets.
from ansible.module_utils.basic import AnsibleModule
def parse_for_packages(stdout):
packages = []
data = stdout.split('\n')
regex = re.compile(r'^\(\d+/\d+\)\s+\S+\s+(\S+)')
for l in data:
p = regex.search(l)
if p:
packages.append(p.group(1))
return packages
def update_package_db(module, exit):
cmd = "%s update" % (APK_PATH)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="could not update package db", stdout=stdout, stderr=stderr)
elif exit:
module.exit_json(changed=True, msg='updated repository indexes', stdout=stdout, stderr=stderr)
else:
return True
def query_toplevel(module, name):
# /etc/apk/world contains a list of top-level packages separated by ' ' or \n
# packages may contain repository (@) or version (=<>~) separator characters or start with negation !
regex = re.compile(r'^' + re.escape(name) + r'([@=<>~].+)?$')
with open('/etc/apk/world') as f:
content = f.read().split()
for p in content:
if regex.search(p):
return True
return False
def query_package(module, name):
cmd = "%s -v info --installed %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
return True
else:
return False
def query_latest(module, name):
cmd = "%s version %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
search_pattern = r"(%s)-[\d\.\w]+-[\d\w]+\s+(.)\s+[\d\.\w]+-[\d\w]+\s+" % (re.escape(name))
match = re.search(search_pattern, stdout)
if match and match.group(2) == "<":
return False
return True
def query_virtual(module, name):
cmd = "%s -v info --description %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
search_pattern = r"^%s: virtual meta package" % (re.escape(name))
if re.search(search_pattern, stdout):
return True
return False
def get_dependencies(module, name):
cmd = "%s -v info --depends %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
dependencies = stdout.split()
if len(dependencies) > 1:
return dependencies[1:]
else:
return []
def upgrade_packages(module, available):
if module.check_mode:
cmd = "%s upgrade --simulate" % (APK_PATH)
else:
cmd = "%s upgrade" % (APK_PATH)
if available:
cmd = "%s --available" % cmd
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
packagelist = parse_for_packages(stdout)
if rc != 0:
module.fail_json(msg="failed to upgrade packages", stdout=stdout, stderr=stderr, packages=packagelist)
if re.search(r'^OK', stdout):
module.exit_json(changed=False, msg="packages already upgraded", stdout=stdout, stderr=stderr, packages=packagelist)
module.exit_json(changed=True, msg="upgraded packages", stdout=stdout, stderr=stderr, packages=packagelist)
def install_packages(module, names, state):
upgrade = False
to_install = []
to_upgrade = []
for name in names:
# Check if virtual package
if query_virtual(module, name):
# Get virtual package dependencies
dependencies = get_dependencies(module, name)
for dependency in dependencies:
if state == 'latest' and not query_latest(module, dependency):
to_upgrade.append(dependency)
else:
if not query_toplevel(module, name):
to_install.append(name)
elif state == 'latest' and not query_latest(module, name):
to_upgrade.append(name)
if to_upgrade:
upgrade = True
if not to_install and not upgrade:
module.exit_json(changed=False, msg="package(s) already installed")
packages = " ".join(to_install + to_upgrade)
if upgrade:
if module.check_mode:
cmd = "%s add --upgrade --simulate %s" % (APK_PATH, packages)
else:
cmd = "%s add --upgrade %s" % (APK_PATH, packages)
else:
if module.check_mode:
cmd = "%s add --simulate %s" % (APK_PATH, packages)
else:
cmd = "%s add %s" % (APK_PATH, packages)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
packagelist = parse_for_packages(stdout)
if rc != 0:
module.fail_json(msg="failed to install %s" % (packages), stdout=stdout, stderr=stderr, packages=packagelist)
module.exit_json(changed=True, msg="installed %s package(s)" % (packages), stdout=stdout, stderr=stderr, packages=packagelist)
def remove_packages(module, names):
installed = []
for name in names:
if query_package(module, name):
installed.append(name)
if not installed:
module.exit_json(changed=False, msg="package(s) already removed")
names = " ".join(installed)
if module.check_mode:
cmd = "%s del --purge --simulate %s" % (APK_PATH, names)
else:
cmd = "%s del --purge %s" % (APK_PATH, names)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
packagelist = parse_for_packages(stdout)
# Check to see if packages are still present because of dependencies
for name in installed:
if query_package(module, name):
rc = 1
break
if rc != 0:
module.fail_json(msg="failed to remove %s package(s)" % (names), stdout=stdout, stderr=stderr, packages=packagelist)
module.exit_json(changed=True, msg="removed %s package(s)" % (names), stdout=stdout, stderr=stderr, packages=packagelist)
# ==========================================
# Main control flow.
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'installed', 'absent', 'removed', 'latest']),
name=dict(type='list'),
repository=dict(type='list'),
update_cache=dict(default='no', type='bool'),
upgrade=dict(default='no', type='bool'),
available=dict(default='no', type='bool'),
),
required_one_of=[['name', 'update_cache', 'upgrade']],
mutually_exclusive=[['name', 'upgrade']],
supports_check_mode=True
)
# Set LANG env since we parse stdout
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
global APK_PATH
APK_PATH = module.get_bin_path('apk', required=True)
p = module.params
# add repositories to the APK_PATH
if p['repository']:
for r in p['repository']:
APK_PATH = "%s --repository %s --repositories-file /dev/null" % (APK_PATH, r)
# normalize the state parameter
if p['state'] in ['present', 'installed']:
p['state'] = 'present'
if p['state'] in ['absent', 'removed']:
p['state'] = 'absent'
if p['update_cache']:
update_package_db(module, not p['name'] and not p['upgrade'])
if p['upgrade']:
upgrade_packages(module, p['available'])
if p['state'] in ['present', 'latest']:
install_packages(module, p['name'], p['state'])
elif p['state'] == 'absent':
remove_packages(module, p['name'])
if __name__ == '__main__':
main()
| gpl-3.0 | 602,523,452,841,156,400 | 31.130682 | 147 | 0.62405 | false |
Cog-Creators/Red-DiscordBot | redbot/cogs/audio/core/cog_utils.py | 3 | 4323 | from abc import ABC
from typing import Final
from base64 import b64decode
from io import BytesIO
import struct
from redbot import VersionInfo
from redbot.core import commands
from ..converters import get_lazy_converter, get_playlist_converter
__version__ = VersionInfo.from_json({"major": 2, "minor": 4, "micro": 0, "releaselevel": "final"})
__author__ = ["aikaterna", "Draper"]
_SCHEMA_VERSION: Final[int] = 3
_OWNER_NOTIFICATION: Final[int] = 1
LazyGreedyConverter = get_lazy_converter("--")
PlaylistConverter = get_playlist_converter()
HUMANIZED_PERM = {
"create_instant_invite": "Create Instant Invite",
"kick_members": "Kick Members",
"ban_members": "Ban Members",
"administrator": "Administrator",
"manage_channels": "Manage Channels",
"manage_guild": "Manage Server",
"add_reactions": "Add Reactions",
"view_audit_log": "View Audit Log",
"priority_speaker": "Priority Speaker",
"stream": "Go Live",
"read_messages": "Read Text Channels & See Voice Channels",
"send_messages": "Send Messages",
"send_tts_messages": "Send TTS Messages",
"manage_messages": "Manage Messages",
"embed_links": "Embed Links",
"attach_files": "Attach Files",
"read_message_history": "Read Message History",
"mention_everyone": "Mention @everyone, @here, and All Roles",
"external_emojis": "Use External Emojis",
"view_guild_insights": "View Server Insights",
"connect": "Connect",
"speak": "Speak",
"mute_members": "Mute Members",
"deafen_members": "Deafen Members",
"move_members": "Move Members",
"use_voice_activation": "Use Voice Activity",
"change_nickname": "Change Nickname",
"manage_nicknames": "Manage Nicknames",
"manage_roles": "Manage Roles",
"manage_webhooks": "Manage Webhooks",
"manage_emojis": "Manage Emojis",
}
class CompositeMetaClass(type(commands.Cog), type(ABC)):
"""
This allows the metaclass used for proper type detection to
coexist with discord.py's metaclass
"""
pass
# Both DataReader and DataWriter are taken from https://github.com/Devoxin/Lavalink.py/blob/master/lavalink/datarw.py
# These are licenced under MIT, Thanks Devoxin for putting these together!
# The license can be found in https://github.com/Devoxin/Lavalink.py/blob/master/LICENSE
class DataReader:
def __init__(self, ts):
self._buf = BytesIO(b64decode(ts))
def _read(self, n):
return self._buf.read(n)
def read_byte(self):
return self._read(1)
def read_boolean(self):
(result,) = struct.unpack("B", self.read_byte())
return result != 0
def read_unsigned_short(self):
(result,) = struct.unpack(">H", self._read(2))
return result
def read_int(self):
(result,) = struct.unpack(">i", self._read(4))
return result
def read_long(self):
(result,) = struct.unpack(">Q", self._read(8))
return result
def read_utf(self):
text_length = self.read_unsigned_short()
return self._read(text_length)
class DataWriter:
def __init__(self):
self._buf = BytesIO()
def _write(self, data):
self._buf.write(data)
def write_byte(self, byte):
self._buf.write(byte)
def write_boolean(self, b):
enc = struct.pack("B", 1 if b else 0)
self.write_byte(enc)
def write_unsigned_short(self, s):
enc = struct.pack(">H", s)
self._write(enc)
def write_int(self, i):
enc = struct.pack(">i", i)
self._write(enc)
def write_long(self, l):
enc = struct.pack(">Q", l)
self._write(enc)
def write_utf(self, s):
utf = s.encode("utf8")
byte_len = len(utf)
if byte_len > 65535:
raise OverflowError("UTF string may not exceed 65535 bytes!")
self.write_unsigned_short(byte_len)
self._write(utf)
def finish(self):
with BytesIO() as track_buf:
byte_len = self._buf.getbuffer().nbytes
flags = byte_len | (1 << 30)
enc_flags = struct.pack(">i", flags)
track_buf.write(enc_flags)
self._buf.seek(0)
track_buf.write(self._buf.read())
self._buf.close()
track_buf.seek(0)
return track_buf.read()
| gpl-3.0 | -9,030,245,852,811,552,000 | 28.013423 | 117 | 0.617395 | false |
rackerlabs/django-DefectDojo | dojo/reports/views.py | 2 | 41612 | import logging
import mimetypes
import os
import urllib.parse
from datetime import datetime
from dateutil.relativedelta import relativedelta
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import user_passes_test
from django.core.exceptions import PermissionDenied
from django.urls import reverse
from django.http import Http404, HttpResponseRedirect, HttpResponseForbidden, JsonResponse
from django.http import HttpResponse
from django.shortcuts import render, get_object_or_404
from django.utils import timezone
from dojo.celery import app
from dojo.endpoint.views import get_endpoint_ids
from dojo.filters import ReportFindingFilter, ReportAuthedFindingFilter, EndpointReportFilter, ReportFilter, \
EndpointFilter
from dojo.forms import ReportOptionsForm, DeleteReportForm
from dojo.models import Product_Type, Finding, Product, Engagement, Test, \
Dojo_User, Endpoint, Report, Risk_Acceptance
from dojo.reports.widgets import CoverPage, PageBreak, TableOfContents, WYSIWYGContent, FindingList, EndpointList, \
CustomReportJsonForm, ReportOptions, report_widget_factory
from dojo.tasks import async_pdf_report, async_custom_pdf_report
from dojo.utils import get_page_items, add_breadcrumb, get_system_setting, get_period_counts_legacy, Product_Tab
logger = logging.getLogger(__name__)
def down(request):
return render(request, 'disabled.html')
def report_url_resolver(request):
try:
url_resolver = request.META['HTTP_X_FORWARDED_PROTO'] + "://" + request.META['HTTP_X_FORWARDED_FOR']
except:
hostname = request.META['HTTP_HOST']
port_index = hostname.find(":")
if port_index != -1:
url_resolver = request.scheme + "://" + hostname[:port_index]
else:
url_resolver = request.scheme + "://" + hostname
return url_resolver + ":" + request.META['SERVER_PORT']
def report_builder(request):
add_breadcrumb(title="Report Builder", top_level=True, request=request)
findings = Finding.objects.all()
findings = ReportAuthedFindingFilter(request.GET, queryset=findings, user=request.user)
endpoints = Endpoint.objects.filter(finding__active=True,
finding__verified=True,
finding__false_p=False,
finding__duplicate=False,
finding__out_of_scope=False,
).distinct()
ids = get_endpoint_ids(endpoints)
endpoints = Endpoint.objects.filter(id__in=ids)
endpoints = EndpointFilter(request.GET, queryset=endpoints)
in_use_widgets = [ReportOptions(request=request)]
available_widgets = [CoverPage(request=request),
TableOfContents(request=request),
WYSIWYGContent(request=request),
FindingList(request=request, findings=findings),
EndpointList(request=request, endpoints=endpoints),
PageBreak()]
return render(request,
'dojo/report_builder.html',
{"available_widgets": available_widgets,
"in_use_widgets": in_use_widgets})
def custom_report(request):
# saving the report
form = CustomReportJsonForm(request.POST)
host = report_url_resolver(request)
if form.is_valid():
selected_widgets = report_widget_factory(json_data=request.POST['json'], request=request, user=request.user,
finding_notes=False, finding_images=False, host=host)
report_name = 'Custom PDF Report: ' + request.user.username
report_format = 'AsciiDoc'
finding_notes = True
finding_images = True
if 'report-options' in selected_widgets:
options = selected_widgets['report-options']
report_name = 'Custom PDF Report: ' + options.report_name
report_format = options.report_type
finding_notes = (options.include_finding_notes == '1')
finding_images = (options.include_finding_images == '1')
selected_widgets = report_widget_factory(json_data=request.POST['json'], request=request, user=request.user,
finding_notes=finding_notes, finding_images=finding_images, host=host)
if report_format == 'PDF':
report = Report(name=report_name,
type="Custom",
format=report_format,
requester=request.user,
task_id='tbd',
options=request.POST['json'])
report.save()
async_custom_pdf_report.delay(report=report,
template="dojo/custom_pdf_report.html",
filename="custom_pdf_report.pdf",
host=host,
user=request.user,
uri=request.build_absolute_uri(report.get_url()),
finding_notes=finding_notes,
finding_images=finding_images)
messages.add_message(request, messages.SUCCESS,
'Your report is building.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('reports'))
elif report_format == 'AsciiDoc':
widgets = list(selected_widgets.values())
return render(request,
'dojo/custom_asciidoc_report.html',
{"widgets": widgets,
"host": host,
"finding_notes": finding_notes,
"finding_images": finding_images,
"user_id": request.user.id})
else:
return HttpResponseForbidden()
else:
return HttpResponseForbidden()
def report_findings(request):
findings = Finding.objects.filter()
findings = ReportAuthedFindingFilter(request.GET, queryset=findings, user=request.user)
title_words = [word
for finding in findings.qs
for word in finding.title.split() if len(word) > 2]
title_words = sorted(set(title_words))
paged_findings = get_page_items(request, findings.qs.order_by('numerical_severity'), 25)
product_type = None
if 'test__engagement__product__prod_type' in request.GET:
p = request.GET.getlist('test__engagement__product__prod_type', [])
if len(p) == 1:
product_type = get_object_or_404(Product_Type, id=p[0])
return render(request,
'dojo/report_findings.html',
{"findings": paged_findings,
"filtered": findings,
"title_words": title_words,
"title": "finding-list",
})
def report_endpoints(request):
user = Dojo_User.objects.get(id=request.user.id)
endpoints = Endpoint.objects.filter(finding__active=True,
finding__verified=True,
finding__false_p=False,
finding__duplicate=False,
finding__out_of_scope=False,
).distinct()
ids = get_endpoint_ids(endpoints)
endpoints = Endpoint.objects.filter(id__in=ids)
endpoints = EndpointFilter(request.GET, queryset=endpoints)
paged_endpoints = get_page_items(request, endpoints.qs, 25)
return render(request,
'dojo/report_endpoints.html',
{"endpoints": paged_endpoints,
"filtered": endpoints,
"title": "endpoint-list",
})
def download_report(request, rid):
report = get_object_or_404(Report, id=rid)
original_filename = report.file.name
file_path = report.file.path
fp = open(file_path, 'rb')
response = HttpResponse(fp.read())
fp.close()
type, encoding = mimetypes.guess_type(original_filename)
if type is None:
type = 'application/octet-stream'
response['Content-Type'] = type
response['Content-Length'] = str(os.stat(file_path).st_size)
if encoding is not None:
response['Content-Encoding'] = encoding
# To inspect details for the below code, see http://greenbytes.de/tech/tc2231/
if 'WebKit' in request.META['HTTP_USER_AGENT']:
# Safari 3.0 and Chrome 2.0 accepts UTF-8 encoded string directly.
filename_header = 'filename=%s' % original_filename.encode('utf-8')
elif 'MSIE' in request.META['HTTP_USER_AGENT']:
# IE does not support internationalized filename at all.
# It can only recognize internationalized URL, so we do the trick via routing rules.
filename_header = ''
else:
# For others like Firefox, we follow RFC2231 (encoding extension in HTTP headers).
filename_header = 'filename*=UTF-8\'\'%s' % urllib.parse.quote(original_filename.encode('utf-8'))
response['Content-Disposition'] = 'attachment; ' + filename_header
report.status = 'downloaded'
report.save()
return response
@user_passes_test(lambda u: u.is_staff)
def delete_report(request, rid):
report = get_object_or_404(Report, id=rid)
form = DeleteReportForm(instance=report)
if request.method == 'POST':
form = DeleteReportForm(request.POST, instance=report)
if form.is_valid():
report.file.delete()
report.delete()
messages.add_message(request,
messages.SUCCESS,
'Report deleted successfully.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('reports'))
else:
messages.add_message(request,
messages.ERROR,
'Unable to delete Report, please try again.',
extra_tags='alert-danger')
else:
return HttpResponseForbidden()
def report_status(request, rid):
report = get_object_or_404(Report, id=rid)
return JsonResponse({'status': report.status,
'id': report.id})
def report_cover_page(request):
report_title = request.GET.get('title', 'Report')
report_subtitle = request.GET.get('subtitle', '')
report_info = request.GET.get('info', '')
return render(request,
'dojo/report_cover_page.html',
{'report_title': report_title,
'report_subtitle': report_subtitle,
'report_info': report_info})
def revoke_report(request, rid):
report = get_object_or_404(Report, id=rid)
form = DeleteReportForm(instance=report)
if request.method == 'POST':
form = DeleteReportForm(request.POST, instance=report)
if form.is_valid():
app.control.revoke(report.task_id, terminate=True)
report.file.delete()
report.delete()
messages.add_message(request,
messages.SUCCESS,
'Report generation stopped and report deleted successfully.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('reports'))
else:
messages.add_message(request,
messages.ERROR,
'Unable to stop Report, please try again.',
extra_tags='alert-danger')
else:
return HttpResponseForbidden()
def reports(request):
if request.user.is_staff:
reports = Report.objects.all()
else:
reports = Report.objects.filter(requester=request.user)
reports = ReportFilter(request.GET, queryset=reports)
paged_reports = get_page_items(request, reports.qs, 25)
add_breadcrumb(title="Report List", top_level=True, request=request)
return render(request,
'dojo/reports.html',
{'report_list': reports,
'reports': paged_reports})
def regen_report(request, rid):
report = get_object_or_404(Report, id=rid)
if report.type != 'Custom':
return HttpResponseRedirect(report.options + "®en=" + rid)
else:
report.datetime = timezone.now()
report.status = 'requested'
if report.requester.username != request.user.username:
report.requester = request.user
report.save()
async_custom_pdf_report.delay(report=report,
template="dojo/custom_pdf_report.html",
filename="custom_pdf_report.pdf",
host=report_url_resolver(request),
user=request.user,
uri=request.build_absolute_uri(report.get_url()))
messages.add_message(request, messages.SUCCESS,
'Your report is building.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('reports'))
@user_passes_test(lambda u: u.is_staff)
def product_type_report(request, ptid):
product_type = get_object_or_404(Product_Type, id=ptid)
return generate_report(request, product_type)
def product_report(request, pid):
product = get_object_or_404(Product, id=pid)
if request.user.is_staff or request.user in product.authorized_users.all():
pass # user is authorized for this product
else:
raise PermissionDenied
return generate_report(request, product)
def product_findings_report(request):
if request.user.is_staff:
findings = Finding.objects.filter().distinct()
else:
findings = Finding.objects.filter(test__engagement__product__authorized_users__in=[request.user]).distinct()
return generate_report(request, findings)
@user_passes_test(lambda u: u.is_staff)
def engagement_report(request, eid):
engagement = get_object_or_404(Engagement, id=eid)
return generate_report(request, engagement)
@user_passes_test(lambda u: u.is_staff)
def test_report(request, tid):
test = get_object_or_404(Test, id=tid)
return generate_report(request, test)
def endpoint_report(request, eid):
endpoint = get_object_or_404(Endpoint, id=eid)
if request.user.is_staff or request.user in endpoint.product.authorized_users.all():
pass # user is authorized for this product
else:
raise PermissionDenied
return generate_report(request, endpoint)
def product_endpoint_report(request, pid):
user = Dojo_User.objects.get(id=request.user.id)
product = get_object_or_404(Product, id=pid)
endpoints = Endpoint.objects.filter(product=product,
finding__active=True,
finding__verified=True,
finding__false_p=False,
finding__duplicate=False,
finding__out_of_scope=False,
)
ids = get_endpoint_ids(endpoints)
endpoints = Endpoint.objects.filter(id__in=ids)
if request.user.is_staff or request.user in product.authorized_users.all():
pass # user is authorized for this product
else:
raise PermissionDenied
endpoints = EndpointReportFilter(request.GET, queryset=endpoints)
paged_endpoints = get_page_items(request, endpoints.qs, 25)
report_format = request.GET.get('report_type', 'AsciiDoc')
include_finding_notes = int(request.GET.get('include_finding_notes', 0))
include_finding_images = int(request.GET.get('include_finding_images', 0))
include_executive_summary = int(request.GET.get('include_executive_summary', 0))
include_table_of_contents = int(request.GET.get('include_table_of_contents', 0))
generate = "_generate" in request.GET
add_breadcrumb(parent=product, title="Vulnerable Product Endpoints Report", top_level=False, request=request)
report_form = ReportOptionsForm()
filename = "product_endpoint_report.pdf"
template = "dojo/product_endpoint_pdf_report.html"
report_name = "Product Endpoint Report: " + str(product)
report_title = "Product Endpoint Report"
report_subtitle = str(product)
report_info = "Generated By %s on %s" % (
user.get_full_name(), (timezone.now().strftime("%m/%d/%Y %I:%M%p %Z")))
try:
start_date = Finding.objects.filter(endpoints__in=endpoints.qs).order_by('date')[:1][0].date
except:
start_date = timezone.now()
end_date = timezone.now()
risk_acceptances = Risk_Acceptance.objects.filter(engagement__test__finding__endpoints__in=endpoints.qs)
accepted_findings = [finding for ra in risk_acceptances
for finding in ra.accepted_findings.filter(endpoints__in=endpoints.qs)]
verified_findings = Finding.objects.filter(endpoints__in=endpoints.qs,
date__range=[start_date, end_date],
false_p=False,
verified=True,
duplicate=False,
out_of_scope=False)
open_findings = Finding.objects.filter(endpoints__in=endpoints.qs,
false_p=False,
verified=True,
duplicate=False,
out_of_scope=False,
active=True,
mitigated__isnull=True)
closed_findings = Finding.objects.filter(endpoints__in=endpoints.qs,
false_p=False,
verified=True,
duplicate=False,
out_of_scope=False,
mitigated__isnull=False)
if generate:
report_form = ReportOptionsForm(request.GET)
if report_format == 'AsciiDoc':
return render(request,
'dojo/asciidoc_report.html',
{'product_type': None,
'product': product,
'accepted_findings': accepted_findings,
'open_findings': open_findings,
'closed_findings': closed_findings,
'verified_findings': verified_findings,
'engagement': None,
'test': None,
'endpoints': endpoints,
'endpoint': None,
'findings': None,
'include_finding_notes': include_finding_notes,
'include_finding_images': include_finding_images,
'include_executive_summary': include_executive_summary,
'include_table_of_contents': include_table_of_contents,
'user': request.user,
'title': 'Generate Report',
})
elif report_format == 'PDF':
endpoints = endpoints.qs.order_by('finding__numerical_severity')
# lets create the report object and send it in to celery task
if 'regen' in request.GET:
# we should already have a report object, lets get and use it
report = get_object_or_404(Report, id=request.GET['regen'])
report.datetime = timezone.now()
report.status = 'requested'
if report.requester.username != request.user.username:
report.requester = request.user
else:
report = Report(name="Product Endpoints " + str(product),
type="Product Endpoint",
format='PDF',
requester=request.user,
task_id='tbd',
options=request.path + "?" + request.GET.urlencode())
report.save()
async_pdf_report.delay(report=report,
template=template,
filename=filename,
report_title=report_title,
report_subtitle=report_subtitle,
report_info=report_info,
context={'product': product,
'endpoints': endpoints,
'accepted_findings': accepted_findings,
'open_findings': open_findings,
'closed_findings': closed_findings,
'verified_findings': verified_findings,
'report_name': report_name,
'include_finding_notes': include_finding_notes,
'include_finding_images': include_finding_images,
'include_executive_summary': include_executive_summary,
'include_table_of_contents': include_table_of_contents,
'user': user,
'team_name': get_system_setting('team_name'),
'title': 'Generate Report',
'host': report_url_resolver(request),
'user_id': request.user.id},
uri=request.build_absolute_uri(report.get_url()))
messages.add_message(request, messages.SUCCESS,
'Your report is building.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('reports'))
else:
raise Http404()
product_tab = Product_Tab(product.id, "Product Endpoint Report", tab="endpoints")
return render(request,
'dojo/request_endpoint_report.html',
{"endpoints": paged_endpoints,
"filtered": endpoints,
"product_tab": product_tab,
'report_form': report_form,
"name": "Vulnerable Product Endpoints",
})
def generate_report(request, obj):
user = Dojo_User.objects.get(id=request.user.id)
product_type = None
product = None
engagement = None
test = None
endpoint = None
endpoints = None
endpoint_all_findings = None
endpoint_monthly_counts = None
endpoint_active_findings = None
accepted_findings = None
open_findings = None
closed_findings = None
verified_findings = None
report_title = None
report_subtitle = None
report_info = "Generated By %s on %s" % (
user.get_full_name(), (timezone.now().strftime("%m/%d/%Y %I:%M%p %Z")))
if type(obj).__name__ == "Product":
if request.user.is_staff or request.user in obj.authorized_users.all():
pass # user is authorized for this product
else:
raise PermissionDenied
elif type(obj).__name__ == "Endpoint":
if request.user.is_staff or request.user in obj.product.authorized_users.all():
pass # user is authorized for this product
else:
raise PermissionDenied
elif type(obj).__name__ == "QuerySet":
# authorization taken care of by only selecting findings from product user is authed to see
pass
else:
if not request.user.is_staff:
raise PermissionDenied
report_format = request.GET.get('report_type', 'AsciiDoc')
include_finding_notes = int(request.GET.get('include_finding_notes', 0))
include_finding_images = int(request.GET.get('include_finding_images', 0))
include_executive_summary = int(request.GET.get('include_executive_summary', 0))
include_table_of_contents = int(request.GET.get('include_table_of_contents', 0))
generate = "_generate" in request.GET
report_name = str(obj)
report_type = type(obj).__name__
add_breadcrumb(title="Generate Report", top_level=False, request=request)
if type(obj).__name__ == "Product_Type":
product_type = obj
filename = "product_type_finding_report.pdf"
template = "dojo/product_type_pdf_report.html"
report_name = "Product Type Report: " + str(product_type)
report_title = "Product Type Report"
report_subtitle = str(product_type)
findings = ReportFindingFilter(request.GET, queryset=Finding.objects.filter(
test__engagement__product__prod_type=product_type).distinct().prefetch_related('test',
'test__engagement__product',
'test__engagement__product__prod_type'))
products = Product.objects.filter(prod_type=product_type,
engagement__test__finding__in=findings.qs).distinct()
engagements = Engagement.objects.filter(product__prod_type=product_type,
test__finding__in=findings.qs).distinct()
tests = Test.objects.filter(engagement__product__prod_type=product_type,
finding__in=findings.qs).distinct()
if findings:
start_date = timezone.make_aware(datetime.combine(findings.qs.last().date, datetime.min.time()))
else:
start_date = timezone.now()
end_date = timezone.now()
r = relativedelta(end_date, start_date)
months_between = (r.years * 12) + r.months
# include current month
months_between += 1
endpoint_monthly_counts = get_period_counts_legacy(findings.qs.order_by('numerical_severity'), findings.qs.order_by('numerical_severity'), None,
months_between, start_date,
relative_delta='months')
context = {'product_type': product_type,
'products': products,
'engagements': engagements,
'tests': tests,
'report_name': report_name,
'endpoint_opened_per_month': endpoint_monthly_counts[
'opened_per_period'] if endpoint_monthly_counts is not None else [],
'endpoint_active_findings': findings.qs.order_by('numerical_severity'),
'findings': findings.qs.order_by('numerical_severity'),
'include_finding_notes': include_finding_notes,
'include_finding_images': include_finding_images,
'include_executive_summary': include_executive_summary,
'include_table_of_contents': include_table_of_contents,
'user': user,
'team_name': settings.TEAM_NAME,
'title': 'Generate Report',
'host': report_url_resolver(request),
'user_id': request.user.id}
elif type(obj).__name__ == "Product":
product = obj
filename = "product_finding_report.pdf"
template = "dojo/product_pdf_report.html"
report_name = "Product Report: " + str(product)
report_title = "Product Report"
report_subtitle = str(product)
findings = ReportFindingFilter(request.GET, queryset=Finding.objects.filter(
test__engagement__product=product).distinct().prefetch_related('test',
'test__engagement__product',
'test__engagement__product__prod_type'))
ids = set(finding.id for finding in findings.qs)
engagements = Engagement.objects.filter(test__finding__id__in=ids).distinct()
tests = Test.objects.filter(finding__id__in=ids).distinct()
ids = get_endpoint_ids(Endpoint.objects.filter(product=product).distinct())
endpoints = Endpoint.objects.filter(id__in=ids)
context = {'product': product,
'engagements': engagements,
'tests': tests,
'report_name': report_name,
'findings': findings.qs.order_by('numerical_severity'),
'include_finding_notes': include_finding_notes,
'include_finding_images': include_finding_images,
'include_executive_summary': include_executive_summary,
'include_table_of_contents': include_table_of_contents,
'user': user,
'team_name': settings.TEAM_NAME,
'title': 'Generate Report',
'endpoints': endpoints,
'host': report_url_resolver(request),
'user_id': request.user.id}
elif type(obj).__name__ == "Engagement":
engagement = obj
findings = ReportFindingFilter(request.GET,
queryset=Finding.objects.filter(test__engagement=engagement,
).prefetch_related('test',
'test__engagement__product',
'test__engagement__product__prod_type').distinct())
report_name = "Engagement Report: " + str(engagement)
filename = "engagement_finding_report.pdf"
template = 'dojo/engagement_pdf_report.html'
report_title = "Engagement Report"
report_subtitle = str(engagement)
ids = set(finding.id for finding in findings.qs)
tests = Test.objects.filter(finding__id__in=ids).distinct()
ids = get_endpoint_ids(Endpoint.objects.filter(product=engagement.product).distinct())
endpoints = Endpoint.objects.filter(id__in=ids)
context = {'engagement': engagement,
'tests': tests,
'report_name': report_name,
'findings': findings.qs.order_by('numerical_severity'),
'include_finding_notes': include_finding_notes,
'include_finding_images': include_finding_images,
'include_executive_summary': include_executive_summary,
'include_table_of_contents': include_table_of_contents,
'user': user,
'team_name': settings.TEAM_NAME,
'title': 'Generate Report',
'host': report_url_resolver(request),
'user_id': request.user.id,
'endpoints': endpoints}
elif type(obj).__name__ == "Test":
test = obj
findings = ReportFindingFilter(request.GET,
queryset=Finding.objects.filter(test=test).prefetch_related('test',
'test__engagement__product',
'test__engagement__product__prod_type').distinct())
filename = "test_finding_report.pdf"
template = "dojo/test_pdf_report.html"
report_name = "Test Report: " + str(test)
report_title = "Test Report"
report_subtitle = str(test)
context = {'test': test,
'report_name': report_name,
'findings': findings.qs.order_by('numerical_severity'),
'include_finding_notes': include_finding_notes,
'include_finding_images': include_finding_images,
'include_executive_summary': include_executive_summary,
'include_table_of_contents': include_table_of_contents,
'user': user,
'team_name': settings.TEAM_NAME,
'title': 'Generate Report',
'host': report_url_resolver(request),
'user_id': request.user.id}
elif type(obj).__name__ == "Endpoint":
endpoint = obj
host = endpoint.host_no_port
report_name = "Endpoint Report: " + host
report_type = "Endpoint"
endpoints = Endpoint.objects.filter(host__regex="^" + host + ":?",
product=endpoint.product).distinct()
filename = "endpoint_finding_report.pdf"
template = 'dojo/endpoint_pdf_report.html'
report_title = "Endpoint Report"
report_subtitle = host
findings = ReportFindingFilter(request.GET,
queryset=Finding.objects.filter(endpoints__in=endpoints,
).prefetch_related('test',
'test__engagement__product',
'test__engagement__product__prod_type').distinct())
context = {'endpoint': endpoint,
'endpoints': endpoints,
'report_name': report_name,
'findings': findings.qs.order_by('numerical_severity'),
'include_finding_notes': include_finding_notes,
'include_finding_images': include_finding_images,
'include_executive_summary': include_executive_summary,
'include_table_of_contents': include_table_of_contents,
'user': user,
'team_name': get_system_setting('team_name'),
'title': 'Generate Report',
'host': report_url_resolver(request),
'user_id': request.user.id}
elif type(obj).__name__ == "QuerySet":
findings = ReportAuthedFindingFilter(request.GET,
queryset=obj.prefetch_related('test',
'test__engagement__product',
'test__engagement__product__prod_type').distinct(),
user=request.user)
filename = "finding_report.pdf"
report_name = 'Finding'
report_type = 'Finding'
template = 'dojo/finding_pdf_report.html'
report_title = "Finding Report"
report_subtitle = ''
context = {'findings': findings.qs.order_by('numerical_severity'),
'report_name': report_name,
'include_finding_notes': include_finding_notes,
'include_finding_images': include_finding_images,
'include_executive_summary': include_executive_summary,
'include_table_of_contents': include_table_of_contents,
'user': user,
'team_name': settings.TEAM_NAME,
'title': 'Generate Report',
'host': report_url_resolver(request),
'user_id': request.user.id}
else:
raise Http404()
report_form = ReportOptionsForm()
if generate:
report_form = ReportOptionsForm(request.GET)
if report_format == 'AsciiDoc':
return render(request,
'dojo/asciidoc_report.html',
{'product_type': product_type,
'product': product,
'engagement': engagement,
'test': test,
'endpoint': endpoint,
'findings': findings.qs.order_by('numerical_severity'),
'include_finding_notes': include_finding_notes,
'include_finding_images': include_finding_images,
'include_executive_summary': include_executive_summary,
'include_table_of_contents': include_table_of_contents,
'user': user,
'team_name': settings.TEAM_NAME,
'title': 'Generate Report',
'user_id': request.user.id,
'host': report_url_resolver(request),
})
elif report_format == 'PDF':
if 'regen' in request.GET:
# we should already have a report object, lets get and use it
report = get_object_or_404(Report, id=request.GET['regen'])
report.datetime = timezone.now()
report.status = 'requested'
if report.requester.username != request.user.username:
report.requester = request.user
else:
# lets create the report object and send it in to celery task
report = Report(name=report_name,
type=report_type,
format='PDF',
requester=request.user,
task_id='tbd',
options=request.path + "?" + request.GET.urlencode())
report.save()
async_pdf_report.delay(report=report,
template=template,
filename=filename,
report_title=report_title,
report_subtitle=report_subtitle,
report_info=report_info,
context=context,
uri=request.build_absolute_uri(report.get_url()))
messages.add_message(request, messages.SUCCESS,
'Your report is building.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('reports'))
elif report_format == 'HTML':
return render(request,
template,
{'product_type': product_type,
'product': product,
'engagement': engagement,
'report_name': report_name,
'test': test,
'endpoint': endpoint,
'endpoints': endpoints,
'findings': findings.qs.order_by('numerical_severity'),
'include_finding_notes': include_finding_notes,
'include_finding_images': include_finding_images,
'include_executive_summary': include_executive_summary,
'include_table_of_contents': include_table_of_contents,
'user': user,
'team_name': settings.TEAM_NAME,
'title': 'Generate Report',
'user_id': request.user.id,
'host': "",
})
else:
raise Http404()
paged_findings = get_page_items(request, findings.qs.order_by('numerical_severity'), 25)
product_tab = None
if engagement:
product_tab = Product_Tab(engagement.product.id, title="Engagement Report", tab="engagements")
product_tab.setEngagement(engagement)
elif test:
product_tab = Product_Tab(test.engagement.product.id, title="Test Report", tab="engagements")
product_tab.setEngagement(test.engagement)
elif product:
product_tab = Product_Tab(product.id, title="Product Report", tab="findings")
elif endpoints:
product_tab = Product_Tab(endpoint.product.id, title="Endpoint Report", tab="endpoints")
return render(request, 'dojo/request_report.html',
{'product_type': product_type,
'product': product,
'product_tab': product_tab,
'engagement': engagement,
'test': test,
'endpoint': endpoint,
'findings': findings,
'paged_findings': paged_findings,
'report_form': report_form,
})
| bsd-3-clause | -3,357,059,948,598,063,000 | 46.340159 | 152 | 0.526579 | false |
envhyf/wrftools | wrftools/fetch.py | 1 | 3678 | import os
import time
import datetime
import subprocess
import shared
def run_gribmaster(config):
"""Runs the gribmaster programme to download the most recent boundary conditions """
logger = shared.get_logger()
gm_dir = config['gm_dir']
gm_transfer = config['gm_transfer']
gm_dataset = config['gm_dataset']
start = config['init_time']
fcst_hours = config['fcst_hours']
gm_log = config['gm_log']
gm_sleep = config['gm_sleep'] # this is in minutes
gm_max_attempts = int(config['gm_max_attempts'])
log_dir = '/home/slha/forecasting'
cmd = '%s/gribmaster --verbose --%s --dset %s --date %s --cycle %s --length %s > %s' %(gm_dir, gm_transfer, gm_dataset, start.strftime('%Y%m%d'), start.strftime('%H'), fcst_hours, gm_log )
for attempt in range(gm_max_attempts):
logger.info('*** RUNNING GRIBMASTER, %s attempt ***' % (attempt+1))
shared.run(cmd, config)
cmd = 'grep "BUMMER" %s' % gm_log # check for failure
ret = subprocess.call(cmd, shell=True)
# if we positively find the string BUMMER, we know we have failed
if ret==0:
logger.error('*** FAIL GRIBMASTER: Attempt %d of %d ***' % (attempt+1, gm_max_attempts))
logger.info('Sleeping for %s minutes' % gm_sleep)
time.sleep(gm_sleep*60)
# else we check for definite sucess
else:
cmd = 'grep "ENJOY" %s' % gm_log # check for failure
ret = subprocess.call(cmd, shell=True)
if ret==0:
logger.info('*** SUCESS GRIBMASTER ***')
return
raise IOError('gribmaster did not find files after %d attempts' % gm_max_attempts)
def get_sst(config):
""" Downloads SST fields from an ftp server.
Whoever is running this must have the http_proxy environment variable set
correctly to allow them to download files through the proxy. Example:
http_proxy = http://slha:[email protected]:8080"""
logger = shared.get_logger()
# create an lftpscript in model run dir
logger.info('*** FETCHING SST ***')
working_dir = config['working_dir']
tmp_dir = config['tmp_dir']
http_proxy = os.environ['http_proxy']
home = os.environ['HOME']
sst_server = config['sst_server']
sst_server_dir = config['sst_server_dir']
sst_local_dir = config['sst_local_dir']
sst_time = shared.get_sst_time(config)
sst_filename = shared.sub_date(shared.get_sst_filename(config), init_time=config['init_time'])
if not os.path.exists(sst_local_dir):
os.makedirs(sst_local_dir)
if os.path.exists('%s/%s' %(sst_local_dir, sst_filename)):
logger.info('*** SST ALREADY EXISTS LOCALLY, NOT DOWNLOADED ***')
return
lftpfilename = '%s/lftpscript' % working_dir
logger.debug('Writing lftpscript to %s' % lftpfilename)
lftpscript = open(lftpfilename, 'w')
lftpscript.write('lcd %s\n' % sst_local_dir)
lftpscript.write('set ftp:proxy %s\n' % http_proxy)
lftpscript.write('set hftp:use-type no\n')
lftpscript.write('open %s\n' % sst_server)
lftpscript.write('get %s/%s\n' % (sst_server_dir,sst_filename))
lftpscript.write('bye')
lftpscript.close()
cmd = '/usr/bin/lftp -f %s' % lftpfilename
shared.run_cmd(cmd, config)
# check if file downloaded
if not os.path.exists('%s/%s' %(sst_local_dir, sst_filename)):
raise IOError('SST file: %s not downloaded' % sst_filename)
logger.info('*** SUCCESS SST DOWNLOADED ***')
| gpl-3.0 | -8,387,826,132,980,787,000 | 38.978261 | 196 | 0.604133 | false |
mezz64/home-assistant | homeassistant/components/nad/media_player.py | 2 | 10484 | """Support for interfacing with NAD receivers through RS-232."""
from nad_receiver import NADReceiver, NADReceiverTCP, NADReceiverTelnet
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_SELECT_SOURCE,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import CONF_HOST, CONF_NAME, STATE_OFF, STATE_ON
import homeassistant.helpers.config_validation as cv
DEFAULT_TYPE = "RS232"
DEFAULT_SERIAL_PORT = "/dev/ttyUSB0"
DEFAULT_PORT = 53
DEFAULT_NAME = "NAD Receiver"
DEFAULT_MIN_VOLUME = -92
DEFAULT_MAX_VOLUME = -20
DEFAULT_VOLUME_STEP = 4
SUPPORT_NAD = (
SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_VOLUME_STEP
| SUPPORT_SELECT_SOURCE
)
CONF_TYPE = "type"
CONF_SERIAL_PORT = "serial_port" # for NADReceiver
CONF_PORT = "port" # for NADReceiverTelnet
CONF_MIN_VOLUME = "min_volume"
CONF_MAX_VOLUME = "max_volume"
CONF_VOLUME_STEP = "volume_step" # for NADReceiverTCP
CONF_SOURCE_DICT = "sources" # for NADReceiver
SOURCE_DICT_SCHEMA = vol.Schema({vol.Range(min=1, max=10): cv.string})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_TYPE, default=DEFAULT_TYPE): vol.In(
["RS232", "Telnet", "TCP"]
),
vol.Optional(CONF_SERIAL_PORT, default=DEFAULT_SERIAL_PORT): cv.string,
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MIN_VOLUME, default=DEFAULT_MIN_VOLUME): int,
vol.Optional(CONF_MAX_VOLUME, default=DEFAULT_MAX_VOLUME): int,
vol.Optional(CONF_SOURCE_DICT, default={}): SOURCE_DICT_SCHEMA,
vol.Optional(CONF_VOLUME_STEP, default=DEFAULT_VOLUME_STEP): int,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NAD platform."""
if config.get(CONF_TYPE) in ("RS232", "Telnet"):
add_entities(
[NAD(config)],
True,
)
else:
add_entities(
[NADtcp(config)],
True,
)
class NAD(MediaPlayerEntity):
"""Representation of a NAD Receiver."""
def __init__(self, config):
"""Initialize the NAD Receiver device."""
self.config = config
self._instantiate_nad_receiver()
self._min_volume = config[CONF_MIN_VOLUME]
self._max_volume = config[CONF_MAX_VOLUME]
self._source_dict = config[CONF_SOURCE_DICT]
self._reverse_mapping = {value: key for key, value in self._source_dict.items()}
self._volume = self._state = self._mute = self._source = None
def _instantiate_nad_receiver(self) -> NADReceiver:
if self.config[CONF_TYPE] == "RS232":
self._nad_receiver = NADReceiver(self.config[CONF_SERIAL_PORT])
else:
host = self.config.get(CONF_HOST)
port = self.config[CONF_PORT]
self._nad_receiver = NADReceiverTelnet(host, port)
@property
def name(self):
"""Return the name of the device."""
return self.config[CONF_NAME]
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def icon(self):
"""Return the icon for the device."""
return "mdi:speaker-multiple"
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._mute
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_NAD
def turn_off(self):
"""Turn the media player off."""
self._nad_receiver.main_power("=", "Off")
def turn_on(self):
"""Turn the media player on."""
self._nad_receiver.main_power("=", "On")
def volume_up(self):
"""Volume up the media player."""
self._nad_receiver.main_volume("+")
def volume_down(self):
"""Volume down the media player."""
self._nad_receiver.main_volume("-")
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._nad_receiver.main_volume("=", self.calc_db(volume))
def mute_volume(self, mute):
"""Mute (true) or unmute (false) media player."""
if mute:
self._nad_receiver.main_mute("=", "On")
else:
self._nad_receiver.main_mute("=", "Off")
def select_source(self, source):
"""Select input source."""
self._nad_receiver.main_source("=", self._reverse_mapping.get(source))
@property
def source(self):
"""Name of the current input source."""
return self._source
@property
def source_list(self):
"""List of available input sources."""
return sorted(list(self._reverse_mapping.keys()))
@property
def available(self):
"""Return if device is available."""
return self._state is not None
def update(self) -> None:
"""Retrieve latest state."""
power_state = self._nad_receiver.main_power("?")
if not power_state:
self._state = None
return
self._state = (
STATE_ON if self._nad_receiver.main_power("?") == "On" else STATE_OFF
)
if self._state == STATE_ON:
self._mute = self._nad_receiver.main_mute("?") == "On"
volume = self._nad_receiver.main_volume("?")
# Some receivers cannot report the volume, e.g. C 356BEE,
# instead they only support stepping the volume up or down
self._volume = self.calc_volume(volume) if volume is not None else None
self._source = self._source_dict.get(self._nad_receiver.main_source("?"))
def calc_volume(self, decibel):
"""
Calculate the volume given the decibel.
Return the volume (0..1).
"""
return abs(self._min_volume - decibel) / abs(
self._min_volume - self._max_volume
)
def calc_db(self, volume):
"""
Calculate the decibel given the volume.
Return the dB.
"""
return self._min_volume + round(
abs(self._min_volume - self._max_volume) * volume
)
class NADtcp(MediaPlayerEntity):
"""Representation of a NAD Digital amplifier."""
def __init__(self, config):
"""Initialize the amplifier."""
self._name = config[CONF_NAME]
self._nad_receiver = NADReceiverTCP(config.get(CONF_HOST))
self._min_vol = (config[CONF_MIN_VOLUME] + 90) * 2 # from dB to nad vol (0-200)
self._max_vol = (config[CONF_MAX_VOLUME] + 90) * 2 # from dB to nad vol (0-200)
self._volume_step = config[CONF_VOLUME_STEP]
self._state = None
self._mute = None
self._nad_volume = None
self._volume = None
self._source = None
self._source_list = self._nad_receiver.available_sources()
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._mute
@property
def supported_features(self):
"""Flag media player features that are supported."""
return SUPPORT_NAD
def turn_off(self):
"""Turn the media player off."""
self._nad_receiver.power_off()
def turn_on(self):
"""Turn the media player on."""
self._nad_receiver.power_on()
def volume_up(self):
"""Step volume up in the configured increments."""
self._nad_receiver.set_volume(self._nad_volume + 2 * self._volume_step)
def volume_down(self):
"""Step volume down in the configured increments."""
self._nad_receiver.set_volume(self._nad_volume - 2 * self._volume_step)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
nad_volume_to_set = int(
round(volume * (self._max_vol - self._min_vol) + self._min_vol)
)
self._nad_receiver.set_volume(nad_volume_to_set)
def mute_volume(self, mute):
"""Mute (true) or unmute (false) media player."""
if mute:
self._nad_receiver.mute()
else:
self._nad_receiver.unmute()
def select_source(self, source):
"""Select input source."""
self._nad_receiver.select_source(source)
@property
def source(self):
"""Name of the current input source."""
return self._source
@property
def source_list(self):
"""List of available input sources."""
return self._nad_receiver.available_sources()
def update(self):
"""Get the latest details from the device."""
try:
nad_status = self._nad_receiver.status()
except OSError:
return
if nad_status is None:
return
# Update on/off state
if nad_status["power"]:
self._state = STATE_ON
else:
self._state = STATE_OFF
# Update current volume
self._volume = self.nad_vol_to_internal_vol(nad_status["volume"])
self._nad_volume = nad_status["volume"]
# Update muted state
self._mute = nad_status["muted"]
# Update current source
self._source = nad_status["source"]
def nad_vol_to_internal_vol(self, nad_volume):
"""Convert nad volume range (0-200) to internal volume range.
Takes into account configured min and max volume.
"""
if nad_volume < self._min_vol:
volume_internal = 0.0
elif nad_volume > self._max_vol:
volume_internal = 1.0
else:
volume_internal = (nad_volume - self._min_vol) / (
self._max_vol - self._min_vol
)
return volume_internal
| apache-2.0 | -8,724,611,313,933,065,000 | 30.202381 | 88 | 0.592617 | false |
shadda/AutobahnPython | examples/websocket/streaming/frame_based_server.py | 15 | 1958 | ###############################################################################
##
## Copyright 2011 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import hashlib
from twisted.internet import reactor
from autobahn.websocket import WebSocketServerFactory, \
WebSocketServerProtocol, \
listenWS
class FrameBasedHashServerProtocol(WebSocketServerProtocol):
"""
Frame-based WebSockets server that computes a running SHA-256 for message
data received. It will respond after every frame received with the digest
computed up to that point. It can receive messages of unlimited number
of frames. Digest is reset upon new message.
"""
def onMessageBegin(self, opcode):
WebSocketServerProtocol.onMessageBegin(self, opcode)
self.sha256 = hashlib.sha256()
def onMessageFrame(self, payload, reserved):
data = ''.join(payload)
self.sha256.update(data)
digest = self.sha256.hexdigest()
self.sendMessage(digest)
print "Sent digest for frame: %s" % digest
def onMessageEnd(self):
pass
if __name__ == '__main__':
factory = WebSocketServerFactory("ws://localhost:9000")
factory.protocol = FrameBasedHashServerProtocol
listenWS(factory)
reactor.run()
| apache-2.0 | -8,476,707,464,821,172,000 | 34.943396 | 79 | 0.626149 | false |
chmberl/django-cms | cms/models/placeholderpluginmodel.py | 49 | 1244 | # -*- coding: utf-8 -*-
from cms.models import CMSPlugin
from cms.models.fields import PlaceholderField
from cms.utils.copy_plugins import copy_plugins_to
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class PlaceholderReference(CMSPlugin):
name = models.CharField(max_length=255)
placeholder_ref = PlaceholderField(slotname='clipboard')
class Meta:
app_label = 'cms'
def __str__(self):
return self.name
def copy_to(self, placeholder, language):
copy_plugins_to(self.placeholder_ref.get_plugins(), placeholder, to_language=language)
def copy_from(self, placeholder, language):
copy_plugins_to(placeholder.get_plugins(language), self.placeholder_ref, to_language=self.language)
def move_to(self, placeholder, language):
for plugin in self.placeholder_ref.get_plugins():
plugin.placeholder = placeholder
plugin.language = language
plugin.save()
def move_from(self, placeholder, language):
for plugin in placeholder.get_plugins():
plugin.placeholder = self.placeholder_ref
plugin.language = language
plugin.save()
| bsd-3-clause | -7,869,210,827,518,331,000 | 33.555556 | 107 | 0.691318 | false |
dimkal/mne-python | examples/inverse/plot_label_source_activations.py | 32 | 2269 | """
====================================================
Extracting the time series of activations in a label
====================================================
We first apply a dSPM inverse operator to get signed activations
in a label (with positive and negative values) and we then
compare different strategies to average the times series
in a label. We compare a simple average, with an averaging
using the dipoles normal (flip mode) and then a PCA,
also using a sign flip.
"""
# Author: Alexandre Gramfort <[email protected]>
#
# License: BSD (3-clause)
import matplotlib.pyplot as plt
import mne
from mne.datasets import sample
from mne.minimum_norm import read_inverse_operator, apply_inverse
print(__doc__)
data_path = sample.data_path()
label = 'Aud-lh'
label_fname = data_path + '/MEG/sample/labels/%s.label' % label
fname_inv = data_path + '/MEG/sample/sample_audvis-meg-oct-6-meg-inv.fif'
fname_evoked = data_path + '/MEG/sample/sample_audvis-ave.fif'
snr = 3.0
lambda2 = 1.0 / snr ** 2
method = "dSPM" # use dSPM method (could also be MNE or sLORETA)
# Load data
evoked = mne.read_evokeds(fname_evoked, condition=0, baseline=(None, 0))
inverse_operator = read_inverse_operator(fname_inv)
src = inverse_operator['src']
# Compute inverse solution
pick_ori = "normal" # Get signed values to see the effect of sign filp
stc = apply_inverse(evoked, inverse_operator, lambda2, method,
pick_ori=pick_ori)
label = mne.read_label(label_fname)
stc_label = stc.in_label(label)
mean = stc.extract_label_time_course(label, src, mode='mean')
mean_flip = stc.extract_label_time_course(label, src, mode='mean_flip')
pca = stc.extract_label_time_course(label, src, mode='pca_flip')
print("Number of vertices : %d" % len(stc_label.data))
# View source activations
plt.figure()
plt.plot(1e3 * stc_label.times, stc_label.data.T, 'k', linewidth=0.5)
h0, = plt.plot(1e3 * stc_label.times, mean.T, 'r', linewidth=3)
h1, = plt.plot(1e3 * stc_label.times, mean_flip.T, 'g', linewidth=3)
h2, = plt.plot(1e3 * stc_label.times, pca.T, 'b', linewidth=3)
plt.legend([h0, h1, h2], ['mean', 'mean flip', 'PCA flip'])
plt.xlabel('Time (ms)')
plt.ylabel('Source amplitude')
plt.title('Activations in Label : %s' % label)
plt.show()
| bsd-3-clause | -7,708,012,445,618,289,000 | 34.453125 | 73 | 0.681798 | false |
SebastianBoyd/sebastianboyd.com | tree/stocks/stocks_chart.py | 2 | 2233 | import urllib2
import json
import time
def get_data(tickers):
tickers = ','.join(map(str,tickers))
response = urllib2.urlopen("http://www.google.com/finance/info?infotype=infoquoteall&q=" + tickers)
string = response.read()
string = string.strip('\n//')
data = json.loads(string, object_hook=_decode_dict)
return data
def update_chart(values):
file = open('test.json', 'w')
file.write(values)
file.close()
def _decode_list(data):
rv = []
for item in data:
if isinstance(item, unicode):
item = item.encode('utf-8')
elif isinstance(item, list):
item = _decode_list(item)
elif isinstance(item, dict):
item = _decode_dict(item)
rv.append(item)
return rv
def _decode_dict(data):
rv = {}
for key, value in data.iteritems():
if isinstance(key, unicode):
key = key.encode('utf-8')
if isinstance(value, unicode):
value = value.encode('utf-8')
elif isinstance(value, list):
value = _decode_list(value)
elif isinstance(value, dict):
value = _decode_dict(value)
rv[key] = value
return rv
def check_transactions():
t = open('transactions.json', 'r')
data = json.loads(t.read(), object_hook=_decode_dict)
t.close()
return data
def clean():
values = "[null" + ", null" * 420 + "]"
update_chart(values)
while True:
now = time.localtime(time.time())
minutes = (now[3] - 6) * 60 + now[4]
print minutes
if minutes < 0:
clean()
elif minutes < 421:
file = open('test.json', 'r')
old_chart = file.read()
file.close()
list_chart = json.loads(old_chart)
transaction_data = check_transactions()
data = get_data(transaction_data['owned_stocks'].keys())
total_price = transaction_data['cash']
for stocks in data:
price = float(stocks['l'])
shares = transaction_data['owned_stocks'][stocks['t']]
total_price = total_price + price * shares
print total_price
file = open('current_price.json', 'w')
file.write('[' + str(total_price) + ']')
file.close()
list_chart[minutes] = float(total_price)
list_chart = str(list_chart)
list_chart = list_chart.replace("None", 'null')
update_chart(list_chart)
time.sleep(60)
| lgpl-3.0 | 4,611,814,163,427,318,000 | 26.9125 | 100 | 0.622481 | false |
alephu5/Soundbyte | environment/lib/python3.3/site-packages/sympy/printing/tableform.py | 5 | 11745 | from __future__ import print_function, division
from sympy.core.containers import Tuple
from types import FunctionType
class TableForm(object):
"""
Create a nice table representation of data.
Example::
>>> from sympy import TableForm
>>> t = TableForm([[5, 7], [4, 2], [10, 3]])
>>> print(t)
5 7
4 2
10 3
You can use the SymPy's printing system to produce tables in any
format (ascii, latex, html, ...).
>>> print(t.as_latex())
\\begin{tabular}{l l}
$5$ & $7$ \\\\
$4$ & $2$ \\\\
$10$ & $3$ \\\\
\end{tabular}
"""
def __init__(self, data, **kwarg):
"""
Creates a TableForm.
Parameters:
data ...
2D data to be put into the table; data can be
given as a Matrix
headings ...
gives the labels for rows and columns:
Can be a single argument that applies to both
dimensions:
- None ... no labels
- "automatic" ... labels are 1, 2, 3, ...
Can be a list of labels for rows and columns:
The lables for each dimension can be given
as None, "automatic", or [l1, l2, ...] e.g.
["automatic", None] will number the rows
[default: None]
alignments ...
alignment of the columns with:
- "left" or "<"
- "center" or "^"
- "right" or ">"
When given as a single value, the value is used for
all columns. The row headings (if given) will be
right justified unless an explicit alignment is
given for it and all other columns.
[default: "left"]
formats ...
a list of format strings or functions that accept
3 arguments (entry, row number, col number) and
return a string for the table entry. (If a function
returns None then the _print method will be used.)
wipe_zeros ...
Don't show zeros in the table.
[default: True]
pad ...
the string to use to indicate a missing value (e.g.
elements that are None or those that are missing
from the end of a row (i.e. any row that is shorter
than the rest is assumed to have missing values).
When None, nothing will be shown for values that
are missing from the end of a row; values that are
None, however, will be shown.
[default: None]
Examples
========
>>> from sympy import TableForm, Matrix
>>> TableForm([[5, 7], [4, 2], [10, 3]])
5 7
4 2
10 3
>>> TableForm([list('.'*i) for i in range(1, 4)], headings='automatic')
| 1 2 3
---------
1 | .
2 | . .
3 | . . .
>>> TableForm([['.'*(j if not i%2 else 1) for i in range(3)]
... for j in range(4)], alignments='rcl')
.
. . .
.. . ..
... . ...
"""
from sympy import Symbol, S, Matrix
from sympy.core.sympify import SympifyError
# We only support 2D data. Check the consistency:
if isinstance(data, Matrix):
data = data.tolist()
_w = len(data[0])
_h = len(data)
# fill out any short lines
pad = kwarg.get('pad', None)
ok_None = False
if pad is None:
pad = " "
ok_None = True
pad = Symbol(pad)
_w = max(len(line) for line in data)
for i, line in enumerate(data):
if len(line) != _w:
line.extend([pad]*(_w - len(line)))
for j, lj in enumerate(line):
if lj is None:
if not ok_None:
lj = pad
else:
try:
lj = S(lj)
except SympifyError:
lj = Symbol(str(lj))
line[j] = lj
data[i] = line
_lines = Tuple(*data)
headings = kwarg.get("headings", [None, None])
if headings == "automatic":
_headings = [range(1, _h + 1), range(1, _w + 1)]
else:
h1, h2 = headings
if h1 == "automatic":
h1 = range(1, _h + 1)
if h2 == "automatic":
h2 = range(1, _w + 1)
_headings = [h1, h2]
allow = ('l', 'r', 'c')
alignments = kwarg.get("alignments", "l")
def _std_align(a):
a = a.strip().lower()
if len(a) > 1:
return {'left': 'l', 'right': 'r', 'center': 'c'}.get(a, a)
else:
return {'<': 'l', '>': 'r', '^': 'c'}.get(a, a)
std_align = _std_align(alignments)
if std_align in allow:
_alignments = [std_align]*_w
else:
_alignments = []
for a in alignments:
std_align = _std_align(a)
_alignments.append(std_align)
if std_align not in ('l', 'r', 'c'):
raise ValueError('alignment "%s" unrecognized' %
alignments)
if _headings[0] and len(_alignments) == _w + 1:
_head_align = _alignments[0]
_alignments = _alignments[1:]
else:
_head_align = 'r'
if len(_alignments) != _w:
raise ValueError(
'wrong number of alignments: expected %s but got %s' %
(_w, len(_alignments)))
_column_formats = kwarg.get("formats", [None]*_w)
_wipe_zeros = kwarg.get("wipe_zeros", True)
self._w = _w
self._h = _h
self._lines = _lines
self._headings = _headings
self._head_align = _head_align
self._alignments = _alignments
self._column_formats = _column_formats
self._wipe_zeros = _wipe_zeros
def __repr__(self):
from .str import sstr
return sstr(self, order=None)
def __str__(self):
from .str import sstr
return sstr(self, order=None)
def as_matrix(self):
"""Returns the data of the table in Matrix form.
Examples
========
>>> from sympy import TableForm
>>> t = TableForm([[5, 7], [4, 2], [10, 3]], headings='automatic')
>>> t
| 1 2
--------
1 | 5 7
2 | 4 2
3 | 10 3
>>> t.as_matrix()
Matrix([
[ 5, 7],
[ 4, 2],
[10, 3]])
"""
from sympy import Matrix
return Matrix(self._lines)
def as_str(self):
# XXX obsolete ?
return str(self)
def as_latex(self):
from .latex import latex
return latex(self)
def _sympystr(self, p):
"""
Returns the string representation of 'self'.
Example:
>>> from sympy import TableForm
>>> t = TableForm([[5, 7], [4, 2], [10, 3]])
>>> s = t.as_str()
"""
column_widths = [0] * self._w
lines = []
for line in self._lines:
new_line = []
for i in range(self._w):
# Format the item somehow if needed:
s = str(line[i])
if self._wipe_zeros and (s == "0"):
s = " "
w = len(s)
if w > column_widths[i]:
column_widths[i] = w
new_line.append(s)
lines.append(new_line)
# Check heading:
if self._headings[0]:
self._headings[0] = [str(x) for x in self._headings[0]]
_head_width = max([len(x) for x in self._headings[0]])
if self._headings[1]:
new_line = []
for i in range(self._w):
# Format the item somehow if needed:
s = str(self._headings[1][i])
w = len(s)
if w > column_widths[i]:
column_widths[i] = w
new_line.append(s)
self._headings[1] = new_line
format_str = []
def _align(align, w):
return '%%%s%ss' % (
("-" if align == "l" else ""),
str(w))
format_str = [_align(align, w) for align, w in
zip(self._alignments, column_widths)]
if self._headings[0]:
format_str.insert(0, _align(self._head_align, _head_width))
format_str.insert(1, '|')
format_str = ' '.join(format_str) + '\n'
s = []
if self._headings[1]:
d = self._headings[1]
if self._headings[0]:
d = [""] + d
first_line = format_str % tuple(d)
s.append(first_line)
s.append("-" * (len(first_line) - 1) + "\n")
for i, line in enumerate(lines):
d = [l if self._alignments[j] != 'c' else
l.center(column_widths[j]) for j, l in enumerate(line)]
if self._headings[0]:
l = self._headings[0][i]
l = (l if self._head_align != 'c' else
l.center(_head_width))
d = [l] + d
s.append(format_str % tuple(d))
return ''.join(s)[:-1] # don't include trailing newline
def _latex(self, printer):
"""
Returns the string representation of 'self'.
"""
# Check heading:
if self._headings[1]:
new_line = []
for i in range(self._w):
# Format the item somehow if needed:
new_line.append(str(self._headings[1][i]))
self._headings[1] = new_line
alignments = []
if self._headings[0]:
self._headings[0] = [str(x) for x in self._headings[0]]
alignments = [self._head_align]
alignments.extend(self._alignments)
s = r"\begin{tabular}{" + " ".join(alignments) + "}\n"
if self._headings[1]:
d = self._headings[1]
if self._headings[0]:
d = [""] + d
first_line = " & ".join(d) + r" \\" + "\n"
s += first_line
s += r"\hline" + "\n"
for i, line in enumerate(self._lines):
d = []
for j, x in enumerate(line):
if self._wipe_zeros and (x in (0, "0")):
d.append(" ")
continue
f = self._column_formats[j]
if f:
if isinstance(f, FunctionType):
v = f(x, i, j)
if v is None:
v = printer._print(x)
else:
v = f % x
d.append(v)
else:
v = printer._print(x)
d.append("$%s$" % v)
if self._headings[0]:
d = [self._headings[0][i]] + d
s += " & ".join(d) + r" \\" + "\n"
s += r"\end{tabular}"
return s
| gpl-3.0 | -2,497,701,696,475,045,400 | 31.266484 | 79 | 0.42103 | false |
jcftang/ansible | lib/ansible/utils/module_docs_fragments/files.py | 29 | 3217 | # (c) 2014, Matt Martz <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
mode:
required: false
default: null
description:
- Mode the file or directory should be. For those used to I(/usr/bin/chmod) remember that modes are actually octal numbers (like 0644). Leaving off the leading zero will likely have unexpected results. As of version 1.8, the mode may be specified as a symbolic mode (for example, C(u+rwx) or C(u=rw,g=r,o=r)).
owner:
required: false
default: null
description:
- Name of the user that should own the file/directory, as would be fed to I(chown).
group:
required: false
default: null
description:
- Name of the group that should own the file/directory, as would be fed to I(chown).
seuser:
required: false
default: null
description:
- User part of SELinux file context. Will default to system policy, if
applicable. If set to C(_default), it will use the C(user) portion of the
policy if available.
serole:
required: false
default: null
description:
- Role part of SELinux file context, C(_default) feature works as for I(seuser).
setype:
required: false
default: null
description:
- Type part of SELinux file context, C(_default) feature works as for I(seuser).
selevel:
required: false
default: "s0"
description:
- Level part of the SELinux file context. This is the MLS/MCS attribute,
sometimes known as the C(range). C(_default) feature works as for
I(seuser).
unsafe_writes:
description:
- Normally this module uses atomic operations to prevent data corruption or inconsistent reads from the target files,
sometimes systems are configured or just broken in ways that prevent this. One example are docker mounted files,
they cannot be updated atomically and can only be done in an unsafe manner.
- This boolean option allows ansible to fall back to unsafe methods of updating files for those cases in which you do
not have any other choice. Be aware that this is subject to race conditions and can lead to data corruption.
required: false
default: false
version_added: "2.2"
attributes:
description:
- Attributes of the file or directory should be. To get supported flags look at the man page for I(chattr) on the taget system.
required: false
default: None
aliases: ['attr']
version_added: "2.3"
"""
| gpl-3.0 | 5,859,240,094,766,090,000 | 39.2125 | 315 | 0.705316 | false |
ClearCorp/odoo-clearcorp | TODO-9.0/stock_requisition_group/procurement_order.py | 3 | 2922 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Addons modules by CLEARCORP S.A.
# Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class ProcurementOrder(models.Model):
_inherit = 'procurement.order'
@api.multi
def name_get(self):
result = []
for procurement in self:
result.append((procurement.id, '%s: %s' % (
procurement.product_id.display_name or '',
procurement.origin or '')))
return result
@api.model
def _run(self, procurement):
if procurement.rule_id and procurement.rule_id.action == 'buy' and \
procurement.product_id.purchase_requisition:
res = super(ProcurementOrder, self)._run(procurement)
# Review if there is any created requisition
requisition_obj = self.env['purchase.requisition']
requisition = requisition_obj.search(
[('group_id', '=', procurement.group_id.id),
('state', '=', 'draft'),
('id', '!=', procurement.requisition_id.id),
('company_id', '=', procurement.company_id.id)], limit=1)
if requisition:
for line in procurement.requisition_id.line_ids:
line.procurement_id = procurement.id
line.requisition_id = requisition.id
procurement.requisition_id.unlink()
procurement.requisition_id = requisition.id
else:
# Assign the procurement group and
# assign the sequence to the requisition
procurement.requisition_id.write({
'group_id': procurement.group_id.id,
'name': procurement.origin,
})
for line in procurement.requisition_id.line_ids:
line.procurement_id = procurement.id
return res
else:
return super(ProcurementOrder, self)._run(procurement)
| agpl-3.0 | -6,953,665,887,141,594,000 | 41.970588 | 78 | 0.568789 | false |
josrolgil/exjobbCalvin | calvin/runtime/south/plugins/async/twistedimpl/serialport.py | 4 | 1874 | # -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.internet.serialport import SerialPort
from twisted.internet import reactor
from twisted.internet.protocol import Protocol
from twisted.internet import fdesc
class RawProtocol(Protocol):
def __init__(self, sp):
self.data = b""
self.sp = sp
def dataReceived(self, data):
self.data += data
self.sp.trigger()
class SP(object):
"""A Calvin serialport object"""
def __init__(self, devicename, baudrate, bytesize, parity, stopbits, timeout, xonxoff, rtscts, trigger, actor_id):
self._trigger = trigger
self._actor_id = actor_id
self._port = SerialPort(
RawProtocol(self),
devicename,
reactor,
baudrate,
bytesize,
parity,
stopbits,
timeout,
xonxoff,
rtscts)
def trigger(self):
self._trigger(actor_ids=[self._actor_id])
def write(self, data):
fdesc.writeToFD(self._port.fileno(), data)
def read(self):
data = self._port.protocol.data
self._port.protocol.data = b""
return data
def hasData(self):
return len(self._port.protocol.data)
def close(self):
self._port.loseConnection()
| apache-2.0 | -7,057,764,002,521,376,000 | 26.558824 | 118 | 0.636073 | false |
sagge-miky/Super-HUGS-Revolution-98 | score.py | 2 | 8127 | ###########################################
# Created on 1-7-2013. Miguel Angel Astor #
###########################################
import string
import pygame
try:
import android
except ImportError:
android = None
try:
import pygame.mixer as mixer
except ImportError:
import android.mixer as mixer
import player
import database
import audio
from constants import DEBUG
from imloader import cached_image_loader
from actor import BaseActor
from state import BaseState, VALID_STATES
class ScoreState(BaseState):
def __init__(self):
BaseState.__init__(self)
self.background_color = (125, 158, 192)
self.next_transition = VALID_STATES['MENU']
self.cursor_x = 0
self.cursor_y = 0
self.letter_index = 0 # Tells how many letters the user has clicked.
self.player_init = [] # Holds the player initials.
image = cached_image_loader.get_image_to_screen_percent('gfx/iniciales.png')
self.banner = BaseActor(0, image, "Banner", False, True, False)
self.banner.set_position([pygame.display.Info().current_w // 2, (image.get_height() // 2) + 20])
image2 = cached_image_loader.get_image_to_screen_percent('gfx/Fuente/_.png')
self.underscore_c = BaseActor(1, image2, "Underscore center", False, True, False)
self.underscore_c.set_position([pygame.display.Info().current_w // 2,
self.banner.get_position()[1] + image.get_height() + 25])
self.underscore_l = BaseActor(2, image2, "Underscore left", False, True, False)
self.underscore_l.set_position([self.underscore_c.get_position()[0] - image2.get_width(),
self.underscore_c.get_position()[1]])
self.underscore_r = BaseActor(3, image2, "Underscore right", False, True, False)
self.underscore_r.set_position([self.underscore_c.get_position()[0] + image2.get_width(),
self.underscore_c.get_position()[1]])
image = cached_image_loader.get_image_to_screen_percent('gfx/del.png')
self.del_button = BaseActor(4, image, "Delete button", False, True, False)
self.del_button.set_position([self.underscore_c.get_position()[0] + (2 * image2.get_width()),
self.underscore_c.get_position()[1]])
image = cached_image_loader.get_image_to_screen_percent('gfx/listo.png')
self.done_button = BaseActor(5, image, "Done button", False, False, False)
self.done_button.set_position([self.underscore_c.get_position()[0] + (3 * image2.get_width()),
self.underscore_c.get_position()[1]])
self.letters = {}
letter_list = ['q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p', 'a', 's', 'd', 'f', 'g', 'h',
'j', 'k', 'l', 'z', 'x', 'c', 'v', 'b', 'n', 'm']
image = cached_image_loader.get_image_to_screen_percent('gfx/Fuente/a.png')
q_x_position = self.banner.rect.left + (image.get_width() // 2)
q_y_position = int((float(pygame.display.Info().current_h) * 438.0 ) / 768.0)
letter_sep = int((float(pygame.display.Info().current_w) * 10.0 ) / 1024.0)
for l in letter_list:
image = cached_image_loader.get_image_to_screen_percent('gfx/Fuente/' + l + '.png')
letter_actor = BaseActor(89, image, string.upper(l), False, True, False)
if l == 'a':
q_x_position = self.banner.rect.left + image.get_width()
q_y_position = int((float(pygame.display.Info().current_h) * 543.0) / 768.0)
elif l == 'z':
q_x_position = self.banner.rect.left + int(1.5 * image.get_width())
q_y_position = int((float(pygame.display.Info().current_h) * 649.0) / 768.0)
letter_actor.set_position([q_x_position, q_y_position])
self.letters[l] = letter_actor
if l == 'i':
q_x_position += image.get_width() + (2 * letter_sep)
else:
q_x_position += image.get_width() + letter_sep
self.letter_y = int((float(pygame.display.Info().current_h) * 265.0 ) / 768.0)
def input(self):
for event in pygame.event.get():
if android is not None:
if android.check_pause():
android.wait_for_resume()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
self.next_transition = VALID_STATES['QUIT']
if event.type == pygame.QUIT:
self.next_transition = VALID_STATES['QUIT']
# Catch the position of a mouse click (or tap).
if event.type == pygame.MOUSEBUTTONDOWN:
(self.cursor_x, self.cursor_y) = event.pos
def update(self):
if self.next_transition != VALID_STATES['QUIT']:
if self.next_transition != VALID_STATES['STAY']:
# Set next_transition to STAY if the game gets to this state from GameState a second or third time, etc.
self.next_transition = VALID_STATES['STAY']
mixer.music.stop()
audio.cached_audio_manager.play_sound('sfx/Game_Over_2.wav')
if self.letter_index < 3:
# If not all initials are set, check taps on every letter.
for key in self.letters.keys():
if self.letters[key].test_collision_with_point((self.cursor_x, self.cursor_y)):
self.player_init.append(self.letters[key].get_name())
self.letter_index += 1
if self.letter_index > 0 and self.del_button.test_collision_with_point((self.cursor_x, self.cursor_y)):
# If the player clicked on the delete button and there are initials set,
# remove the last one.
self.player_init.pop()
self.letter_index -= 1
if self.letter_index == 3:
# If all initials have been set, make the done button visible.
self.done_button.make_visible()
else:
self.done_button.make_invisible()
if self.done_button.is_visible() and self.done_button.test_collision_with_point((self.cursor_x, self.cursor_y)):
# If the user clicked on the done button, insert the score in the database and go to the main menu.
database.cursor.execute('SELECT * FROM score ORDER BY score ASC')
row = database.cursor.fetchone()
score = (str(self.player_init[0] + self.player_init[1] + self.player_init[2]),
player.PLAYERS[1].get_score(),
row[0])
database.cursor.execute('UPDATE score SET player_name = ?, score = ? WHERE _id = ?', score)
database.scores.commit()
# Don't forget to reset the initials list.
self.player_init = []
self.letter_index = 0
self.next_transition = VALID_STATES['MENU']
# Reset the mouse pointer.
self.cursor_x = 0
self.cursor_y = 0
return self.next_transition
def render(self, canvas):
canvas.fill(self.background_color)
self.banner.draw(canvas)
if self.letter_index < 1:
self.underscore_l.draw(canvas)
if self.letter_index < 2:
self.underscore_c.draw(canvas)
if self.letter_index < 3:
self.underscore_r.draw(canvas)
self.del_button.draw(canvas)
if self.done_button.is_visible():
self.done_button.draw(canvas)
for key in self.letters.keys():
self.letters[key].draw(canvas)
for i in range(self.letter_index):
initial = self.letters[string.lower(self.player_init[i])].image
position = None
if i == 0:
position = (self.underscore_l.rect.left, self.letter_y - (initial.get_height() // 2))
elif i == 1:
position = (self.underscore_c.rect.left, self.letter_y - (initial.get_height() // 2))
else:
position = (self.underscore_r.rect.left, self.letter_y - (initial.get_height() // 2))
canvas.blit(initial, position)
| bsd-2-clause | -5,601,474,165,439,807,000 | 43.653846 | 122 | 0.574874 | false |
GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/sympy/core/compatibility.py | 3 | 5401 | """
Reimplementations of constructs introduced in later versions of Python than we
support.
"""
# These are in here because telling if something is an iterable just by calling
# hasattr(obj, "__iter__") behaves differently in Python 2 and Python 3. In
# particular, hasattr(str, "__iter__") is False in Python 2 and True in Python 3.
# I think putting them here also makes it easier to use them in the core.
def iterable(i, exclude=(basestring, dict)):
"""
Return a boolean indicating whether i is an iterable in the sympy sense.
When sympy is working with iterables, it is almost always assuming
that the iterable is not a string or a mapping, so those are excluded
by default. If you want a pure python definition, make exclude=None. To
exclude multiple items, pass them as a tuple.
See also: is_sequence
Examples:
>>> from sympy.utilities.iterables import iterable
>>> from sympy import Tuple
>>> things = [[1], (1,), set([1]), Tuple(1), (j for j in [1, 2]), {1:2}, '1', 1]
>>> for i in things:
... print iterable(i), type(i)
True <type 'list'>
True <type 'tuple'>
True <type 'set'>
True <class 'sympy.core.containers.Tuple'>
True <type 'generator'>
False <type 'dict'>
False <type 'str'>
False <type 'int'>
>>> iterable({}, exclude=None)
True
>>> iterable({}, exclude=str)
True
>>> iterable("no", exclude=str)
False
"""
try:
iter(i)
except TypeError:
return False
if exclude:
return not isinstance(i, exclude)
return True
def is_sequence(i, include=None):
"""
Return a boolean indicating whether i is a sequence in the sympy
sense. If anything that fails the test below should be included as
being a sequence for your application, set 'include' to that object's
type; multiple types should be passed as a tuple of types.
Note: although generators can generate a sequence, they often need special
handling to make sure their elements are captured before the generator is
exhausted, so these are not included by default in the definition of a
sequence.
See also: iterable
Examples:
>>> from sympy.utilities.iterables import is_sequence
>>> from types import GeneratorType
>>> is_sequence([])
True
>>> is_sequence(set())
False
>>> is_sequence('abc')
False
>>> is_sequence('abc', include=str)
True
>>> generator = (c for c in 'abc')
>>> is_sequence(generator)
False
>>> is_sequence(generator, include=(str, GeneratorType))
True
"""
return (hasattr(i, '__getitem__') and
iterable(i) or
bool(include) and
isinstance(i, include))
"""
Wrapping some imports in try/except statements to allow the same code to
be used in Python 3+ as well.
"""
try:
callable = callable
except NameError:
import collections
def callable(obj):
return isinstance(obj, collections.Callable)
try:
from functools import reduce
except ImportError:
reduce = reduce
def cmp_to_key(mycmp):
"""
Convert a cmp= function into a key= function
This code is included in Python 2.7 and 3.2 in functools.
"""
class K(object):
def __init__(self, obj, *args):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) < 0
def __gt__(self, other):
return mycmp(self.obj, other.obj) > 0
def __eq__(self, other):
return mycmp(self.obj, other.obj) == 0
def __le__(self, other):
return mycmp(self.obj, other.obj) <= 0
def __ge__(self, other):
return mycmp(self.obj, other.obj) >= 0
def __ne__(self, other):
return mycmp(self.obj, other.obj) != 0
return K
try:
import __builtin__
cmp = __builtin__.cmp
except AttributeError:
def cmp(a,b):
return (a > b) - (a < b)
try:
from itertools import product
except ImportError: # Python 2.5
def product(*args, **kwds):
# product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy
# product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111
pools = map(tuple, args) * kwds.get('repeat', 1)
result = [[]]
for pool in pools:
result = [x+[y] for x in result for y in pool]
for prod in result:
yield tuple(prod)
try:
from itertools import permutations
except ImportError: # Python 2.5
def permutations(iterable, r=None):
# permutations('ABCD', 2) --> AB AC AD BA BC BD CA CB CD DA DB DC
# permutations(range(3)) --> 012 021 102 120 201 210
pool = tuple(iterable)
n = len(pool)
r = n if r is None else r
if r > n:
return
indices = range(n)
cycles = range(n, n-r, -1)
yield tuple(pool[i] for i in indices[:r])
while n:
for i in reversed(range(r)):
cycles[i] -= 1
if cycles[i] == 0:
indices[i:] = indices[i+1:] + indices[i:i+1]
cycles[i] = n - i
else:
j = cycles[i]
indices[i], indices[-j] = indices[-j], indices[i]
yield tuple(pool[i] for i in indices[:r])
break
else:
return
| agpl-3.0 | 1,321,565,830,904,298,200 | 29.342697 | 84 | 0.584892 | false |
pbchou/trafficserver | tests/gold_tests/headers/syntax.test.py | 6 | 4109 | '''
Test whitespace between field name and colon in the header
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Test.Summary = '''
Test whitespace between field name and colon in the header
'''
Test.ContinueOnFail = True
# Define default ATS
ts = Test.MakeATSProcess("ts")
server = Test.MakeOriginServer("server")
# **testname is required**
testName = ""
request_header = {"headers": "GET / HTTP/1.1\r\nHost: www.example.com\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
response_header = {"headers": "HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", "timestamp": "1469733493.993", "body": ""}
server.addResponse("sessionlog.json", request_header, response_header)
ts.Disk.remap_config.AddLine(
'map http://www.example.com http://127.0.0.1:{0}'.format(server.Variables.Port)
)
# Test 0 - 200 Response
tr = Test.AddTestRun()
tr.Processes.Default.StartBefore(server, ready=When.PortOpen(server.Variables.Port))
tr.Processes.Default.StartBefore(Test.Processes.ts)
tr.Processes.Default.Command = 'curl -s -D - -v --ipv4 --http1.1 -H " foo: bar" -H "Host: www.example.com" http://localhost:{0}/'.format(
ts.Variables.port)
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Streams.stdout = "syntax.200.gold"
tr.StillRunningAfter = ts
# Test 1 - 400 Response - Single space after field name
tr = Test.AddTestRun()
tr.Processes.Default.Command = 'curl -s -D - -v --ipv4 --http1.1 -H "foo : bar" -H "Host: www.example.com" http://localhost:{0}/'.format(
ts.Variables.port)
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Streams.stdout = "syntax.400.gold"
tr.StillRunningAfter = ts
# Test 2 - 400 Response - Double space after field name
tr = Test.AddTestRun()
tr.Processes.Default.Command = 'curl -s -D - -v --ipv4 --http1.1 -H "foo : bar" -H "Host: www.example.com" http://localhost:{0}/'.format(
ts.Variables.port)
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Streams.stdout = "syntax.400.gold"
tr.StillRunningAfter = ts
# Test 3 - 400 Response - Three different Content-Length headers
tr = Test.AddTestRun()
tr.Processes.Default.Command = 'curl -s -D - -v --ipv4 --http1.1 -d "hello world" -H "Content-Length: 11" -H "Content-Length: 10" -H "Content-Length: 9" -H "Host: www.example.com" http://localhost:{0}/'.format(
ts.Variables.port)
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Streams.stdout = "syntax.400.gold"
tr.StillRunningAfter = ts
# Test 4 - 200 Response - Three same Content-Length headers
tr = Test.AddTestRun()
tr.Processes.Default.Command = 'curl -s -D - -v --ipv4 --http1.1 -d "hello world" -H "Content-Length: 11" -H "Content-Length: 11" -H "Content-Length: 11" -H "Host: www.example.com" http://localhost:{0}/'.format(
ts.Variables.port)
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Streams.stdout = "syntax.200.gold"
tr.StillRunningAfter = ts
# Test 5 - 200 Response - Three different Content-Length headers with a Transfer encoding header
tr = Test.AddTestRun()
tr.Processes.Default.Command = 'curl -s -D - -v --ipv4 --http1.1 -d "hello world" -H "Transfer-Encoding: chunked" -H "Content-Length: 11" -H "Content-Length: 10" -H "Content-Length: 9" -H "Host: www.example.com" http://localhost:{0}/'.format(
ts.Variables.port)
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Streams.stdout = "syntax.200.gold"
tr.StillRunningAfter = ts
| apache-2.0 | 1,375,132,695,238,180,600 | 45.693182 | 242 | 0.721343 | false |
amitsela/beam | sdks/python/apache_beam/examples/complete/juliaset/setup.py | 18 | 4322 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Setup.py module for the workflow's worker utilities.
All the workflow related code is gathered in a package that will be built as a
source distribution, staged in the staging area for the workflow being run and
then installed in the workers when they start running.
This behavior is triggered by specifying the --setup_file command line option
when running the workflow for remote execution.
"""
from distutils.command.build import build as _build
import subprocess
import setuptools
# This class handles the pip install mechanism.
class build(_build): # pylint: disable=invalid-name
"""A build command class that will be invoked during package install.
The package built using the current setup.py will be staged and later
installed in the worker using `pip install package'. This class will be
instantiated during install for this specific scenario and will trigger
running the custom commands specified.
"""
sub_commands = _build.sub_commands + [('CustomCommands', None)]
# Some custom command to run during setup. The command is not essential for this
# workflow. It is used here as an example. Each command will spawn a child
# process. Typically, these commands will include steps to install non-Python
# packages. For instance, to install a C++-based library libjpeg62 the following
# two commands will have to be added:
#
# ['apt-get', 'update'],
# ['apt-get', '--assume-yes', install', 'libjpeg62'],
#
# First, note that there is no need to use the sudo command because the setup
# script runs with appropriate access.
# Second, if apt-get tool is used then the first command needs to be 'apt-get
# update' so the tool refreshes itself and initializes links to download
# repositories. Without this initial step the other apt-get install commands
# will fail with package not found errors. Note also --assume-yes option which
# shortcuts the interactive confirmation.
#
# The output of custom commands (including failures) will be logged in the
# worker-startup log.
CUSTOM_COMMANDS = [
['echo', 'Custom command worked!']]
class CustomCommands(setuptools.Command):
"""A setuptools Command class able to run arbitrary commands."""
def initialize_options(self):
pass
def finalize_options(self):
pass
def RunCustomCommand(self, command_list):
print 'Running command: %s' % command_list
p = subprocess.Popen(
command_list,
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
# Can use communicate(input='y\n'.encode()) if the command run requires
# some confirmation.
stdout_data, _ = p.communicate()
print 'Command output: %s' % stdout_data
if p.returncode != 0:
raise RuntimeError(
'Command %s failed: exit code: %s' % (command_list, p.returncode))
def run(self):
for command in CUSTOM_COMMANDS:
self.RunCustomCommand(command)
# Configure the required packages and scripts to install.
# Note that the Python Dataflow containers come with numpy already installed
# so this dependency will not trigger anything to be installed unless a version
# restriction is specified.
REQUIRED_PACKAGES = [
'numpy',
]
setuptools.setup(
name='juliaset',
version='0.0.1',
description='Julia set workflow package.',
install_requires=REQUIRED_PACKAGES,
packages=setuptools.find_packages(),
cmdclass={
# Command class instantiated and run during pip install scenarios.
'build': build,
'CustomCommands': CustomCommands,
}
)
| apache-2.0 | -1,823,187,815,389,771,500 | 36.258621 | 80 | 0.737621 | false |
linyehui/migrating-from-wikidot-to-jekyll | wikidot.py | 1 | 4660 | #!/usr/bin/env python
# -*- encoding: UTF8 -*-
# Copyright 2012 Philipp Klaus
# Part of https://github.com/vLj2/wikidot-to-markdown
import re ## The most important module here!
import string ## for string.join()
#import markdown
import uuid ## to generate random UUIDs using uuid.uuid4()
class WikidotToMarkdown(object):
def __init__(self):
# regex for URL found on http://regexlib.com/REDetails.aspx?regex_id=501
self.url_regex = r"(http|https|ftp)\://([a-zA-Z0-9\.\-]+(\:[a-zA-Z0-9\.&%\$\-]+)*@)*((25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9])\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[0-9])|localhost|([a-zA-Z0-9\-]+\.)*[a-zA-Z0-9\-]+\.(com|edu|gov|int|mil|net|org|biz|arpa|info|name|pro|aero|coop|museum|[a-zA-Z]{2}))(\:[0-9]+)*(/($|[a-zA-Z0-9\.\,\?\'\\\+&%\$#\=~_\-]+))*[/]?"
self.static_replacements = { '[[toc]]': '', # no equivalent for table of contents in Markdown
}
self.regex_replacements = { r'^\+ ([^\n]*)$': r"# \1\n", # headings
r'^\+\+ ([^\n]*)$': r"## \1\n",
r'^\+\+\+ ([^\n]*)$': r"### \1\n",
r'^\+\+\+\+ ([^\n]*)$': r"#### \1\n",
r'^\+\+\+\+\+ ([^\n]*)$': r"##### \1\n",
r'([^:])//([\s\S ]*?)//': r'\1*\2*', # italics
r'([^:])__([\s\S ]*?)__': r'\1**\2**', # underlining → bold
r'([^:]){{([\s\S ]*?)}}': r'\1`\2`', # inline monospaced text
}
self.regex_split_condition = r"^\+ ([^\n]*)$"
def convert(self, text):
text = '\n'+text+'\n'# add embed in newlines (makes regex replaces work better)
# first we search for [[code]] statements as we don't want any replacement to happen inside those code blocks!
code_blocks = dict()
code_blocks_found = re.findall(re.compile(r'(\[\[code( type="([\S]+)")?\]\]([\s\S ]*?)\[\[/code\]\])',re.MULTILINE), text)
for code_block_found in code_blocks_found:
tmp_hash = str(uuid.uuid4())
text = text.replace(code_block_found[0],tmp_hash,1) # replace code block with a hash - to fill it in later
code_blocks[tmp_hash] = "\n"+string.join([" " + l for l in code_block_found[-1].strip().split("\n") ],"\n")+"\n"
for search, replacement in self.static_replacements.items():
text = text.replace(search,replacement,1)
# search for any of the simpler replacements in the dictionary regex_replacements
for s_reg, r_reg in self.regex_replacements.items():
text = re.sub(re.compile(s_reg,re.MULTILINE),r_reg,text)
# search for image of the form [[image https://linyehui.com/test.png]]
for link in re.finditer(r"\[\[image ("+self.url_regex+r")\]\]", text):
#print link.group(0), "" % (link.groups()[-1])
text = text.replace(link.group(0),"" % (link.groups()[-1],link.group(1)),1)
# search for simple http://www.google.com links:
for link in re.finditer(r"[\s\S\n ]("+self.url_regex+r")", text):
print link.group(0)
if link.group(0)[0] == "[" : continue
elif link.group(0)[0] == "(" : continue
text = text.replace(link.group(1),"<%s> " % link.group(1),1)
# search for links of the form [http://www.google.com Google Website]
for link in re.finditer(r"\[("+self.url_regex+r") ([^\]]*)\]", text):
#print link.group(0), "[%s](%s)" % (link.groups()[-1],link.group(1))
text = text.replace(link.group(0),"[%s](%s)" % (link.groups()[-1],link.group(1)),1)
# search for unhandled tags and state them
for unhandled_tag in re.finditer(r"\[\[/([\s\S ]*?)\]\]", text):
print("Found an unhandled tag: %s" % unhandled_tag.group(1))
# now we substitute back our code blocks
for tmp_hash, code in code_blocks.items():
text = text.replace(tmp_hash, code, 1)
return text[1:-1]
def split_text(self, text):
output_parts = []
split_regex = re.compile(self.regex_split_condition)
for line in text.split("\n"):
line += "\n"
if len(output_parts) > 0 and (re.match(split_regex,line) == None): output_parts[-1] += line
else: output_parts.append(line)
return output_parts
| mit | -4,660,743,538,455,500,000 | 62.808219 | 542 | 0.500644 | false |
proxysh/Safejumper-for-Desktop | buildmac/Resources/env/lib/python2.7/site-packages/twisted/python/components.py | 14 | 14295 | # -*- test-case-name: twisted.python.test.test_components -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Component architecture for Twisted, based on Zope3 components.
Using the Zope3 API directly is strongly recommended. Everything
you need is in the top-level of the zope.interface package, e.g.::
from zope.interface import Interface, implementer
class IFoo(Interface):
pass
@implementer(IFoo)
class Foo:
print(IFoo.implementedBy(Foo)) # True
print(IFoo.providedBy(Foo())) # True
L{twisted.python.components.registerAdapter} from this module may be used to
add to Twisted's global adapter registry.
L{twisted.python.components.proxyForInterface} is a factory for classes
which allow access to only the parts of another class defined by a specified
interface.
"""
from __future__ import division, absolute_import, print_function
# zope3 imports
from zope.interface import interface, declarations
from zope.interface.adapter import AdapterRegistry
# twisted imports
from twisted.python.compat import NativeStringIO
from twisted.python import reflect
from twisted.python._oldstyle import _oldStyle
# Twisted's global adapter registry
globalRegistry = AdapterRegistry()
# Attribute that registerAdapter looks at. Is this supposed to be public?
ALLOW_DUPLICATES = 0
def registerAdapter(adapterFactory, origInterface, *interfaceClasses):
"""Register an adapter class.
An adapter class is expected to implement the given interface, by
adapting instances implementing 'origInterface'. An adapter class's
__init__ method should accept one parameter, an instance implementing
'origInterface'.
"""
self = globalRegistry
assert interfaceClasses, "You need to pass an Interface"
global ALLOW_DUPLICATES
# deal with class->interface adapters:
if not isinstance(origInterface, interface.InterfaceClass):
origInterface = declarations.implementedBy(origInterface)
for interfaceClass in interfaceClasses:
factory = self.registered([origInterface], interfaceClass)
if factory is not None and not ALLOW_DUPLICATES:
raise ValueError("an adapter (%s) was already registered." % (factory, ))
for interfaceClass in interfaceClasses:
self.register([origInterface], interfaceClass, '', adapterFactory)
def getAdapterFactory(fromInterface, toInterface, default):
"""Return registered adapter for a given class and interface.
Note that is tied to the *Twisted* global registry, and will
thus not find adapters registered elsewhere.
"""
self = globalRegistry
if not isinstance(fromInterface, interface.InterfaceClass):
fromInterface = declarations.implementedBy(fromInterface)
factory = self.lookup1(fromInterface, toInterface)
if factory is None:
factory = default
return factory
def _addHook(registry):
"""
Add an adapter hook which will attempt to look up adapters in the given
registry.
@type registry: L{zope.interface.adapter.AdapterRegistry}
@return: The hook which was added, for later use with L{_removeHook}.
"""
lookup = registry.lookup1
def _hook(iface, ob):
factory = lookup(declarations.providedBy(ob), iface)
if factory is None:
return None
else:
return factory(ob)
interface.adapter_hooks.append(_hook)
return _hook
def _removeHook(hook):
"""
Remove a previously added adapter hook.
@param hook: An object previously returned by a call to L{_addHook}. This
will be removed from the list of adapter hooks.
"""
interface.adapter_hooks.remove(hook)
# add global adapter lookup hook for our newly created registry
_addHook(globalRegistry)
def getRegistry():
"""Returns the Twisted global
C{zope.interface.adapter.AdapterRegistry} instance.
"""
return globalRegistry
# FIXME: deprecate attribute somehow?
CannotAdapt = TypeError
@_oldStyle
class Adapter:
"""I am the default implementation of an Adapter for some interface.
This docstring contains a limerick, by popular demand::
Subclassing made Zope and TR
much harder to work with by far.
So before you inherit,
be sure to declare it
Adapter, not PyObject*
@cvar temporaryAdapter: If this is True, the adapter will not be
persisted on the Componentized.
@cvar multiComponent: If this adapter is persistent, should it be
automatically registered for all appropriate interfaces.
"""
# These attributes are used with Componentized.
temporaryAdapter = 0
multiComponent = 1
def __init__(self, original):
"""Set my 'original' attribute to be the object I am adapting.
"""
self.original = original
def __conform__(self, interface):
"""
I forward __conform__ to self.original if it has it, otherwise I
simply return None.
"""
if hasattr(self.original, "__conform__"):
return self.original.__conform__(interface)
return None
def isuper(self, iface, adapter):
"""
Forward isuper to self.original
"""
return self.original.isuper(iface, adapter)
@_oldStyle
class Componentized:
"""I am a mixin to allow you to be adapted in various ways persistently.
I define a list of persistent adapters. This is to allow adapter classes
to store system-specific state, and initialized on demand. The
getComponent method implements this. You must also register adapters for
this class for the interfaces that you wish to pass to getComponent.
Many other classes and utilities listed here are present in Zope3; this one
is specific to Twisted.
"""
persistenceVersion = 1
def __init__(self):
self._adapterCache = {}
def locateAdapterClass(self, klass, interfaceClass, default):
return getAdapterFactory(klass, interfaceClass, default)
def setAdapter(self, interfaceClass, adapterClass):
"""
Cache a provider for the given interface, by adapting C{self} using
the given adapter class.
"""
self.setComponent(interfaceClass, adapterClass(self))
def addAdapter(self, adapterClass, ignoreClass=0):
"""Utility method that calls addComponent. I take an adapter class and
instantiate it with myself as the first argument.
@return: The adapter instantiated.
"""
adapt = adapterClass(self)
self.addComponent(adapt, ignoreClass)
return adapt
def setComponent(self, interfaceClass, component):
"""
Cache a provider of the given interface.
"""
self._adapterCache[reflect.qual(interfaceClass)] = component
def addComponent(self, component, ignoreClass=0):
"""
Add a component to me, for all appropriate interfaces.
In order to determine which interfaces are appropriate, the component's
provided interfaces will be scanned.
If the argument 'ignoreClass' is True, then all interfaces are
considered appropriate.
Otherwise, an 'appropriate' interface is one for which its class has
been registered as an adapter for my class according to the rules of
getComponent.
@return: the list of appropriate interfaces
"""
for iface in declarations.providedBy(component):
if (ignoreClass or
(self.locateAdapterClass(self.__class__, iface, None)
== component.__class__)):
self._adapterCache[reflect.qual(iface)] = component
def unsetComponent(self, interfaceClass):
"""Remove my component specified by the given interface class."""
del self._adapterCache[reflect.qual(interfaceClass)]
def removeComponent(self, component):
"""
Remove the given component from me entirely, for all interfaces for which
it has been registered.
@return: a list of the interfaces that were removed.
"""
l = []
for k, v in list(self._adapterCache.items()):
if v is component:
del self._adapterCache[k]
l.append(reflect.namedObject(k))
return l
def getComponent(self, interface, default=None):
"""Create or retrieve an adapter for the given interface.
If such an adapter has already been created, retrieve it from the cache
that this instance keeps of all its adapters. Adapters created through
this mechanism may safely store system-specific state.
If you want to register an adapter that will be created through
getComponent, but you don't require (or don't want) your adapter to be
cached and kept alive for the lifetime of this Componentized object,
set the attribute 'temporaryAdapter' to True on your adapter class.
If you want to automatically register an adapter for all appropriate
interfaces (with addComponent), set the attribute 'multiComponent' to
True on your adapter class.
"""
k = reflect.qual(interface)
if k in self._adapterCache:
return self._adapterCache[k]
else:
adapter = interface.__adapt__(self)
if adapter is not None and not (
hasattr(adapter, "temporaryAdapter") and
adapter.temporaryAdapter):
self._adapterCache[k] = adapter
if (hasattr(adapter, "multiComponent") and
adapter.multiComponent):
self.addComponent(adapter)
if adapter is None:
return default
return adapter
def __conform__(self, interface):
return self.getComponent(interface)
class ReprableComponentized(Componentized):
def __init__(self):
Componentized.__init__(self)
def __repr__(self):
from pprint import pprint
sio = NativeStringIO()
pprint(self._adapterCache, sio)
return sio.getvalue()
def proxyForInterface(iface, originalAttribute='original'):
"""
Create a class which proxies all method calls which adhere to an interface
to another provider of that interface.
This function is intended for creating specialized proxies. The typical way
to use it is by subclassing the result::
class MySpecializedProxy(proxyForInterface(IFoo)):
def someInterfaceMethod(self, arg):
if arg == 3:
return 3
return self.original.someInterfaceMethod(arg)
@param iface: The Interface to which the resulting object will conform, and
which the wrapped object must provide.
@param originalAttribute: name of the attribute used to save the original
object in the resulting class. Default to C{original}.
@type originalAttribute: C{str}
@return: A class whose constructor takes the original object as its only
argument. Constructing the class creates the proxy.
"""
def __init__(self, original):
setattr(self, originalAttribute, original)
contents = {"__init__": __init__}
for name in iface:
contents[name] = _ProxyDescriptor(name, originalAttribute)
proxy = type("(Proxy for %s)"
% (reflect.qual(iface),), (object,), contents)
declarations.classImplements(proxy, iface)
return proxy
class _ProxiedClassMethod(object):
"""
A proxied class method.
@ivar methodName: the name of the method which this should invoke when
called.
@type methodName: L{str}
@ivar __name__: The name of the method being proxied (the same as
C{methodName}).
@type __name__: L{str}
@ivar originalAttribute: name of the attribute of the proxy where the
original object is stored.
@type originalAttribute: L{str}
"""
def __init__(self, methodName, originalAttribute):
self.methodName = self.__name__ = methodName
self.originalAttribute = originalAttribute
def __call__(self, oself, *args, **kw):
"""
Invoke the specified L{methodName} method of the C{original} attribute
for proxyForInterface.
@param oself: an instance of a L{proxyForInterface} object.
@return: the result of the underlying method.
"""
original = getattr(oself, self.originalAttribute)
actualMethod = getattr(original, self.methodName)
return actualMethod(*args, **kw)
class _ProxyDescriptor(object):
"""
A descriptor which will proxy attribute access, mutation, and
deletion to the L{_ProxyDescriptor.originalAttribute} of the
object it is being accessed from.
@ivar attributeName: the name of the attribute which this descriptor will
retrieve from instances' C{original} attribute.
@type attributeName: C{str}
@ivar originalAttribute: name of the attribute of the proxy where the
original object is stored.
@type originalAttribute: C{str}
"""
def __init__(self, attributeName, originalAttribute):
self.attributeName = attributeName
self.originalAttribute = originalAttribute
def __get__(self, oself, type=None):
"""
Retrieve the C{self.attributeName} property from I{oself}.
"""
if oself is None:
return _ProxiedClassMethod(self.attributeName,
self.originalAttribute)
original = getattr(oself, self.originalAttribute)
return getattr(original, self.attributeName)
def __set__(self, oself, value):
"""
Set the C{self.attributeName} property of I{oself}.
"""
original = getattr(oself, self.originalAttribute)
setattr(original, self.attributeName, value)
def __delete__(self, oself):
"""
Delete the C{self.attributeName} property of I{oself}.
"""
original = getattr(oself, self.originalAttribute)
delattr(original, self.attributeName)
__all__ = [
"registerAdapter", "getAdapterFactory",
"Adapter", "Componentized", "ReprableComponentized", "getRegistry",
"proxyForInterface",
]
| gpl-2.0 | -8,735,629,557,001,990,000 | 32.167053 | 85 | 0.668346 | false |
nytlabs/hotpants | hotpants-merge.py | 1 | 5237 | # oh boy oh boy!
# It's project HOT_PANTS
from __future__ import print_function
import Adafruit_BBIO.UART as uart
from Adafruit_Thermal import *
# import TMP102 as tmp
import VCNL4000 as vcnl
import time
from serial import Serial
import random
import atexit
import sentence_generator as sg
theObj = 'BLOCKS'
v = vcnl.VCNL4000()
readings = []
sensor_pin = 'P9_40'
# extreme_lo = ['dark','inky','shadowed','midnight''black','sinister','dour','glowering','glum','moody','morose','saturnine','sour','sullen','benighted','obscure','blue','dingy','disconsolate','dismal','gloomy','grim','sorry','drab','drear','dreary','colored','coloured','dark-skinned','non-white','depressing','dispiriting']
extreme_lo = ['stale','cold','dusty','moth-eaten','frigid','arctic','gelid','glacial','icy','polar','frosty','frozen','wintry','cold-blooded','inhuman','insensate','insentient']
# mid_lo = ['shady','dim','grey','faint','weak','dim','shadowy','vague','wispy','feeble','light','swooning','light-headed','lightheaded','fainthearted','timid','faint-hearted','cloudy','muddy','murky','turbid']
# mid_hi = ['light','shiny','clear','lustrous','diaphanous','filmy','gauze-like','gossamer','see-through','sheer','transparent','vaporous','vapourous','cobwebby']
# extreme_hi = ['blinding','superbright','brilliant','vivid','brilliant','vivid','smart','burnished','lustrous','shining','shiny','undimmed','promising','sunny','sunshiny']
extreme_hi = ['raging','hot','angry','furious','tempestuous','wild','blistering','acerbic','acid','acrid','bitter','caustic','sulfurous','sulphurous','virulent','vitriolic','blistery','red-hot','scalding','scathing','venomous','vituperative','juicy','luscious','toothsome','voluptuous','sizzling','live','unrecorded','bouncy','lively','resilient','springy','alive']
preamble = ['Now it is hella ','Oh, just a bit ','It is quite ','Gosh it is ','Well looky here, it is ','Suddenly: ','Call the police, it is ','After awhile: ','Things have changed; now it\'s more ','Hey now! It is very ']
dream = ['i am falling', 'i am walking and falling', 'i had to take a test', 'i have eaten an embarrassing amount of gum']
def parseLen(text):
L = []
# add newlines to cause text to print properly
# we need this because we're printing upside-down text
# "call the police, it is faint-hearted" should be printed as
# "rted" then linebreak then "Call the police, it is faint-hea"
# which is "rted\nCall the police, it is faint-hea"
if len(text) > Adafruit_Thermal.maxColumn: # 32 is defined by the printer; max chars per line
r = len(text)%32
L.append(text[-r:]+'\n')
for i in reversed(range(len(text)/32)):
L.append(text[i*32:(i+1)*32]+'\n')
else:
L.append(text)
return ''.join(L)
def slowPrint(text):
for i in text.splitlines():
printer.print(i+'\n')
time.sleep(0.1)
def checkSensor():
global rPast
global rMin
global rMax
global noop
# change this to whatever get-readings call we need
r = v.getProximity()
readings.append(r)
if len(readings)>WINDOW_SIZE:
del readings[:-WINDOW_SIZE]
avg = 0
for i in readings[-WINDOW_SIZE:]:
avg += (i/float(WINDOW_SIZE))
delta = r-avg
print(r, delta, avg)
if r > rMax:
rMax = r
# does this merit an emission? Or should delta have to be > threshold?
if r < rMin:
rMin = r
# does this merit an emission? Or should delta have to be > threshold?
if abs(delta) > emission_threshold:
if len(readings)==WINDOW_SIZE:
print('emitting remark')
noop = 0
emit_remark(r, delta, avg)
else:
pass
else:
noop += 1
if noop > noop_threshold:
noop = 0
print('emitting dream')
emit_dream(r, delta, avg)
rPast = r
def emit_dream(r, delta, avg):
norm = mapVals(r,rMin, rMax, 0.0, 0.999)
sen = sg.generate(theObj, norm, delta, True)
slowPrint(parseLen(sen))
printer.feed(1)
slowPrint(parseLen('A DREAM: '+str(time.ctime())))
printer.feed(1)
# slowPrint(parseLen('A DREAM: '+random.choice(dream)))
# printer.feed(1)
def emit_remark(r, delta, avg):
norm = mapVals(r,rMin, rMax, 0.0, 0.999)
sen = sg.generate(theObj, norm, delta, False)
slowPrint(parseLen(sen))
printer.feed(1)
# slowPrint(parseLen(random.choice(preamble)+random.choice(extreme_hi)))
# printer.feed(1)
def exit_handler():
pass
# print 'exiting'
# adc.cleanup()
# uart.cleanup() # not yet supported?
def mapVals(val, inMin, inMax, outMin, outMax):
toRet = outMin + (outMax - outMin) * ((val - inMin) / (inMax - inMin))
return toRet
uart.setup("UART2")
printer = Adafruit_Thermal("/dev/ttyO2", 19200, timeout=5)
printer.begin()
printer.upsideDownOn()
printer.feed(3)
printer.print('i am awake and I have a TMP102')
printer.feed(1)
rPast = 0
rMax = 0 # all-time max sensor reading
rMin = 0 # all-time min sensor reading
WINDOW_SIZE = 30 # size of moving-window avg
noop = 0 # number of intervals passed without a trigger
noop_threshold = 480
emission_threshold = 0.7
while True:
checkSensor()
time.sleep(0.5) | apache-2.0 | 1,027,970,702,916,533,400 | 37.514706 | 365 | 0.642162 | false |
varses/awsch | lantz/drivers/tektronix/tds1012.py | 3 | 6856 | # -*- coding: utf-8 -*-
"""
lantz.drivers.tektronix.tds1012
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Implements the drivers to control an oscilloscope.
:copyright: 2015 by Lantz Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
Source: Tektronix Manual
"""
import numpy as np
from lantz.feat import Feat
from lantz.action import Action
from lantz.messagebased import MessageBasedDriver
from lantz.errors import InvalidCommand
class TDS1012(MessageBasedDriver):
"""Tektronix TDS1012 100MHz 2 Channel Digital Storage Oscilloscope
"""
MANUFACTURER_ID = '0x699'
@Action()
def initiate(self):
""" Initiates the acquisition in the osciloscope.
"""
self.send(':ACQ:STATE ON')
@Action()
def idn(self):
""" Identify the Osciloscope
"""
return self.query('*IDN?')
@Action()
def autoset(self):
""" Adjust the vertical, horizontal and trigger controls to display a
stable waveform.
"""
self.send('AUTOS EXEC')
@Action()
def autocal(self):
""" Autocalibration of osciloscope. It may take several minutes to
complete
"""
return self.send('*CAL')
@Feat(limits=(1,2))
def datasource(self):
""" Retrieves the data source from which data is going to be taken.
TDS1012 has 2 channels
"""
return self.query('DAT:SOU?')
@datasource.setter
def datasource(self,value):
""" Sets the data source for the acquisition of data.
"""
self.send('DAT:SOU CH{}'.format(value))
@Action()
def acquire_parameters(self):
""" Acquire parameters of the osciloscope.
It is intended for adjusting the values obtained in acquire_curve
"""
values = 'XZE?;XIN?;PT_OF?;YZE?;YMU?;YOF?;'
answer = self.query('WFMP:{}'.format(values))
parameters = {}
for v, j in zip(values.split('?;'),answer.split(';')):
parameters[v] = float(j)
return parameters
@Action()
def data_setup(self):
""" Sets the way data is going to be encoded for sending.
"""
self.send('DAT:ENC ASCI;WID 2') #ASCII is the least efficient way, but
# couldn't make the binary mode to work
@Action()
def acquire_curve(self,start=1,stop=2500):
""" Gets data from the oscilloscope. It accepts setting the start and
stop points of the acquisition (by default the entire range).
"""
parameters = self.acquire_parameters()
self.data_setup()
self.send('DAT:STAR {}'.format(start))
self.send('DAT:STOP {}'.format(stop))
data = self.query('CURV?')
data = data.split(',')
data = np.array(list(map(float,data)))
ydata = (data - parameters['YOF']) * parameters['YMU']\
+ parameters['YZE']
xdata = np.arange(len(data))*parameters['XIN'] + parameters['XZE']
return list(xdata), list(ydata)
@Action()
def forcetrigger(self):
""" Creates a trigger event.
"""
self.send('TRIG:FORC')
return
@Action()
def triggerlevel(self):
""" Sets the trigger level to 50% of the minimum and maximum values of
the signal.
"""
self.send('TRIG:MAI SETL')
@Feat(values={'AUTO', 'NORMAL'})
def trigger(self):
""" Retrieves trigger state.
"""
return self.query('TRIG:MAIN:MODE?')
@trigger.setter
def trigger(self,state):
""" Sets the trigger state.
"""
self.send('TRIG:MAI:MOD {}'.format(state))
return
@Feat()
def horizontal_division(self):
""" Horizontal time base division.
"""
return float(self.query('HOR:MAI:SCA?'))
@horizontal_division.setter
def horizontal_division(self,value):
""" Sets the horizontal time base division.
"""
self.send('HOR:MAI:SCA {}'.format(value))
return
@Feat(values={0, 4, 16, 64, 128})
def number_averages(self):
""" Number of averages
"""
answer = self.query('ACQ?')
answer = answer.split(';')
if answer[0] == 'SAMPLE':
return 0
elif answer[0] == 'AVERAGE':
return int(self.query('ACQ:NUMAV?'))
else:
raise InvalidCommand
@number_averages.setter
def number_averages(self,value):
""" Sets the number of averages. If 0, the it is a continous sample.
"""
if value == 0:
self.send('ACQ:MOD SAMPLE')
else:
self.send('ACQ:MOD AVE;NUMAV {}'.format(value))
@Action(values={'FREQ', 'MINI', 'MAXI', 'MEAN'})
def _measure(self, mode):
""" Measures the Frequency, Minimum, Maximum or Mean of a signal.
"""
self.send('MEASU:IMM:TYP {}'.format(mode))
return float(self.query('MEASU:IMM:VAL?'))
def measure_mean(self):
""" Gets the mean of the signal.
"""
answer = self._measure('MEAN')
return answer
def measure_frequency(self):
""" Gets the frequency of the signal.
"""
answer = self._measure('FREQ')
return answer
def measure_minimum(self):
""" Gets the minimum of the signal.
"""
answer = self._measure('MINI')
return answer
def measure_maximum(self):
""" Gets the mean of the signal.
"""
answer = self._measure('MAXI')
return answer
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Measure using TDS1012 and dump to screen')
parser.add_argument('-p', '--port', default='/dev/ttyS0',
help='Serial port')
parser.add_argument('-v', '--view', action='store_true', default=True,
help='View ')
parser.add_argument('-c', '--channel', default=1, type=int,
help='Channel to use')
args = parser.parse_args()
osc = TDS1012(args.port)
osc.initiate()
print('Osciloscope Identification: {}'.format(osc.idn))
print(osc.trigger)
osc.forcetrigger()
osc.triggerlevel()
osc.trigger = "AUTO"
print(osc.trigger)
params = osc.acquire_parameters()
if args.view:
import matplotlib.pyplot as plt
import numpy as np
if args.view:
osc.datasource = args.channel
x, y = osc.acquire_curve()
x = np.array(x)
x = x - x.min()
y = np.array(y)
plt.plot(x, y)
plt.show()
| bsd-3-clause | 4,648,393,467,898,027,000 | 28.424893 | 92 | 0.547258 | false |
unapiedra/BBChop | BBChop/entropy.py | 1 | 1059 | # Copyright 2008 Ealdwulf Wuffinga
# This file is part of BBChop.
#
# BBChop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# BBChop is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BBChop. If not, see <http://www.gnu.org/licenses/>.
from . import numberType
def shannon(probs):
e=0
for p in probs:
if(p>0):
e-=p*numberType.log(p)
return e
alpha=numberType.const('1.2')
def renyi(probs):
e=0
one=numberType.const(1.0)
d=one/(one-alpha)
for p in probs:
e=e+numberType.pow(p,alpha)
return numberType.log(e)*d
| gpl-2.0 | 8,434,994,482,874,837,000 | 28.416667 | 73 | 0.678942 | false |
gmarkall/numba | numba/tests/test_hashing.py | 2 | 11890 | # -*- coding: utf-8 -*-
"""
Test hashing of various supported types.
"""
import unittest
import sys
import subprocess
from collections import defaultdict
from textwrap import dedent
import numpy as np
from numba import jit
from numba.core import types, utils
import unittest
from numba.tests.support import TestCase, tag, CompilationCache
from numba.cpython.unicode import compile_time_get_string_data
from numba.cpython import hashing
def hash_usecase(x):
return hash(x)
class TestHashingSetup(TestCase):
def test_warn_on_fnv(self):
# FNV hash alg variant is not supported, check Numba warns
work = """
import sys
import warnings
from collections import namedtuple
# hash_info is a StructSequence, mock as a named tuple
fields = ["width", "modulus", "inf", "nan", "imag", "algorithm",
"hash_bits", "seed_bits", "cutoff"]
hinfo = sys.hash_info
FAKE_HASHINFO = namedtuple('FAKE_HASHINFO', fields)
fd = dict()
for f in fields:
fd[f] = getattr(hinfo, f)
fd['algorithm'] = 'fnv'
fake_hashinfo = FAKE_HASHINFO(**fd)
# replace the hashinfo with the fnv version
sys.hash_info = fake_hashinfo
with warnings.catch_warnings(record=True) as warns:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
import numba
assert len(warns) > 0
expect = "FNV hashing is not implemented in Numba. See PEP 456"
for w in warns:
if expect in str(w.message):
break
else:
raise RuntimeError("Expected warning not found")
"""
subprocess.check_call([sys.executable, '-c', dedent(work)])
class BaseTest(TestCase):
def setUp(self):
self.cfunc = jit(nopython=True)(hash_usecase)
def check_hash_values(self, values):
cfunc = self.cfunc
for val in list(values):
nb_hash = cfunc(val)
self.assertIsInstance(nb_hash, int)
try:
self.assertEqual(nb_hash, hash(val))
except AssertionError as e:
print("val, nb_hash, hash(val)")
print(val, nb_hash, hash(val))
print("abs(val), hashing._PyHASH_MODULUS - 1")
print(abs(val), hashing._PyHASH_MODULUS - 1)
raise e
def int_samples(self, typ=np.int64):
for start in (0, -50, 60000, 1 << 32):
info = np.iinfo(typ)
if not info.min <= start <= info.max:
continue
n = 100
yield range(start, start + n)
yield range(start, start + 100 * n, 100)
yield range(start, start + 128 * n, 128)
yield [-1]
def float_samples(self, typ):
info = np.finfo(typ)
for start in (0, 10, info.max ** 0.5, info.max / 1000.0):
n = 100
min_step = max(info.tiny, start * info.resolution)
for step in (1.2, min_step ** 0.5, min_step):
if step < min_step:
continue
a = np.linspace(start, start + n * step, n)
a = a.astype(typ)
yield a
yield -a
yield a + a.mean()
# Infs, nans, zeros, magic -1
a = typ([0.0, 0.5, -0.0, -1.0, float('inf'), -float('inf'),
float('nan')])
yield a
def complex_samples(self, typ, float_ty):
for real in self.float_samples(float_ty):
for imag in self.float_samples(float_ty):
# Ensure equal sizes
real = real[:len(imag)]
imag = imag[:len(real)]
a = real + typ(1j) * imag
yield a
class TestNumberHashing(BaseTest):
"""
Test hashing of number types.
"""
def check_floats(self, typ):
for a in self.float_samples(typ):
self.assertEqual(a.dtype, np.dtype(typ))
self.check_hash_values(a)
def check_complex(self, typ, float_ty):
for a in self.complex_samples(typ, float_ty):
self.assertEqual(a.dtype, np.dtype(typ))
self.check_hash_values(a)
def test_floats(self):
self.check_floats(np.float32)
self.check_floats(np.float64)
def test_complex(self):
self.check_complex(np.complex64, np.float32)
self.check_complex(np.complex128, np.float64)
def test_bool(self):
self.check_hash_values([False, True])
def test_ints(self):
minmax = []
for ty in [np.int8, np.uint8, np.int16, np.uint16,
np.int32, np.uint32, np.int64, np.uint64]:
for a in self.int_samples(ty):
self.check_hash_values(a)
info = np.iinfo(ty)
# check hash(-1) = -2
# check hash(0) = 0
self.check_hash_values([ty(-1)])
self.check_hash_values([ty(0)])
signed = 'uint' not in str(ty)
# check bit shifting patterns from min through to max
sz = ty().itemsize
for x in [info.min, info.max]:
shifts = 8 * sz
# x is a python int, do shifts etc as a python int and init
# numpy type from that to avoid numpy type rules
y = x
for i in range(shifts):
twiddle1 = 0xaaaaaaaaaaaaaaaa
twiddle2 = 0x5555555555555555
vals = [y]
for tw in [twiddle1, twiddle2]:
val = y & twiddle1
if val < sys.maxsize:
vals.append(val)
for v in vals:
self.check_hash_values([ty(v)])
if signed: # try the same with flipped signs
# negated signed INT_MIN will overflow
for v in vals:
if v != info.min:
self.check_hash_values([ty(-v)])
if x == 0: # unsigned min is 0, shift up
y = (y | 1) << 1
else: # everything else shift down
y = y >> 1
# these straddle the branch between returning the int as the hash and
# doing the PyLong hash alg
self.check_hash_values([np.int64(0x1ffffffffffffffe)])
self.check_hash_values([np.int64(0x1fffffffffffffff)])
self.check_hash_values([np.uint64(0x1ffffffffffffffe)])
self.check_hash_values([np.uint64(0x1fffffffffffffff)])
# check some values near sys int mins
self.check_hash_values([np.int64(-0x7fffffffffffffff)])
self.check_hash_values([np.int64(-0x7ffffffffffffff6)])
self.check_hash_values([np.int64(-0x7fffffffffffff9c)])
self.check_hash_values([np.int32(-0x7fffffff)])
self.check_hash_values([np.int32(-0x7ffffff6)])
self.check_hash_values([np.int32(-0x7fffff9c)])
class TestTupleHashing(BaseTest):
"""
Test hashing of tuples.
"""
def check_tuples(self, value_generator, split):
for values in value_generator:
tuples = [split(a) for a in values]
self.check_hash_values(tuples)
def test_homogeneous_tuples(self):
typ = np.uint64
def split2(i):
"""
Split i's bits into 2 integers.
"""
i = typ(i)
return (i & typ(0x5555555555555555),
i & typ(0xaaaaaaaaaaaaaaaa),
)
def split3(i):
"""
Split i's bits into 3 integers.
"""
i = typ(i)
return (i & typ(0x2492492492492492),
i & typ(0x4924924924924924),
i & typ(0x9249249249249249),
)
self.check_tuples(self.int_samples(), split2)
self.check_tuples(self.int_samples(), split3)
# Check exact. Sample values from:
# https://github.com/python/cpython/blob/b738237d6792acba85b1f6e6c8993a812c7fd815/Lib/test/test_tuple.py#L80-L93
# Untypable empty tuples are replaced with (7,).
self.check_hash_values([(7,), (0,), (0, 0), (0.5,),
(0.5, (7,), (-2, 3, (4, 6)))])
def test_heterogeneous_tuples(self):
modulo = 2**63
def split(i):
a = i & 0x5555555555555555
b = (i & 0xaaaaaaaa) ^ ((i >> 32) & 0xaaaaaaaa)
return np.int64(a), np.float64(b * 0.0001)
self.check_tuples(self.int_samples(), split)
class TestUnicodeHashing(BaseTest):
def test_basic_unicode(self):
kind1_string = "abcdefghijklmnopqrstuvwxyz"
for i in range(len(kind1_string)):
self.check_hash_values([kind1_string[:i]])
sep = "眼"
kind2_string = sep.join(list(kind1_string))
for i in range(len(kind2_string)):
self.check_hash_values([kind2_string[:i]])
sep = "🐍⚡"
kind4_string = sep.join(list(kind1_string))
for i in range(len(kind4_string)):
self.check_hash_values([kind4_string[:i]])
empty_string = ""
self.check_hash_values(empty_string)
def test_hash_passthrough(self):
# no `hash` call made, this just checks that `._hash` is correctly
# passed through from an already existing string
kind1_string = "abcdefghijklmnopqrstuvwxyz"
@jit(nopython=True)
def fn(x):
return x._hash
hash_value = compile_time_get_string_data(kind1_string)[-1]
self.assertTrue(hash_value != -1)
self.assertEqual(fn(kind1_string), hash_value)
def test_hash_passthrough_call(self):
# check `x._hash` and hash(x) are the same
kind1_string = "abcdefghijklmnopqrstuvwxyz"
@jit(nopython=True)
def fn(x):
return x._hash, hash(x)
hash_value = compile_time_get_string_data(kind1_string)[-1]
self.assertTrue(hash_value != -1)
self.assertEqual(fn(kind1_string), (hash_value, hash_value))
@unittest.skip("Needs hash computation at const unpickling time")
def test_hash_literal(self):
# a strconst always seem to have an associated hash value so the hash
# member of the returned value should contain the correct hash
@jit(nopython=True)
def fn():
x = "abcdefghijklmnopqrstuvwxyz"
return x
val = fn()
tmp = hash("abcdefghijklmnopqrstuvwxyz")
self.assertEqual(tmp, (compile_time_get_string_data(val)[-1]))
def test_hash_on_str_creation(self):
# In cPython some? new strings do not have a cached hash until hash() is
# called
def impl(do_hash):
const1 = "aaaa"
const2 = "眼眼眼眼"
new = const1 + const2
if do_hash:
hash(new)
return new
jitted = jit(nopython=True)(impl)
# do not compute the hash, cPython will have no cached hash, but Numba
# will
compute_hash = False
expected = impl(compute_hash)
got = jitted(compute_hash)
a = (compile_time_get_string_data(expected))
b = (compile_time_get_string_data(got))
self.assertEqual(a[:-1], b[:-1])
self.assertTrue(a[-1] != b[-1])
# now with compute hash enabled, cPython will have a cached hash as will
# Numba
compute_hash = True
expected = impl(compute_hash)
got = jitted(compute_hash)
a = (compile_time_get_string_data(expected))
b = (compile_time_get_string_data(got))
self.assertEqual(a, b)
if __name__ == "__main__":
unittest.main()
| bsd-2-clause | 9,025,825,448,722,313,000 | 32.640227 | 120 | 0.543579 | false |
aparo/pyes | pyes/contrib/mappings.py | 6 | 1433 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from future import print_function
from pyes.es import ES
from pyes import mappings
def mappings_to_code(obj, doc_count=0):
result = []
odict = obj.as_dict()
if isinstance(obj, (mappings.DocumentObjectField, mappings.ObjectField, mappings.NestedObject)):
properties = odict.pop("properties", [])
doc_count += 1
kwargs = ["name=%r" % obj.name,
"type=%r" % odict.pop("type")] +\
["%s=%r" % (k, odict[k]) for k in sorted(odict.keys())]
result.append(
"doc%d=" % doc_count + str(type(obj)).split(".")[-1].strip("'>") + "(" + ', '.join(kwargs) + ")")
for k in sorted(obj.properties.keys()):
result.extend(mappings_to_code(obj.properties[k], doc_count))
else:
kwargs = ["name=%r" % obj.name,
"type=%r" % odict.pop("type"),
"store=%r" % obj.store,
"index=%r" % odict.pop("index")] +\
["%s=%r" % (k, odict[k]) for k in sorted(odict.keys())]
result.append("doc%d.add_property(" % doc_count +\
str(type(obj)).split(".")[-1].strip("'>") + "(" +\
', '.join(kwargs) + "))")
return result
if __name__ == '__main__':
es = ES("192.168.1.1:9200")
res = mappings_to_code(es.mappings.get_doctype("twitter", "twitter"))
print("\n".join(res))
| bsd-3-clause | -2,986,008,077,885,929,000 | 38.805556 | 109 | 0.508025 | false |
jjpsos/buddy-books | accounting/apps/books/models.py | 2 | 14831 | from decimal import Decimal as D
from datetime import date
from django.conf import settings
from django.db import models
from django.core.urlresolvers import reverse
from django.core.validators import MaxValueValidator, MinValueValidator
from django.contrib.contenttypes.fields import (
GenericForeignKey,
GenericRelation)
from django.contrib.contenttypes.models import ContentType
from django.utils import timezone
from accounting.libs import prices
from accounting.libs.checks import CheckingModelMixin
from accounting.libs.templatetags.currency_filters import currency_formatter
from accounting.libs.templatetags.format_filters import percentage_formatter
from .managers import (
EstimateQuerySet,
InvoiceQuerySet,
BillQuerySet,
ExpenseClaimQuerySet)
TWO_PLACES = D(10) ** -2
class Organization(models.Model):
display_name = models.CharField(max_length=150,
help_text="Name that you communicate")
legal_name = models.CharField(max_length=150,
help_text="Official name to appear on your reports, sales "
"invoices and bills")
owner = models.ForeignKey(settings.AUTH_USER_MODEL,
related_name="owned_organizations")
members = models.ManyToManyField(settings.AUTH_USER_MODEL,
related_name="organizations",
blank=True, null=True)
class Meta:
pass
def __str__(self):
return self.legal_name
def get_absolute_url(self):
return reverse('books:organization-detail', args=[self.pk])
@property
def turnover_excl_tax(self):
return self.invoices.turnover_excl_tax() or D('0.00')
@property
def turnover_incl_tax(self):
return self.invoices.turnover_incl_tax() or D('0.00')
@property
def debts_excl_tax(self):
return self.bills.debts_excl_tax() or D('0.00')
@property
def debts_incl_tax(self):
return self.bills.debts_incl_tax() or D('0.00')
@property
def profits(self):
return self.turnover_excl_tax - self.debts_excl_tax
@property
def collected_tax(self):
return self.turnover_incl_tax - self.turnover_excl_tax
@property
def deductible_tax(self):
return self.debts_incl_tax - self.debts_excl_tax
@property
def tax_provisionning(self):
return self.collected_tax - self.deductible_tax
@property
def overdue_total(self):
due_invoices = self.invoices.dued()
due_turnonver = due_invoices.turnover_incl_tax()
total_paid = due_invoices.total_paid()
return due_turnonver - total_paid
class TaxRate(models.Model):
"""
Every transaction line item needs a Tax Rate.
Tax Rates can have multiple Tax Components.
For instance, you can have an item that is charged a Tax Rate
called "City Import Tax (8%)" that has two components:
- a city tax of 5%
- an import tax of 3%.
*inspired by Xero*
"""
organization = models.ForeignKey('books.Organization',
related_name="tax_rates",
verbose_name="Attached to Organization")
name = models.CharField(max_length=50)
rate = models.DecimalField(max_digits=6,
decimal_places=5,
validators=[MinValueValidator(D('0')),
MaxValueValidator(D('1'))])
class Meta:
pass
def __str__(self):
return "{} ({})".format(self.name, percentage_formatter(self.rate))
class AbstractSale(CheckingModelMixin, models.Model):
number = models.IntegerField(default=1,
db_index=True)
# Total price needs to be stored with and wihtout taxes
# because the tax percentage can vary depending on the associated lines
total_incl_tax = models.DecimalField("Total (inc. tax)",
decimal_places=2,
max_digits=12,
default=D('0'))
total_excl_tax = models.DecimalField("Total (excl. tax)",
decimal_places=2,
max_digits=12,
default=D('0'))
# tracking
date_issued = models.DateField(default=date.today)
date_dued = models.DateField("Due date",
blank=True, null=True,
help_text="The date when the total amount "
"should have been collected")
date_paid = models.DateField(blank=True, null=True)
class Meta:
abstract = True
class CheckingOptions:
fields = (
'total_incl_tax',
'total_excl_tax',
'date_dued',
)
def __str__(self):
return "#{} ({})".format(self.number, self.total_incl_tax)
def get_detail_url(self):
raise NotImplementedError
def get_edit_url(self):
raise NotImplementedError
def compute_totals(self):
self.total_excl_tax = self.get_total_excl_tax()
self.total_incl_tax = self.get_total_incl_tax()
def _get_total(self, prop):
"""
For executing a named method on each line of the basket
and returning the total.
"""
total = D('0.00')
line_queryset = self.lines.all()
for line in line_queryset:
total = total + getattr(line, prop)
return total
@property
def total_tax(self):
return self.total_incl_tax - self.total_excl_tax
def get_total_excl_tax(self):
return self._get_total('line_price_excl_tax')
def get_total_incl_tax(self):
return self._get_total('line_price_incl_tax')
@property
def total_paid(self):
total = D('0')
for p in self.payments.all():
total += p.amount
return total
@property
def total_due_incl_tax(self):
due = self.total_incl_tax
due -= self.total_paid
return due
def is_fully_paid(self):
return self.total_paid.quantize(TWO_PLACES) >= self.total_incl_tax.quantize(TWO_PLACES)
def is_partially_paid(self):
paid = self.total_paid.quantize(TWO_PLACES)
return paid and paid > 0 and paid < self.total_incl_tax.quantize(TWO_PLACES)
@property
def payroll_taxes(self):
# TODO implement collected/accurial
paid = self.total_paid
payroll = D('0')
for emp in self.organization.employees.all():
if not emp.salary_follows_profits:
continue
payroll += paid * emp.shares_percentage * emp.payroll_tax_rate
return payroll
def _check_total(self, check, total, computed_total):
if total.quantize(TWO_PLACES) != computed_total.quantize(TWO_PLACES):
check.mark_fail(level=check.LEVEL_ERROR,
message="The computed amount isn't correct, it "
"should be {}, please edit and save the "
"{} to fix it.".format(
currency_formatter(total),
self._meta.verbose_name))
else:
check.mark_pass()
return check
def check_total_excl_tax(self, check):
total = self.get_total_excl_tax()
return self._check_total(check, total, self.total_excl_tax)
def check_total_incl_tax(self, check):
total = self.get_total_incl_tax()
return self._check_total(check, total, self.total_incl_tax)
def check_date_dued(self, check):
if self.date_dued is None:
check.mark_fail(message="No due date specified")
return check
if self.total_excl_tax == D('0'):
check.mark_fail(message="The invoice has no value")
return check
if self.is_fully_paid():
last_payment = self.payments.all().first()
formatted_date = last_payment.date_paid.strftime('%B %d, %Y')
check.mark_pass(message="Has been paid on the {}"
.format(formatted_date))
return check
if timezone.now().date() > self.date_dued:
check.mark_fail(message="The due date has been exceeded.")
else:
check.mark_pass()
return check
class AbstractSaleLine(models.Model):
label = models.CharField(max_length=255)
description = models.TextField(blank=True, null=True)
unit_price_excl_tax = models.DecimalField(max_digits=8,
decimal_places=2)
quantity = models.DecimalField(max_digits=8,
decimal_places=2,
default=1)
class Meta:
abstract = True
def __str__(self):
return self.label
@property
def unit_price(self):
"""Returns the `Price` instance representing the instance"""
unit = self.unit_price_excl_tax
tax = unit * self.tax_rate.rate
p = prices.Price(settings.ACCOUNTING_DEFAULT_CURRENCY, unit, tax=tax)
return p
@property
def line_price_excl_tax(self):
return self.quantity * self.unit_price.excl_tax
@property
def line_price_incl_tax(self):
return self.quantity * self.unit_price.incl_tax
@property
def taxes(self):
return self.line_price_incl_tax - self.line_price_excl_tax
def from_client(self):
raise NotImplementedError
def to_client(self):
raise NotImplementedError
class Estimate(AbstractSale):
organization = models.ForeignKey('books.Organization',
related_name="estimates",
verbose_name="From Organization")
client = models.ForeignKey('people.Client',
verbose_name="To Client")
objects = EstimateQuerySet.as_manager()
class Meta:
unique_together = (("number", "organization"),)
ordering = ('-number',)
def get_detail_url(self):
return reverse('books:estimate-detail', args=[self.pk])
def get_edit_url(self):
return reverse('books:estimate-edit', args=[self.pk])
def from_client(self):
return self.organization
def to_client(self):
return self.client
class EstimateLine(AbstractSaleLine):
invoice = models.ForeignKey('books.Estimate',
related_name="lines")
tax_rate = models.ForeignKey('books.TaxRate')
class Meta:
pass
class Invoice(AbstractSale):
organization = models.ForeignKey('books.Organization',
related_name="invoices",
verbose_name="From Organization")
client = models.ForeignKey('people.Client',
verbose_name="To Client")
payments = GenericRelation('books.Payment')
objects = InvoiceQuerySet.as_manager()
class Meta:
unique_together = (("number", "organization"),)
ordering = ('-number',)
def get_detail_url(self):
return reverse('books:invoice-detail', args=[self.pk])
def get_edit_url(self):
return reverse('books:invoice-edit', args=[self.pk])
def from_client(self):
return self.organization
def to_client(self):
return self.client
class InvoiceLine(AbstractSaleLine):
invoice = models.ForeignKey('books.Invoice',
related_name="lines")
tax_rate = models.ForeignKey('books.TaxRate')
class Meta:
pass
class Bill(AbstractSale):
organization = models.ForeignKey('books.Organization',
related_name="bills",
verbose_name="To Organization")
client = models.ForeignKey('people.Client',
verbose_name="From Client")
payments = GenericRelation('books.Payment')
objects = BillQuerySet.as_manager()
class Meta:
unique_together = (("number", "organization"),)
ordering = ('-number',)
def get_detail_url(self):
return reverse('books:bill-detail', args=[self.pk])
def get_edit_url(self):
return reverse('books:bill-edit', args=[self.pk])
def from_client(self):
return self.client
def to_client(self):
return self.organization
class BillLine(AbstractSaleLine):
bill = models.ForeignKey('books.Bill',
related_name="lines")
tax_rate = models.ForeignKey('books.TaxRate')
class Meta:
pass
class ExpenseClaim(AbstractSale):
organization = models.ForeignKey('books.Organization',
related_name="expense_claims",
verbose_name="From Organization")
employee = models.ForeignKey('people.Employee',
verbose_name="Paid by employee")
payments = GenericRelation('books.Payment')
objects = ExpenseClaimQuerySet.as_manager()
class Meta:
unique_together = (("number", "organization"),)
ordering = ('-number',)
def get_detail_url(self):
return reverse('books:expense_claim-detail', args=[self.pk])
def get_edit_url(self):
return reverse('books:expense_claim-edit', args=[self.pk])
def from_client(self):
return self.employee
def to_client(self):
return self.organization
class ExpenseClaimLine(AbstractSaleLine):
expense_claim = models.ForeignKey('books.ExpenseClaim',
related_name="lines")
tax_rate = models.ForeignKey('books.TaxRate')
class Meta:
pass
class Payment(models.Model):
amount = models.DecimalField("Amount",
decimal_places=2,
max_digits=12)
detail = models.CharField(max_length=255,
blank=True,
null=True)
date_paid = models.DateField(default=date.today)
reference = models.CharField(max_length=255,
blank=True,
null=True)
# relationship to an object
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
class Meta:
ordering = ('-date_paid',)
def __str__(self):
if self.detail:
return self.detail
return "Payment of {}".format(currency_formatter(self.amount))
| mit | 4,129,681,992,870,755,000 | 30.82618 | 95 | 0.579395 | false |
7sDream/qqqfome | qqqfome/test/test_db.py | 1 | 3247 | import os
import unittest
import shutil
from zhihu import ZhihuClient
from .. import db
file_dir = os.path.dirname(os.path.abspath(__file__))
test_dir = os.path.join(file_dir, 'test')
json_path = os.path.join(file_dir, 'test.json')
author = ZhihuClient(json_path).me()
db_path = os.path.join(test_dir, db.author_to_db_filename(author))
class InitDBTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
os.makedirs(test_dir, exist_ok=True)
os.chdir(test_dir)
def tearDown(self):
self.db.close() if (hasattr(self, 'db') and self.db) else None
try:
os.remove(db_path)
except FileNotFoundError:
pass
@classmethod
def tearDownClass(cls):
shutil.rmtree(test_dir)
def test_create_db_truely_create_a_file(self):
self.db = db.create_db(author)
self.assertTrue(os.path.isfile(db_path))
def test_create_db_raise_error_when_file_exist(self):
with open(db_path, 'w') as f:
f.write('test file')
with self.assertRaises(FileExistsError):
self.db = db.create_db(author)
def test_connect_db_when_file_exist(self):
# create a db
self.db = db.create_db(author)
db.close_db(self.db)
try:
self.db = db.connect_db(db_path)
except FileNotFoundError:
self.fail("Raise error when try connect a exist database file.")
def test_connect_db_when_file_not_exist(self):
with self.assertRaises(FileNotFoundError):
self.db = db.connect_db(db_path)
def test_create_table(self):
self.db = db.create_db(author)
db.create_table(self.db)
cursor = self.db.execute(
"""
select name from sqlite_master where type = 'table';
"""
)
self.assertListEqual(list(cursor),
[('followers',), ('sqlite_sequence',),
('meta',), ('log',)])
cursor = self.db.execute(
"""
select * from followers LIMIT 1;
"""
)
row_names = list(map(lambda x: x[0], cursor.description))
self.assertListEqual(row_names, ['id', 'name', 'in_name'])
def test_add_one_user_to_db(self):
self.db = db.create_db(author)
db.create_table(self.db)
db.add_user_to_db(self.db, author)
cursor = self.db.execute(
"""
SELECT in_name FROM followers;
"""
)
for row in cursor:
self.assertEqual(row[0], author.id)
def test_is_db_closed_when_closed(self):
self.db = db.create_db(author)
self.db.close()
self.assertTrue(db.is_db_closed(self.db))
def test_is_db_closed_when_not_closed(self):
self.db = db.create_db(author)
self.assertFalse(db.is_db_closed(self.db))
def test_close_db_when_closed(self):
self.db = db.create_db(author)
self.db.close()
db.close_db(self.db)
self.assertTrue(db.is_db_closed(self.db))
def test_close_db_when_not_closed(self):
self.db = db.create_db(author)
db.close_db(self.db)
self.assertTrue(db.is_db_closed(self.db))
| mit | 2,914,056,600,617,124,000 | 27.482456 | 76 | 0.574684 | false |
knehez/edx-platform | common/test/acceptance/pages/lms/pay_and_verify.py | 28 | 6439 | """Payment and verification pages"""
import re
from urllib import urlencode
from bok_choy.page_object import PageObject, unguarded
from bok_choy.promise import Promise, EmptyPromise
from . import BASE_URL
from .dashboard import DashboardPage
class PaymentAndVerificationFlow(PageObject):
"""Interact with the split payment and verification flow.
The flow can be accessed at the following URLs:
`/verify_student/start-flow/{course}/`
`/verify_student/upgrade/{course}/`
`/verify_student/verify-now/{course}/`
`/verify_student/verify-later/{course}/`
`/verify_student/payment-confirmation/{course}/`
Users can reach the flow when attempting to enroll in a course's verified
mode, either directly from the track selection page, or by upgrading from
the honor mode. Users can also reach the flow when attempting to complete
a deferred verification, or when attempting to view a receipt corresponding
to an earlier payment.
"""
def __init__(self, browser, course_id, entry_point='start-flow'):
"""Initialize the page.
Arguments:
browser (Browser): The browser instance.
course_id (unicode): The course in which the user is enrolling.
Keyword Arguments:
entry_point (str): Where to begin the flow; must be one of 'start-flow',
'upgrade', 'verify-now', verify-later', or 'payment-confirmation'.
Raises:
ValueError
"""
super(PaymentAndVerificationFlow, self).__init__(browser)
self._course_id = course_id
if entry_point not in ['start-flow', 'upgrade', 'verify-now', 'verify-later', 'payment-confirmation']:
raise ValueError(
"Entry point must be either 'start-flow', 'upgrade', 'verify-now', 'verify-later', or 'payment-confirmation'."
)
self._entry_point = entry_point
@property
def url(self):
"""Return the URL corresponding to the initial position in the flow."""
url = "{base}/verify_student/{entry_point}/{course}/".format(
base=BASE_URL,
entry_point=self._entry_point,
course=self._course_id
)
return url
def is_browser_on_page(self):
"""Check if a step in the payment and verification flow has loaded."""
return (
self.q(css="div .make-payment-step").is_present() or
self.q(css="div .payment-confirmation-step").is_present() or
self.q(css="div .face-photo-step").is_present() or
self.q(css="div .id-photo-step").is_present() or
self.q(css="div .review-photos-step").is_present() or
self.q(css="div .enrollment-confirmation-step").is_present()
)
def indicate_contribution(self):
"""Interact with the radio buttons appearing on the first page of the upgrade flow."""
self.q(css=".contribution-option > input").first.click()
def proceed_to_payment(self):
"""Interact with the payment button."""
self.q(css=".payment-button").click()
FakePaymentPage(self.browser, self._course_id).wait_for_page()
def immediate_verification(self):
"""Interact with the immediate verification button."""
self.q(css="#verify_now_button").click()
PaymentAndVerificationFlow(self.browser, self._course_id, entry_point='verify-now').wait_for_page()
def defer_verification(self):
"""Interact with the link allowing the user to defer their verification."""
self.q(css="#verify_later_button").click()
DashboardPage(self.browser).wait_for_page()
def webcam_capture(self):
"""Interact with a webcam capture button."""
self.q(css="#webcam_capture_button").click()
def _check_func():
next_step_button_classes = self.q(css="#next_step_button").attrs('class')
next_step_button_enabled = 'is-disabled' not in next_step_button_classes
return (next_step_button_enabled, next_step_button_classes)
# Check that the #next_step_button is enabled before returning control to the caller
Promise(_check_func, "The 'Next Step' button is enabled.").fulfill()
def next_verification_step(self, next_page_object):
"""Interact with the 'Next' step button found in the verification flow."""
self.q(css="#next_step_button").click()
next_page_object.wait_for_page()
def go_to_dashboard(self):
"""Interact with the link to the dashboard appearing on the enrollment confirmation page."""
if self.q(css="div .enrollment-confirmation-step").is_present():
self.q(css=".action-primary").click()
else:
raise Exception("The dashboard can only be accessed from the enrollment confirmation.")
DashboardPage(self.browser).wait_for_page()
class FakePaymentPage(PageObject):
"""Interact with the fake payment endpoint.
This page is hidden behind the feature flag `ENABLE_PAYMENT_FAKE`,
which is enabled in the Bok Choy env settings.
Configuring this payment endpoint also requires configuring the Bok Choy
auth settings with the following:
"CC_PROCESSOR_NAME": "CyberSource2",
"CC_PROCESSOR": {
"CyberSource2": {
"SECRET_KEY": <string>,
"ACCESS_KEY": <string>,
"PROFILE_ID": "edx",
"PURCHASE_ENDPOINT": "/shoppingcart/payment_fake"
}
}
"""
def __init__(self, browser, course_id):
"""Initialize the page.
Arguments:
browser (Browser): The browser instance.
course_id (unicode): The course in which the user is enrolling.
"""
super(FakePaymentPage, self).__init__(browser)
self._course_id = course_id
url = BASE_URL + "/shoppingcart/payment_fake/"
def is_browser_on_page(self):
"""Check if a step in the payment and verification flow has loaded."""
message = self.q(css='BODY').text[0]
match = re.search('Payment page', message)
return True if match else False
def submit_payment(self):
"""Interact with the payment submission button."""
self.q(css="input[value='Submit']").click()
return PaymentAndVerificationFlow(self.browser, self._course_id, entry_point='payment-confirmation').wait_for_page()
| agpl-3.0 | -2,131,324,766,209,848,600 | 38.262195 | 126 | 0.633639 | false |
knowsis/pika | examples/tmp.py | 12 | 14140 | # -*- coding: utf-8 -*-
import logging
import pika
import json
LOG_FORMAT = ('%(levelname) -10s %(asctime)s %(name) -30s %(funcName) '
'-35s %(lineno) -5d: %(message)s')
LOGGER = logging.getLogger(__name__)
class ExamplePublisher(object):
"""This is an example publisher that will handle unexpected interactions
with RabbitMQ such as channel and connection closures.
If RabbitMQ closes the connection, it will reopen it. You should
look at the output, as there are limited reasons why the connection may
be closed, which usually are tied to permission related issues or
socket timeouts.
It uses delivery confirmations and illustrates one way to keep track of
messages that have been sent and if they've been confirmed by RabbitMQ.
"""
EXCHANGE = 'message'
EXCHANGE_TYPE = 'topic'
PUBLISH_INTERVAL = 1
QUEUE = 'text'
ROUTING_KEY = 'example.text'
URLS = ['amqp://test:test@localhost:5672/%2F',
'amqp://guest:guest@localhost:5672/%2F']
def __init__(self):
"""Setup the example publisher object, passing in the URL we will use
to connect to RabbitMQ.
"""
self._connection = None
self._channel = None
self._deliveries = []
self._acked = 0
self._nacked = 0
self._message_number = 0
self._stopping = False
self._closing = False
self._url_offset = 0
def connect(self):
"""This method connects to RabbitMQ, returning the connection handle.
When the connection is established, the on_connection_open method
will be invoked by pika.
:rtype: pika.SelectConnection
"""
url = self.URLS[self._url_offset]
self._url_offset += 1
if self._url_offset == len(self.URLS):
self._url_offset = 0
LOGGER.info('Connecting to %s', url)
return pika.SelectConnection(pika.URLParameters(url),
self.on_connection_open,
False)
def close_connection(self):
"""This method closes the connection to RabbitMQ."""
LOGGER.info('Closing connection')
self._closing = True
self._connection.close()
def add_on_connection_close_callback(self):
"""This method adds an on close callback that will be invoked by pika
when RabbitMQ closes the connection to the publisher unexpectedly.
"""
LOGGER.info('Adding connection close callback')
self._connection.add_on_close_callback(self.on_connection_closed)
def on_connection_closed(self, connection, reply_code, reply_text):
"""This method is invoked by pika when the connection to RabbitMQ is
closed unexpectedly. Since it is unexpected, we will reconnect to
RabbitMQ if it disconnects.
:param pika.connection.Connection connection: The closed connection obj
:param int reply_code: The server provided reply_code if given
:param str reply_text: The server provided reply_text if given
"""
self._channel = None
if self._closing:
self._connection.ioloop.stop()
else:
LOGGER.warning('Connection closed, reopening in 5 seconds: (%s) %s',
reply_code, reply_text)
self._connection.add_timeout(5, self.reconnect)
def on_connection_open(self, unused_connection):
"""This method is called by pika once the connection to RabbitMQ has
been established. It passes the handle to the connection object in
case we need it, but in this case, we'll just mark it unused.
:type unused_connection: pika.SelectConnection
"""
LOGGER.info('Connection opened')
self.add_on_connection_close_callback()
self.open_channel()
def reconnect(self):
"""Will be invoked by the IOLoop timer if the connection is
closed. See the on_connection_closed method.
"""
# This is the old connection IOLoop instance, stop its ioloop
self._connection.ioloop.stop()
# Create a new connection
self._connection = self.connect()
# There is now a new connection, needs a new ioloop to run
self._connection.ioloop.start()
def add_on_channel_close_callback(self):
"""This method tells pika to call the on_channel_closed method if
RabbitMQ unexpectedly closes the channel.
"""
LOGGER.info('Adding channel close callback')
self._channel.add_on_close_callback(self.on_channel_closed)
def on_channel_closed(self, channel, reply_code, reply_text):
"""Invoked by pika when RabbitMQ unexpectedly closes the channel.
Channels are usually closed if you attempt to do something that
violates the protocol, such as re-declare an exchange or queue with
different parameters. In this case, we'll close the connection
to shutdown the object.
:param pika.channel.Channel: The closed channel
:param int reply_code: The numeric reason the channel was closed
:param str reply_text: The text reason the channel was closed
"""
LOGGER.warning('Channel was closed: (%s) %s', reply_code, reply_text)
self._deliveries = []
self._message_number = 0
if not self._closing:
self._connection.close()
def on_channel_open(self, channel):
"""This method is invoked by pika when the channel has been opened.
The channel object is passed in so we can make use of it.
Since the channel is now open, we'll declare the exchange to use.
:param pika.channel.Channel channel: The channel object
"""
LOGGER.info('Channel opened')
self._channel = channel
self.add_on_channel_close_callback()
self.setup_exchange(self.EXCHANGE)
def setup_exchange(self, exchange_name):
"""Setup the exchange on RabbitMQ by invoking the Exchange.Declare RPC
command. When it is complete, the on_exchange_declareok method will
be invoked by pika.
:param str|unicode exchange_name: The name of the exchange to declare
"""
LOGGER.info('Declaring exchange %s', exchange_name)
self._channel.exchange_declare(self.on_exchange_declareok,
exchange_name,
self.EXCHANGE_TYPE)
def on_exchange_declareok(self, unused_frame):
"""Invoked by pika when RabbitMQ has finished the Exchange.Declare RPC
command.
:param pika.Frame.Method unused_frame: Exchange.DeclareOk response frame
"""
LOGGER.info('Exchange declared')
self.setup_queue(self.QUEUE)
def setup_queue(self, queue_name):
"""Setup the queue on RabbitMQ by invoking the Queue.Declare RPC
command. When it is complete, the on_queue_declareok method will
be invoked by pika.
:param str|unicode queue_name: The name of the queue to declare.
"""
LOGGER.info('Declaring queue %s', queue_name)
self._channel.queue_declare(self.on_queue_declareok, queue_name)
def on_queue_declareok(self, method_frame):
"""Method invoked by pika when the Queue.Declare RPC call made in
setup_queue has completed. In this method we will bind the queue
and exchange together with the routing key by issuing the Queue.Bind
RPC command. When this command is complete, the on_bindok method will
be invoked by pika.
:param pika.frame.Method method_frame: The Queue.DeclareOk frame
"""
LOGGER.info('Binding %s to %s with %s',
self.EXCHANGE, self.QUEUE, self.ROUTING_KEY)
self._channel.queue_bind(self.on_bindok, self.QUEUE,
self.EXCHANGE, self.ROUTING_KEY)
def on_delivery_confirmation(self, method_frame):
"""Invoked by pika when RabbitMQ responds to a Basic.Publish RPC
command, passing in either a Basic.Ack or Basic.Nack frame with
the delivery tag of the message that was published. The delivery tag
is an integer counter indicating the message number that was sent
on the channel via Basic.Publish. Here we're just doing house keeping
to keep track of stats and remove message numbers that we expect
a delivery confirmation of from the list used to keep track of messages
that are pending confirmation.
:param pika.frame.Method method_frame: Basic.Ack or Basic.Nack frame
"""
confirmation_type = method_frame.method.NAME.split('.')[1].lower()
LOGGER.info('Received %s for delivery tag: %i',
confirmation_type,
method_frame.method.delivery_tag)
if confirmation_type == 'ack':
self._acked += 1
elif confirmation_type == 'nack':
self._nacked += 1
self._deliveries.remove(method_frame.method.delivery_tag)
LOGGER.info('Published %i messages, %i have yet to be confirmed, '
'%i were acked and %i were nacked',
self._message_number, len(self._deliveries),
self._acked, self._nacked)
def enable_delivery_confirmations(self):
"""Send the Confirm.Select RPC method to RabbitMQ to enable delivery
confirmations on the channel. The only way to turn this off is to close
the channel and create a new one.
When the message is confirmed from RabbitMQ, the
on_delivery_confirmation method will be invoked passing in a Basic.Ack
or Basic.Nack method from RabbitMQ that will indicate which messages it
is confirming or rejecting.
"""
LOGGER.info('Issuing Confirm.Select RPC command')
self._channel.confirm_delivery(self.on_delivery_confirmation)
def publish_message(self):
"""If the class is not stopping, publish a message to RabbitMQ,
appending a list of deliveries with the message number that was sent.
This list will be used to check for delivery confirmations in the
on_delivery_confirmations method.
Once the message has been sent, schedule another message to be sent.
The main reason I put scheduling in was just so you can get a good idea
of how the process is flowing by slowing down and speeding up the
delivery intervals by changing the PUBLISH_INTERVAL constant in the
class.
"""
if self._stopping:
return
message = {u'مفتاح': u' قيمة',
u'键': u'值',
u'キー': u'値'}
properties = pika.BasicProperties(app_id='example-publisher',
content_type='text/plain',
headers=message)
self._channel.basic_publish(self.EXCHANGE, self.ROUTING_KEY,
json.dumps(message, ensure_ascii=False),
properties)
self._message_number += 1
self._deliveries.append(self._message_number)
LOGGER.info('Published message # %i', self._message_number)
self.schedule_next_message()
def schedule_next_message(self):
"""If we are not closing our connection to RabbitMQ, schedule another
message to be delivered in PUBLISH_INTERVAL seconds.
"""
if self._stopping:
return
LOGGER.info('Scheduling next message for %0.1f seconds',
self.PUBLISH_INTERVAL)
self._connection.add_timeout(self.PUBLISH_INTERVAL,
self.publish_message)
def start_publishing(self):
"""This method will enable delivery confirmations and schedule the
first message to be sent to RabbitMQ
"""
LOGGER.info('Issuing consumer related RPC commands')
self.enable_delivery_confirmations()
self.schedule_next_message()
def on_bindok(self, unused_frame):
"""This method is invoked by pika when it receives the Queue.BindOk
response from RabbitMQ. Since we know we're now setup and bound, it's
time to start publishing."""
LOGGER.info('Queue bound')
self.start_publishing()
def close_channel(self):
"""Invoke this command to close the channel with RabbitMQ by sending
the Channel.Close RPC command.
"""
LOGGER.info('Closing the channel')
if self._channel:
self._channel.close()
def open_channel(self):
"""This method will open a new channel with RabbitMQ by issuing the
Channel.Open RPC command. When RabbitMQ confirms the channel is open
by sending the Channel.OpenOK RPC reply, the on_channel_open method
will be invoked.
"""
LOGGER.info('Creating a new channel')
self._connection.channel(on_open_callback=self.on_channel_open)
def run(self):
"""Run the example code by connecting and then starting the IOLoop.
"""
self._connection = self.connect()
self._connection.ioloop.start()
def stop(self):
"""Stop the example by closing the channel and connection. We
set a flag here so that we stop scheduling new messages to be
published. The IOLoop is started because this method is
invoked by the Try/Catch below when KeyboardInterrupt is caught.
Starting the IOLoop again will allow the publisher to cleanly
disconnect from RabbitMQ.
"""
LOGGER.info('Stopping')
self._stopping = True
self.close_channel()
self.close_connection()
self._connection.ioloop.start()
LOGGER.info('Stopped')
def main():
logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)
example = ExamplePublisher()
try:
example.run()
except KeyboardInterrupt:
example.stop()
if __name__ == '__main__':
main()
| bsd-3-clause | 5,614,882,860,885,336,000 | 37.900826 | 80 | 0.629063 | false |
groschovskiy/lerigos_music | Server/API/lib/protorpc/generate_test.py | 26 | 4728 | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for protorpc.generate."""
__author__ = '[email protected] (Rafe Kaplan)'
import operator
import cStringIO
import sys
import unittest
from protorpc import generate
from protorpc import test_util
class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
test_util.TestCase):
MODULE = generate
class IndentWriterTest(test_util.TestCase):
def setUp(self):
self.out = cStringIO.StringIO()
self.indent_writer = generate.IndentWriter(self.out)
def testWriteLine(self):
self.indent_writer.write_line('This is a line')
self.indent_writer.write_line('This is another line')
self.assertEquals('This is a line\n'
'This is another line\n',
self.out.getvalue())
def testLeftShift(self):
self.run_count = 0
def mock_write_line(line):
self.run_count += 1
self.assertEquals('same as calling write_line', line)
self.indent_writer.write_line = mock_write_line
self.indent_writer << 'same as calling write_line'
self.assertEquals(1, self.run_count)
def testIndentation(self):
self.indent_writer << 'indent 0'
self.indent_writer.begin_indent()
self.indent_writer << 'indent 1'
self.indent_writer.begin_indent()
self.indent_writer << 'indent 2'
self.indent_writer.end_indent()
self.indent_writer << 'end 2'
self.indent_writer.end_indent()
self.indent_writer << 'end 1'
self.assertRaises(generate.IndentationError,
self.indent_writer.end_indent)
self.assertEquals('indent 0\n'
' indent 1\n'
' indent 2\n'
' end 2\n'
'end 1\n',
self.out.getvalue())
def testBlankLine(self):
self.indent_writer << ''
self.indent_writer.begin_indent()
self.indent_writer << ''
self.assertEquals('\n\n', self.out.getvalue())
def testNoneInvalid(self):
self.assertRaises(
TypeError, operator.lshift, self.indent_writer, None)
def testAltIndentation(self):
self.indent_writer = generate.IndentWriter(self.out, indent_space=3)
self.indent_writer << 'indent 0'
self.assertEquals(0, self.indent_writer.indent_level)
self.indent_writer.begin_indent()
self.indent_writer << 'indent 1'
self.assertEquals(1, self.indent_writer.indent_level)
self.indent_writer.begin_indent()
self.indent_writer << 'indent 2'
self.assertEquals(2, self.indent_writer.indent_level)
self.indent_writer.end_indent()
self.indent_writer << 'end 2'
self.assertEquals(1, self.indent_writer.indent_level)
self.indent_writer.end_indent()
self.indent_writer << 'end 1'
self.assertEquals(0, self.indent_writer.indent_level)
self.assertRaises(generate.IndentationError,
self.indent_writer.end_indent)
self.assertEquals(0, self.indent_writer.indent_level)
self.assertEquals('indent 0\n'
' indent 1\n'
' indent 2\n'
' end 2\n'
'end 1\n',
self.out.getvalue())
def testIndent(self):
self.indent_writer << 'indent 0'
self.assertEquals(0, self.indent_writer.indent_level)
def indent1():
self.indent_writer << 'indent 1'
self.assertEquals(1, self.indent_writer.indent_level)
def indent2():
self.indent_writer << 'indent 2'
self.assertEquals(2, self.indent_writer.indent_level)
test_util.do_with(self.indent_writer.indent(), indent2)
self.assertEquals(1, self.indent_writer.indent_level)
self.indent_writer << 'end 2'
test_util.do_with(self.indent_writer.indent(), indent1)
self.assertEquals(0, self.indent_writer.indent_level)
self.indent_writer << 'end 1'
self.assertEquals('indent 0\n'
' indent 1\n'
' indent 2\n'
' end 2\n'
'end 1\n',
self.out.getvalue())
def main():
unittest.main()
if __name__ == '__main__':
main()
| apache-2.0 | 2,934,610,787,526,945,000 | 30.105263 | 74 | 0.62923 | false |
overtherain/scriptfile | software/googleAppEngine/google/appengine/ext/go/__init__.py | 3 | 13117 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A bridge between dev_appserver.py and a Go app."""
import asyncore
import atexit
import datetime
import errno
import getpass
import logging
import os
import random
import re
import shutil
import signal
import socket
import subprocess
import stat
import sys
import tempfile
import threading
import time
from google.appengine.ext.remote_api import handler
from google.appengine.ext.remote_api import remote_api_pb
from google.appengine.runtime import apiproxy_errors
from google.appengine.tools import dev_appserver
GAB_WORK_DIR = None
GO_APP = None
GO_APP_NAME = '_go_app'
GO_HTTP_PORT = 0
GO_API_PORT = 0
RAPI_HANDLER = None
HEALTH_CHECK_PATH = '/_appengine_delegate_health_check'
INTERNAL_SERVER_ERROR = ('Status: 500 Internal Server Error\r\n' +
'Content-Type: text/plain\r\n\r\nInternal Server Error')
MAX_START_TIME = 10
HEADER_MAP = {
'APPLICATION_ID': 'X-AppEngine-Inbound-AppId',
'CONTENT_TYPE': 'Content-Type',
'CURRENT_VERSION_ID': 'X-AppEngine-Inbound-Version-Id',
'REMOTE_ADDR': 'X-AppEngine-Remote-Addr',
'REQUEST_LOG_ID': 'X-AppEngine-Request-Log-Id',
'USER_EMAIL': 'X-AppEngine-Inbound-User-Email',
'USER_ID': 'X-AppEngine-Inbound-User-Id',
'USER_IS_ADMIN': 'X-AppEngine-Inbound-User-Is-Admin',
}
ENV_PASSTHROUGH = re.compile(
r'^(BACKEND_PORT\..*|INSTANCE_ID|SERVER_SOFTWARE)$'
)
OS_ENV_PASSTHROUGH = (
'SYSTEMROOT',
'USER',
)
APP_CONFIG = None
def quiet_kill(pid):
"""Send a SIGTERM to pid; won't raise an exception if pid is not running."""
try:
os.kill(pid, signal.SIGTERM)
except OSError:
pass
def pick_unused_port():
for _ in range(10):
port = int(random.uniform(32768, 60000))
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind(('127.0.0.1', port))
return port
except socket.error:
logging.info('could not bind to port %d', port)
finally:
s.close()
raise dev_appserver.ExecuteError('could not pick an unused port')
def gab_work_dir(config, user, port):
base = os.getenv('XDG_CACHE_HOME')
if not base:
if sys.platform == 'darwin':
base = os.path.join(os.getenv('HOME'), 'Library', 'Caches',
'com.google.GoAppEngine')
else:
base = os.path.join(os.path.expanduser('~'), '.cache')
if os.path.islink(base):
try:
os.makedirs(os.path.realpath(base))
except OSError, e:
if e.errno != errno.EEXIST:
raise
app = re.sub(r'[.:]', '_', config.application)
return os.path.join(base,
'dev_appserver_%s_%s_%s_go_app_work_dir' % (app, user, port))
def cleanup():
try:
shutil.rmtree(GAB_WORK_DIR)
except:
pass
class DelegateClient(asyncore.dispatcher):
def __init__(self, http_req):
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connect(('127.0.0.1', GO_HTTP_PORT))
self.buffer = http_req
self.result = ''
self.closed = False
def handle_close(self):
self.close()
self.closed = True
def handle_connect(self):
pass
def handle_read(self):
self.result += self.recv(8192)
def handle_write(self):
sent = self.send(self.buffer)
self.buffer = self.buffer[sent:]
def writable(self):
return len(self.buffer) > 0
class DelegateServer(asyncore.dispatcher):
def __init__(self):
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.bind(('127.0.0.1', GO_API_PORT))
self.listen(5)
def handle_accept(self):
pair = self.accept()
if not pair:
return
sock, addr = pair
RemoteAPIHandler(sock)
def writable(self):
return False
class RemoteAPIHandler(asyncore.dispatcher_with_send):
def __init__(self, sock):
asyncore.dispatcher_with_send.__init__(self, sock)
self.n = -1
self.data = ''
def handle_read(self):
self.data += self.recv(8192)
if self.n == -1:
i = self.data.find('\n')
if i == -1:
return
try:
self.n = int(self.data[:i])
except:
self.n = -2
if self.n < 0:
self.n = -2
self.data = ''
return
self.data = self.data[i+1:]
elif self.n == -2:
self.data = ''
return
if len(self.data) < self.n:
return
req = remote_api_pb.Request()
req.ParseFromString(self.data[:self.n])
self.data, self.n = self.data[self.n:], -1
rapi_result = None
rapi_error = 'unknown error'
try:
rapi_result = RAPI_HANDLER.ExecuteRequest(req)
except apiproxy_errors.CallNotFoundError, e:
service_name = req.service_name()
method = req.method()
rapi_error = 'call not found for %s/%s' % (service_name, method)
except Exception, e:
rapi_error = str(e)
res = remote_api_pb.Response()
if rapi_result:
res.set_response(rapi_result.Encode())
else:
ae = res.mutable_application_error()
ae.set_code(1)
ae.set_detail(rapi_error)
res1 = res.Encode()
self.send('%d\n' % len(res1))
self.send(res1)
def find_app_files(basedir):
if not basedir.endswith(os.path.sep):
basedir = basedir + os.path.sep
files, dirs = {}, [basedir]
while dirs:
dname = dirs.pop()
for entry in os.listdir(dname):
ename = os.path.join(dname, entry)
if APP_CONFIG.skip_files.match(ename):
continue
try:
s = os.stat(ename)
except OSError, e:
logging.warn('%s', e)
continue
if stat.S_ISDIR(s[stat.ST_MODE]):
dirs.append(ename)
continue
files[ename[len(basedir):]] = s[stat.ST_MTIME]
return files
def find_go_files_mtime(app_files):
files, mtime = [], 0
for f, mt in app_files.items():
if not f.endswith('.go'):
continue
if APP_CONFIG.nobuild_files.match(f):
continue
files.append(f)
mtime = max(mtime, mt)
return files, mtime
def wait_until_go_app_ready(proc, tee):
deadline = (datetime.datetime.now() +
datetime.timedelta(seconds=MAX_START_TIME))
while datetime.datetime.now() < deadline:
if proc.poll():
raise dev_appserver.ExecuteError('Go app failed during init', tee.buf)
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', GO_HTTP_PORT))
s.send('HEAD %s HTTP/1.0\r\n\r\n' % HEALTH_CHECK_PATH)
s.close()
return
except:
time.sleep(0.1)
quiet_kill(proc.pid)
raise dev_appserver.ExecuteError('unable to start ' + GO_APP_NAME, tee.buf)
def up(path, n):
"""Return the nth parent directory of the given path."""
for _ in range(n):
path = os.path.dirname(path)
return path
class Tee(threading.Thread):
"""A simple line-oriented "tee".
This class connects two file-like objects, piping the output of one to the
input of the other, and buffering the last N lines.
"""
MAX_LINES = 100
def __init__(self, in_f, out_f):
threading.Thread.__init__(self, name='Tee')
self.__in = in_f
self.__out = out_f
self.buf = []
def run(self):
while True:
line = self.__in.readline()
if not line:
break
self.__out.write(line)
self.buf.append(line)
if len(self.buf) > Tee.MAX_LINES:
self.buf.pop(0)
class GoApp:
def __init__(self, root_path):
self.root_path = root_path
self.proc = None
self.proc_start = 0
self.goroot = os.path.join(
up(__file__, 5),
'goroot')
if not os.path.isdir(self.goroot):
raise Exception('no goroot found at ' + self.goroot)
self.arch = None
arch_map = {
'arm': '5',
'amd64': '6',
'386': '8',
}
for p in os.listdir(os.path.join(self.goroot, 'pkg', 'tool')):
if '_' not in p:
continue
arch = p.split('_', 1)[1]
if arch in arch_map:
self.arch = arch_map[arch]
break
if not self.arch:
raise Exception('bad goroot: no compiler found')
atexit.register(self.cleanup)
def cleanup(self):
if self.proc:
quiet_kill(self.proc.pid)
self.proc = None
def make_and_run(self, env):
app_files = find_app_files(self.root_path)
go_files, go_mtime = find_go_files_mtime(app_files)
if not go_files:
raise Exception('no .go files in %s', self.root_path)
app_mtime = max(app_files.values())
bin_name, bin_mtime = os.path.join(GAB_WORK_DIR, GO_APP_NAME), 0
try:
bin_mtime = os.stat(bin_name)[stat.ST_MTIME]
except:
pass
rebuild, restart = False, False
if go_mtime >= bin_mtime:
rebuild, restart = True, True
elif app_mtime > self.proc_start:
restart = True
if restart and self.proc:
quiet_kill(self.proc.pid)
self.proc.wait()
self.proc = None
if rebuild:
self.build(go_files)
if not self.proc or self.proc.poll() is not None:
logging.info('running %s, HTTP port = %d, API port = %d',
GO_APP_NAME, GO_HTTP_PORT, GO_API_PORT)
limited_env = {
'GOROOT': self.goroot,
'PWD': self.root_path,
'TZ': 'UTC',
}
for k, v in env.items():
if ENV_PASSTHROUGH.match(k):
limited_env[k] = v
for e in OS_ENV_PASSTHROUGH:
if e in os.environ:
limited_env[e] = os.environ[e]
self.proc_start = app_mtime
self.proc = subprocess.Popen([bin_name,
'-addr_http', 'tcp:127.0.0.1:%d' % GO_HTTP_PORT,
'-addr_api', 'tcp:127.0.0.1:%d' % GO_API_PORT],
stderr=subprocess.PIPE,
cwd=self.root_path, env=limited_env)
tee = Tee(self.proc.stderr, sys.stderr)
tee.start()
wait_until_go_app_ready(self.proc, tee)
def build(self, go_files):
logging.info('building ' + GO_APP_NAME)
if not os.path.exists(GAB_WORK_DIR):
os.makedirs(GAB_WORK_DIR)
gab_argv = [
os.path.join(self.goroot, 'bin', 'go-app-builder'),
'-app_base', self.root_path,
'-arch', self.arch,
'-binary_name', GO_APP_NAME,
'-dynamic',
'-goroot', self.goroot,
'-unsafe',
'-work_dir', GAB_WORK_DIR] + go_files
try:
p = subprocess.Popen(gab_argv, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, env={})
gab_retcode = p.wait()
except Exception, e:
raise Exception('cannot call go-app-builder', e)
if gab_retcode != 0:
raise dev_appserver.CompileError(p.stdout.read() + '\n' + p.stderr.read())
OldSigTermHandler = None
def SigTermHandler(signum, frame):
if GO_APP:
GO_APP.cleanup()
if OldSigTermHandler:
OldSigTermHandler(signum, frame)
def execute_go_cgi(root_path, config, handler_path, cgi_path,
env, infile, outfile):
global RAPI_HANDLER, GAB_WORK_DIR, GO_APP, GO_HTTP_PORT, GO_API_PORT
global OldSigTermHandler
if not RAPI_HANDLER:
GAB_WORK_DIR = gab_work_dir(config, getpass.getuser(), env['SERVER_PORT'])
GO_HTTP_PORT = pick_unused_port()
GO_API_PORT = pick_unused_port()
atexit.register(cleanup)
try:
OldSigTermHandler = signal.signal(signal.SIGTERM, SigTermHandler)
except ValueError:
pass
DelegateServer()
RAPI_HANDLER = handler.ApiCallHandler()
GO_APP = GoApp(root_path)
GO_APP.make_and_run(env)
request_method = env['REQUEST_METHOD']
server_protocol = env['SERVER_PROTOCOL']
request_uri = env['PATH_INFO']
if env.get('QUERY_STRING'):
request_uri += '?' + env['QUERY_STRING']
content = infile.getvalue()
headers = []
for k, v in env.items():
if k in HEADER_MAP:
headers.append('%s: %s' % (HEADER_MAP[k], v))
elif k.startswith('HTTP_'):
hk = k[5:].replace("_", "-")
if hk.title() == 'Connection':
continue
headers.append('%s: %s' % (hk, v))
headers.append('Content-Length: %d' % len(content))
headers.append('Connection: close')
http_req = (request_method + ' ' + request_uri + ' ' + server_protocol +
'\r\n' + '\r\n'.join(headers) + '\r\n\r\n' + content)
old_env = os.environ.copy()
try:
os.environ.clear()
os.environ.update(env)
x = DelegateClient(http_req)
while not x.closed:
asyncore.loop(30.0, False, None, 1)
res = x.result
finally:
os.environ.clear()
os.environ.update(old_env)
if res.startswith('HTTP/1.0 ') or res.startswith('HTTP/1.1 '):
res = 'Status:' + res[8:]
else:
res = INTERNAL_SERVER_ERROR
outfile.write(res)
| mit | 405,327,432,642,739,650 | 22.052724 | 80 | 0.613021 | false |
FireWRT/OpenWrt-Firefly-Libraries | staging_dir/host/lib/python3.4/test/test_urllib2_localnet.py | 19 | 26224 | import base64
import os
import email
import urllib.parse
import urllib.request
import http.server
import unittest
import hashlib
from test import support
threading = support.import_module('threading')
try:
import ssl
except ImportError:
ssl = None
here = os.path.dirname(__file__)
# Self-signed cert file for 'localhost'
CERT_localhost = os.path.join(here, 'keycert.pem')
# Self-signed cert file for 'fakehostname'
CERT_fakehostname = os.path.join(here, 'keycert2.pem')
# Loopback http server infrastructure
class LoopbackHttpServer(http.server.HTTPServer):
"""HTTP server w/ a few modifications that make it useful for
loopback testing purposes.
"""
def __init__(self, server_address, RequestHandlerClass):
http.server.HTTPServer.__init__(self,
server_address,
RequestHandlerClass)
# Set the timeout of our listening socket really low so
# that we can stop the server easily.
self.socket.settimeout(0.1)
def get_request(self):
"""HTTPServer method, overridden."""
request, client_address = self.socket.accept()
# It's a loopback connection, so setting the timeout
# really low shouldn't affect anything, but should make
# deadlocks less likely to occur.
request.settimeout(10.0)
return (request, client_address)
class LoopbackHttpServerThread(threading.Thread):
"""Stoppable thread that runs a loopback http server."""
def __init__(self, request_handler):
threading.Thread.__init__(self)
self._stop_server = False
self.ready = threading.Event()
request_handler.protocol_version = "HTTP/1.0"
self.httpd = LoopbackHttpServer(("127.0.0.1", 0),
request_handler)
self.port = self.httpd.server_port
def stop(self):
"""Stops the webserver if it's currently running."""
self._stop_server = True
self.join()
self.httpd.server_close()
def run(self):
self.ready.set()
while not self._stop_server:
self.httpd.handle_request()
# Authentication infrastructure
class DigestAuthHandler:
"""Handler for performing digest authentication."""
def __init__(self):
self._request_num = 0
self._nonces = []
self._users = {}
self._realm_name = "Test Realm"
self._qop = "auth"
def set_qop(self, qop):
self._qop = qop
def set_users(self, users):
assert isinstance(users, dict)
self._users = users
def set_realm(self, realm):
self._realm_name = realm
def _generate_nonce(self):
self._request_num += 1
nonce = hashlib.md5(str(self._request_num).encode("ascii")).hexdigest()
self._nonces.append(nonce)
return nonce
def _create_auth_dict(self, auth_str):
first_space_index = auth_str.find(" ")
auth_str = auth_str[first_space_index+1:]
parts = auth_str.split(",")
auth_dict = {}
for part in parts:
name, value = part.split("=")
name = name.strip()
if value[0] == '"' and value[-1] == '"':
value = value[1:-1]
else:
value = value.strip()
auth_dict[name] = value
return auth_dict
def _validate_auth(self, auth_dict, password, method, uri):
final_dict = {}
final_dict.update(auth_dict)
final_dict["password"] = password
final_dict["method"] = method
final_dict["uri"] = uri
HA1_str = "%(username)s:%(realm)s:%(password)s" % final_dict
HA1 = hashlib.md5(HA1_str.encode("ascii")).hexdigest()
HA2_str = "%(method)s:%(uri)s" % final_dict
HA2 = hashlib.md5(HA2_str.encode("ascii")).hexdigest()
final_dict["HA1"] = HA1
final_dict["HA2"] = HA2
response_str = "%(HA1)s:%(nonce)s:%(nc)s:" \
"%(cnonce)s:%(qop)s:%(HA2)s" % final_dict
response = hashlib.md5(response_str.encode("ascii")).hexdigest()
return response == auth_dict["response"]
def _return_auth_challenge(self, request_handler):
request_handler.send_response(407, "Proxy Authentication Required")
request_handler.send_header("Content-Type", "text/html")
request_handler.send_header(
'Proxy-Authenticate', 'Digest realm="%s", '
'qop="%s",'
'nonce="%s", ' % \
(self._realm_name, self._qop, self._generate_nonce()))
# XXX: Not sure if we're supposed to add this next header or
# not.
#request_handler.send_header('Connection', 'close')
request_handler.end_headers()
request_handler.wfile.write(b"Proxy Authentication Required.")
return False
def handle_request(self, request_handler):
"""Performs digest authentication on the given HTTP request
handler. Returns True if authentication was successful, False
otherwise.
If no users have been set, then digest auth is effectively
disabled and this method will always return True.
"""
if len(self._users) == 0:
return True
if "Proxy-Authorization" not in request_handler.headers:
return self._return_auth_challenge(request_handler)
else:
auth_dict = self._create_auth_dict(
request_handler.headers["Proxy-Authorization"]
)
if auth_dict["username"] in self._users:
password = self._users[ auth_dict["username"] ]
else:
return self._return_auth_challenge(request_handler)
if not auth_dict.get("nonce") in self._nonces:
return self._return_auth_challenge(request_handler)
else:
self._nonces.remove(auth_dict["nonce"])
auth_validated = False
# MSIE uses short_path in its validation, but Python's
# urllib.request uses the full path, so we're going to see if
# either of them works here.
for path in [request_handler.path, request_handler.short_path]:
if self._validate_auth(auth_dict,
password,
request_handler.command,
path):
auth_validated = True
if not auth_validated:
return self._return_auth_challenge(request_handler)
return True
class BasicAuthHandler(http.server.BaseHTTPRequestHandler):
"""Handler for performing basic authentication."""
# Server side values
USER = 'testUser'
PASSWD = 'testPass'
REALM = 'Test'
USER_PASSWD = "%s:%s" % (USER, PASSWD)
ENCODED_AUTH = base64.b64encode(USER_PASSWD.encode('ascii')).decode('ascii')
def __init__(self, *args, **kwargs):
http.server.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
def log_message(self, format, *args):
# Suppress console log message
pass
def do_HEAD(self):
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
def do_AUTHHEAD(self):
self.send_response(401)
self.send_header("WWW-Authenticate", "Basic realm=\"%s\"" % self.REALM)
self.send_header("Content-type", "text/html")
self.end_headers()
def do_GET(self):
if not self.headers.get("Authorization", ""):
self.do_AUTHHEAD()
self.wfile.write(b"No Auth header received")
elif self.headers.get(
"Authorization", "") == "Basic " + self.ENCODED_AUTH:
self.send_response(200)
self.end_headers()
self.wfile.write(b"It works")
else:
# Request Unauthorized
self.do_AUTHHEAD()
# Proxy test infrastructure
class FakeProxyHandler(http.server.BaseHTTPRequestHandler):
"""This is a 'fake proxy' that makes it look like the entire
internet has gone down due to a sudden zombie invasion. It main
utility is in providing us with authentication support for
testing.
"""
def __init__(self, digest_auth_handler, *args, **kwargs):
# This has to be set before calling our parent's __init__(), which will
# try to call do_GET().
self.digest_auth_handler = digest_auth_handler
http.server.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
def log_message(self, format, *args):
# Uncomment the next line for debugging.
# sys.stderr.write(format % args)
pass
def do_GET(self):
(scm, netloc, path, params, query, fragment) = urllib.parse.urlparse(
self.path, "http")
self.short_path = path
if self.digest_auth_handler.handle_request(self):
self.send_response(200, "OK")
self.send_header("Content-Type", "text/html")
self.end_headers()
self.wfile.write(bytes("You've reached %s!<BR>" % self.path,
"ascii"))
self.wfile.write(b"Our apologies, but our server is down due to "
b"a sudden zombie invasion.")
# Test cases
@unittest.skipUnless(threading, "Threading required for this test.")
class BasicAuthTests(unittest.TestCase):
USER = "testUser"
PASSWD = "testPass"
INCORRECT_PASSWD = "Incorrect"
REALM = "Test"
def setUp(self):
super(BasicAuthTests, self).setUp()
# With Basic Authentication
def http_server_with_basic_auth_handler(*args, **kwargs):
return BasicAuthHandler(*args, **kwargs)
self.server = LoopbackHttpServerThread(http_server_with_basic_auth_handler)
self.server_url = 'http://127.0.0.1:%s' % self.server.port
self.server.start()
self.server.ready.wait()
def tearDown(self):
self.server.stop()
super(BasicAuthTests, self).tearDown()
def test_basic_auth_success(self):
ah = urllib.request.HTTPBasicAuthHandler()
ah.add_password(self.REALM, self.server_url, self.USER, self.PASSWD)
urllib.request.install_opener(urllib.request.build_opener(ah))
try:
self.assertTrue(urllib.request.urlopen(self.server_url))
except urllib.error.HTTPError:
self.fail("Basic auth failed for the url: %s", self.server_url)
def test_basic_auth_httperror(self):
ah = urllib.request.HTTPBasicAuthHandler()
ah.add_password(self.REALM, self.server_url, self.USER, self.INCORRECT_PASSWD)
urllib.request.install_opener(urllib.request.build_opener(ah))
self.assertRaises(urllib.error.HTTPError, urllib.request.urlopen, self.server_url)
@unittest.skipUnless(threading, "Threading required for this test.")
class ProxyAuthTests(unittest.TestCase):
URL = "http://localhost"
USER = "tester"
PASSWD = "test123"
REALM = "TestRealm"
def setUp(self):
super(ProxyAuthTests, self).setUp()
self.digest_auth_handler = DigestAuthHandler()
self.digest_auth_handler.set_users({self.USER: self.PASSWD})
self.digest_auth_handler.set_realm(self.REALM)
# With Digest Authentication.
def create_fake_proxy_handler(*args, **kwargs):
return FakeProxyHandler(self.digest_auth_handler, *args, **kwargs)
self.server = LoopbackHttpServerThread(create_fake_proxy_handler)
self.server.start()
self.server.ready.wait()
proxy_url = "http://127.0.0.1:%d" % self.server.port
handler = urllib.request.ProxyHandler({"http" : proxy_url})
self.proxy_digest_handler = urllib.request.ProxyDigestAuthHandler()
self.opener = urllib.request.build_opener(
handler, self.proxy_digest_handler)
def tearDown(self):
self.server.stop()
super(ProxyAuthTests, self).tearDown()
def test_proxy_with_bad_password_raises_httperror(self):
self.proxy_digest_handler.add_password(self.REALM, self.URL,
self.USER, self.PASSWD+"bad")
self.digest_auth_handler.set_qop("auth")
self.assertRaises(urllib.error.HTTPError,
self.opener.open,
self.URL)
def test_proxy_with_no_password_raises_httperror(self):
self.digest_auth_handler.set_qop("auth")
self.assertRaises(urllib.error.HTTPError,
self.opener.open,
self.URL)
def test_proxy_qop_auth_works(self):
self.proxy_digest_handler.add_password(self.REALM, self.URL,
self.USER, self.PASSWD)
self.digest_auth_handler.set_qop("auth")
result = self.opener.open(self.URL)
while result.read():
pass
result.close()
def test_proxy_qop_auth_int_works_or_throws_urlerror(self):
self.proxy_digest_handler.add_password(self.REALM, self.URL,
self.USER, self.PASSWD)
self.digest_auth_handler.set_qop("auth-int")
try:
result = self.opener.open(self.URL)
except urllib.error.URLError:
# It's okay if we don't support auth-int, but we certainly
# shouldn't receive any kind of exception here other than
# a URLError.
result = None
if result:
while result.read():
pass
result.close()
def GetRequestHandler(responses):
class FakeHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
server_version = "TestHTTP/"
requests = []
headers_received = []
port = 80
def do_GET(self):
body = self.send_head()
while body:
done = self.wfile.write(body)
body = body[done:]
def do_POST(self):
content_length = self.headers["Content-Length"]
post_data = self.rfile.read(int(content_length))
self.do_GET()
self.requests.append(post_data)
def send_head(self):
FakeHTTPRequestHandler.headers_received = self.headers
self.requests.append(self.path)
response_code, headers, body = responses.pop(0)
self.send_response(response_code)
for (header, value) in headers:
self.send_header(header, value % {'port':self.port})
if body:
self.send_header("Content-type", "text/plain")
self.end_headers()
return body
self.end_headers()
def log_message(self, *args):
pass
return FakeHTTPRequestHandler
@unittest.skipUnless(threading, "Threading required for this test.")
class TestUrlopen(unittest.TestCase):
"""Tests urllib.request.urlopen using the network.
These tests are not exhaustive. Assuming that testing using files does a
good job overall of some of the basic interface features. There are no
tests exercising the optional 'data' and 'proxies' arguments. No tests
for transparent redirection have been written.
"""
def setUp(self):
super(TestUrlopen, self).setUp()
# Ignore proxies for localhost tests.
self.old_environ = os.environ.copy()
os.environ['NO_PROXY'] = '*'
self.server = None
def tearDown(self):
if self.server is not None:
self.server.stop()
os.environ.clear()
os.environ.update(self.old_environ)
super(TestUrlopen, self).tearDown()
def urlopen(self, url, data=None, **kwargs):
l = []
f = urllib.request.urlopen(url, data, **kwargs)
try:
# Exercise various methods
l.extend(f.readlines(200))
l.append(f.readline())
l.append(f.read(1024))
l.append(f.read())
finally:
f.close()
return b"".join(l)
def start_server(self, responses=None):
if responses is None:
responses = [(200, [], b"we don't care")]
handler = GetRequestHandler(responses)
self.server = LoopbackHttpServerThread(handler)
self.server.start()
self.server.ready.wait()
port = self.server.port
handler.port = port
return handler
def start_https_server(self, responses=None, **kwargs):
if not hasattr(urllib.request, 'HTTPSHandler'):
self.skipTest('ssl support required')
from test.ssl_servers import make_https_server
if responses is None:
responses = [(200, [], b"we care a bit")]
handler = GetRequestHandler(responses)
server = make_https_server(self, handler_class=handler, **kwargs)
handler.port = server.port
return handler
def test_redirection(self):
expected_response = b"We got here..."
responses = [
(302, [("Location", "http://localhost:%(port)s/somewhere_else")],
""),
(200, [], expected_response)
]
handler = self.start_server(responses)
data = self.urlopen("http://localhost:%s/" % handler.port)
self.assertEqual(data, expected_response)
self.assertEqual(handler.requests, ["/", "/somewhere_else"])
def test_chunked(self):
expected_response = b"hello world"
chunked_start = (
b'a\r\n'
b'hello worl\r\n'
b'1\r\n'
b'd\r\n'
b'0\r\n'
)
response = [(200, [("Transfer-Encoding", "chunked")], chunked_start)]
handler = self.start_server(response)
data = self.urlopen("http://localhost:%s/" % handler.port)
self.assertEqual(data, expected_response)
def test_404(self):
expected_response = b"Bad bad bad..."
handler = self.start_server([(404, [], expected_response)])
try:
self.urlopen("http://localhost:%s/weeble" % handler.port)
except urllib.error.URLError as f:
data = f.read()
f.close()
else:
self.fail("404 should raise URLError")
self.assertEqual(data, expected_response)
self.assertEqual(handler.requests, ["/weeble"])
def test_200(self):
expected_response = b"pycon 2008..."
handler = self.start_server([(200, [], expected_response)])
data = self.urlopen("http://localhost:%s/bizarre" % handler.port)
self.assertEqual(data, expected_response)
self.assertEqual(handler.requests, ["/bizarre"])
def test_200_with_parameters(self):
expected_response = b"pycon 2008..."
handler = self.start_server([(200, [], expected_response)])
data = self.urlopen("http://localhost:%s/bizarre" % handler.port,
b"get=with_feeling")
self.assertEqual(data, expected_response)
self.assertEqual(handler.requests, ["/bizarre", b"get=with_feeling"])
def test_https(self):
handler = self.start_https_server()
context = ssl.create_default_context(cafile=CERT_localhost)
data = self.urlopen("https://localhost:%s/bizarre" % handler.port, context=context)
self.assertEqual(data, b"we care a bit")
def test_https_with_cafile(self):
handler = self.start_https_server(certfile=CERT_localhost)
# Good cert
data = self.urlopen("https://localhost:%s/bizarre" % handler.port,
cafile=CERT_localhost)
self.assertEqual(data, b"we care a bit")
# Bad cert
with self.assertRaises(urllib.error.URLError) as cm:
self.urlopen("https://localhost:%s/bizarre" % handler.port,
cafile=CERT_fakehostname)
# Good cert, but mismatching hostname
handler = self.start_https_server(certfile=CERT_fakehostname)
with self.assertRaises(ssl.CertificateError) as cm:
self.urlopen("https://localhost:%s/bizarre" % handler.port,
cafile=CERT_fakehostname)
def test_https_with_cadefault(self):
handler = self.start_https_server(certfile=CERT_localhost)
# Self-signed cert should fail verification with system certificate store
with self.assertRaises(urllib.error.URLError) as cm:
self.urlopen("https://localhost:%s/bizarre" % handler.port,
cadefault=True)
def test_https_sni(self):
if ssl is None:
self.skipTest("ssl module required")
if not ssl.HAS_SNI:
self.skipTest("SNI support required in OpenSSL")
sni_name = None
def cb_sni(ssl_sock, server_name, initial_context):
nonlocal sni_name
sni_name = server_name
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.set_servername_callback(cb_sni)
handler = self.start_https_server(context=context, certfile=CERT_localhost)
context = ssl.create_default_context(cafile=CERT_localhost)
self.urlopen("https://localhost:%s" % handler.port, context=context)
self.assertEqual(sni_name, "localhost")
def test_sending_headers(self):
handler = self.start_server()
req = urllib.request.Request("http://localhost:%s/" % handler.port,
headers={"Range": "bytes=20-39"})
urllib.request.urlopen(req)
self.assertEqual(handler.headers_received["Range"], "bytes=20-39")
def test_basic(self):
handler = self.start_server()
open_url = urllib.request.urlopen("http://localhost:%s" % handler.port)
for attr in ("read", "close", "info", "geturl"):
self.assertTrue(hasattr(open_url, attr), "object returned from "
"urlopen lacks the %s attribute" % attr)
try:
self.assertTrue(open_url.read(), "calling 'read' failed")
finally:
open_url.close()
def test_info(self):
handler = self.start_server()
try:
open_url = urllib.request.urlopen(
"http://localhost:%s" % handler.port)
info_obj = open_url.info()
self.assertIsInstance(info_obj, email.message.Message,
"object returned by 'info' is not an "
"instance of email.message.Message")
self.assertEqual(info_obj.get_content_subtype(), "plain")
finally:
self.server.stop()
def test_geturl(self):
# Make sure same URL as opened is returned by geturl.
handler = self.start_server()
open_url = urllib.request.urlopen("http://localhost:%s" % handler.port)
url = open_url.geturl()
self.assertEqual(url, "http://localhost:%s" % handler.port)
def test_bad_address(self):
# Make sure proper exception is raised when connecting to a bogus
# address.
# as indicated by the comment below, this might fail with some ISP,
# so we run the test only when -unetwork/-uall is specified to
# mitigate the problem a bit (see #17564)
support.requires('network')
self.assertRaises(OSError,
# Given that both VeriSign and various ISPs have in
# the past or are presently hijacking various invalid
# domain name requests in an attempt to boost traffic
# to their own sites, finding a domain name to use
# for this test is difficult. RFC2606 leads one to
# believe that '.invalid' should work, but experience
# seemed to indicate otherwise. Single character
# TLDs are likely to remain invalid, so this seems to
# be the best choice. The trailing '.' prevents a
# related problem: The normal DNS resolver appends
# the domain names from the search path if there is
# no '.' the end and, and if one of those domains
# implements a '*' rule a result is returned.
# However, none of this will prevent the test from
# failing if the ISP hijacks all invalid domain
# requests. The real solution would be to be able to
# parameterize the framework with a mock resolver.
urllib.request.urlopen,
"http://sadflkjsasf.i.nvali.d./")
def test_iteration(self):
expected_response = b"pycon 2008..."
handler = self.start_server([(200, [], expected_response)])
data = urllib.request.urlopen("http://localhost:%s" % handler.port)
for line in data:
self.assertEqual(line, expected_response)
def test_line_iteration(self):
lines = [b"We\n", b"got\n", b"here\n", b"verylong " * 8192 + b"\n"]
expected_response = b"".join(lines)
handler = self.start_server([(200, [], expected_response)])
data = urllib.request.urlopen("http://localhost:%s" % handler.port)
for index, line in enumerate(data):
self.assertEqual(line, lines[index],
"Fetched line number %s doesn't match expected:\n"
" Expected length was %s, got %s" %
(index, len(lines[index]), len(line)))
self.assertEqual(index + 1, len(lines))
threads_key = None
def setUpModule():
# Store the threading_setup in a key and ensure that it is cleaned up
# in the tearDown
global threads_key
threads_key = support.threading_setup()
def tearDownModule():
if threads_key:
support.threading_cleanup(threads_key)
if __name__ == "__main__":
unittest.main()
| gpl-2.0 | -3,460,005,752,569,179,000 | 36.950796 | 91 | 0.584045 | false |
kmike/scikit-learn | examples/decomposition/plot_ica_blind_source_separation.py | 4 | 1512 | """
=====================================
Blind source separation using FastICA
=====================================
:ref:`ICA` is used to estimate sources given noisy measurements.
Imagine 2 instruments playing simultaneously and 2 microphones
recording the mixed signals. ICA is used to recover the sources
ie. what is played by each instrument.
"""
print(__doc__)
import numpy as np
import pylab as pl
from sklearn.decomposition import FastICA
###############################################################################
# Generate sample data
np.random.seed(0)
n_samples = 2000
time = np.linspace(0, 10, n_samples)
s1 = np.sin(2 * time) # Signal 1 : sinusoidal signal
s2 = np.sign(np.sin(3 * time)) # Signal 2 : square signal
S = np.c_[s1, s2]
S += 0.2 * np.random.normal(size=S.shape) # Add noise
S /= S.std(axis=0) # Standardize data
# Mix data
A = np.array([[1, 1], [0.5, 2]]) # Mixing matrix
X = np.dot(S, A.T) # Generate observations
# Compute ICA
ica = FastICA()
S_ = ica.fit(X).transform(X) # Get the estimated sources
A_ = ica.get_mixing_matrix() # Get estimated mixing matrix
assert np.allclose(X, np.dot(S_, A_.T))
###############################################################################
# Plot results
pl.figure()
pl.subplot(3, 1, 1)
pl.plot(S)
pl.title('True Sources')
pl.subplot(3, 1, 2)
pl.plot(X)
pl.title('Observations (mixed signal)')
pl.subplot(3, 1, 3)
pl.plot(S_)
pl.title('ICA estimated sources')
pl.subplots_adjust(0.09, 0.04, 0.94, 0.94, 0.26, 0.36)
pl.show()
| bsd-3-clause | -8,432,181,432,673,024,000 | 28.647059 | 79 | 0.583995 | false |
soarpenguin/ansible | lib/ansible/modules/cloud/google/gcpubsub.py | 8 | 11780 | #!/usr/bin/python
# Copyright 2016 Google Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcpubsub
version_added: "2.3"
short_description: Create and Delete Topics/Subscriptions, Publish and pull messages on PubSub.
description:
- Create and Delete Topics/Subscriptions, Publish and pull messages on PubSub.
See U(https://cloud.google.com/pubsub/docs) for an overview.
requirements:
- "python >= 2.6"
- "google-auth >= 0.5.0"
- "google-cloud-pubsub >= 0.22.0"
notes:
- Subscription pull happens before publish. You cannot publish and pull in the same task.
author:
- "Tom Melendez (@supertom) <[email protected]>"
options:
topic:
description:
- GCP pubsub topic name. Only the name, not the full path, is required.
required: True
subscription:
description:
- Dictionary containing a subscripton name associated with a topic (required), along with optional ack_deadline, push_endpoint and pull.
For pulling from a subscription, message_ack (bool), max_messages (int) and return_immediate are available as subfields.
See subfields name, push_endpoint and ack_deadline for more information.
required: False
name:
description: Subfield of subscription. Required if subscription is specified. See examples.
required: False
ack_deadline:
description: Subfield of subscription. Not required. Default deadline for subscriptions to ACK the message before it is resent. See examples.
required: False
pull:
description:
- Subfield of subscription. Not required. If specified, messages will be retrieved from topic via the provided subscription name.
max_messages (int; default None; max number of messages to pull), message_ack (bool; default False; acknowledge the message) and return_immediately
(bool; default True, don't wait for messages to appear). If the messages are acknowledged, changed is set to True, otherwise, changed is False.
push_endpoint:
description:
- Subfield of subscription. Not required. If specified, message will be sent to an endpoint.
See U(https://cloud.google.com/pubsub/docs/advanced#push_endpoints) for more information.
required: False
publish:
description:
- List of dictionaries describing messages and attributes to be published. Dictionary is in message(str):attributes(dict) format.
Only message is required.
required: False
state:
description:
- State of the topic or queue (absent, present). Applies to the most granular resource. Remove the most granular resource. If subcription is
specified we remove it. If only topic is specified, that is what is removed. Note that a topic can be removed without first removing the
subscription.
required: False
default: "present"
'''
EXAMPLES = '''
# Create a topic and publish a message to it
# (Message will be pushed; there is no check to see if the message was pushed before
# Topics:
## Create Topic
gcpubsub:
topic: ansible-topic-example
state: present
## Delete Topic
### Subscriptions associated with topic are not deleted.
gcpubsub:
topic: ansible-topic-example
state: absent
## Messages: publish multiple messages, with attributes (key:value available with the message)
### setting absent will keep the messages from being sent
gcpubsub:
topic: "{{ topic_name }}"
state: present
publish:
- message: "this is message 1"
attributes:
mykey1: myvalue
mykey2: myvalu2
mykey3: myvalue3
- message: "this is message 2"
attributes:
server: prod
sla: "99.9999"
owner: fred
# Subscriptions
## Create Subscription (pull)
gcpubsub:
topic: ansible-topic-example
subscription:
- name: mysub
state: present
## Create Subscription with ack_deadline and push endpoint
### pull is default, ack_deadline is not required
gcpubsub:
topic: ansible-topic-example
subscription:
- name: mysub
ack_deadline: "60"
push_endpoint: http://pushendpoint.example.com
state: present
## Subscription change from push to pull
### setting push_endpoint to "None" converts subscription to pull.
gcpubsub:
topic: ansible-topic-example
subscription:
name: mysub
push_endpoint: "None"
## Delete subscription
### Topic will not be deleted
gcpubsub:
topic: ansible-topic-example
subscription:
- name: mysub
state: absent
## Pull messages from subscription
### only pull keyword is required.
gcpubsub:
topic: ansible-topic-example
subscription:
name: ansible-topic-example-sub
pull:
message_ack: yes
max_messages: "100"
'''
RETURN = '''
publish:
description: List of dictionaries describing messages and attributes to be published. Dictionary is in message(str):attributes(dict) format.
Only message is required.
returned: Only when specified
type: list
sample: "publish: ['message': 'my message', attributes: {'key1': 'value1'}]"
pulled_messages:
description: list of dictionaries containing message info. Fields are ack_id, attributes, data, message_id.
returned: Only when subscription.pull is specified
type: list
sample: [{ "ack_id": "XkASTCcYREl...","attributes": {"key1": "val1",...}, "data": "this is message 1", "message_id": "49107464153705"},..]
state:
description: The state of the topic or subscription. Value will be either 'absent' or 'present'.
returned: Always
type: str
sample: "present"
subscription:
description: Name of subscription.
returned: When subscription fields are specified
type: str
sample: "mysubscription"
topic:
description: Name of topic.
returned: Always
type: str
sample: "mytopic"
'''
try:
from ast import literal_eval
HAS_PYTHON26 = True
except ImportError:
HAS_PYTHON26 = False
try:
from google.cloud import pubsub
HAS_GOOGLE_CLOUD_PUBSUB = True
except ImportError as e:
HAS_GOOGLE_CLOUD_PUBSUB = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.gcp import check_min_pkg_version, get_google_cloud_credentials
CLOUD_CLIENT = 'google-cloud-pubsub'
CLOUD_CLIENT_MINIMUM_VERSION = '0.22.0'
CLOUD_CLIENT_USER_AGENT = 'ansible-pubsub-0.1'
def publish_messages(message_list, topic):
with topic.batch() as batch:
for message in message_list:
msg = message['message']
attrs = {}
if 'attributes' in message:
attrs = message['attributes']
batch.publish(bytes(msg), **attrs)
return True
def pull_messages(pull_params, sub):
"""
:rtype: tuple (output, changed)
"""
changed = False
max_messages=pull_params.get('max_messages', None)
message_ack = pull_params.get('message_ack', 'no')
return_immediately = pull_params.get('return_immediately', False)
output= []
pulled = sub.pull(return_immediately=return_immediately,
max_messages=max_messages)
for ack_id, msg in pulled:
msg_dict = {'message_id': msg.message_id,
'attributes': msg.attributes,
'data': msg.data,
'ack_id': ack_id }
output.append(msg_dict)
if message_ack:
ack_ids = [m['ack_id'] for m in output]
if ack_ids:
sub.acknowledge(ack_ids)
changed = True
return (output, changed)
def main():
module = AnsibleModule(argument_spec=dict(
topic=dict(required=True),
state=dict(choices=['absent', 'present'], default='present'),
publish=dict(type='list', default=None),
subscription=dict(type='dict', default=None),
service_account_email=dict(),
credentials_file=dict(),
project_id=dict(), ),)
if not HAS_PYTHON26:
module.fail_json(
msg="GCE module requires python's 'ast' module, python v2.6+")
if not HAS_GOOGLE_CLOUD_PUBSUB:
module.fail_json(msg="Please install google-cloud-pubsub library.")
if not check_min_pkg_version(CLOUD_CLIENT, CLOUD_CLIENT_MINIMUM_VERSION):
module.fail_json(msg="Please install %s client version %s" % (CLOUD_CLIENT, CLOUD_CLIENT_MINIMUM_VERSION))
mod_params = {}
mod_params['publish'] = module.params.get('publish')
mod_params['state'] = module.params.get('state')
mod_params['topic'] = module.params.get('topic')
mod_params['subscription'] = module.params.get('subscription')
creds, params = get_google_cloud_credentials(module)
pubsub_client = pubsub.Client(project=params['project_id'], credentials=creds, use_gax=False)
pubsub_client.user_agent = CLOUD_CLIENT_USER_AGENT
changed = False
json_output = {}
t = None
if mod_params['topic']:
t = pubsub_client.topic(mod_params['topic'])
s = None
if mod_params['subscription']:
# Note: default ack deadline cannot be changed without deleting/recreating subscription
s = t.subscription(mod_params['subscription']['name'],
ack_deadline=mod_params['subscription'].get('ack_deadline', None),
push_endpoint=mod_params['subscription'].get('push_endpoint', None))
if mod_params['state'] == 'absent':
# Remove the most granular resource. If subcription is specified
# we remove it. If only topic is specified, that is what is removed.
# Note that a topic can be removed without first removing the subscription.
# TODO(supertom): Enhancement: Provide an option to only delete a topic
# if there are no subscriptions associated with it (which the API does not support).
if s is not None:
if s.exists():
s.delete()
changed = True
else:
if t.exists():
t.delete()
changed = True
elif mod_params['state'] == 'present':
if not t.exists():
t.create()
changed = True
if s:
if not s.exists():
s.create()
s.reload()
changed = True
else:
# Subscription operations
# TODO(supertom): if more 'update' operations arise, turn this into a function.
s.reload()
push_endpoint=mod_params['subscription'].get('push_endpoint', None)
if push_endpoint is not None:
if push_endpoint != s.push_endpoint:
if push_endpoint == 'None':
push_endpoint = None
s.modify_push_configuration(push_endpoint=push_endpoint)
s.reload()
changed = push_endpoint == s.push_endpoint
if 'pull' in mod_params['subscription']:
if s.push_endpoint is not None:
module.fail_json(msg="Cannot pull messages, push_endpoint is configured.")
(json_output['pulled_messages'], changed) = pull_messages(
mod_params['subscription']['pull'], s)
# publish messages to the topic
if mod_params['publish'] and len(mod_params['publish']) > 0:
changed = publish_messages(mod_params['publish'], t)
json_output['changed'] = changed
json_output.update(mod_params)
module.exit_json(**json_output)
if __name__ == '__main__':
main()
| gpl-3.0 | 6,279,037,117,966,139,000 | 34.059524 | 157 | 0.645501 | false |
partofthething/home-assistant | tests/components/flux/test_switch.py | 3 | 39615 | """The tests for the Flux switch platform."""
from unittest.mock import patch
import pytest
from homeassistant.components import light, switch
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_PLATFORM,
SERVICE_TURN_ON,
STATE_ON,
SUN_EVENT_SUNRISE,
)
from homeassistant.core import State
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import (
assert_setup_component,
async_fire_time_changed,
async_mock_service,
mock_restore_cache,
)
async def test_valid_config(hass):
"""Test configuration."""
assert await async_setup_component(
hass,
"switch",
{
"switch": {
"platform": "flux",
"name": "flux",
"lights": ["light.desk", "light.lamp"],
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.flux")
assert state
assert state.state == "off"
async def test_restore_state_last_on(hass):
"""Test restoring state when the last state is on."""
mock_restore_cache(hass, [State("switch.flux", "on")])
assert await async_setup_component(
hass,
"switch",
{
"switch": {
"platform": "flux",
"name": "flux",
"lights": ["light.desk", "light.lamp"],
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.flux")
assert state
assert state.state == "on"
async def test_restore_state_last_off(hass):
"""Test restoring state when the last state is off."""
mock_restore_cache(hass, [State("switch.flux", "off")])
assert await async_setup_component(
hass,
"switch",
{
"switch": {
"platform": "flux",
"name": "flux",
"lights": ["light.desk", "light.lamp"],
}
},
)
await hass.async_block_till_done()
state = hass.states.get("switch.flux")
assert state
assert state.state == "off"
async def test_valid_config_with_info(hass):
"""Test configuration."""
assert await async_setup_component(
hass,
"switch",
{
"switch": {
"platform": "flux",
"name": "flux",
"lights": ["light.desk", "light.lamp"],
"stop_time": "22:59",
"start_time": "7:22",
"start_colortemp": "1000",
"sunset_colortemp": "2000",
"stop_colortemp": "4000",
}
},
)
await hass.async_block_till_done()
async def test_valid_config_no_name(hass):
"""Test configuration."""
with assert_setup_component(1, "switch"):
assert await async_setup_component(
hass,
"switch",
{"switch": {"platform": "flux", "lights": ["light.desk", "light.lamp"]}},
)
await hass.async_block_till_done()
async def test_invalid_config_no_lights(hass):
"""Test configuration."""
with assert_setup_component(0, "switch"):
assert await async_setup_component(
hass, "switch", {"switch": {"platform": "flux", "name": "flux"}}
)
await hass.async_block_till_done()
async def test_flux_when_switch_is_off(hass, legacy_patchable_time):
"""Test the flux switch when it is off."""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=10, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
}
},
)
await hass.async_block_till_done()
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
assert not turn_on_calls
async def test_flux_before_sunrise(hass, legacy_patchable_time):
"""Test the flux switch before sunrise."""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=2, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=5)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
await hass.async_block_till_done()
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 112
assert call.data[light.ATTR_XY_COLOR] == [0.606, 0.379]
async def test_flux_before_sunrise_known_location(hass, legacy_patchable_time):
"""Test the flux switch before sunrise."""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
hass.config.latitude = 55.948372
hass.config.longitude = -3.199466
hass.config.elevation = 17
test_time = dt_util.utcnow().replace(
hour=2, minute=0, second=0, day=21, month=6, year=2019
)
await hass.async_block_till_done()
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
# 'brightness': 255,
# 'disable_brightness_adjust': True,
# 'mode': 'rgb',
# 'interval': 120
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 112
assert call.data[light.ATTR_XY_COLOR] == [0.606, 0.379]
# pylint: disable=invalid-name
async def test_flux_after_sunrise_before_sunset(hass, legacy_patchable_time):
"""Test the flux switch after sunrise and before sunset."""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=8, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 173
assert call.data[light.ATTR_XY_COLOR] == [0.439, 0.37]
# pylint: disable=invalid-name
async def test_flux_after_sunset_before_stop(hass, legacy_patchable_time):
"""Test the flux switch after sunset and before stop."""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=17, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"stop_time": "22:00",
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 146
assert call.data[light.ATTR_XY_COLOR] == [0.506, 0.385]
# pylint: disable=invalid-name
async def test_flux_after_stop_before_sunrise(hass, legacy_patchable_time):
"""Test the flux switch after stop and before sunrise."""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=23, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 112
assert call.data[light.ATTR_XY_COLOR] == [0.606, 0.379]
# pylint: disable=invalid-name
async def test_flux_with_custom_start_stop_times(hass, legacy_patchable_time):
"""Test the flux with custom start and stop times."""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=17, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"start_time": "6:00",
"stop_time": "23:30",
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 147
assert call.data[light.ATTR_XY_COLOR] == [0.504, 0.385]
async def test_flux_before_sunrise_stop_next_day(hass, legacy_patchable_time):
"""Test the flux switch before sunrise.
This test has the stop_time on the next day (after midnight).
"""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=2, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"stop_time": "01:00",
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 112
assert call.data[light.ATTR_XY_COLOR] == [0.606, 0.379]
# pylint: disable=invalid-name
async def test_flux_after_sunrise_before_sunset_stop_next_day(
hass, legacy_patchable_time
):
"""
Test the flux switch after sunrise and before sunset.
This test has the stop_time on the next day (after midnight).
"""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=8, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"stop_time": "01:00",
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 173
assert call.data[light.ATTR_XY_COLOR] == [0.439, 0.37]
# pylint: disable=invalid-name
@pytest.mark.parametrize("x", [0, 1])
async def test_flux_after_sunset_before_midnight_stop_next_day(
hass, legacy_patchable_time, x
):
"""Test the flux switch after sunset and before stop.
This test has the stop_time on the next day (after midnight).
"""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=23, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"stop_time": "01:00",
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 119
assert call.data[light.ATTR_XY_COLOR] == [0.588, 0.386]
# pylint: disable=invalid-name
async def test_flux_after_sunset_after_midnight_stop_next_day(
hass, legacy_patchable_time
):
"""Test the flux switch after sunset and before stop.
This test has the stop_time on the next day (after midnight).
"""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=00, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"stop_time": "01:00",
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 114
assert call.data[light.ATTR_XY_COLOR] == [0.601, 0.382]
# pylint: disable=invalid-name
async def test_flux_after_stop_before_sunrise_stop_next_day(
hass, legacy_patchable_time
):
"""Test the flux switch after stop and before sunrise.
This test has the stop_time on the next day (after midnight).
"""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=2, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"stop_time": "01:00",
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 112
assert call.data[light.ATTR_XY_COLOR] == [0.606, 0.379]
# pylint: disable=invalid-name
async def test_flux_with_custom_colortemps(hass, legacy_patchable_time):
"""Test the flux with custom start and stop colortemps."""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=17, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"start_colortemp": "1000",
"stop_colortemp": "6000",
"stop_time": "22:00",
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 159
assert call.data[light.ATTR_XY_COLOR] == [0.469, 0.378]
# pylint: disable=invalid-name
async def test_flux_with_custom_brightness(hass, legacy_patchable_time):
"""Test the flux with custom start and stop colortemps."""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=17, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"brightness": 255,
"stop_time": "22:00",
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 255
assert call.data[light.ATTR_XY_COLOR] == [0.506, 0.385]
async def test_flux_with_multiple_lights(hass, legacy_patchable_time):
"""Test the flux switch with multiple light entities."""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1, ent2, ent3 = platform.ENTITIES
await hass.services.async_call(
light.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ent2.entity_id}, blocking=True
)
await hass.services.async_call(
light.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ent3.entity_id}, blocking=True
)
await hass.async_block_till_done()
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
state = hass.states.get(ent2.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
state = hass.states.get(ent3.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("xy_color") is None
assert state.attributes.get("brightness") is None
test_time = dt_util.utcnow().replace(hour=12, minute=0, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
print(f"sunrise {sunrise_time}")
return sunrise_time
print(f"sunset {sunset_time}")
return sunset_time
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id, ent2.entity_id, ent3.entity_id],
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_BRIGHTNESS] == 163
assert call.data[light.ATTR_XY_COLOR] == [0.46, 0.376]
call = turn_on_calls[-2]
assert call.data[light.ATTR_BRIGHTNESS] == 163
assert call.data[light.ATTR_XY_COLOR] == [0.46, 0.376]
call = turn_on_calls[-3]
assert call.data[light.ATTR_BRIGHTNESS] == 163
assert call.data[light.ATTR_XY_COLOR] == [0.46, 0.376]
async def test_flux_with_mired(hass, legacy_patchable_time):
"""Test the flux switch´s mode mired."""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("color_temp") is None
test_time = dt_util.utcnow().replace(hour=8, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"mode": "mired",
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
assert call.data[light.ATTR_COLOR_TEMP] == 269
async def test_flux_with_rgb(hass, legacy_patchable_time):
"""Test the flux switch´s mode rgb."""
platform = getattr(hass.components, "test.light")
platform.init()
assert await async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.async_block_till_done()
ent1 = platform.ENTITIES[0]
# Verify initial state of light
state = hass.states.get(ent1.entity_id)
assert STATE_ON == state.state
assert state.attributes.get("color_temp") is None
test_time = dt_util.utcnow().replace(hour=8, minute=30, second=0)
sunset_time = test_time.replace(hour=17, minute=0, second=0)
sunrise_time = test_time.replace(hour=5, minute=0, second=0)
def event_date(hass, event, now=None):
if event == SUN_EVENT_SUNRISE:
return sunrise_time
return sunset_time
with patch(
"homeassistant.components.flux.switch.dt_utcnow", return_value=test_time
), patch(
"homeassistant.components.flux.switch.get_astral_event_date",
side_effect=event_date,
):
assert await async_setup_component(
hass,
switch.DOMAIN,
{
switch.DOMAIN: {
"platform": "flux",
"name": "flux",
"lights": [ent1.entity_id],
"mode": "rgb",
}
},
)
await hass.async_block_till_done()
turn_on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await hass.services.async_call(
switch.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "switch.flux"},
blocking=True,
)
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
call = turn_on_calls[-1]
rgb = (255, 198, 152)
rounded_call = tuple(map(round, call.data[light.ATTR_RGB_COLOR]))
assert rounded_call == rgb
| mit | 2,528,409,233,684,775,400 | 32.570339 | 86 | 0.587534 | false |
fermiPy/fermipy | fermipy/roi_model.py | 1 | 83464 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import, division, print_function
import os
import copy
import re
import collections
import numpy as np
import xml.etree.cElementTree as ElementTree
from astropy import units as u
from astropy.coordinates import SkyCoord
from astropy.io import fits
from astropy.table import Table, Column
import fermipy
import fermipy.config
from fermipy import utils
from fermipy import wcs_utils
from fermipy import hpx_utils
from fermipy import catalog
from fermipy import defaults
from fermipy import model_utils
from fermipy import fits_utils
from fermipy.logger import Logger, log_level
from fermipy.model_utils import make_parameter_dict
from fermipy.model_utils import cast_pars_dict
from fermipy.model_utils import get_function_defaults
from fermipy.model_utils import get_spatial_type
from fermipy.model_utils import get_function_norm_par_name
from fermipy.model_utils import get_function_par_names
from fermipy.model_utils import extract_pars_from_dict
from fermipy.model_utils import create_pars_from_dict
def create_source_table(scan_shape):
"""Create an empty source table.
Returns
-------
tab : `~astropy.table.Table`
"""
cols_dict = collections.OrderedDict()
cols_dict['Source_Name'] = dict(dtype='S48', format='%s')
cols_dict['name'] = dict(dtype='S48', format='%s')
cols_dict['class'] = dict(dtype='S32', format='%s')
cols_dict['SpectrumType'] = dict(dtype='S32', format='%s')
cols_dict['SpatialType'] = dict(dtype='S32', format='%s')
cols_dict['SourceType'] = dict(dtype='S32', format='%s')
cols_dict['SpatialModel'] = dict(dtype='S32', format='%s')
cols_dict['RAJ2000'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['DEJ2000'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['GLON'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['GLAT'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['ts'] = dict(dtype='f8', format='%.3f')
cols_dict['loglike'] = dict(dtype='f8', format='%.3f')
cols_dict['npred'] = dict(dtype='f8', format='%.3f')
cols_dict['offset'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['offset_ra'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['offset_dec'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['offset_glon'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['offset_glat'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['offset_roi_edge'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['pivot_energy'] = dict(dtype='f8', format='%.3f', unit='MeV')
cols_dict['flux_scan'] = dict(dtype='f8', format='%.3f',
shape=scan_shape)
cols_dict['eflux_scan'] = dict(dtype='f8', format='%.3f',
shape=scan_shape)
cols_dict['norm_scan'] = dict(dtype='f8', format='%.3f',
shape=scan_shape)
cols_dict['dloglike_scan'] = dict(dtype='f8', format='%.3f',
shape=scan_shape)
cols_dict['loglike_scan'] = dict(dtype='f8', format='%.3f',
shape=scan_shape)
# Add source dictionary columns
for k, v in sorted(defaults.source_output.items()):
if not k in cols_dict.keys():
if v[2] == float:
cols_dict[k] = dict(dtype='f8', format='%f')
elif k == 'Spectrum_Filename' or k == 'Spatial_Filename':
cols_dict[k] = dict(dtype='S128', format='%s')
elif v[2] == str:
cols_dict[k] = dict(dtype='S32', format='%s')
cols_dict['param_names'] = dict(dtype='S32', format='%s', shape=(10,))
cols_dict['param_values'] = dict(dtype='f8', format='%f', shape=(10,))
cols_dict['param_errors'] = dict(dtype='f8', format='%f', shape=(10,))
# Catalog Parameters
cols_dict['Flux_Density'] = dict(
dtype='f8', format='%.5g', unit='1 / (MeV cm2 s)')
cols_dict['Spectral_Index'] = dict(dtype='f8', format='%.3f')
cols_dict['Pivot_Energy'] = dict(dtype='f8', format='%.3f', unit='MeV')
cols_dict['beta'] = dict(dtype='f8', format='%.3f')
cols_dict['Exp_Index'] = dict(dtype='f8', format='%.3f')
cols_dict['Cutoff'] = dict(dtype='f8', format='%.3f', unit='MeV')
cols_dict['Expfactor'] = dict(dtype='f8', format='%.3f')
cols_dict['Conf_68_PosAng'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['Conf_68_SemiMajor'] = dict(
dtype='f8', format='%.3f', unit='deg')
cols_dict['Conf_68_SemiMinor'] = dict(
dtype='f8', format='%.3f', unit='deg')
cols_dict['Conf_95_PosAng'] = dict(dtype='f8', format='%.3f', unit='deg')
cols_dict['Conf_95_SemiMajor'] = dict(
dtype='f8', format='%.3f', unit='deg')
cols_dict['Conf_95_SemiMinor'] = dict(
dtype='f8', format='%.3f', unit='deg')
for t in ['eflux', 'eflux100', 'eflux1000', 'eflux10000']:
cols_dict[t] = dict(dtype='f8', format='%.3f', unit='MeV / (cm2 s)')
cols_dict[t + '_err'] = dict(dtype='f8',
format='%.3f', unit='MeV / (cm2 s)')
for t in ['eflux_ul95', 'eflux100_ul95', 'eflux1000_ul95', 'eflux10000_ul95']:
cols_dict[t] = dict(dtype='f8', format='%.3f', unit='MeV / (cm2 s)')
for t in ['flux', 'flux100', 'flux1000', 'flux10000']:
cols_dict[t] = dict(dtype='f8', format='%.3f', unit='1 / (cm2 s)')
cols_dict[t + '_err'] = dict(dtype='f8',
format='%.3f', unit='1 / (cm2 s)')
for t in ['flux_ul95', 'flux100_ul95', 'flux1000_ul95', 'flux10000_ul95']:
cols_dict[t] = dict(dtype='f8', format='%.3f', unit='1 / (cm2 s)')
for t in ['dnde', 'dnde100', 'dnde1000', 'dnde10000']:
cols_dict[t] = dict(dtype='f8', format='%.3f', unit='1 / (MeV cm2 s)')
cols_dict[t + '_err'] = dict(dtype='f8',
format='%.3f', unit='1 / (MeV cm2 s)')
cols = [Column(name=k, **v) for k, v in cols_dict.items()]
tab = Table(cols)
return tab
def get_skydir_distance_mask(src_skydir, skydir, dist, min_dist=None,
square=False, coordsys='CEL'):
"""Retrieve sources within a certain angular distance of an
(ra,dec) coordinate. This function supports two types of
geometric selections: circular (square=False) and square
(square=True). The circular selection finds all sources with a given
angular distance of the target position. The square selection
finds sources within an ROI-like region of size R x R where R
= 2 x dist.
Parameters
----------
src_skydir : `~astropy.coordinates.SkyCoord`
Array of sky directions.
skydir : `~astropy.coordinates.SkyCoord`
Sky direction with respect to which the selection will be applied.
dist : float
Maximum distance in degrees from the sky coordinate.
square : bool
Choose whether to apply a circular or square selection.
coordsys : str
Coordinate system to use when applying a selection with square=True.
"""
if dist is None:
dist = 180.
if not square:
dtheta = src_skydir.separation(skydir).rad
elif coordsys == 'CEL':
dtheta = get_linear_dist(skydir,
src_skydir.ra.rad,
src_skydir.dec.rad,
coordsys=coordsys)
elif coordsys == 'GAL':
dtheta = get_linear_dist(skydir,
src_skydir.galactic.l.rad,
src_skydir.galactic.b.rad,
coordsys=coordsys)
else:
raise Exception('Unrecognized coordinate system: %s' % coordsys)
msk = (dtheta < np.radians(dist))
if min_dist is not None:
msk &= (dtheta > np.radians(min_dist))
return msk
def get_linear_dist(skydir, lon, lat, coordsys='CEL'):
xy = wcs_utils.sky_to_offset(skydir, np.degrees(lon), np.degrees(lat),
coordsys=coordsys)
x = np.radians(xy[:, 0])
y = np.radians(xy[:, 1])
delta = np.array([np.abs(x), np.abs(y)])
dtheta = np.max(delta, axis=0)
return dtheta
def get_dist_to_edge(skydir, lon, lat, width, coordsys='CEL'):
xy = wcs_utils.sky_to_offset(skydir, np.degrees(lon), np.degrees(lat),
coordsys=coordsys)
x = np.radians(xy[:, 0])
y = np.radians(xy[:, 1])
delta_edge = np.array([np.abs(x) - width, np.abs(y) - width])
dtheta = np.max(delta_edge, axis=0)
return dtheta
def get_true_params_dict(pars_dict):
params = {}
for k, p in pars_dict.items():
val = float(p['value']) * float(p['scale'])
err = np.nan
if 'error' in p:
err = float(p['error']) * np.abs(float(p['scale']))
params[k] = {'value': val, 'error': err}
return params
def spatial_pars_from_catalog(cat):
if cat['Spatial_Function'] == 'RadialDisk':
rext = np.sqrt(cat['Model_SemiMajor'] * cat['Model_SemiMinor'])
return {'Radius': {'value': rext}}
elif cat['Spatial_Function'] == 'RadialGaussian':
sigma_to_r68 = np.sqrt(-2.0 * np.log(1.0 - 0.6827))
rext = np.sqrt(cat['Model_SemiMajor'] * cat['Model_SemiMinor'])
return {'Sigma': {'value': rext / sigma_to_r68}}
return {}
def spectral_pars_from_catalog(cat):
"""Create spectral parameters from 3FGL catalog columns."""
spectrum_type = cat['SpectrumType']
pars = get_function_defaults(cat['SpectrumType'])
par_idxs = {k: i for i, k in
enumerate(get_function_par_names(cat['SpectrumType']))}
for k in pars:
pars[k]['value'] = cat['param_values'][par_idxs[k]]
if spectrum_type == 'PowerLaw':
pars['Index']['value'] *= -1.0
pars['Index']['scale'] = -1.0
pars['Scale']['scale'] = 1.0
pars['Index']['max'] = max(5.0, pars['Index']['value'] + 1.0)
pars['Index']['min'] = min(0.0, pars['Index']['value'] - 1.0)
pars['Prefactor'] = make_parameter_dict(pars['Prefactor'])
pars['Scale'] = make_parameter_dict(pars['Scale'], True, False)
pars['Index'] = make_parameter_dict(pars['Index'], False, False)
elif spectrum_type == 'LogParabola':
pars['norm'] = make_parameter_dict(pars['norm'], False, True)
pars['Eb'] = make_parameter_dict(pars['Eb'], True, False)
pars['alpha'] = make_parameter_dict(pars['alpha'], False, False)
pars['beta'] = make_parameter_dict(pars['beta'], False, False)
elif spectrum_type == 'PLSuperExpCutoff':
pars['Index1']['value'] *= -1.0
pars['Index1']['scale'] = -1.0
pars['Index2']['scale'] = 1.0
pars['Prefactor'] = make_parameter_dict(pars['Prefactor'])
pars['Scale'] = make_parameter_dict(pars['Scale'], True, False)
pars['Index1'] = make_parameter_dict(pars['Index1'], False, False)
pars['Index2'] = make_parameter_dict(pars['Index2'], False, False)
pars['Cutoff'] = make_parameter_dict(pars['Cutoff'], False, True)
elif spectrum_type == 'PLSuperExpCutoff2':
pars['Index1']['value'] *= -1.0
pars['Index1']['scale'] = -1.0
pars['Index2']['scale'] = 1.0
pars['Prefactor'] = make_parameter_dict(pars['Prefactor'])
pars['Scale'] = make_parameter_dict(pars['Scale'], True, False)
pars['Index1'] = make_parameter_dict(pars['Index1'], False, False)
pars['Index2'] = make_parameter_dict(pars['Index2'], False, False)
pars['Expfactor'] = make_parameter_dict(pars['Expfactor'], False, True)
elif spectrum_type == 'PLSuperExpCutoff3':
pars['IndexS']['value'] *= -1.0
pars['IndexS']['scale'] = -1.0
pars['Index2']['scale'] = 1.0
pars['Prefactor'] = make_parameter_dict(pars['Prefactor'])
pars['Scale'] = make_parameter_dict(pars['Scale'], True, False)
pars['IndexS'] = make_parameter_dict(pars['IndexS'], False, False)
pars['Index2'] = make_parameter_dict(pars['Index2'], False, False)
pars['Expfactor2'] = make_parameter_dict(pars['Expfactor2'], False, True)
else:
raise Exception('Unsupported spectral type:' + spectrum_type)
return pars
class Model(object):
"""Base class for point-like and diffuse source components. This
class is a container for spectral and spatial parameters as well
as other source properties such as TS, Npred, and location within
the ROI.
"""
def __init__(self, name, data):
self._data = defaults.make_default_dict(defaults.source_output)
self._data['spectral_pars'] = get_function_defaults(data['SpectrumType'])
if 'spectral_pars' in data:
for k, v in data['spectral_pars'].items():
if k in self._data['spectral_pars']:
self._data['spectral_pars'][k].update(v)
else:
self._data['spectral_pars'][k] = v.copy()
try:
self._data['spatial_pars'] = get_function_defaults(data['SpatialType'])
except:
print (data)
raise KeyError("xx")
self._data.setdefault('catalog', data.pop('catalog', {}))
self._data.setdefault('assoc', data.pop('assoc', {}))
self._data.setdefault('class', '')
self._data['name'] = name
self._data.setdefault('psf_scale_fn', None)
self._data = utils.merge_dict(self._data, data)
self._names = [name]
catalog = self._data['catalog']
if 'CLASS1' in catalog:
self['class'] = catalog['CLASS1'].strip()
elif 'CLASS' in catalog:
self['class'] = catalog['CLASS'].strip()
for k in ROIModel.src_name_cols:
if k not in catalog:
continue
name = catalog[k].strip()
if name != '' and name not in self._names:
self._names.append(name)
self._data['assoc'][k] = name
self._sync_params()
def __contains__(self, key):
return key in self._data
def __getitem__(self, key):
return self._data[key]
def __setitem__(self, key, value):
self._data[key] = value
def __eq__(self, other):
return self.name == other.name
def __str__(self):
data = copy.deepcopy(self.data)
data['names'] = self.names
output = []
output += ['{:15s}:'.format('Name') + ' {name:s}']
output += ['{:15s}:'.format('TS') + ' {ts:.2f}']
output += ['{:15s}:'.format('Npred') + ' {npred:.2f}']
output += ['{:15s}:'.format('SpatialModel') + ' {SpatialModel:s}']
output += ['{:15s}:'.format('SpectrumType') + ' {SpectrumType:s}']
output += ['Spectral Parameters']
for i, p in enumerate(self['param_names']):
if not p:
break
val = self['param_values'][i]
err = self['param_errors'][i]
output += ['{:15s}: {:10.4g} +/- {:10.4g}'.format(p, val, err)]
return '\n'.join(output).format(**data)
def items(self):
return self._data.items()
@property
def data(self):
return self._data
@property
def spectral_pars(self):
return self._data['spectral_pars']
@property
def spatial_pars(self):
return self._data['spatial_pars']
@property
def params(self):
return get_true_params_dict(self._data['spectral_pars'])
@property
def is_free(self):
""" returns True if any of the spectral model parameters is set to free, else False
"""
return bool(np.array([int(value.get("free", False)) for key, value in self.spectral_pars.items()]).sum())
@property
def name(self):
return self._data['name']
@property
def names(self):
return self._names
@property
def assoc(self):
return self._data['assoc']
@property
def psf_scale_fn(self):
return self._data['psf_scale']
@staticmethod
def create_from_dict(src_dict, roi_skydir=None, rescale=False):
src_dict = copy.deepcopy(src_dict)
src_dict.setdefault('SpatialModel', 'PointSource')
src_dict.setdefault('SpatialType',
get_spatial_type(src_dict['SpatialModel']))
# Need this to handle old conventions for
# MapCubeFunction/ConstantValue sources
if src_dict['SpatialModel'] == 'DiffuseSource':
src_dict['SpatialModel'] = src_dict['SpatialType']
if 'filefunction' in src_dict:
src_dict['Spectrum_Filename'] = src_dict.pop('filefunction', str(''))
if 'mapcube' in src_dict:
src_dict['Spatial_Filename'] = src_dict.pop('mapcube', str(''))
if 'radialprofile' in src_dict:
src_dict['Spatial_Filename'] = src_dict.pop('radialprofile', str(''))
if 'spectral_pars' in src_dict:
src_dict['spectral_pars'] = cast_pars_dict(
src_dict['spectral_pars'])
if 'spatial_pars' in src_dict:
src_dict['spatial_pars'] = cast_pars_dict(src_dict['spatial_pars'])
if src_dict['SpatialModel'] == 'ConstantValue':
return IsoSource(src_dict['name'], src_dict)
elif src_dict['SpatialModel'] == 'CompositeSource':
return CompositeSource(src_dict['name'], src_dict)
elif src_dict['SpatialModel'] == 'MapCubeFunction':
return MapCubeSource(src_dict['name'], src_dict)
else:
return Source.create_from_dict(src_dict, roi_skydir,
rescale=rescale)
def _sync_params(self):
pars = model_utils.pars_dict_to_vectors(self['SpectrumType'],
self.spectral_pars)
self._data.update(pars)
def get_norm(self):
par_name = get_function_norm_par_name(self['SpectrumType'])
val = self.spectral_pars[par_name]['value']
scale = self.spectral_pars[par_name]['scale']
return float(val) * float(scale)
def add_to_table(self, tab):
row_dict = {}
row_dict['Source_Name'] = self['name']
row_dict['RAJ2000'] = self['ra']
row_dict['DEJ2000'] = self['dec']
row_dict['GLON'] = self['glon']
row_dict['GLAT'] = self['glat']
if not 'param_names' in self.data:
pars = model_utils.pars_dict_to_vectors(self['SpectrumType'],
self.spectral_pars)
row_dict.update(pars)
r68_semimajor = self['pos_err_semimajor'] * \
self['pos_r68'] / self['pos_err']
r68_semiminor = self['pos_err_semiminor'] * \
self['pos_r68'] / self['pos_err']
r95_semimajor = self['pos_err_semimajor'] * \
self['pos_r95'] / self['pos_err']
r95_semiminor = self['pos_err_semiminor'] * \
self['pos_r95'] / self['pos_err']
row_dict['Conf_68_PosAng'] = self['pos_angle']
row_dict['Conf_68_SemiMajor'] = r68_semimajor
row_dict['Conf_68_SemiMinor'] = r68_semiminor
row_dict['Conf_95_PosAng'] = self['pos_angle']
row_dict['Conf_95_SemiMajor'] = r95_semimajor
row_dict['Conf_95_SemiMinor'] = r95_semiminor
row_dict.update(self.get_catalog_dict())
for t in self.data.keys():
if t == 'params':
continue
if t in tab.columns:
row_dict[t] = self[t]
row = [row_dict[k] for k in tab.columns]
tab.add_row(row)
def get_catalog_dict(self):
o = {'Spectral_Index': np.nan,
'Flux_Density': np.nan,
'Pivot_Energy': np.nan,
'beta': np.nan,
'Exp_Index': np.nan,
'Cutoff': np.nan,
'Expfactor': np.nan}
params = get_true_params_dict(self.spectral_pars)
if self['SpectrumType'] == 'PowerLaw':
o['Spectral_Index'] = -1.0 * params['Index']['value']
o['Flux_Density'] = params['Prefactor']['value']
o['Pivot_Energy'] = params['Scale']['value']
elif self['SpectrumType'] == 'LogParabola':
o['Spectral_Index'] = params['alpha']['value']
o['Flux_Density'] = params['norm']['value']
o['Pivot_Energy'] = params['Eb']['value']
o['beta'] = params['beta']['value']
elif self['SpectrumType'] == 'PLSuperExpCutoff':
o['Spectral_Index'] = -1.0 * params['Index1']['value']
o['Exp_Index'] = params['Index2']['value']
o['Flux_Density'] = params['Prefactor']['value']
o['Pivot_Energy'] = params['Scale']['value']
o['Cutoff'] = params['Cutoff']['value']
elif self['SpectrumType'] == 'PLSuperExpCutoff2':
o['Spectral_Index'] = -1.0 * params['Index1']['value']
o['Exp_Index'] = params['Index2']['value']
o['Flux_Density'] = params['Prefactor']['value']
o['Pivot_Energy'] = params['Scale']['value']
o['Expfactor'] = params['Expfactor']['value']
elif self['SpectrumType'] == 'PLSuperExpCutoff3':
o['Spectral_Index'] = -1.0 * params['IndexS']['value']
o['Exp_Index'] = params['Index2']['value']
o['Flux_Density'] = params['Prefactor']['value']
o['Pivot_Energy'] = params['Scale']['value']
o['Expfactor'] = params['Expfactor2']['value']
return o
def check_cuts(self, cuts):
if cuts is None:
return True
if isinstance(cuts, tuple):
cuts = {cuts[0]: (cuts[1], cuts[2])}
elif isinstance(cuts, list):
cuts = {c[0]: (c[1], c[2]) for c in cuts}
for k, v in cuts.items():
# if not isinstance(c,tuple) or len(c) != 3:
# raise Exception('Wrong format for cuts tuple.')
if k in self._data:
if not utils.apply_minmax_selection(self[k], v):
return False
elif 'catalog' in self._data and k in self._data['catalog']:
if not utils.apply_minmax_selection(self['catalog'][k], v):
return False
else:
return False
return True
def set_psf_scale_fn(self, fn):
self._data['psf_scale_fn'] = fn
def set_spectral_pars(self, spectral_pars):
self._data['spectral_pars'] = copy.deepcopy(spectral_pars)
self._sync_params()
def update_spectral_pars(self, spectral_pars):
self._data['spectral_pars'] = utils.merge_dict(
self.spectral_pars, spectral_pars)
self._sync_params()
def set_name(self, name, names=None):
self._data['name'] = name
if names is None:
self._names = [name]
else:
self._names = names
def add_name(self, name):
if name not in self._names:
self._names.append(name)
def update_data(self, d):
self._data = utils.merge_dict(self._data, d, add_new_keys=True)
def update_from_source(self, src):
self._data['spectral_pars'] = {}
self._data['spatial_pars'] = {}
self._data = utils.merge_dict(self.data, src.data, add_new_keys=True)
self._name = src.name
self._names = list(set(self._names + src.names))
class IsoSource(Model):
def __init__(self, name, data):
data['SpectrumType'] = 'FileFunction'
data['SpatialType'] = 'ConstantValue'
data['SpatialModel'] = 'ConstantValue'
data['SourceType'] = 'DiffuseSource'
if not 'spectral_pars' in data:
data['spectral_pars'] = {
'Normalization': {'name': 'Normalization', 'scale': 1.0,
'value': 1.0,
'min': 0.001, 'max': 1000.0,
'free': False}}
super(IsoSource, self).__init__(name, data)
self._init_spatial_pars()
@property
def filefunction(self):
return self._data['Spectrum_Filename']
@property
def diffuse(self):
return True
def _init_spatial_pars(self):
self['spatial_pars'] = {
'Value': {'name': 'Value', 'scale': '1',
'value': '1', 'min': '0', 'max': '10',
'free': '0'}}
def write_xml(self, root, **kwargs):
filename = kwargs.get('Spectrum_Filename', self.filefunction)
filename = utils.path_to_xmlpath(self.filefunction)
source_element = utils.create_xml_element(root, 'source',
dict(name=self.name,
type='DiffuseSource'))
spec_el = utils.create_xml_element(source_element, 'spectrum',
dict(file=filename,
type='FileFunction',
ctype='-1'))
spat_el = utils.create_xml_element(source_element, 'spatialModel',
dict(type='ConstantValue'))
for k, v in self.spectral_pars.items():
utils.create_xml_element(spec_el, 'parameter', v)
for k, v in self.spatial_pars.items():
utils.create_xml_element(spat_el, 'parameter', v)
class MapCubeSource(Model):
def __init__(self, name, data):
data.setdefault('SpectrumType', 'PowerLaw')
data['SpatialType'] = 'MapCubeFunction'
data['SpatialModel'] = 'MapCubeFunction'
data['SourceType'] = 'DiffuseSource'
if not 'spectral_pars' in data:
data['spectral_pars'] = {
'Prefactor': {'name': 'Prefactor', 'scale': 1.0,
'value': 1.0, 'min': 0.1, 'max': '10.0',
'free': False},
'Index': {'name': 'Index', 'scale': -1.0,
'value': 0.0, 'min': -1.0, 'max': 1.0,
'free': False},
'Scale': {'name': 'Scale', 'scale': 1.0,
'value': 1000.0,
'min': 1000.0, 'max': 1000.0,
'free': False},
}
super(MapCubeSource, self).__init__(name, data)
self._init_spatial_pars()
@property
def mapcube(self):
return self._data['Spatial_Filename']
@property
def diffuse(self):
return True
def _init_spatial_pars(self):
self['spatial_pars'] = {
'Normalization':
{'name': 'Normalization', 'scale': '1',
'value': '1', 'min': '0', 'max': '10',
'free': '0'}}
def write_xml(self, root, **kwargs):
filename = kwargs.get('Spatial_Filename', self.mapcube)
filename = utils.path_to_xmlpath(filename)
source_element = utils.create_xml_element(root, 'source',
dict(name=self.name,
type='DiffuseSource'))
spec_el = utils.create_xml_element(source_element, 'spectrum',
dict(type=self.data['SpectrumType']))
spat_el = utils.create_xml_element(source_element, 'spatialModel',
dict(type='MapCubeFunction',
file=filename))
for k, v in self.spectral_pars.items():
utils.create_xml_element(spec_el, 'parameter', v)
for k, v in self.spatial_pars.items():
utils.create_xml_element(spat_el, 'parameter', v)
class Source(Model):
"""Class representation of a source (non-diffuse) model component.
A source object serves as a container for the properties of that
source (position, spatial/spectral parameters, TS, etc.) as
derived in the current analysis. Most properties of a source
object can be accessed with the bracket operator:
# Return the TS of this source
>>> src['ts']
# Get a skycoord representation of the source position
>>> src.skydir
"""
def __init__(self, name, data, radec=None):
data.setdefault('SpatialModel', 'PointSource')
data.setdefault('SpectrumType', 'PowerLaw')
data.setdefault(
'SpatialType', model_utils.get_spatial_type(data['SpatialModel']))
data.setdefault(
'SourceType', model_utils.get_source_type(data['SpatialType']))
super(Source, self).__init__(name, data)
catalog = self.data.get('catalog', {})
if radec is not None:
self._set_radec(radec)
elif 'ra' in self.data and 'dec' in self.data:
self._set_radec([self.data['ra'], self.data['dec']])
elif 'RAJ2000' in catalog and 'DEJ2000' in catalog:
self._set_radec([catalog['RAJ2000'], catalog['DEJ2000']])
else:
raise Exception('Failed to infer RADEC for source: %s' % name)
self._init_spatial_pars(SpatialWidth=self['SpatialWidth'])
def __str__(self):
data = copy.deepcopy(self.data)
data['names'] = self.names
output = []
output += ['{:15s}:'.format('Name') + ' {name:s}']
output += ['{:15s}:'.format('Associations') + ' {names}']
output += ['{:15s}:'.format('RA/DEC') + ' {ra:10.3f}/{dec:10.3f}']
output += ['{:15s}:'.format('GLON/GLAT') +
' {glon:10.3f}/{glat:10.3f}']
output += ['{:15s}:'.format('TS') + ' {ts:.2f}']
output += ['{:15s}:'.format('Npred') + ' {npred:.2f}']
output += ['{:15s}:'.format('Flux') +
' {flux:9.4g} +/- {flux_err:8.3g}']
output += ['{:15s}:'.format('EnergyFlux') +
' {eflux:9.4g} +/- {eflux_err:8.3g}']
output += ['{:15s}:'.format('SpatialModel') + ' {SpatialModel:s}']
output += ['{:15s}:'.format('SpectrumType') + ' {SpectrumType:s}']
output += ['Spectral Parameters']
for i, p in enumerate(self['param_names']):
if not p:
break
val = self['param_values'][i]
err = self['param_errors'][i]
output += ['{:15s}: {:10.4g} +/- {:10.4g}'.format(p, val, err)]
return '\n'.join(output).format(**data)
def _set_radec(self, radec):
self['radec'] = np.array(radec, ndmin=1)
self['RAJ2000'] = radec[0]
self['DEJ2000'] = radec[1]
self['ra'] = radec[0]
self['dec'] = radec[1]
glonlat = utils.eq2gal(radec[0], radec[1])
self['glon'], self['glat'] = glonlat[0][0], glonlat[1][0]
if 'RA' in self.spatial_pars:
self.spatial_pars['RA']['value'] = radec[0]
self.spatial_pars['DEC']['value'] = radec[1]
def _set_spatial_width(self, spatial_width):
self.data['SpatialWidth'] = spatial_width
if self['SpatialType'] in ['RadialGaussian']:
self.spatial_pars['Sigma'][
'value'] = spatial_width / 1.5095921854516636
elif self['SpatialType'] in ['RadialDisk']:
self.spatial_pars['Radius'][
'value'] = spatial_width / 0.8246211251235321
def _init_spatial_pars(self, **kwargs):
spatial_pars = copy.deepcopy(kwargs)
spatial_width = spatial_pars.pop('SpatialWidth', None)
if self['SpatialType'] == 'SkyDirFunction':
self._extended = False
self._data['SourceType'] = 'PointSource'
else:
self._extended = True
self._data['SourceType'] = 'DiffuseSource'
spatial_pars.setdefault('RA', spatial_pars.pop('ra', self['ra']))
spatial_pars.setdefault('DEC', spatial_pars.pop('dec', self['dec']))
for k, v in spatial_pars.items():
if not isinstance(v, dict):
spatial_pars[k] = {'name': k, 'value': v}
if k in self.spatial_pars:
self.spatial_pars[k].update(spatial_pars[k])
if spatial_width is not None:
self._set_spatial_width(spatial_width)
elif self['SpatialType'] == 'RadialDisk':
self['SpatialWidth'] = self.spatial_pars[
'Radius']['value'] * 0.8246211251235321
elif self['SpatialType'] == 'RadialGaussian':
self['SpatialWidth'] = self.spatial_pars[
'Sigma']['value'] * 1.5095921854516636
if 'RA' in spatial_pars or 'DEC' in spatial_pars:
self._set_radec([spatial_pars['RA']['value'],
spatial_pars['DEC']['value']])
def update_data(self, d):
self._data = utils.merge_dict(self._data, d, add_new_keys=True)
if 'ra' in d and 'dec' in d:
self._set_radec([d['ra'], d['dec']])
def set_radec(self, ra, dec):
self._set_radec(np.array([ra, dec]))
def set_position(self, skydir):
"""
Set the position of the source.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
"""
if not isinstance(skydir, SkyCoord):
skydir = SkyCoord(ra=skydir[0], dec=skydir[1], unit=u.deg)
if not skydir.isscalar:
skydir = np.ravel(skydir)[0]
radec = np.array([skydir.icrs.ra.deg, skydir.icrs.dec.deg])
self._set_radec(radec)
def set_roi_direction(self, roidir):
offset = roidir.separation(self.skydir).deg
offset_cel = wcs_utils.sky_to_offset(
roidir, self['ra'], self['dec'], 'CEL')
offset_gal = wcs_utils.sky_to_offset(
roidir, self['glon'], self['glat'], 'GAL')
self['offset'] = offset
self['offset_ra'] = offset_cel[0, 0]
self['offset_dec'] = offset_cel[0, 1]
self['offset_glon'] = offset_gal[0, 0]
self['offset_glat'] = offset_gal[0, 1]
def set_roi_geom(self, geom):
if geom is None:
return
self['offset_roi_edge'] = float(
wcs_utils.distance_to_edge(geom, self.skydir))
def set_spatial_model(self, spatial_model, spatial_pars):
update_pars = False
if spatial_model != self['SpatialModel']:
update_pars = True
self._data['SpatialModel'] = spatial_model
self._data['SpatialType'] = get_spatial_type(self['SpatialModel'])
if update_pars:
self._data['spatial_pars'] = get_function_defaults(
self['SpatialType'])
if spatial_model == 'PointSource':
self._data['SpatialWidth'] = None
self._init_spatial_pars(**spatial_pars)
def separation(self, src):
if isinstance(src, Source):
return self.radec.separation(src.skydir)
else:
return self.radec.separation(src)
@property
def diffuse(self):
return False
@property
def extended(self):
return self._extended
@property
def associations(self):
return self._names
@property
def radec(self):
return self['radec']
@property
def skydir(self):
"""Return a SkyCoord representation of the source position.
Returns
-------
skydir : `~astropy.coordinates.SkyCoord`
"""
return SkyCoord(self.radec[0] * u.deg, self.radec[1] * u.deg)
@property
def data(self):
return self._data
@classmethod
def create_from_dict(cls, src_dict, roi_skydir=None, rescale=False):
"""Create a source object from a python dictionary.
Parameters
----------
src_dict : dict
Dictionary defining the properties of the source.
"""
src_dict = copy.deepcopy(src_dict)
src_dict.setdefault('SpatialModel', 'PointSource')
src_dict.setdefault('Spectrum_Filename', None)
src_dict.setdefault('SpectrumType', 'PowerLaw')
src_dict['SpatialType'] = get_spatial_type(src_dict['SpatialModel'])
spectrum_type = src_dict['SpectrumType']
spatial_type = src_dict['SpatialType']
spectral_pars = src_dict.pop('spectral_pars', {})
spatial_pars = src_dict.pop('spatial_pars', {})
if not spectral_pars:
spectral_pars = extract_pars_from_dict(spectrum_type, src_dict)
norm_par_name = get_function_norm_par_name(spectrum_type)
if norm_par_name is not None:
spectral_pars[norm_par_name].setdefault('free', True)
if not spatial_pars:
spatial_pars = extract_pars_from_dict(spatial_type, src_dict)
for k in ['RA', 'DEC', 'Prefactor']:
if k in spatial_pars:
del spatial_pars[k]
spectral_pars = create_pars_from_dict(spectrum_type, spectral_pars,
rescale)
#raise ValueError("%s %s" % (spatial_type, spatial_pars))
spatial_pars = create_pars_from_dict(spatial_type, spatial_pars,
False)
if 'file' in src_dict:
src_dict['Spectrum_Filename'] = src_dict.pop('file')
if spectrum_type == 'DMFitFunction' and src_dict['Spectrum_Filename'] is None:
src_dict['Spectrum_Filename'] = os.path.join('$FERMIPY_DATA_DIR',
'gammamc_dif.dat')
src_dict['spectral_pars'] = cast_pars_dict(spectral_pars)
src_dict['spatial_pars'] = cast_pars_dict(spatial_pars)
if 'name' in src_dict:
name = src_dict['name']
src_dict['Source_Name'] = src_dict.pop('name')
elif 'Source_Name' in src_dict:
name = src_dict['Source_Name']
else:
raise Exception('Source name undefined. %s' % src_dict)
skydir = wcs_utils.get_target_skydir(src_dict, roi_skydir)
src_dict['RAJ2000'] = skydir.ra.deg
src_dict['DEJ2000'] = skydir.dec.deg
radec = np.array([skydir.ra.deg, skydir.dec.deg])
return cls(name, src_dict, radec=radec)
@classmethod
def create_from_xmlfile(cls, xmlfile, extdir=None):
"""Create a Source object from an XML file.
Parameters
----------
xmlfile : str
Path to XML file.
extdir : str
Path to the extended source archive.
"""
root = ElementTree.ElementTree(file=xmlfile).getroot()
srcs = root.findall('source')
if len(srcs) == 0:
raise Exception('No sources found.')
return cls.create_from_xml(srcs[0], extdir=extdir)
@staticmethod
def create_from_xml(root, extdir=None):
"""Create a Source object from an XML node.
Parameters
----------
root : `~xml.etree.ElementTree.Element`
XML node containing the source.
extdir : str
Path to the extended source archive.
"""
src_type = root.attrib['type']
spec = utils.load_xml_elements(root, 'spectrum')
spectral_pars = utils.load_xml_elements(root, 'spectrum/parameter')
spectral_type = spec['type']
spectral_pars = cast_pars_dict(spectral_pars)
spat = {}
spatial_pars = {}
nested_sources = []
if src_type == 'CompositeSource':
spatial_type = 'CompositeSource'
source_library = root.findall('source_library')[0]
for node in source_library.findall('source'):
nested_sources += [Source.create_from_xml(node, extdir=extdir)]
else:
spat = utils.load_xml_elements(root, 'spatialModel')
spatial_pars = utils.load_xml_elements(
root, 'spatialModel/parameter')
spatial_pars = cast_pars_dict(spatial_pars)
spatial_type = spat['type']
xml_dict = copy.deepcopy(root.attrib)
src_dict = {'catalog': xml_dict}
src_dict['Source_Name'] = xml_dict['name']
src_dict['SpectrumType'] = spectral_type
src_dict['SpatialType'] = spatial_type
src_dict['SourceType'] = src_type
src_dict['Spatial_Filename'] = ''
src_dict['Spectrum_Filename'] = ''
if 'file' in spat:
src_dict['Spatial_Filename'] = utils.xmlpath_to_path(spat['file'])
if not os.path.isfile(src_dict['Spatial_Filename']) \
and extdir is not None:
src_dict['Spatial_Filename'] = \
os.path.join(extdir, 'Templates',
src_dict['Spatial_Filename'])
if 'file' in spec:
src_dict['Spectrum_Filename'] = utils.xmlpath_to_path(spec['file'])
if src_type == 'PointSource':
src_dict['SpatialModel'] = 'PointSource'
elif src_type == 'CompositeSource':
src_dict['SpatialModel'] = 'CompositeSource'
elif spatial_type == 'SpatialMap':
src_dict['SpatialModel'] = 'SpatialMap'
else:
src_dict['SpatialModel'] = spatial_type
if src_type == 'PointSource' or \
spatial_type in ['SpatialMap', 'RadialGaussian', 'RadialDisk']:
if 'RA' in xml_dict:
src_dict['RAJ2000'] = float(xml_dict['RA'])
src_dict['DEJ2000'] = float(xml_dict['DEC'])
elif 'RA' in spatial_pars:
src_dict['RAJ2000'] = float(spatial_pars['RA']['value'])
src_dict['DEJ2000'] = float(spatial_pars['DEC']['value'])
else:
try:
skydir = wcs_utils.get_map_skydir(os.path.expandvars(
src_dict['Spatial_Filename']))
except Exception:
skydir = hpx_utils.get_map_skydir(os.path.expandvars(
src_dict['Spatial_Filename']))
src_dict['RAJ2000'] = skydir.ra.deg
src_dict['DEJ2000'] = skydir.dec.deg
radec = np.array([src_dict['RAJ2000'], src_dict['DEJ2000']])
src_dict['spectral_pars'] = spectral_pars
src_dict['spatial_pars'] = spatial_pars
return Source(src_dict['Source_Name'],
src_dict, radec=radec)
elif src_type == 'DiffuseSource' and spatial_type == 'ConstantValue':
return IsoSource(src_dict['Source_Name'],
{'Spectrum_Filename': spec['file'],
'spectral_pars': spectral_pars,
'spatial_pars': spatial_pars})
elif src_type == 'DiffuseSource' and spatial_type == 'MapCubeFunction':
return MapCubeSource(src_dict['Source_Name'],
{'Spatial_Filename': spat['file'],
'SpectrumType': spectral_type,
'spectral_pars': spectral_pars,
'spatial_pars': spatial_pars})
elif src_type == 'CompositeSource':
return CompositeSource(src_dict['Source_Name'],
{'SpectrumType': spectral_type,
'nested_sources': nested_sources})
else:
raise Exception(
'Unrecognized type for source: %s %s' % (src_dict['Source_Name'], src_type))
def write_xml(self, root):
"""Write this source to an XML node."""
if not self.extended:
try:
source_element = utils.create_xml_element(root, 'source',
dict(name=self['Source_Name'],
type='PointSource'))
except TypeError as msg:
print (self['Source_Name'], self)
raise TypeError(msg)
spat_el = ElementTree.SubElement(source_element, 'spatialModel')
spat_el.set('type', 'SkyDirFunction')
elif self['SpatialType'] == 'SpatialMap':
source_element = utils.create_xml_element(root, 'source',
dict(name=self['Source_Name'],
type='DiffuseSource'))
filename = utils.path_to_xmlpath(self['Spatial_Filename'])
spat_el = utils.create_xml_element(source_element, 'spatialModel',
dict(map_based_integral='True',
type='SpatialMap',
file=filename))
else:
source_element = utils.create_xml_element(root, 'source',
dict(name=self['Source_Name'],
type='DiffuseSource'))
spat_el = utils.create_xml_element(source_element, 'spatialModel',
dict(type=self['SpatialType']))
for k, v in self.spatial_pars.items():
utils.create_xml_element(spat_el, 'parameter', v)
el = ElementTree.SubElement(source_element, 'spectrum')
stype = self['SpectrumType'].strip()
el.set('type', stype)
if self['Spectrum_Filename'] is not None:
filename = utils.path_to_xmlpath(self['Spectrum_Filename'])
el.set('file', filename)
for k, v in self.spectral_pars.items():
utils.create_xml_element(el, 'parameter', v)
class CompositeSource(Model):
def __init__(self, name, data):
data.setdefault('SpectrumType', 'ConstantValue')
data['SpatialType'] = 'CompositeSource'
data['SpatialModel'] = 'CompositeSource'
data['SourceType'] = 'CompositeSource'
if not 'spectral_pars' in data:
data['spectral_pars'] = {
'Value': {'name': 'Value', 'scale': 1.0,
'value': 1.0, 'min': 0.1, 'max': '10.0',
'free': False},
}
super(CompositeSource, self).__init__(name, data)
self._build_nested_sources(data)
@property
def nested_sources(self):
return self._nested_sources
@property
def diffuse(self):
return True
def _build_nested_sources(self, data):
self._nested_sources = []
for nested_source in data.get('nested_sources', []):
if isinstance(nested_source, Model):
self._nested_sources.append(copy.deepcopy(nested_source))
elif isinstance(nested_source, dict):
self._nested_sources.append(
Source.create_from_dict(nested_source))
def write_xml(self, root):
source_element = utils.create_xml_element(root, 'source',
dict(name=self.name,
type='CompositeSource'))
spec_el = utils.create_xml_element(source_element, 'spectrum',
dict(type=self.data['SpectrumType']))
for k, v in self.spectral_pars.items():
utils.create_xml_element(spec_el, 'parameter', v)
spat_el = utils.create_xml_element(
source_element, 'source_library', dict(title=self.name))
for nested_source in self._nested_sources:
nested_source.write_xml(spat_el)
class ROIModel(fermipy.config.Configurable):
"""This class is responsible for managing the ROI model (both sources
and diffuse components). Source catalogs can be read
from either FITS or XML files. Individual components are
represented by instances of `~fermipy.roi_model.Model` and can be
accessed by name using the bracket operator.
* Create an ROI with all 3FGL sources and print a summary of its contents:
>>> skydir = astropy.coordinates.SkyCoord(0.0,0.0,unit='deg')
>>> roi = ROIModel({'catalogs' : ['3FGL'],'src_roiwidth' : 10.0},skydir=skydir)
>>> print(roi)
name SpatialModel SpectrumType offset ts npred
--------------------------------------------------------------------------------
3FGL J2357.3-0150 PointSource PowerLaw 1.956 nan 0.0
3FGL J0006.2+0135 PointSource PowerLaw 2.232 nan 0.0
3FGL J0016.3-0013 PointSource PowerLaw 4.084 nan 0.0
3FGL J0014.3-0455 PointSource PowerLaw 6.085 nan 0.0
* Print a summary of an individual source
>>> print(roi['3FGL J0006.2+0135'])
Name : 3FGL J0006.2+0135
Associations : ['3FGL J0006.2+0135']
RA/DEC : 1.572/ 1.585
GLON/GLAT : 100.400/ -59.297
TS : nan
Npred : nan
Flux : nan +/- nan
EnergyFlux : nan +/- nan
SpatialModel : PointSource
SpectrumType : PowerLaw
Spectral Parameters
Index : -2 +/- nan
Scale : 1000 +/- nan
Prefactor : 1e-12 +/- nan
* Get the SkyCoord for a source
>>> dir = roi['SourceA'].skydir
* Loop over all sources and print their names
>>> for s in roi.sources: print(s.name)
3FGL J2357.3-0150
3FGL J0006.2+0135
3FGL J0016.3-0013
3FGL J0014.3-0455
"""
defaults = dict(defaults.model.items(),
fileio=defaults.fileio)
src_name_cols = ['Source_Name',
'ASSOC', 'ASSOC1', 'ASSOC2', 'ASSOC_GAM',
'1FHL_Name', '2FGL_Name', '3FGL_Name',
'ASSOC_GAM1', 'ASSOC_GAM2', 'ASSOC_TEV']
def __init__(self, config=None, **kwargs):
# Coordinate for ROI center (defaults to 0,0)
self._skydir = kwargs.pop('skydir', SkyCoord(0.0, 0.0, unit=u.deg))
self._geom = kwargs.get('geom', None)
coordsys = kwargs.pop('coordsys', 'CEL')
srcname = kwargs.pop('srcname', None)
super(ROIModel, self).__init__(config, **kwargs)
self._src_radius = self.config['src_radius']
if self.config['src_roiwidth'] is not None:
self._config['src_radius_roi'] = self.config['src_roiwidth'] * 0.5
self._srcs = []
self._diffuse_srcs = []
self._src_dict = collections.defaultdict(list)
self._src_radius = []
self.load(coordsys=coordsys, srcname=srcname)
def __contains__(self, key):
key = key.replace(' ', '').lower()
return key in self._src_dict.keys()
def __getitem__(self, key):
return self.get_source_by_name(key)
def __iter__(self):
return iter(self._srcs + self._diffuse_srcs)
def __str__(self):
o = ''
o += '%-20s%-15s%-15s%8s%10s%12s\n' % (
'name', 'SpatialModel', 'SpectrumType', 'offset',
'ts', 'npred')
o += '-' * 80 + '\n'
for s in sorted(self.sources, key=lambda t: t['offset']):
if s.diffuse:
continue
o += '%-20.19s%-15.14s%-15.14s%8.3f%10.2f%12.1f\n' % (
s['name'], s['SpatialModel'],
s['SpectrumType'],
s['offset'], s['ts'], s['npred'])
for s in sorted(self.sources, key=lambda t: t['offset']):
if not s.diffuse:
continue
o += '%-20.19s%-15.14s%-15.14s%8s%10.2f%12.1f\n' % (
s['name'], s['SpatialModel'],
s['SpectrumType'],
'-----', s['ts'], s['npred'])
return o
@property
def skydir(self):
"""Return the sky direction corresponding to the center of the
ROI."""
return self._skydir
@property
def geom(self):
return self._geom
@property
def sources(self):
return self._srcs + self._diffuse_srcs
@property
def point_sources(self):
return self._srcs
@property
def diffuse_sources(self):
return self._diffuse_srcs
@property
def extdir(self):
extdir = self.config['extdir']
if extdir is not None and not os.path.isdir(os.path.expandvars(extdir)):
return os.path.join('$FERMIPY_DATA_DIR',
'catalogs', extdir)
else:
return extdir
def set_geom(self, geom):
self._geom = geom
for s in self._srcs:
s.set_roi_geom(geom)
def clear(self):
"""Clear the contents of the ROI."""
self._srcs = []
self._diffuse_srcs = []
self._src_dict = collections.defaultdict(list)
self._src_radius = []
def load_diffuse_srcs(self):
srcs = self.create_diffuse_srcs(self.config)
for src in srcs:
self.load_source(src, False, self.config['merge_sources'])
def create_diffuse_srcs(self, config):
srcs = []
srcs += self._create_diffuse_src('isodiff', config)
srcs += self._create_diffuse_src('galdiff', config)
srcs += self._create_diffuse_src('limbdiff', config)
srcs += self._create_diffuse_src('diffuse', config)
srcs += self._create_diffuse_src_from_xml(config)
return srcs
def _create_diffuse_src(self, name, config, src_type='FileFunction'):
if 'FERMI_DIR' in os.environ and 'FERMI_DIFFUSE_DIR' not in os.environ:
os.environ['FERMI_DIFFUSE_DIR'] = \
os.path.expandvars('$FERMI_DIR/refdata/fermi/galdiffuse')
search_dirs = []
if config.get('diffuse_dir', []):
search_dirs += config.get('diffuse_dir', [])
search_dirs += [self.config['fileio']['outdir'],
'$FERMI_DIFFUSE_DIR',
'$FERMIPY_DATA_DIR']
srcs = []
if config is not None:
srcs = config[name]
elif self.config[name] is not None:
srcs = self.config[name]
srcs_out = []
for i, t in enumerate(srcs):
if utils.isstr(t):
src_dict = {'file': t}
elif isinstance(t, dict):
src_dict = copy.deepcopy(t)
else:
raise Exception(
'Invalid type in diffuse mode list: %s' % str(type(t)))
src_dict['file'] = \
utils.resolve_file_path(src_dict['file'],
search_dirs=search_dirs)
if 'name' not in src_dict:
if len(srcs) == 1:
src_dict['name'] = name
else:
src_dict['name'] = name + '%02i' % i
if re.search(r'(\.txt$)', src_dict['file']):
src_type = 'FileFunction'
elif re.search(r'(\.fits$|\.fit$|\.fits.gz$|\.fit.gz$)',
src_dict['file']):
src_type = 'MapCubeFunction'
else:
raise Exception(
'Unrecognized file format for diffuse model: %s' % src_dict[
'file'])
# Extract here
if src_type == 'FileFunction':
src = IsoSource(src_dict['name'], {
'Spectrum_Filename': src_dict['file']})
altname = os.path.basename(src_dict['file'])
altname = re.sub(r'(\.txt$)', '', altname)
else:
# src = MapCubeSource(src_dict['name'], {
# 'Spatial_Filename': src_dict['file'],
sp_filename = src_dict.pop('file')
src_dict['Spatial_Filename'] = sp_filename
src = MapCubeSource(src_dict['name'], src_dict)
altname = os.path.basename(sp_filename)
altname = re.sub(r'(\.fits$|\.fit$|\.fits.gz$|\.fit.gz$)',
'', altname)
src.add_name(altname)
srcs_out += [src]
return srcs_out
def _create_diffuse_src_from_xml(self, config, src_type='FileFunction'):
"""Load sources from an XML file.
"""
diffuse_xmls = config.get('diffuse_xml')
srcs_out = []
for diffuse_xml in diffuse_xmls:
srcs_out += self.load_xml(diffuse_xml, coordsys=config.get('coordsys', 'CEL'))
return srcs_out
def create_source(self, name, src_dict, build_index=True,
merge_sources=True, rescale=True):
"""Add a new source to the ROI model from a dictionary or an
existing source object.
Parameters
----------
name : str
src_dict : dict or `~fermipy.roi_model.Source`
Returns
-------
src : `~fermipy.roi_model.Source`
"""
src_dict = copy.deepcopy(src_dict)
if isinstance(src_dict, dict):
src_dict['name'] = name
src = Model.create_from_dict(src_dict, self.skydir,
rescale=rescale)
else:
src = src_dict
src.set_name(name)
if isinstance(src, Source):
src.set_roi_direction(self.skydir)
src.set_roi_geom(self.geom)
self.load_source(src, build_index=build_index,
merge_sources=merge_sources)
return self.get_source_by_name(name)
def copy_source(self, name):
src = self.get_source_by_name(name)
return copy.deepcopy(src)
def load_sources(self, sources):
"""Delete all sources in the ROI and load the input source list."""
self.clear()
for s in sources:
if isinstance(s, dict):
s = Model.create_from_dict(s)
self.load_source(s, build_index=False)
self._build_src_index()
def _add_source_alias(self, name, src):
if src not in self._src_dict[name]:
self._src_dict[name] += [src]
def load_source(self, src, build_index=True, merge_sources=True,
**kwargs):
"""
Load a single source.
Parameters
----------
src : `~fermipy.roi_model.Source`
Source object that will be added to the ROI.
merge_sources : bool
When a source matches an existing source in the model
update that source with the properties of the new source.
build_index : bool
Re-make the source index after loading this source.
"""
src = copy.deepcopy(src)
name = src.name.replace(' ', '').lower()
min_sep = kwargs.get('min_separation', None)
if min_sep is not None:
sep = src.skydir.separation(self._src_skydir).deg
if len(sep) > 0 and np.min(sep) < min_sep:
return
match_srcs = self.match_source(src)
if len(match_srcs) == 1:
# self.logger.debug('Found matching source for %s : %s',
# src.name, match_srcs[0].name)
if merge_sources:
match_srcs[0].update_from_source(src)
else:
match_srcs[0].add_name(src.name)
self._add_source_alias(src.name.replace(' ', '').lower(),
match_srcs[0])
return
elif len(match_srcs) > 2:
raise Exception('Multiple sources with name %s' % name)
self._add_source_alias(src.name, src)
for name in src.names:
self._add_source_alias(name.replace(' ', '').lower(), src)
if isinstance(src, Source):
self._srcs.append(src)
else:
self._diffuse_srcs.append(src)
if build_index:
self._build_src_index()
def match_source(self, src):
"""Look for source or sources in the model that match the
given source. Sources are matched by name and any association
columns defined in the assoc_xmatch_columns parameter.
"""
srcs = []
names = [src.name]
for col in self.config['assoc_xmatch_columns']:
if col in src.assoc and src.assoc[col]:
names += [src.assoc[col]]
for name in names:
name = name.replace(' ', '').lower()
if name not in self._src_dict:
continue
srcs += [s for s in self._src_dict[name] if s not in srcs]
return srcs
def load(self, **kwargs):
"""Load both point source and diffuse components."""
coordsys = kwargs.get('coordsys', 'CEL')
extdir = kwargs.get('extdir', self.extdir)
srcname = kwargs.get('srcname', None)
self.clear()
self.load_diffuse_srcs()
for c in self.config['catalogs']:
if isinstance(c, catalog.Catalog):
self.load_existing_catalog(c)
continue
extname = os.path.splitext(c)[1]
if extname != '.xml':
self.load_fits_catalog(c, extdir=extdir, coordsys=coordsys,
srcname=srcname)
elif extname == '.xml':
self.load_xml(c, extdir=extdir, coordsys=coordsys)
else:
raise Exception('Unrecognized catalog file extension: %s' % c)
for c in self.config['sources']:
if 'name' not in c:
raise Exception(
'No name field in source dictionary:\n ' + str(c))
self.create_source(c['name'], c, build_index=False)
self._build_src_index()
def delete_sources(self, srcs):
to_del = []
for k, v in self._src_dict.items():
for s in srcs:
if s in v:
self._src_dict[k].remove(s)
if not v:
to_del.append(k)
while to_del:
ss = to_del.pop()
self._src_dict.pop(ss)
del ss
self._srcs = [s for s in self._srcs if s not in srcs]
self._diffuse_srcs = [s for s in self._diffuse_srcs if s not in srcs]
self._build_src_index()
@classmethod
def create_from_roi_data(cls, datafile):
"""Create an ROI model."""
data = np.load(datafile).flat[0]
roi = cls()
roi.load_sources(data['sources'].values())
return roi
@classmethod
def create(cls, selection, config, **kwargs):
"""Create an ROIModel instance."""
if selection['target'] is not None:
return cls.create_from_source(selection['target'],
config, **kwargs)
else:
target_skydir = wcs_utils.get_target_skydir(selection)
return cls.create_from_position(target_skydir, config, **kwargs)
@classmethod
def create_from_position(cls, skydir, config, **kwargs):
"""Create an ROIModel instance centered on a sky direction.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky direction on which the ROI will be centered.
config : dict
Model configuration dictionary.
"""
coordsys = kwargs.pop('coordsys', 'CEL')
roi = cls(config, skydir=skydir, coordsys=coordsys, **kwargs)
return roi
@classmethod
def create_from_source(cls, name, config, **kwargs):
"""Create an ROI centered on the given source."""
coordsys = kwargs.pop('coordsys', 'CEL')
roi = cls(config, src_radius=None, src_roiwidth=None,
srcname=name, **kwargs)
src = roi.get_source_by_name(name)
return cls.create_from_position(src.skydir, config,
coordsys=coordsys, **kwargs)
@classmethod
def create_roi_from_ft1(cls, ft1file, config):
"""Create an ROI model by extracting the sources coordinates
form an FT1 file."""
raise NotImplementedError
def has_source(self, name):
index_name = name.replace(' ', '').lower()
if index_name in self._src_dict:
return True
else:
return False
def get_source_by_name(self, name):
"""Return a single source in the ROI with the given name. The
input name string can match any of the strings in the names
property of the source object. Case and whitespace are
ignored when matching name strings. If no sources are found
or multiple sources then an exception is thrown.
Parameters
----------
name : str
Name string.
Returns
-------
srcs : `~fermipy.roi_model.Model`
A source object.
"""
srcs = self.get_sources_by_name(name)
if len(srcs) == 1:
return srcs[0]
elif len(srcs) == 0:
raise Exception('No source matching name: ' + name)
elif len(srcs) > 1:
raise Exception('Multiple sources matching name: ' + name)
def get_sources_by_name(self, name):
"""Return a list of sources in the ROI matching the given
name. The input name string can match any of the strings in
the names property of the source object. Case and whitespace
are ignored when matching name strings.
Parameters
----------
name : str
Returns
-------
srcs : list
A list of `~fermipy.roi_model.Model` objects.
"""
index_name = name.replace(' ', '').lower()
if index_name in self._src_dict:
return list(self._src_dict[index_name])
else:
raise Exception('No source matching name: ' + name)
def get_nearby_sources(self, name, distance, min_dist=None,
square=False):
src = self.get_source_by_name(name)
return self.get_sources_by_position(src.skydir,
distance, min_dist,
square)
def get_sources(self, skydir=None, distance=None, cuts=None,
minmax_ts=None, minmax_npred=None,
exclude=None, square=False, coordsys='CEL',
names=None):
"""Retrieve list of source objects satisfying the following
selections:
* Angular separation from ``skydir`` or ROI center (if
``skydir`` is None) less than ``distance``.
* Cuts on source properties defined in ``cuts`` list.
* TS and Npred in range specified by ``minmax_ts`` and ``minmax_npred``.
* Name matching a value in ``names``
Sources can be excluded from the selection by adding their
name to the ``exclude`` list.
Returns
-------
srcs : list
List of source objects.
"""
if skydir is None:
skydir = self.skydir
if exclude is None:
exclude = []
rsrc, srcs = self.get_sources_by_position(skydir,
distance,
square=square,
coordsys=coordsys)
o = []
for s in srcs + self.diffuse_sources:
if names and s.name not in names:
continue
if s.name in exclude:
continue
if not s.check_cuts(cuts):
continue
ts = s['ts']
npred = s['npred']
if not utils.apply_minmax_selection(ts, minmax_ts):
continue
if not utils.apply_minmax_selection(npred, minmax_npred):
continue
o.append(s)
return o
def get_sources_by_property(self, pname, pmin, pmax=None):
srcs = []
for i, s in enumerate(self._srcs):
if pname not in s:
continue
if pmin is not None and s[pname] < pmin:
continue
if pmax is not None and s[pname] > pmax:
continue
srcs.append(s)
return srcs
def get_sources_by_position(self, skydir, dist, min_dist=None,
square=False, coordsys='CEL'):
"""Retrieve sources within a certain angular distance of a sky
coordinate. This function supports two types of geometric
selections: circular (square=False) and square (square=True).
The circular selection finds all sources with a given angular
distance of the target position. The square selection finds
sources within an ROI-like region of size R x R where R = 2 x
dist.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky direction with respect to which the selection will be applied.
dist : float
Maximum distance in degrees from the sky coordinate.
square : bool
Choose whether to apply a circular or square selection.
coordsys : str
Coordinate system to use when applying a selection with square=True.
"""
msk = get_skydir_distance_mask(self._src_skydir, skydir, dist,
min_dist=min_dist, square=square,
coordsys=coordsys)
radius = self._src_skydir.separation(skydir).deg
radius = radius[msk]
srcs = [self._srcs[i] for i in np.nonzero(msk)[0]]
isort = np.argsort(radius)
radius = radius[isort]
srcs = [srcs[i] for i in isort]
return radius, srcs
def load_fits_catalog(self, name, **kwargs):
"""Load sources from a FITS catalog file.
Parameters
----------
name : str
Catalog name or path to a catalog FITS file.
"""
# EAC split this function to make it easier to load an existing catalog
cat = catalog.Catalog.create(name)
self.load_existing_catalog(cat, **kwargs)
def load_existing_catalog(self, cat, **kwargs):
"""Load sources from an existing catalog object.
Parameters
----------
cat : `~fermipy.catalog.Catalog`
Catalog object.
"""
coordsys = kwargs.get('coordsys', 'CEL')
extdir = kwargs.get('extdir', self.extdir)
srcname = kwargs.get('srcname', None)
m0 = get_skydir_distance_mask(cat.skydir, self.skydir,
self.config['src_radius'])
m1 = get_skydir_distance_mask(cat.skydir, self.skydir,
self.config['src_radius_roi'],
square=True, coordsys=coordsys)
m = (m0 & m1)
if srcname is not None:
m &= utils.find_rows_by_string(cat.table, [srcname],
self.src_name_cols)
offset = self.skydir.separation(cat.skydir).deg
offset_cel = wcs_utils.sky_to_offset(self.skydir,
cat.radec[:, 0], cat.radec[:, 1],
'CEL')
offset_gal = wcs_utils.sky_to_offset(self.skydir,
cat.glonlat[
:, 0], cat.glonlat[:, 1],
'GAL')
for i, (row, radec) in enumerate(zip(cat.table[m],
cat.radec[m])):
catalog_dict = catalog.row_to_dict(row)
src_dict = {'catalog': catalog_dict}
src_dict['Source_Name'] = row['Source_Name']
src_dict['SpectrumType'] = row['SpectrumType']
if row['extended']:
src_dict['SourceType'] = 'DiffuseSource'
src_dict['SpatialType'] = str(row['Spatial_Function'])
src_dict['SpatialModel'] = str(row['Spatial_Function'])
search_dirs = []
if extdir is not None:
search_dirs += [extdir, os.path.join(extdir, 'Templates')]
search_dirs += [row['extdir'],
os.path.join(row['extdir'], 'Templates')]
if src_dict['SpatialType'] == 'SpatialMap':
try:
src_dict['Spatial_Filename'] = utils.resolve_file_path(
row['Spatial_Filename'],
search_dirs=search_dirs)
except:
print("Failed to find %s" % row['Spatial_Filename'])
else:
src_dict['SourceType'] = 'PointSource'
src_dict['SpatialType'] = 'SkyDirFunction'
src_dict['SpatialModel'] = 'PointSource'
src_dict['spectral_pars'] = spectral_pars_from_catalog(
catalog_dict)
src_dict['spatial_pars'] = spatial_pars_from_catalog(catalog_dict)
src = Source(src_dict['Source_Name'], src_dict, radec=radec)
src.data['offset'] = offset[m][i]
src.data['offset_ra'] = offset_cel[:, 0][m][i]
src.data['offset_dec'] = offset_cel[:, 1][m][i]
src.data['offset_glon'] = offset_gal[:, 0][m][i]
src.data['offset_glat'] = offset_gal[:, 1][m][i]
self.load_source(src, False,
merge_sources=self.config['merge_sources'])
self._build_src_index()
def load_xml(self, xmlfile, **kwargs):
"""Load sources from an XML file."""
extdir = kwargs.get('extdir', self.extdir)
coordsys = kwargs.get('coordsys', 'CEL')
if not os.path.isfile(xmlfile):
xmlfile = os.path.join(fermipy.PACKAGE_DATA, 'catalogs', xmlfile)
root = ElementTree.ElementTree(file=xmlfile).getroot()
diffuse_srcs = []
srcs = []
ra, dec = [], []
for s in root.findall('source'):
src = Source.create_from_xml(s, extdir=extdir)
if src.diffuse:
diffuse_srcs += [src]
else:
srcs += [src]
ra += [src['RAJ2000']]
dec += [src['DEJ2000']]
src_skydir = SkyCoord(ra=np.array(ra) * u.deg,
dec=np.array(dec) * u.deg)
radec = np.vstack((src_skydir.ra.deg, src_skydir.dec.deg)).T
glonlat = np.vstack((src_skydir.galactic.l.deg,
src_skydir.galactic.b.deg)).T
offset = self.skydir.separation(src_skydir).deg
offset_cel = wcs_utils.sky_to_offset(self.skydir,
radec[:, 0], radec[:, 1], 'CEL')
offset_gal = wcs_utils.sky_to_offset(self.skydir,
glonlat[:, 0], glonlat[:, 1], 'GAL')
m0 = get_skydir_distance_mask(src_skydir, self.skydir,
self.config['src_radius'])
m1 = get_skydir_distance_mask(src_skydir, self.skydir,
self.config['src_radius_roi'],
square=True, coordsys=coordsys)
m = (m0 & m1)
srcs = np.array(srcs)[m]
for i, s in enumerate(srcs):
s.data['offset'] = offset[m][i]
s.data['offset_ra'] = offset_cel[:, 0][m][i]
s.data['offset_dec'] = offset_cel[:, 1][m][i]
s.data['offset_glon'] = offset_gal[:, 0][m][i]
s.data['offset_glat'] = offset_gal[:, 1][m][i]
self.load_source(s, False,
merge_sources=self.config['merge_sources'])
for i, s in enumerate(diffuse_srcs):
self.load_source(s, False,
merge_sources=self.config['merge_sources'])
self._build_src_index()
return srcs
def _build_src_index(self):
"""Build an indices for fast lookup of a source given its name
or coordinates."""
self._srcs = sorted(self._srcs, key=lambda t: t['offset'])
nsrc = len(self._srcs)
radec = np.zeros((2, nsrc))
for i, src in enumerate(self._srcs):
radec[:, i] = src.radec
self._src_skydir = SkyCoord(ra=radec[0], dec=radec[1], unit=u.deg)
self._src_radius = self._src_skydir.separation(self.skydir)
def write_xml(self, xmlfile, config=None):
"""Save the ROI model as an XML file."""
root = ElementTree.Element('source_library')
root.set('title', 'source_library')
for s in self._srcs:
s.write_xml(root)
if config is not None:
srcs = self.create_diffuse_srcs(config)
diffuse_srcs = {s.name: s for s in srcs}
for s in self._diffuse_srcs:
src = copy.deepcopy(diffuse_srcs.get(s.name, s))
src.update_spectral_pars(s.spectral_pars)
src.write_xml(root)
else:
for s in self._diffuse_srcs:
s.write_xml(root)
output_file = open(xmlfile, 'w')
output_file.write(utils.prettify_xml(root))
def create_source_table(self):
cols_dict = collections.OrderedDict()
cols_dict['source_name'] = dict(dtype='S48', format='%s')
cols_dict['spectrum_type'] = dict(dtype='S48', format='%s')
cols_dict['spatialModel_type'] = dict(dtype='S48', format='%s')
cols_dict['spectrum_file'] = dict(dtype='S256', format='%s')
cols_dict['spatialModel_file'] = dict(dtype='S256', format='%s')
cols = [Column(name=k, **v) for k, v in cols_dict.items()]
tab = Table(cols)
row_dict = {}
for s in self.sources:
row_dict['source_name'] = s.name
row_dict['spectrum_type'] = s['SpectrumType']
row_dict['spatialModel_type'] = s['SpatialType']
row_dict['spectrum_file'] = s['Spectrum_Filename']
row_dict['spatialModel_file'] = s['Spatial_Filename']
tab.add_row([row_dict[k] for k in tab.columns])
return tab
def create_param_table(self):
cols_dict = collections.OrderedDict()
cols_dict['source_name'] = dict(dtype='S48', format='%s')
cols_dict['name'] = dict(dtype='S48', format='%s')
cols_dict['group'] = dict(dtype='S48', format='%s')
cols_dict['type'] = dict(dtype='S48', format='%s')
cols_dict['value'] = dict(dtype='f8', format='%.3f')
cols_dict['error'] = dict(dtype='f8', format='%.3f')
cols_dict['scale'] = dict(dtype='f8', format='%.3f')
cols_dict['min'] = dict(dtype='f8', format='%.3f')
cols_dict['max'] = dict(dtype='f8', format='%.3f')
cols_dict['free'] = dict(dtype='bool')
cols = [Column(name=k, **v) for k, v in cols_dict.items()]
tab = Table(cols)
row_dict = {}
for s in self.sources:
row_dict['source_name'] = s.name
row_dict['type'] = s['SpectrumType']
row_dict['group'] = 'spectrum'
for k, v in s.spectral_pars.items():
row_dict['name'] = k
row_dict.update(v)
tab.add_row([row_dict[k] for k in tab.columns])
row_dict['type'] = s['SpatialType']
row_dict['group'] = 'spatialModel'
for k, v in s.spatial_pars.items():
row_dict['name'] = k
row_dict.update(v)
tab.add_row([row_dict[k] for k in tab.columns])
return tab
def create_table(self, names=None):
"""Create an astropy Table object with the contents of the ROI model.
"""
scan_shape = (1,)
for src in self._srcs:
scan_shape = max(scan_shape, src['dloglike_scan'].shape)
tab = create_source_table(scan_shape)
for s in self._srcs:
if names is not None and s.name not in names:
continue
s.add_to_table(tab)
return tab
def write_fits(self, fitsfile):
"""Write the ROI model to a FITS file."""
tab = self.create_table()
hdu_data = fits.table_to_hdu(tab)
hdus = [fits.PrimaryHDU(), hdu_data]
fits_utils.write_hdus(hdus, fitsfile)
def to_ds9(self, free='box',fixed='cross',frame='fk5',color='green',header=True):
"""Returns a list of ds9 region definitions
Parameters
----------
free: bool
one of the supported ds9 point symbols, used for free sources, see here: http://ds9.si.edu/doc/ref/region.html
fixed: bool
as free but for fixed sources
frame: str
typically fk5, more to be implemented
color: str
color used for symbols (only ds9 compatible colors)
header: bool
if True, will prepend a global header line.
Returns
-------
lines : list
list of regions (and header if requested)
"""
# todo: add support for extended sources?!
allowed_symbols = ['circle','box','diamond','cross','x','arrow','boxcircle']
# adding some checks.
assert free in allowed_symbols, "symbol %s not supported"%free
assert fixed in allowed_symbols, "symbol %s not supported"%fixed
lines = []
if header:
lines.append("global color=%s"%color)
for src in self.get_sources():
# self.get_sources will return both Source, but also IsoSource and MapCube, in which case the sources
# should be ignored (since they are by construction all-sky and have no corresponding ds9 region string)
if not isinstance(src, Source): continue
# otherwise get ra, dec
ra, dec = src.radec
line = "%s; point( %1.5f, %1.5f) # point=%s text={%s} color=%s"%(frame,ra, dec,
free if src.is_free else fixed,
src.name,
color)
lines.append(line)
return lines
def write_ds9region(self, region, *args, **kwargs):
"""Create a ds9 compatible region file from the ROI.
It calls the `to_ds9` method and write the result to the region file. Only the file name is required.
All other parameters will be forwarded to the `to_ds9` method, see the documentation of that method
for all accepted parameters and options.
Parameters
----------
region : str
name of the region file (string)
"""
lines = self.to_ds9(*args,**kwargs)
with open(region,'w') as fo:
fo.write("\n".join(lines))
| bsd-3-clause | -9,097,744,642,414,983,000 | 35.367756 | 122 | 0.527305 | false |
hugobuddel/orange3 | Orange/widgets/utils/scaling.py | 2 | 24980 | from datetime import time
import random
import sys
import numpy as np
import Orange
from Orange.statistics.basic_stats import DomainBasicStats
from Orange.widgets.settings import Setting
from Orange.widgets.utils.datacaching import getCached, setCached
# noinspection PyBroadException
def checksum(x):
if x is None:
return None
try:
return x.checksum()
except:
return float('nan')
def get_variable_values_sorted(variable):
"""
Return a list of sorted values for given attribute, if all its values can be
cast to int's.
"""
if variable.is_continuous:
return []
try:
return sorted(variable.values, key=int)
except ValueError:
return variable.values
def get_variable_value_indices(variable, sort_values=True):
"""
Create a dictionary with given variable. Keys are variable values, values
are indices (transformed from string to int); in case all values are
integers, we also sort them.
"""
if variable.is_continuous:
return {}
if sort_values:
values = get_variable_values_sorted(variable)
else:
values = variable.values
return {value: i for i, value in enumerate(values)}
class ScaleData:
jitter_size = Setting(10)
jitter_continuous = Setting(False)
def __init__(self):
self.raw_data = None # input data
self.attribute_names = [] # list of attribute names from self.raw_data
self.attribute_name_index = {} # dict with indices to attributes
self.attribute_flip_info = {} # dictionary with attrName: 0/1 attribute is flipped or not
self.data_has_class = False
self.data_has_continuous_class = False
self.data_has_discrete_class = False
self.data_class_name = None
self.data_domain = None
self.data_class_index = None
self.have_data = False
self.jitter_seed = 0
self.attr_values = {}
self.domain_data_stat = []
self.original_data = None # input (nonscaled) data in a numpy array
self.scaled_data = None # scaled data to the interval 0-1
self.no_jittering_scaled_data = None
self.valid_data_array = None
def rescale_data(self):
"""
Force the existing data to be rescaled due to changes like
jitter_continuous, jitter_size, ...
"""
self.set_data(self.raw_data, skipIfSame=0)
def set_data(self, data, **args):
if args.get("skipIfSame", 1):
if checksum(data) == checksum(self.raw_data):
return
self.domain_data_stat = []
self.attr_values = {}
self.original_data = None
self.scaled_data = None
self.no_jittering_scaled_data = None
self.valid_data_array = None
self.raw_data = None
self.have_data = False
self.data_has_class = False
self.data_has_continuous_class = False
self.data_has_discrete_class = False
self.data_class_name = None
self.data_domain = None
self.data_class_index = None
if data is None:
return
full_data = data
self.raw_data = data
len_data = data and len(data) or 0
self.attribute_names = [attr.name for attr in full_data.domain]
self.attribute_name_index = dict([(full_data.domain[i].name, i)
for i in range(len(full_data.domain))])
self.attribute_flip_info = {}
self.data_domain = full_data.domain
self.data_has_class = bool(full_data.domain.class_var)
self.data_has_continuous_class = full_data.domain.has_continuous_class
self.data_has_discrete_class = full_data.domain.has_discrete_class
self.data_class_name = self.data_has_class and full_data.domain.class_var.name
if self.data_has_class:
self.data_class_index = self.attribute_name_index[self.data_class_name]
self.have_data = bool(self.raw_data and len(self.raw_data) > 0)
self.domain_data_stat = getCached(full_data,
DomainBasicStats,
(full_data,))
sort_values_for_discrete_attrs = args.get("sort_values_for_discrete_attrs",
1)
for index in range(len(full_data.domain)):
attr = full_data.domain[index]
if attr.is_discrete:
self.attr_values[attr.name] = [0, len(attr.values)]
elif attr.is_continuous:
self.attr_values[attr.name] = [self.domain_data_stat[index].min,
self.domain_data_stat[index].max]
if 'no_data' in args:
return
# the original_data, no_jittering_scaled_data and validArray are arrays
# that we can cache so that other visualization widgets don't need to
# compute it. The scaled_data on the other hand has to be computed for
# each widget separately because of different
# jitter_continuous and jitter_size values
if getCached(data, "visualizationData"):
self.original_data, self.no_jittering_scaled_data, self.valid_data_array = getCached(data,
"visualizationData")
else:
no_jittering_data = np.c_[full_data.X, full_data.Y].T
valid_data_array = ~np.isnan(no_jittering_data)
original_data = no_jittering_data.copy()
for index in range(len(data.domain)):
attr = data.domain[index]
if attr.is_discrete:
# see if the values for discrete attributes have to be resorted
variable_value_indices = get_variable_value_indices(data.domain[index],
sort_values_for_discrete_attrs)
if 0 in [i == variable_value_indices[attr.values[i]]
for i in range(len(attr.values))]:
# make the array a contiguous, otherwise the putmask
# function does not work
line = no_jittering_data[index].copy()
indices = [np.where(line == val, 1, 0)
for val in range(len(attr.values))]
for i in range(len(attr.values)):
np.putmask(line, indices[i],
variable_value_indices[attr.values[i]])
no_jittering_data[index] = line # save the changed array
original_data[index] = line # reorder also the values in the original data
no_jittering_data[index] = ((no_jittering_data[index] * 2.0 + 1.0)
/ float(2 * len(attr.values)))
elif attr.is_continuous:
diff = self.domain_data_stat[index].max - self.domain_data_stat[
index].min or 1 # if all values are the same then prevent division by zero
no_jittering_data[index] = (no_jittering_data[index] -
self.domain_data_stat[index].min) / diff
self.original_data = original_data
self.no_jittering_scaled_data = no_jittering_data
self.valid_data_array = valid_data_array
if data:
setCached(data, "visualizationData",
(self.original_data, self.no_jittering_scaled_data,
self.valid_data_array))
# compute the scaled_data arrays
scaled_data = self.no_jittering_scaled_data
# Random generators for jittering
random = np.random.RandomState(seed=self.jitter_seed)
rand_seeds = random.random_integers(0, 2 ** 30 - 1,
size=len(data.domain))
for index, rseed in zip(list(range(len(data.domain))), rand_seeds):
# Need to use a different seed for each feature
random = np.random.RandomState(seed=rseed)
attr = data.domain[index]
if attr.is_discrete:
scaled_data[index] += (self.jitter_size / (50.0 * max(1, len(attr.values)))) * \
(random.rand(len(full_data)) - 0.5)
elif attr.is_continuous and self.jitter_continuous:
scaled_data[index] += self.jitter_size / 50.0 * (0.5 - random.rand(len(full_data)))
scaled_data[index] = np.absolute(scaled_data[index]) # fix values below zero
ind = np.where(scaled_data[index] > 1.0, 1, 0) # fix values above 1
np.putmask(scaled_data[index], ind, 2.0 - np.compress(ind, scaled_data[index]))
self.scaled_data = scaled_data[:, :len_data]
def scale_example_value(self, instance, index):
"""
Scale instance's value at index index to a range between 0 and 1 with
respect to self.raw_data.
"""
if instance[index].isSpecial():
print("Warning: scaling instance with missing value")
return 0.5
if instance.domain[index].is_discrete:
d = get_variable_value_indices(instance.domain[index])
return (d[instance[index].value] * 2 + 1) / float(2 * len(d))
elif instance.domain[index].is_continuous:
diff = self.domain_data_stat[index].max - self.domain_data_stat[index].min
if diff == 0:
diff = 1 # if all values are the same then prevent division by zero
return (instance[index] - self.domain_data_stat[index].min) / diff
def get_attribute_label(self, attr_name):
if (self.attribute_flip_info.get(attr_name, 0) and
self.data_domain[attr_name].is_continuous):
return "-" + attr_name
return attr_name
def flip_attribute(self, attr_name):
if attr_name not in self.attribute_names:
return 0
if self.data_domain[attr_name].is_discrete:
return 0
index = self.attribute_name_index[attr_name]
self.attribute_flip_info[attr_name] = 1 - self.attribute_flip_info.get(attr_name, 0)
if self.data_domain[attr_name].is_continuous:
self.attr_values[attr_name] = [-self.attr_values[attr_name][1], -self.attr_values[attr_name][0]]
self.scaled_data[index] = 1 - self.scaled_data[index]
self.no_jittering_scaled_data[index] = 1 - self.no_jittering_scaled_data[index]
return 1
def get_min_max_val(self, attr):
if type(attr) == int:
attr = self.attribute_names[attr]
diff = self.attr_values[attr][1] - self.attr_values[attr][0]
return diff or 1.0
def get_valid_list(self, indices, also_class_if_exists=1):
"""
Get array of 0 and 1 of len = len(self.raw_data). If there is a missing
value at any attribute in indices return 0 for that instance.
"""
if self.valid_data_array is None or len(self.valid_data_array) == 0:
return np.array([], np.bool)
inds = indices[:]
if also_class_if_exists and self.data_has_class:
inds.append(self.data_class_index)
return np.all(self.valid_data_array[inds], axis=0)
def get_valid_indices(self, indices):
"""
Get array with numbers that represent the instance indices that have a
valid data value.
"""
valid_list = self.get_valid_list(indices)
return np.nonzero(valid_list)[0]
def rnd_correction(self, max):
"""
Return a number from -max to max.
"""
return (random.random() - 0.5) * 2 * max
class ScaleScatterPlotData(ScaleData):
def get_original_data(self, indices):
data = self.original_data.take(indices, axis = 0)
for i, ind in enumerate(indices):
[minVal, maxVal] = self.attr_values[self.data_domain[ind].name]
if self.data_domain[ind].is_discrete:
data[i] += (self.jitter_size/50.0)*(np.random.random(len(self.raw_data)) - 0.5)
elif self.data_domain[ind].is_continuous and self.jitter_continuous:
data[i] += (self.jitter_size/(50.0*(maxVal-minVal or 1)))*(np.random.random(len(self.raw_data)) - 0.5)
return data
getOriginalData = get_original_data
# @deprecated_keywords({"xAttr": "xattr", "yAttr": "yattr"})
def get_xy_data_positions(self, xattr, yattr, filter_valid=False,
copy=True):
"""
Create x-y projection of attributes in attrlist.
"""
xattr_index = self.attribute_name_index[xattr]
yattr_index = self.attribute_name_index[yattr]
if filter_valid is True:
filter_valid = self.get_valid_list([xattr_index, yattr_index])
if isinstance(filter_valid, np.ndarray):
xdata = self.scaled_data[xattr_index, filter_valid]
ydata = self.scaled_data[yattr_index, filter_valid]
elif copy:
xdata = self.scaled_data[xattr_index].copy()
ydata = self.scaled_data[yattr_index].copy()
else:
xdata = self.scaled_data[xattr_index]
ydata = self.scaled_data[yattr_index]
if self.data_domain[xattr_index].is_discrete:
xdata *= len(self.data_domain[xattr_index].values)
xdata -= 0.5
else:
xdata *= self.attr_values[xattr][1] - self.attr_values[xattr][0]
xdata += float(self.attr_values[xattr][0])
if self.data_domain[yattr_index].is_discrete:
ydata *= len(self.data_domain[yattr_index].values)
ydata -= 0.5
else:
ydata *= self.attr_values[yattr][1] - self.attr_values[yattr][0]
ydata += float(self.attr_values[yattr][0])
return xdata, ydata
getXYDataPositions = get_xy_data_positions
# @deprecated_keywords({"attrIndices": "attr_indices",
# "settingsDict": "settings_dict"})
def get_projected_point_position(self, attr_indices, values, **settings_dict):
"""
For attributes in attr_indices and values of these attributes in values
compute point positions this function has more sense in radviz and
polyviz methods. settings_dict has to be because radviz and polyviz have
this parameter.
"""
return values
getProjectedPointPosition = get_projected_point_position
# @deprecated_keywords({"attrIndices": "attr_indices",
# "settingsDict": "settings_dict"})
def create_projection_as_example_table(self, attr_indices, **settings_dict):
"""
Create the projection of attribute indices given in attr_indices and
create an example table with it.
"""
if self.data_has_class:
domain = settings_dict.get("domain") or \
Orange.data.Domain([Orange.feature.Continuous(self.data_domain[attr_indices[0]].name),
Orange.feature.Continuous(self.data_domain[attr_indices[1]].name),
Orange.feature.Discrete(self.data_domain.class_var.name,
values = get_variable_values_sorted(self.data_domain.class_var))])
else:
domain = settings_dict.get("domain") or \
Orange.data.Domain([Orange.feature.Continuous(self.data_domain[attr_indices[0]].name),
Orange.feature.Continuous(self.data_domain[attr_indices[1]].name)])
data = self.create_projection_as_numeric_array(attr_indices,
**settings_dict)
if data != None:
return Orange.data.Table(domain, data)
else:
return Orange.data.Table(domain)
createProjectionAsExampleTable = create_projection_as_example_table
# @deprecated_keywords({"attrIndices": "attr_indices",
# "settingsDict": "settings_dict"})
def create_projection_as_example_table_3D(self, attr_indices, **settings_dict):
"""
Create the projection of attribute indices given in attr_indices and
create an example table with it.
"""
if self.data_has_class:
domain = settings_dict.get("domain") or \
Orange.data.Domain([Orange.feature.Continuous(self.data_domain[attr_indices[0]].name),
Orange.feature.Continuous(self.data_domain[attr_indices[1]].name),
Orange.feature.Continuous(self.data_domain[attr_indices[2]].name),
Orange.feature.Discrete(self.data_domain.class_var.name,
values = get_variable_values_sorted(self.data_domain.class_var))])
else:
domain = settings_dict.get("domain") or \
Orange.data.Domain([Orange.feature.Continuous(self.data_domain[attr_indices[0]].name),
Orange.feature.Continuous(self.data_domain[attr_indices[1]].name),
Orange.feature.Continuous(self.data_domain[attr_indices[2]].name)])
data = self.create_projection_as_numeric_array_3D(attr_indices,
**settings_dict)
if data != None:
return Orange.data.Table(domain, data)
else:
return Orange.data.Table(domain)
createProjectionAsExampleTable3D = create_projection_as_example_table_3D
# @deprecated_keywords({"attrIndices": "attr_indices",
# "settingsDict": "settings_dict",
# "validData": "valid_data",
# "classList": "class_list",
# "jutterSize": "jitter_size"})
def create_projection_as_numeric_array(self, attr_indices, **settings_dict):
valid_data = settings_dict.get("valid_data")
class_list = settings_dict.get("class_list")
jitter_size = settings_dict.get("jitter_size", 0.0)
if valid_data == None:
valid_data = self.get_valid_list(attr_indices)
if sum(valid_data) == 0:
return None
if class_list == None and self.data_has_class:
class_list = self.original_data[self.data_class_index]
xarray = self.no_jittering_scaled_data[attr_indices[0]]
yarray = self.no_jittering_scaled_data[attr_indices[1]]
if jitter_size > 0.0:
xarray += (np.random.random(len(xarray))-0.5)*jitter_size
yarray += (np.random.random(len(yarray))-0.5)*jitter_size
if class_list != None:
data = np.compress(valid_data, np.array((xarray, yarray, class_list)), axis = 1)
else:
data = np.compress(valid_data, np.array((xarray, yarray)), axis = 1)
data = np.transpose(data)
return data
createProjectionAsNumericArray = create_projection_as_numeric_array
# @deprecated_keywords({"attrIndices": "attr_indices",
# "settingsDict": "settings_dict",
# "validData": "valid_data",
# "classList": "class_list",
# "jutterSize": "jitter_size"})
def create_projection_as_numeric_array_3D(self, attr_indices, **settings_dict):
valid_data = settings_dict.get("valid_data")
class_list = settings_dict.get("class_list")
jitter_size = settings_dict.get("jitter_size", 0.0)
if valid_data == None:
valid_data = self.get_valid_list(attr_indices)
if sum(valid_data) == 0:
return None
if class_list == None and self.data_has_class:
class_list = self.original_data[self.data_class_index]
xarray = self.no_jittering_scaled_data[attr_indices[0]]
yarray = self.no_jittering_scaled_data[attr_indices[1]]
zarray = self.no_jittering_scaled_data[attr_indices[2]]
if jitter_size > 0.0:
xarray += (np.random.random(len(xarray))-0.5)*jitter_size
yarray += (np.random.random(len(yarray))-0.5)*jitter_size
zarray += (np.random.random(len(zarray))-0.5)*jitter_size
if class_list != None:
data = np.compress(valid_data, np.array((xarray, yarray, zarray, class_list)), axis = 1)
else:
data = np.compress(valid_data, np.array((xarray, yarray, zarray)), axis = 1)
data = np.transpose(data)
return data
createProjectionAsNumericArray3D = create_projection_as_numeric_array_3D
# @deprecated_keywords({"attributeNameOrder": "attribute_name_order",
# "addResultFunct": "add_result_funct"})
def get_optimal_clusters(self, attribute_name_order, add_result_funct):
if not self.data_has_class or self.data_has_continuous_class:
return
jitter_size = 0.001 * self.clusterOptimization.jitterDataBeforeTriangulation
domain = Orange.data.Domain([Orange.feature.Continuous("xVar"),
Orange.feature.Continuous("yVar"),
self.data_domain.class_var])
# init again, in case that the attribute ordering took too much time
self.scatterWidget.progressBarInit()
start_time = time.time()
count = len(attribute_name_order)*(len(attribute_name_order)-1)/2
test_index = 0
for i in range(len(attribute_name_order)):
for j in range(i):
try:
attr1 = self.attribute_name_index[attribute_name_order[j]]
attr2 = self.attribute_name_index[attribute_name_order[i]]
test_index += 1
if self.clusterOptimization.isOptimizationCanceled():
secs = time.time() - start_time
self.clusterOptimization.setStatusBarText("Evaluation stopped (evaluated %d projections in %d min, %d sec)"
% (test_index, secs/60, secs%60))
self.scatterWidget.progressBarFinished()
return
data = self.create_projection_as_example_table([attr1, attr2],
domain = domain,
jitter_size = jitter_size)
graph, valuedict, closuredict, polygon_vertices_dict, enlarged_closure_dict, other_dict = self.clusterOptimization.evaluateClusters(data)
all_value = 0.0
classes_dict = {}
for key in valuedict.keys():
add_result_funct(valuedict[key], closuredict[key],
polygon_vertices_dict[key],
[attribute_name_order[i],
attribute_name_order[j]],
int(graph.objects[polygon_vertices_dict[key][0]].getclass()),
enlarged_closure_dict[key], other_dict[key])
classes_dict[key] = int(graph.objects[polygon_vertices_dict[key][0]].getclass())
all_value += valuedict[key]
add_result_funct(all_value, closuredict, polygon_vertices_dict,
[attribute_name_order[i], attribute_name_order[j]],
classes_dict, enlarged_closure_dict, other_dict) # add all the clusters
self.clusterOptimization.setStatusBarText("Evaluated %d projections..."
% (test_index))
self.scatterWidget.progressBarSet(100.0*test_index/float(count))
del data, graph, valuedict, closuredict, polygon_vertices_dict, enlarged_closure_dict, other_dict, classes_dict
except:
type, val, traceback = sys.exc_info()
sys.excepthook(type, val, traceback) # print the exception
secs = time.time() - start_time
self.clusterOptimization.setStatusBarText("Finished evaluation (evaluated %d projections in %d min, %d sec)" % (test_index, secs/60, secs%60))
self.scatterWidget.progressBarFinished()
getOptimalClusters = get_optimal_clusters
| gpl-3.0 | 5,134,210,712,570,955,000 | 45.345083 | 157 | 0.561489 | false |
jinyu121/Canteen | CanteenWebsite/views.py | 1 | 2840 | # -*- coding: utf-8 -*-
from django.shortcuts import get_object_or_404
from django.shortcuts import render
from CanteenWebsite.models import Category, Goods
from CanteenWebsite.utils.functions import get_goods_list
from CanteenWebsite.utils.functions import setting_get, setting_get_json
def index(request):
index_page = setting_get("index_page", "blank")
goods_list = None
context_extend = dict(
list_style_template="CanteenWebsite/inclusions/show_list_styles/{}.html".format(
setting_get("goods_list_style")
),
use_goods_detail_view=setting_get_json("use_goods_detail_view")
)
if index_page != "blank":
goods_list_all = Goods.objects.all()
goods_list = get_goods_list(goods_list_all, request)
context = {
"index_page": index_page,
"goods_list": goods_list
}
context.update(context_extend)
return render(request=request,
template_name="CanteenWebsite/index.html",
context=context
)
def category(request, category_id):
context_extend = dict(
list_style_template="CanteenWebsite/inclusions/show_list_styles/{}.html".format(
setting_get("goods_list_style")
),
use_goods_detail_view=setting_get_json("use_goods_detail_view")
)
# 当前分类
current_category = get_object_or_404(Category, pk=category_id)
# 获取商品
goods_list_all = current_category.goods_set.all()
goods_list = get_goods_list(goods_list_all, request)
context = {
"current_category": current_category,
"goods_list": goods_list
}
context.update(context_extend)
return render(request=request,
template_name="CanteenWebsite/category.html",
context=context
)
def detail(request, goods_id):
# 当前商品
current_goods = get_object_or_404(Goods, pk=goods_id)
context = {
"current_goods": current_goods
}
return render(request=request,
template_name="CanteenWebsite/detail.html",
context=context
)
def search(request, key_word):
context_extend = dict(
list_style_template="CanteenWebsite/inclusions/show_list_styles/{}.html".format(
setting_get("goods_list_style")
),
use_goods_detail_view=setting_get_json("use_goods_detail_view")
)
goods_list_all = Goods.objects.filter(name__contains=key_word)
goods_list = get_goods_list(goods_list_all, request)
context = {
"keyword": key_word,
"goods_list": goods_list
}
context.update(context_extend)
return render(request=request,
template_name="CanteenWebsite/search.html",
context=context
)
| gpl-3.0 | 9,032,665,304,725,189,000 | 29.945055 | 88 | 0.617898 | false |
rzarzynski/tempest | tempest/thirdparty/boto/test_ec2_volumes.py | 1 | 3038 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import config
from tempest.openstack.common import log as logging
from tempest import test
from tempest.thirdparty.boto import test as boto_test
CONF = config.CONF
LOG = logging.getLogger(__name__)
def compare_volumes(a, b):
return (a.id == b.id and
a.size == b.size)
class EC2VolumesTest(boto_test.BotoTestCase):
@classmethod
def skip_checks(cls):
super(EC2VolumesTest, cls).skip_checks()
if not CONF.service_available.cinder:
skip_msg = ("%s skipped as Cinder is not available" % cls.__name__)
raise cls.skipException(skip_msg)
@classmethod
def setup_clients(cls):
super(EC2VolumesTest, cls).setup_clients()
cls.client = cls.os.ec2api_client
@classmethod
def resource_setup(cls):
super(EC2VolumesTest, cls).resource_setup()
cls.zone = CONF.boto.aws_zone
@test.idempotent_id('663f0077-c743-48ad-8ae0-46821cbc0918')
def test_create_get_delete(self):
# EC2 Create, get, delete Volume
volume = self.client.create_volume(CONF.volume.volume_size, self.zone)
cuk = self.addResourceCleanUp(self.client.delete_volume, volume.id)
self.assertIn(volume.status, self.valid_volume_status)
retrieved = self.client.get_all_volumes((volume.id,))
self.assertEqual(1, len(retrieved))
self.assertTrue(compare_volumes(volume, retrieved[0]))
self.assertVolumeStatusWait(volume, "available")
self.client.delete_volume(volume.id)
self.cancelResourceCleanUp(cuk)
@test.idempotent_id('c6b60d7a-1af7-4f8e-af21-d539d9496149')
def test_create_volume_from_snapshot(self):
# EC2 Create volume from snapshot
volume = self.client.create_volume(CONF.volume.volume_size, self.zone)
self.addResourceCleanUp(self.client.delete_volume, volume.id)
self.assertVolumeStatusWait(volume, "available")
snap = self.client.create_snapshot(volume.id)
self.addResourceCleanUp(self.destroy_snapshot_wait, snap)
self.assertSnapshotStatusWait(snap, "completed")
svol = self.client.create_volume(CONF.volume.volume_size, self.zone,
snapshot=snap)
cuk = self.addResourceCleanUp(svol.delete)
self.assertVolumeStatusWait(svol, "available")
svol.delete()
self.cancelResourceCleanUp(cuk)
| apache-2.0 | 1,652,370,888,184,995,300 | 38.454545 | 79 | 0.685978 | false |
fdintino/django-templatesadmin | templatesadmin/edithooks/gitcommit.py | 10 | 2041 | from django import forms
from django.utils.translation import ugettext_lazy as _
from templatesadmin import TemplatesAdminException
from templatesadmin.edithooks import TemplatesAdminHook
import subprocess
import os
class GitCommitHook(TemplatesAdminHook):
'''
Commit to git after saving
'''
@classmethod
def post_save(cls, request, form, template_path):
dir, file = os.path.dirname(template_path) + "/", os.path.basename(template_path)
if request.user.first_name and request.user.last_name:
author = "%s %s" % (request.user.first_name, request.user.last_name)
else:
author = request.user.username
message = form.cleaned_data['commitmessage'] or '--'
command = (
'GIT_COMMITTER_NAME="%(author)s" GIT_COMMITER_EMAIL="%(email)s" '
'GIT_AUTHOR_NAME="%(author)s" GIT_AUTHOR_EMAIL="%(email)s" '
'git commit -F - -- %(file)s'
) % {
'file': template_path,
'author': author,
'email': request.user.email,
}
# Stolen from gitpython's git/cmd.py
proc = subprocess.Popen(
args=command,
shell=True,
cwd=dir,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
try:
proc.stdin.write(message.encode('utf-8'))
proc.stdin.close()
stderr_value = proc.stderr.read()
stdout_value = proc.stdout.read()
status = proc.wait()
finally:
proc.stderr.close()
if status != 0:
raise TemplatesAdminException("Error while executing %s: %s" % (command, stderr_value.rstrip(), ))
return stdout_value.rstrip()
@classmethod
def contribute_to_form(cls, template_path):
return dict(commitmessage=forms.CharField(
widget=forms.TextInput(attrs={'size':'100'}),
label = _('Change message'),
required = False,
))
| bsd-3-clause | 7,133,179,707,200,737,000 | 30.4 | 110 | 0.579128 | false |
liggitt/docker-registry | depends/docker-registry-core/setup.py | 5 | 3191 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014 Docker.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import setuptools
except ImportError:
import distutils.core as setuptools
import os
import re
import sys
import docker_registry.core as core
ver = sys.version_info
if ver < (2, 6):
raise Exception("Docker registry requires Python 2.6 or higher.")
requirements_txt = open('./requirements/main.txt')
requirements = [line for line in requirements_txt]
# 2.6 native json raw_decode doesn't fit the bill, so add simple to our req
if ver < (2, 7):
requirements.insert(0, 'simplejson==3.6.2')
# Using this will relax dependencies to semver major matching
if 'DEPS' in os.environ and os.environ['DEPS'].lower() == 'loose':
loose = []
for item in requirements:
d = re.match(r'([^=]+)==([0-9]+)[.]([0-9]+)[.]([0-9]+)', item)
if d:
d = list(d.groups())
name = d.pop(0)
version = d.pop(0)
item = '%s>=%s,<%s' % (name, int(version), int(version) + 1)
loose.insert(0, item)
requirements = loose
setuptools.setup(
name=core.__title__,
version=core.__version__,
author=core.__author__,
author_email=core.__email__,
maintainer=core.__maintainer__,
maintainer_email=core.__email__,
keywords='docker registry core',
url=core.__url__,
description=core.__description__,
long_description=open('./README.md').read(),
download_url=core.__download__,
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
# 'Programming Language :: Python :: 3.2',
# 'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
# 'Programming Language :: Python :: Implementation :: PyPy',
'Operating System :: OS Independent',
'Topic :: Utilities',
'License :: OSI Approved :: Apache Software License'],
platforms=['Independent'],
license=open('./LICENSE').read(),
namespace_packages=['docker_registry', 'docker_registry.drivers'],
packages=['docker_registry', 'docker_registry.core',
'docker_registry.drivers', 'docker_registry.testing'],
install_requires=requirements,
zip_safe=True,
tests_require=open('./requirements/test.txt').read(),
test_suite='nose.collector'
)
| apache-2.0 | -3,957,196,294,774,160,400 | 35.261364 | 79 | 0.621122 | false |
dutradda/myreco | myreco/users/models.py | 1 | 8064 | # MIT License
# Copyright (c) 2016 Diogo Dutra <[email protected]>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import binascii
import re
from base64 import b64decode
import sqlalchemy as sa
from sqlalchemy.ext.declarative import AbstractConcreteBase, declared_attr
from swaggerit.utils import get_swagger_json
class GrantsModelBase(AbstractConcreteBase):
__tablename__ = 'grants'
__table_args__ = (sa.UniqueConstraint('uri_id', 'method_id'),)
__swagger_json__ = get_swagger_json(__file__, 'grants_swagger.json')
id = sa.Column(sa.Integer, primary_key=True)
@declared_attr
def uri_id(cls):
return sa.Column(sa.ForeignKey('uris.id'))
@declared_attr
def method_id(cls):
return sa.Column(sa.ForeignKey('methods.id'))
@declared_attr
def uri(cls):
return sa.orm.relationship('URIsModel')
@declared_attr
def method(cls):
return sa.orm.relationship('MethodsModel')
class URIsModelBase(AbstractConcreteBase):
__tablename__ = 'uris'
__swagger_json__ = get_swagger_json(__file__, 'uris_swagger.json')
id = sa.Column(sa.Integer, primary_key=True)
uri = sa.Column(sa.String(255), unique=True, nullable=False)
class MethodsModelBase(AbstractConcreteBase):
__tablename__ = 'methods'
__swagger_json__ = get_swagger_json(__file__, 'methods_swagger.json')
id = sa.Column(sa.Integer, primary_key=True)
method = sa.Column(sa.String(10), unique=True, nullable=False)
class UsersModelBase(AbstractConcreteBase):
__tablename__ = 'users'
__swagger_json__ = get_swagger_json(__file__)
id = sa.Column(sa.String(255), primary_key=True)
name = sa.Column(sa.String(255), unique=True, nullable=False)
email = sa.Column(sa.String(255), unique=True, nullable=False)
password = sa.Column(sa.String(255), nullable=False)
admin = sa.Column(sa.Boolean, default=False)
@declared_attr
def grants(cls):
return sa.orm.relationship('GrantsModel', uselist=True, secondary='users_grants')
@declared_attr
def stores(cls):
return sa.orm.relationship('StoresModel', uselist=True, secondary='users_stores')
@classmethod
async def authorize(cls, session, authorization, url, method):
try:
authorization = b64decode(authorization).decode()
except binascii.Error:
return None
except UnicodeDecodeError:
return None
if not ':' in authorization:
return None
user = await cls.get(session, {'id': authorization})
user = user[0] if user else user
if user and user.get('admin'):
session.user = user
return True
elif user:
if method == 'OPTIONS':
return True
for grant in user['grants']:
grant_uri = grant['uri']['uri']
if (grant_uri == url or re.match(grant_uri, url)) \
and grant['method']['method'].lower() == method.lower():
session.user = user
return True
return False
@classmethod
async def insert(cls, session, objs, commit=True, todict=True):
objs = cls._to_list(objs)
await cls._set_objs_ids_and_grant(objs, session)
return await type(cls).insert(cls, session, objs, commit, todict)
@classmethod
async def _set_objs_ids_and_grant(cls, objs, session):
objs = cls._to_list(objs)
patch_method = await cls.get_model('methods').get(session, ids={'method': 'patch'}, todict=False)
if not patch_method:
patch_method = await cls.get_model('methods').insert(session, [{'method': 'patch'}], todict=False)
patch_method = patch_method[0]
get_method = await cls.get_model('methods').get(session, ids={'method': 'get'}, todict=False)
if not get_method:
get_method = await cls.get_model('methods').insert(session, [{'method': 'get'}], todict=False)
get_method = get_method[0]
for obj in objs:
new_grants = []
user_uri = '/users/{}'.format(obj['email'])
uri = await cls.get_model('uris').get(session, ids={'uri': user_uri}, todict=False)
if not uri:
uri = await cls.get_model('uris').insert(session, [{'uri': user_uri}], todict=False)
uri = uri[0]
grant = await cls.get_model('grants').get(session, {'uri_id': uri.id, 'method_id': patch_method.id}, todict=False)
if grant:
grant = grant[0].todict()
else:
grant = {'uri_id': uri.id, 'method_id': patch_method.id, '_operation': 'insert'}
new_grants.append(grant)
grant = await cls.get_model('grants').get(session, {'uri_id': uri.id, 'method_id': get_method.id}, todict=False)
if grant:
grant = grant[0].todict()
else:
grant = {'uri_id': uri.id, 'method_id': get_method.id, '_operation': 'insert'}
new_grants.append(grant)
obj['id'] = '{}:{}'.format(obj['email'], obj['password'])
grants = obj.get('grants', [])
grants.extend(new_grants)
obj['grants'] = grants
@classmethod
async def update(cls, session, objs, commit=True, todict=True, ids=None, ids_keys=None):
if not ids:
ids = []
objs = cls._to_list(objs)
for obj in objs:
id_ = obj.get('id')
email = obj.get('email')
if id_ is not None:
ids.append({'id': id_})
ids_keys = ('id',)
elif email is not None:
ids.append({'email': email})
ids_keys = ('email',)
insts = await type(cls).update(cls, session, objs, commit=False,
todict=False, ids=ids, ids_keys=ids_keys)
cls._set_insts_ids(insts)
if commit:
await session.commit()
return cls._build_todict_list(insts) if todict else insts
@classmethod
def _set_insts_ids(cls, insts):
insts = cls._to_list(insts)
for inst in insts:
inst.id = '{}:{}'.format(inst.email, inst.password)
def build_users_grants_table(metadata, **kwargs):
return sa.Table(
'users_grants', metadata,
sa.Column('user_id', sa.String(255), sa.ForeignKey('users.id', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True),
sa.Column('grant_id', sa.Integer, sa.ForeignKey('grants.id', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True),
**kwargs)
def build_users_stores_table(metadata, **kwargs):
return sa.Table(
'users_stores', metadata,
sa.Column('user_id', sa.String(255), sa.ForeignKey('users.id', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True),
sa.Column('store_id', sa.Integer, sa.ForeignKey('stores.id', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True),
**kwargs)
| mit | 5,895,479,481,810,590,000 | 36.859155 | 130 | 0.611483 | false |
safwanrahman/standup | standup/main.py | 3 | 3715 | from datetime import date, timedelta
from types import FunctionType, ModuleType
from flask import Flask, redirect, request, session, url_for
from flask.ext.funnel import Funnel
from flask.ext.markdown import Markdown
from flask.ext.seasurf import SeaSurf
from standup.apps.status.helpers import get_weeks
from standup.apps.status.models import Project
from standup.apps.users.models import Team, User
from standup.database import get_session
from standup.errors import register_error_handlers
from standup.filters import register_filters
from standup.mdext import nixheaders
csrf = SeaSurf()
def _get_apps_full_names(apps):
names = []
for app in apps:
parts = []
if not __name__ == '__main__':
parts = __name__.split('.')
parts.pop()
parts.append('apps')
parts.append(app)
names.append('.'.join(parts))
return names
def create_app(settings):
app = Flask(__name__)
# Import settings from file
for name in dir(settings):
value = getattr(settings, name)
if not (name.startswith('_') or isinstance(value, ModuleType)
or isinstance(value, FunctionType)):
app.config[name] = value
# Additional settings
app.installed_apps = _get_apps_full_names(settings.INSTALLED_APPS)
app.secret_key = app.config.get('SESSION_SECRET')
# Markdown
md = Markdown(app)
# TODO: We might want to expose Markdown extensions to the config
# file.
md.register_extension(nixheaders.makeExtension)
# Flask-Funnel
Funnel(app)
# SeaSurf
csrf.init_app(app)
# Register error handlers
register_error_handlers(app)
# Register template filters
register_filters(app)
for a in app.installed_apps:
# Register blueprints
app.register_blueprint(
getattr(__import__('%s.views' % a, fromlist=['blueprint']),
'blueprint'))
@app.context_processor
def inject_page():
return dict(page=int(request.args.get('page', 1)))
@app.context_processor
def globals():
db = get_session(app)
ctx = dict()
# Projects, teams and current user
ctx['projects'] = db.query(Project).order_by(Project.name)
ctx['teams'] = db.query(Team).order_by(Team.name)
ctx['weeks'] = get_weeks()
ctx['current_user'] = None
if session and 'user_id' in session:
user = db.query(User).get(session['user_id'])
if user:
ctx['current_user'] = user
# Time stuff
ctx['today'] = date.today()
ctx['yesterday'] = date.today() - timedelta(1)
# CSRF
def csrf_field():
return ('<div style="display: none;">'
'<input type="hidden" name="_csrf_token" value="%s">'
'</div>' % csrf._get_token())
ctx['csrf'] = csrf_field
return ctx
@app.before_request
def validate_user():
db = get_session(app)
if session and 'email' in session and not 'user_id' in session:
user = db.query(User).filter_by(email=session['email']).first()
if not user:
if request.endpoint not in ('users.new_profile',
'users.authenticate',
'users.logout',
'static'):
return redirect(url_for('users.new_profile'))
@app.teardown_request
def teardown_request(exception=None):
# Remove the database session if it exists
if hasattr(app, 'db_session'):
app.db_session.close()
return app
| bsd-3-clause | -7,101,002,513,292,639,000 | 28.72 | 75 | 0.58681 | false |
Subsets and Splits