repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
mitsuse/salada | tests/test_segmenter.py | 1 | 1418 | #!/usr/bin/env python
# coding: utf-8
from salada import language
from salada import segmenter
class TestDefault:
def test_segment_text_by_sequence_of_spaces(self):
text = ' foo \n \n\n bar \t\n baz '
expectation = [
language.Segment('', True, False),
language.Segment('foo', False, False),
language.Segment('bar', False, False),
language.Segment('baz', False, False),
language.Segment('', False, True),
]
result = segmenter.Default().segment(text)
assert result == expectation
def test_regard_first_as_headless(self):
text = 'foo \n \n\n bar \t\n baz '
expectation = [
language.Segment('foo', True, False),
language.Segment('bar', False, False),
language.Segment('baz', False, False),
language.Segment('', False, True),
]
result = segmenter.Default().segment(text)
assert result == expectation
def test_regard_last_as_tailless(self):
text = ' foo \n \n\n bar \t\n baz'
expectation = [
language.Segment('', True, False),
language.Segment('foo', False, False),
language.Segment('bar', False, False),
language.Segment('baz', False, True),
]
result = segmenter.Default().segment(text)
assert result == expectation
| mit | -2,338,574,573,679,739,000 | 33.585366 | 54 | 0.559944 | false | 4.051429 | false | false | false |
Onager/plaso | plaso/containers/tasks.py | 1 | 6551 | # -*- coding: utf-8 -*-
"""Task related attribute container definitions."""
import time
import uuid
from plaso.containers import interface
from plaso.containers import manager
from plaso.lib import definitions
class Task(interface.AttributeContainer):
"""Task attribute container.
A task describes a piece of work for a multi processing worker process
for example a taks to process a path specification or to analyze an event.
Attributes:
aborted (bool): True if the session was aborted.
completion_time (int): time that the task was completed. Contains the
number of micro seconds since January 1, 1970, 00:00:00 UTC.
file_entry_type (str): dfVFS type of the file entry the path specification
is referencing.
has_retry (bool): True if the task was previously abandoned and a retry
task was created, False otherwise.
identifier (str): unique identifier of the task.
last_processing_time (int): the last time the task was marked as being
processed as number of milliseconds since January 1, 1970, 00:00:00 UTC.
merge_priority (int): priority used for the task storage file merge, where
a lower value indicates a higher priority to merge.
path_spec (dfvfs.PathSpec): path specification.
session_identifier (str): the identifier of the session the task is part of.
start_time (int): time that the task was started. Contains the number
of micro seconds since January 1, 1970, 00:00:00 UTC.
storage_file_size (int): size of the storage file in bytes.
storage_format (str): the format the task results are to be stored in.
"""
CONTAINER_TYPE = 'task'
def __init__(self, session_identifier=None):
"""Initializes a task attribute container.
Args:
session_identifier (Optional[str]): identifier of the session the task
is part of.
"""
super(Task, self).__init__()
self.aborted = False
self.completion_time = None
self.file_entry_type = None
self.has_retry = False
self.identifier = '{0:s}'.format(uuid.uuid4().hex)
self.last_processing_time = None
self.merge_priority = None
self.path_spec = None
self.session_identifier = session_identifier
self.start_time = int(time.time() * definitions.MICROSECONDS_PER_SECOND)
self.storage_file_size = None
self.storage_format = None
# This method is necessary for heap sort.
def __lt__(self, other):
"""Compares if the task attribute container is less than the other.
Args:
other (Task): task attribute container to compare to.
Returns:
bool: True if the task attribute container is less than the other.
"""
return self.identifier < other.identifier
def CreateRetryTask(self):
"""Creates a new task to retry a previously abandoned task.
The retry task will have a new identifier but most of the attributes
will be a copy of the previously abandoned task.
Returns:
Task: a task to retry a previously abandoned task.
"""
retry_task = Task(session_identifier=self.session_identifier)
retry_task.file_entry_type = self.file_entry_type
retry_task.merge_priority = self.merge_priority
retry_task.path_spec = self.path_spec
retry_task.storage_file_size = self.storage_file_size
retry_task.storage_format = self.storage_format
self.has_retry = True
return retry_task
def CreateTaskCompletion(self):
"""Creates a task completion.
Returns:
TaskCompletion: task completion attribute container.
"""
self.completion_time = int(
time.time() * definitions.MICROSECONDS_PER_SECOND)
task_completion = TaskCompletion()
task_completion.aborted = self.aborted
task_completion.identifier = self.identifier
task_completion.session_identifier = self.session_identifier
task_completion.timestamp = self.completion_time
return task_completion
def CreateTaskStart(self):
"""Creates a task start.
Returns:
TaskStart: task start attribute container.
"""
task_start = TaskStart()
task_start.identifier = self.identifier
task_start.session_identifier = self.session_identifier
task_start.timestamp = self.start_time
return task_start
def UpdateProcessingTime(self):
"""Updates the processing time to now."""
self.last_processing_time = int(
time.time() * definitions.MICROSECONDS_PER_SECOND)
class TaskCompletion(interface.AttributeContainer):
"""Task completion attribute container.
Attributes:
aborted (bool): True if the session was aborted.
identifier (str): unique identifier of the task.
session_identifier (str): the identifier of the session the task
is part of.
timestamp (int): time that the task was completed. Contains the number
of micro seconds since January 1, 1970, 00:00:00 UTC.
"""
CONTAINER_TYPE = 'task_completion'
def __init__(self, identifier=None, session_identifier=None):
"""Initializes a task completion attribute container.
Args:
identifier (Optional[str]): unique identifier of the task.
The identifier should match that of the corresponding
task start information.
session_identifier (Optional[str]): identifier of the session the task
is part of.
"""
super(TaskCompletion, self).__init__()
self.aborted = False
self.identifier = identifier
self.session_identifier = session_identifier
self.timestamp = None
class TaskStart(interface.AttributeContainer):
"""Task start attribute container.
Attributes:
identifier (str): unique identifier of the task.
session_identifier (str): the identifier of the session the task
is part of.
timestamp (int): time that the task was started. Contains the number
of micro seconds since January 1, 1970, 00:00:00 UTC.
"""
CONTAINER_TYPE = 'task_start'
def __init__(self, identifier=None, session_identifier=None):
"""Initializes a task start attribute container.
Args:
identifier (Optional[str]): unique identifier of the task.
The identifier should match that of the corresponding
task completion information.
session_identifier (Optional[str]): identifier of the session the task
is part of.
"""
super(TaskStart, self).__init__()
self.identifier = identifier
self.session_identifier = session_identifier
self.timestamp = None
manager.AttributeContainersManager.RegisterAttributeContainers([
Task, TaskCompletion, TaskStart])
| apache-2.0 | -8,643,982,626,627,538,000 | 34.22043 | 80 | 0.70203 | false | 4.278903 | false | false | false |
holvi/python-stdnum | stdnum/es/iban.py | 1 | 2510 | # iban.py - functions for handling Spanish IBANs
# coding: utf-8
#
# Copyright (C) 2016 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Spanish IBAN (International Bank Account Number).
The IBAN is used to identify bank accounts across national borders. The
Spanish IBAN is built up of the IBAN prefix (ES) and check digits, followed
by the 20 digit CCC (Código Cuenta Corriente).
>>> validate('ES77 1234-1234-16 1234567890')
'ES7712341234161234567890'
>>> to_ccc('ES77 1234-1234-16 1234567890')
'12341234161234567890'
>>> format('ES771234-1234-16 1234567890')
'ES77 1234 1234 1612 3456 7890'
>>> validate('GR1601101050000010547023795') # different country
Traceback (most recent call last):
...
InvalidComponent: ...
>>> validate('ES12 1234-1234-16 1234567890') # invalid IBAN check digit
Traceback (most recent call last):
...
InvalidChecksum: ...
>>> validate('ES15 1234-1234-17 1234567890') # invalid CCC check digit
Traceback (most recent call last):
...
InvalidChecksum: ...
"""
from stdnum import iban
from stdnum.es import ccc
from stdnum.exceptions import *
__all__ = ['compact', 'format', 'to_ccc', 'validate', 'is_valid']
compact = iban.compact
format = iban.format
def to_ccc(number):
"""Return the CCC (Código Cuenta Corriente) part of the number."""
number = compact(number)
if not number.startswith('ES'):
raise InvalidComponent()
return number[4:]
def validate(number):
"""Checks to see if the number provided is a valid Spanish IBAN."""
number = iban.validate(number, check_country=False)
ccc.validate(to_ccc(number))
return number
def is_valid(number):
"""Checks to see if the number provided is a valid Spanish IBAN."""
try:
return bool(validate(number))
except ValidationError:
return False
| lgpl-2.1 | 2,566,745,078,822,567,400 | 30.746835 | 75 | 0.722887 | false | 3.634783 | false | false | false |
Azure/azure-sdk-for-python | sdk/authorization/azure-mgmt-authorization/azure/mgmt/authorization/v2015_06_01/models/_models.py | 1 | 2367 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import msrest.serialization
class ClassicAdministrator(msrest.serialization.Model):
"""Classic Administrators.
:param id: The ID of the administrator.
:type id: str
:param name: The name of the administrator.
:type name: str
:param type: The type of the administrator.
:type type: str
:param email_address: The email address of the administrator.
:type email_address: str
:param role: The role of the administrator.
:type role: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'email_address': {'key': 'properties.emailAddress', 'type': 'str'},
'role': {'key': 'properties.role', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ClassicAdministrator, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.email_address = kwargs.get('email_address', None)
self.role = kwargs.get('role', None)
class ClassicAdministratorListResult(msrest.serialization.Model):
"""ClassicAdministrator list result information.
:param value: An array of administrators.
:type value: list[~azure.mgmt.authorization.v2015_06_01.models.ClassicAdministrator]
:param next_link: The URL to use for getting the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ClassicAdministrator]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ClassicAdministratorListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
| mit | 6,230,837,319,939,194,000 | 34.328358 | 94 | 0.584284 | false | 4.060034 | false | false | false |
RTHMaK/RPGOne | deep_qa-master/deep_qa/data/instances/instance.py | 1 | 15041 | """
This module contains the base ``Instance`` classes that concrete classes
inherit from. Specifically, there are three classes:
1. ``Instance``, that just exists as a base type with no functionality
2. ``TextInstance``, which adds a ``words()`` method and a method to convert
strings to indices using a DataIndexer.
3. ``IndexedInstance``, which is a ``TextInstance`` that has had all of its
strings converted into indices.
This class has methods to deal with padding (so that sequences all have the
same length) and converting an ``Instance`` into a set of Numpy arrays
suitable for use with Keras.
As this codebase is dealing mostly with textual question answering, pretty much
all of the concrete ``Instance`` types will have both a ``TextInstance`` and a
corresponding ``IndexedInstance``, which you can see in the individual files
for each ``Instance`` type.
"""
import itertools
from typing import Any, Callable, Dict, List
from ..tokenizers import tokenizers
from ..data_indexer import DataIndexer
class Instance:
"""
A data instance, used either for training a neural network or for
testing one.
Parameters
----------
label : boolean or index
For simple ``Instances`` (like ``TextInstance``), this is
either ``True``, ``False``, or ``None``, indicating whether the
instance is a positive, negative or unknown (i.e., test) example,
respectively. For ``MultipleChoiceInstances`` or other more
complicated things, is a class index.
index : int, optional
Used for matching instances with other data, such as background
sentences.
"""
def __init__(self, label, index: int=None):
self.label = label
self.index = index
@staticmethod
def _check_label(label: bool, default_label: bool):
if default_label is not None and label is not None and label != default_label:
raise RuntimeError("Default label given with file, and label in file doesn't match!")
class TextInstance(Instance):
"""
An ``Instance`` that has some attached text, typically either a sentence
or a logical form. This is called a ``TextInstance`` because the
individual tokens here are encoded as strings, and we can
get a list of strings out when we ask what words show up in the instance.
We use these kinds of instances to fit a ``DataIndexer`` (i.e., deciding
which words should be mapped to an unknown token); to use them in training
or testing, we need to first convert them into ``IndexedInstances``.
In order to actually convert text into some kind of indexed sequence,
we rely on a ``TextEncoder``. There are several ``TextEncoder``subclasses,
that will let you use word token sequences, character sequences, and other
options. By default we use word tokens. You can override this by setting
the ``encoder`` class variable.
"""
tokenizer = tokenizers['words']({})
def __init__(self, label, index: int=None):
super(TextInstance, self).__init__(label, index)
def _words_from_text(self, text: str) -> Dict[str, List[str]]:
return self.tokenizer.get_words_for_indexer(text)
def _index_text(self, text: str, data_indexer: DataIndexer) -> List[int]:
return self.tokenizer.index_text(text, data_indexer)
def words(self) -> Dict[str, List[str]]:
"""
Returns a list of all of the words in this instance, contained in a
namespace dictionary.
This is mainly used for computing word counts when fitting a word
vocabulary on a dataset. The namespace dictionary allows you to have
several embedding matrices with different vocab sizes, e.g., for words
and for characters (in fact, words and characters are the only use
cases I can think of for now, but this allows you to do other more
crazy things if you want). You can call the namespaces whatever you
want, but if you want the ``DataIndexer`` to work correctly without
namespace arguments, you should use the key 'words' to represent word
tokens.
Returns
-------
namespace : Dictionary of {str: List[str]}
The ``str`` key refers to vocabularies, and the ``List[str]``
should contain the tokens in that vocabulary. For example, you
should use the key ``words`` to represent word tokens, and the
correspoding value in the dictionary would be a list of all the
words in the instance.
"""
raise NotImplementedError
def to_indexed_instance(self, data_indexer: DataIndexer) -> 'IndexedInstance':
"""
Converts the words in this ``Instance`` into indices using
the ``DataIndexer``.
Parameters
----------
data_indexer : DataIndexer
``DataIndexer`` to use in converting the ``Instance`` to
an ``IndexedInstance``.
Returns
-------
indexed_instance : IndexedInstance
A ``TextInstance`` that has had all of its strings converted into
indices.
"""
raise NotImplementedError
@classmethod
def read_from_line(cls, line: str, default_label: bool=None):
"""
Reads an instance of this type from a line.
Parameters
----------
line : str
A line from a data file.
default_label: bool
If a label is not provided, the default to use. Mainly used in
``TrueFalseInstance``.
Returns
-------
indexed_instance : IndexedInstance
A ``TextInstance`` that has had all of its strings converted into
indices.
Notes
-----
We throw a ``RuntimeError`` here instead of a ``NotImplementedError``,
because it's not expected that all subclasses will implement this.
"""
# pylint: disable=unused-argument
raise RuntimeError("%s instances can't be read from a line!" % str(cls))
class IndexedInstance(Instance):
"""
An indexed data instance has all word tokens replaced with word indices,
along with some kind of label, suitable for input to a Keras model. An
``IndexedInstance`` is created from an ``Instance`` using a
``DataIndexer``, and the indices here have no recoverable meaning without
the ``DataIndexer``.
For example, we might have the following ``Instance``:
- ``TrueFalseInstance('Jamie is nice, Holly is mean', True, 25)``
After being converted into an ``IndexedInstance``, we might have
the following:
- ``IndexedTrueFalseInstance([1, 6, 7, 1, 6, 8], True, 25)``
This would mean that ``"Jamie"`` and ``"Holly"`` were OOV to the
``DataIndexer``, and the other words were given indices.
"""
@classmethod
def empty_instance(cls):
"""
Returns an empty, unpadded instance of this class. Necessary for option
padding in multiple choice instances.
"""
raise NotImplementedError
def get_lengths(self) -> List[int]:
"""
Returns the length of this instance in all dimensions that
require padding.
Different kinds of instances have different fields that are padded,
such as sentence length, number of background sentences, number of
options, etc.
Returns
-------
lengths: List of int
A list of integers, where the value at each index is the
maximum length in each dimension.
"""
raise NotImplementedError
def pad(self, max_lengths: Dict[str, int]):
"""
Add zero-padding to make each data example of equal length for use
in the neural network.
This modifies the current object.
Parameters
----------
max_lengths: Dictionary of {str:int}
In this dictionary, each ``str`` refers to a type of token
(e.g. ``max_words_question``), and the corresponding ``int`` is
the value. This dictionary must have the same dimension as was
returned by ``get_lengths()``. We will use these lengths to pad the
instance in all of the necessary dimensions to the given leangths.
"""
raise NotImplementedError
def as_training_data(self):
"""
Convert this ``IndexedInstance`` to NumPy arrays suitable for use as
training data to Keras models.
Returns
-------
train_data : (inputs, label)
The ``IndexedInstance`` as NumPy arrays to be uesd in Keras.
Note that ``inputs`` might itself be a complex tuple, depending
on the ``Instance`` type.
"""
raise NotImplementedError
@staticmethod
def _get_word_sequence_lengths(word_indices: List) -> Dict[str, int]:
"""
Because ``TextEncoders`` can return complex data structures, we might
actually have several things to pad for a single word sequence. We
check for that and handle it in a single spot here. We return a
dictionary containing 'num_sentence_words', which is the number of
words in word_indices. If the word representations also contain
characters, the dictionary additionally contains a
'num_word_characters' key, with a value corresponding to the longest
word in the sequence.
"""
lengths = {'num_sentence_words': len(word_indices)}
if len(word_indices) > 0 and not isinstance(word_indices[0], int):
if isinstance(word_indices[0], list):
lengths['num_word_characters'] = max([len(word) for word in word_indices])
# There might someday be other cases we're missing here, but we'll punt for now.
return lengths
@staticmethod
def pad_word_sequence(word_sequence: List[int],
lengths: Dict[str, int],
truncate_from_right: bool=True) -> List:
"""
Take a list of indices and pads them.
Parameters
----------
word_sequence : List of int
A list of word indices.
lengths : Dictionary of {str:int}
In this dictionary, each ``str`` refers to a type of token
(e.g. ``max_words_question``), and the corresponding ``int`` is
the value. This dictionary must have the same dimension as was
returned by ``get_lengths()``. We will use these lengths to pad the
instance in all of the necessary dimensions to the given leangths.
truncate_from_right : bool, default=True
If truncating the indices is necessary, this parameter dictates
whether we do so on the left or right.
Returns
-------
padded_word_sequence : List of int
A padded list of word indices.
Notes
-----
The reason we truncate from the right by default is for
cases that are questions, with long set ups. We at least want to get
the question encoded, which is always at the end, even if we've lost
much of the question set up. If you want to truncate from the other
direction, you can.
"""
default_value = lambda: 0
if 'num_word_characters' in lengths:
default_value = lambda: []
padded_word_sequence = IndexedInstance.pad_sequence_to_length(
word_sequence, lengths['num_sentence_words'], default_value, truncate_from_right)
if 'num_word_characters' in lengths:
desired_length = lengths['num_word_characters']
longest_word = max(padded_word_sequence, key=len)
if desired_length > len(longest_word):
# since we want to pad to greater than the longest word, we add a
# "dummy word" to get the speed of itertools.zip_longest
padded_word_sequence.append([0]*desired_length)
# pad the list of lists to the longest sublist, appending 0's
words_padded_to_longest = list(zip(*itertools.zip_longest(*padded_word_sequence,
fillvalue=0)))
if desired_length > len(longest_word):
# now we remove the "dummy word" if we appended one.
words_padded_to_longest.pop()
# now we need to truncate all of them to our desired length.
# since truncate_from_right is always False, we chop off starting from
# the right.
padded_word_sequence = [list(word[:desired_length])
for word in words_padded_to_longest]
return padded_word_sequence
@staticmethod
def pad_sequence_to_length(sequence: List,
desired_length: int,
default_value: Callable[[], Any]=lambda: 0,
truncate_from_right: bool=True) -> List:
"""
Take a list of indices and pads them to the desired length.
Parameters
----------
word_sequence : List of int
A list of word indices.
desired_length : int
Maximum length of each sequence. Longer sequences
are truncated to this length, and shorter ones are padded to it.
default_value: Callable, default=lambda: 0
Callable that outputs a default value (of any type) to use as
padding values.
truncate_from_right : bool, default=True
If truncating the indices is necessary, this parameter dictates
whether we do so on the left or right.
Returns
-------
padded_word_sequence : List of int
A padded or truncated list of word indices.
Notes
-----
The reason we truncate from the right by default is for
cases that are questions, with long set ups. We at least want to get
the question encoded, which is always at the end, even if we've lost
much of the question set up. If you want to truncate from the other
direction, you can.
"""
if truncate_from_right:
truncated = sequence[-desired_length:]
else:
truncated = sequence[:desired_length]
if len(truncated) < desired_length:
# If the length of the truncated sequence is less than the desired
# length, we need to pad.
padding_sequence = [default_value()] * (desired_length - len(truncated))
if truncate_from_right:
# When we truncate from the right, we add zeroes to the front.
padding_sequence.extend(truncated)
return padding_sequence
else:
# When we do not truncate from the right, we add zeroes to the end.
truncated.extend(padding_sequence)
return truncated
return truncated
| apache-2.0 | 979,567,012,326,318,200 | 39.761518 | 97 | 0.619773 | false | 4.74929 | false | false | false |
willo12/spacegrids | spacegrids/_iosg.py | 1 | 5429 | #encoding:utf-8
""" io related
"""
import numpy as np
from _config import *
import warnings
warnings.formatwarning = warning_on_one_line
# use_scientificio is set in config
#use_scientificio = False
# fallback is always scipy.io: least dependencies
# cdf_lib set in _config.py and determines which library to use
if cdf_lib =='netcdf4':
try:
from netCDF4 import Dataset
cdf_lib_used = 'netcdf4'
# print 'Using netCDF4'
except:
warnings.warn('no Netcdf4. Reverting to scipy.')
from scipy.io import netcdf
cdf_lib_used = 'scipyio'
elif cdf_lib == 'scientificio':
try:
import Scientific.IO.NetCDF
print 'Using Scientific.IO.NetCDF'
cdf_lib_used = 'scientificio'
except:
warnings.warn('no Scientific io. Reverting to scipy.')
from scipy.io import netcdf
cdf_lib_used = 'scipyio'
else:
from scipy.io import netcdf
cdf_lib_used = 'scipyio'
print 'Using scipyio'
import os
from fieldcls import *
def netcdf_file(filepath,mode = 'r', *args, **kwargs):
"""
Wrapper for opening Netcdf functions from NETCDF4, ScientificIO or Scipy
Depends on cdf_lib_used variable.
For 'netcdf4':
file = Dataset(filepath,mode, format='NETCDF4')
For 'scientificio':
file = Scientific.IO.NetCDF.NetCDFFile(filename = filepath, mode = mode)
Otherwise:
file = netcdf.netcdf_file(filename = filepath, mode = mode, *args, **kwargs)
Args:
filepath: (str) full path to file
mode: (str) mode to use as mode argument to file opening function
Returns:
file handle if successful.
Raises:
IOError if there are problems opening the file.
"""
if cdf_lib_used =='netcdf4':
try:
file = Dataset(filepath,mode, format='NETCDF4', *args, **kwargs)
except IOError:
raise IOError('Cannot open %s using NetCDF4'%filepath)
else:
return file
if cdf_lib_used == 'scientificio':
try:
file = Scientific.IO.NetCDF.NetCDFFile(filename = filepath, mode = mode, *args, **kwargs)
except IOError:
raise IOError('Cannot open %s using Scientific.IO'%filepath)
else:
return file
else:
# Scipy way:
try:
file = netcdf.netcdf_file(filename = filepath, mode = mode, *args, **kwargs)
except IOError:
raise IOError('Cannot open %s using Scipy'%filepath)
else:
return file
def msk_read(filepath='masks/msk', crop = 1):
"""
Reads a text file containing a mask pointed to by filepath and returns a corresponding array.
Due to the way these masks are stored for the UVic model, cropping is needed, as indicated
by the crop flag in the arguments. This is the lowest level mask read function in sg.
Args:
filepath: (str) path to the file
crop: (int) amount of points to crop at the margins.
Return:
ndarray containing mask.
"""
str_data = []
with open(filepath,'r') as fobj:
str_data = fobj.readlines()
data = []
for eachline in str_data:
data_line = []
for elem in eachline:
try:
data_line.append(int(elem))
except:
pass
data.append(data_line)
if crop:
return np.flipud(np.array(data))[1:-1,1:-1]
else:
return np.flipud(np.array(data))
def read_masks(dir_path, msk_shape=None,grids = False, msk_val =2):
"""
Reads mask and returns a list of Field objects containing masks.
Calls msk_read, see msk_read.
Args:
dir_path: (str) path to directory
msk_shape: (None or tuple of int) describing supposed shape of mask
grids: (Gr) grid to use for masks
msk_val: (int) value that will not be nan in mask
Returns:
Dictionary of masks and their names
"""
if not(grids):
print 'read_masks: Provide grid --> no masks loaded.'
return
if isinstance(grids,Gr):
grids = [grids]
try:
L = os.listdir(dir_path)
except:
print "No mask dir."
L=[]
masks = {}
for it in L:
try:
fpath = os.path.join(dir_path,it)
msk = msk_read(fpath)
if msk_shape is not None:
# only test shape if msk_shape not default value of None
if (msk.shape != tuple(msk_shape)):
print "Warning: mask shape does not agree: " + it,
print msk.shape,
print msk_shape
msk = np.array(msk,dtype = np.float32)
if msk_val:
msk[msk != msk_val] = np.nan
msk[msk == msk_val] = 1
for g in grids:
try:
print 'Trying to join mask and grid to create Field, ignore possibe error.'
mskob = Field(name = it, value = msk, grid =g)
break
except:
pass
masks[it] = mskob
except:
print "No mask."
return masks
def locate(top = '/home/',fname = projnickfile):
"""
Locates all files with filename fname. Helper function to info function.
Args:
top: (str) the start dir
fname: (str)the filename to look for.
Returns:
List of all paths to dirs containing fname.
"""
paths = []
if fname is not None:
for root, dirs, files in os.walk(top=top):
if fname in files:
paths.append(root)
else:
paths = [ os.path.join(top,subdir) for subdir in os.listdir(top) ]
return paths
| bsd-3-clause | -3,902,353,986,811,476,500 | 21.810924 | 95 | 0.609136 | false | 3.546048 | false | false | false |
oso/qgis-etri | ui/inference_results.py | 1 | 5734 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui/inference_results.ui'
#
# Created: Tue Nov 19 19:57:44 2013
# by: PyQt4 UI code generator 4.10.2
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_inference_results(object):
def setupUi(self, inference_results):
inference_results.setObjectName(_fromUtf8("inference_results"))
inference_results.resize(800, 600)
self.verticalLayout = QtGui.QVBoxLayout(inference_results)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.groupBox = QtGui.QGroupBox(inference_results)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(30)
sizePolicy.setHeightForWidth(self.groupBox.sizePolicy().hasHeightForWidth())
self.groupBox.setSizePolicy(sizePolicy)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.groupBox)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.graph_model = _MyGraphicsview(self.groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(5)
sizePolicy.setHeightForWidth(self.graph_model.sizePolicy().hasHeightForWidth())
self.graph_model.setSizePolicy(sizePolicy)
self.graph_model.setStyleSheet(_fromUtf8("background-color: transparent;"))
self.graph_model.setFrameShape(QtGui.QFrame.NoFrame)
self.graph_model.setFrameShadow(QtGui.QFrame.Sunken)
self.graph_model.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.graph_model.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
self.graph_model.setAlignment(QtCore.Qt.AlignCenter)
self.graph_model.setRenderHints(QtGui.QPainter.Antialiasing|QtGui.QPainter.TextAntialiasing)
self.graph_model.setObjectName(_fromUtf8("graph_model"))
self.verticalLayout_2.addWidget(self.graph_model)
self.label_lambda = QtGui.QLabel(self.groupBox)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_lambda.setFont(font)
self.label_lambda.setText(_fromUtf8(""))
self.label_lambda.setObjectName(_fromUtf8("label_lambda"))
self.verticalLayout_2.addWidget(self.label_lambda)
self.verticalLayout.addWidget(self.groupBox)
self.tabWidget = QtGui.QTabWidget(inference_results)
self.tabWidget.setTabPosition(QtGui.QTabWidget.North)
self.tabWidget.setDocumentMode(False)
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.tab = QtGui.QWidget()
self.tab.setObjectName(_fromUtf8("tab"))
self.verticalLayout_5 = QtGui.QVBoxLayout(self.tab)
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.table_comp = qt_performance_table(self.tab)
self.table_comp.setObjectName(_fromUtf8("table_comp"))
self.table_comp.setColumnCount(0)
self.table_comp.setRowCount(0)
self.verticalLayout_5.addWidget(self.table_comp)
self.tabWidget.addTab(self.tab, _fromUtf8(""))
self.tab_2 = QtGui.QWidget()
self.tab_2.setObjectName(_fromUtf8("tab_2"))
self.verticalLayout_4 = QtGui.QVBoxLayout(self.tab_2)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.table_incomp = qt_performance_table(self.tab_2)
self.table_incomp.setObjectName(_fromUtf8("table_incomp"))
self.table_incomp.setColumnCount(0)
self.table_incomp.setRowCount(0)
self.verticalLayout_4.addWidget(self.table_incomp)
self.tabWidget.addTab(self.tab_2, _fromUtf8(""))
self.verticalLayout.addWidget(self.tabWidget)
self.buttonBox = QtGui.QDialogButtonBox(inference_results)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Close|QtGui.QDialogButtonBox.Save)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(inference_results)
self.tabWidget.setCurrentIndex(0)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), inference_results.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), inference_results.reject)
QtCore.QMetaObject.connectSlotsByName(inference_results)
def retranslateUi(self, inference_results):
inference_results.setWindowTitle(_translate("inference_results", "ELECTRE-TRI Inference results", None))
self.groupBox.setTitle(_translate("inference_results", "Model learned", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("inference_results", "Compatible alternatives", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("inference_results", "Incompatible alternatives", None))
from table import qt_performance_table
from graphic import _MyGraphicsview
| gpl-3.0 | -2,015,396,951,820,727,000 | 51.605505 | 137 | 0.720788 | false | 3.89538 | false | false | false |
molmod/yaff | yaff/external/test/test_lammpsio.py | 1 | 2291 | # -*- coding: utf-8 -*-
# YAFF is yet another force-field code.
# Copyright (C) 2011 Toon Verstraelen <[email protected]>,
# Louis Vanduyfhuys <[email protected]>, Center for Molecular Modeling
# (CMM), Ghent University, Ghent, Belgium; all rights reserved unless otherwise
# stated.
#
# This file is part of YAFF.
#
# YAFF is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# YAFF is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
from __future__ import division
from __future__ import print_function
import tempfile
import shutil
import os
import numpy as np
import pkg_resources
from molmod.test.common import tmpdir
from yaff.external.lammpsio import *
from yaff import System
from yaff.test.common import get_system_water32
def test_lammps_system_data_water32():
system = get_system_water32()
with tmpdir(__name__, 'test_lammps_system_water32') as dirname:
fn = os.path.join(dirname,'lammps.system')
write_lammps_system_data(system,fn=fn)
with open(fn,'r') as f: lines = f.readlines()
natom = int(lines[2].split()[0])
assert natom==system.natom
assert (system.natom+system.bonds.shape[0]+23)==len(lines)
def test_lammps_ffconversion_mil53():
fn_system = pkg_resources.resource_filename(__name__, '../../data/test/system_mil53.chk')
fn_pars = pkg_resources.resource_filename(__name__, '../../data/test/parameters_mil53.txt')
system = System.from_file(fn_system)
with tmpdir(__name__, 'test_lammps_ffconversion_mil53') as dirname:
ff2lammps(system, fn_pars, dirname)
# No test for correctness, just check that output files are present
assert os.path.isfile(os.path.join(dirname,'lammps.in'))
assert os.path.isfile(os.path.join(dirname,'lammps.data'))
| gpl-3.0 | -4,221,619,580,670,450,000 | 37.183333 | 95 | 0.714535 | false | 3.379056 | true | false | false |
mathDR/BP-AR-HMM | OLDPY/compute_likelihood_unnorm.py | 1 | 1820 | import numpy as np
def compute_likelihood_unnorm(data_struct,theta,obsModelType,Kz_inds,Kz,Ks):
#function log_likelihood =
# compute_likelihood_unnorm(data_struct,theta,obsModelType,Kz_inds,Kz,Ks)
if obsModelType == 'Gaussian':
invSigma = theta.invSigma
mu = theta.mu
dimu, T = (data_struct.obs).shape
log_likelihood = -np.inf*np.ones((Kz,Ks,T))
kz = Kz_inds
for ks in range(Ks):
cholinvSigma = np.linalg.chol(invSigma[:,:,kz,ks])
dcholinvSigma = np.diag(cholinvSigma)
u = np.dot(cholinvSigma*(data_struct.obs - mu[:,kz*np.ones((1,T)),ks]))
log_likelihood[kz,ks,:] = -0.5*np.sum(u**2,axis=0) + np.sum(np.log(dcholinvSigma))
elif obsModelType =='AR' or obsModelType == 'SLDS':
invSigma = theta.invSigma
A = theta.A
X = data_struct.X
dimu, T = (data_struct.obs).shape
log_likelihood = -np.inf*np.ones((Kz,Ks,T))
if theta.mu:
mu = theta.mu
kz = Kz_inds
for ks in range(Ks):
cholinvSigma = np.linalg.chol(invSigma[:,:,kz,ks])
dcholinvSigma = np.diag(cholinvSigma)
u = np.dot(cholinvSigma,(data_struct.obs - np.dot(A[:,:,kz,ks],X)-mu[:,kz*np.ones((1,T)),ks]))
log_likelihood[kz,ks,:] = -0.5*np.sum(u**2,axis=0) + np.sum(np.log(dcholinvSigma))
else:
kz = Kz_inds
for ks in range(Ks):
cholinvSigma = np.linalg.chol(invSigma[:,:,kz,ks])
dcholinvSigma = np.diag(cholinvSigma)
u = np.dot(cholinvSigma,(data_struct.obs - np.dot(A[:,:,kz,ks],X)))
log_likelihood[kz,ks,:] = -0.5*np.sum(u**2,axis=0) + np.sum(np.log(dcholinvSigma))
elif obsModelType == 'Multinomial':
log_likelihood = np.log(theta.p[:,:,data_struct.obs])
else:
raise ValueError('Error in compute_likelihood_unnorm: obsModelType not defined')
return log_likelihood
| mit | -170,037,953,198,581,020 | 34.686275 | 102 | 0.625824 | false | 2.732733 | false | false | false |
skuda/client-python | kubernetes/client/models/v1_downward_api_volume_file.py | 1 | 6558 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1DownwardAPIVolumeFile(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, field_ref=None, mode=None, path=None, resource_field_ref=None):
"""
V1DownwardAPIVolumeFile - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'field_ref': 'V1ObjectFieldSelector',
'mode': 'int',
'path': 'str',
'resource_field_ref': 'V1ResourceFieldSelector'
}
self.attribute_map = {
'field_ref': 'fieldRef',
'mode': 'mode',
'path': 'path',
'resource_field_ref': 'resourceFieldRef'
}
self._field_ref = field_ref
self._mode = mode
self._path = path
self._resource_field_ref = resource_field_ref
@property
def field_ref(self):
"""
Gets the field_ref of this V1DownwardAPIVolumeFile.
Required: Selects a field of the pod: only annotations, labels, name and namespace are supported.
:return: The field_ref of this V1DownwardAPIVolumeFile.
:rtype: V1ObjectFieldSelector
"""
return self._field_ref
@field_ref.setter
def field_ref(self, field_ref):
"""
Sets the field_ref of this V1DownwardAPIVolumeFile.
Required: Selects a field of the pod: only annotations, labels, name and namespace are supported.
:param field_ref: The field_ref of this V1DownwardAPIVolumeFile.
:type: V1ObjectFieldSelector
"""
self._field_ref = field_ref
@property
def mode(self):
"""
Gets the mode of this V1DownwardAPIVolumeFile.
Optional: mode bits to use on this file, must be a value between 0 and 0777. If not specified, the volume defaultMode will be used. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.
:return: The mode of this V1DownwardAPIVolumeFile.
:rtype: int
"""
return self._mode
@mode.setter
def mode(self, mode):
"""
Sets the mode of this V1DownwardAPIVolumeFile.
Optional: mode bits to use on this file, must be a value between 0 and 0777. If not specified, the volume defaultMode will be used. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.
:param mode: The mode of this V1DownwardAPIVolumeFile.
:type: int
"""
self._mode = mode
@property
def path(self):
"""
Gets the path of this V1DownwardAPIVolumeFile.
Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..'
:return: The path of this V1DownwardAPIVolumeFile.
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""
Sets the path of this V1DownwardAPIVolumeFile.
Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..'
:param path: The path of this V1DownwardAPIVolumeFile.
:type: str
"""
if path is None:
raise ValueError("Invalid value for `path`, must not be `None`")
self._path = path
@property
def resource_field_ref(self):
"""
Gets the resource_field_ref of this V1DownwardAPIVolumeFile.
Selects a resource of the container: only resources limits and requests (limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported.
:return: The resource_field_ref of this V1DownwardAPIVolumeFile.
:rtype: V1ResourceFieldSelector
"""
return self._resource_field_ref
@resource_field_ref.setter
def resource_field_ref(self, resource_field_ref):
"""
Sets the resource_field_ref of this V1DownwardAPIVolumeFile.
Selects a resource of the container: only resources limits and requests (limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported.
:param resource_field_ref: The resource_field_ref of this V1DownwardAPIVolumeFile.
:type: V1ResourceFieldSelector
"""
self._resource_field_ref = resource_field_ref
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| apache-2.0 | 5,237,129,877,225,858,000 | 32.804124 | 268 | 0.598353 | false | 4.239173 | false | false | false |
chrislyon/django_ds1 | django_ds1/ds/models.py | 1 | 2352 | from django.db import models
from ckeditor.fields import RichTextField
# Create your models here.
## -------------------------------------------------
## Meta Class contenant certaines donnees de bases
## -------------------------------------------------
DEF_TFAC='DEFAUT'
class HoroDatage(models.Model):
h_datcre = models.DateTimeField(auto_now_add=True, verbose_name='Date de creation')
h_datmod = models.DateTimeField(auto_now=True, verbose_name='Date de Modification')
statut = models.BooleanField(verbose_name='Actif', default=True)
class Meta:
abstract = True
##
## Demande de Service
##
class DService(HoroDatage):
## Canal / Channel / type de demande
TYPE_DS = (
( 'ASS', 'Assistance' ),
( 'DEP', 'Depannage'),
( 'AUD', 'Audit' ),
( 'DEV', 'Developpement' ),
( 'DIV', 'Autres' ),
)
DS_Type = models.CharField(max_length=5, choices=TYPE_DS, default='ASS', verbose_name='Type')
## A voir expression de Demandeur
DS_TiersDemandeur = models.CharField(max_length=20, blank=True, verbose_name='Demandeur')
## A voir expression de facturation
DS_TiersFacture = models.CharField(max_length=20, default=DEF_TFAC, blank=True, verbose_name='Tiers Facture')
DS_Sujet = models.CharField(blank=True, max_length=50, verbose_name='Sujet')
DS_Desc = RichTextField( blank=True, verbose_name='Description')
STATUT_DS = (
( 'NEW', 'Nouvelle' ),
( 'CLOSED', 'Termine' ),
( 'ENC', 'En cours' ),
( 'ATT', 'Attente' ),
)
DS_Statut = models.CharField(max_length=6, choices=STATUT_DS, default='NEW', verbose_name='Statut')
PRIORITE_DS = (
('N', 'NORMAL'),
('U', 'URGENT'),
('B', 'BLOQUANT'),
)
DS_Priorite = models.CharField(max_length=3, choices=PRIORITE_DS, default='N', verbose_name='Priorite')
DS_Assigne = models.CharField(max_length=30, blank=True, verbose_name='Assigne')
DS_Horo_Debut = models.DateTimeField(max_length=30, blank=True, verbose_name='Debut')
DS_Horo_Fin = models.DateTimeField(max_length=30, blank=True, verbose_name='Fin')
DS_Echeance = models.CharField(max_length=30, blank=True, verbose_name='Avant le')
DS_TempsEstime = models.CharField(max_length=30, blank=True, verbose_name='Temps Estime')
DS_TempsRealise = models.CharField(max_length=30, blank=True, verbose_name='Temps Realise')
DS_PC_Realise = models.CharField(max_length=30, blank=True, verbose_name='% Realisation')
| gpl-2.0 | -8,140,727,402,061,289,000 | 35.184615 | 110 | 0.672619 | false | 2.850909 | false | false | false |
cscott/wikiserver | whoosh/support/dawg.py | 1 | 19568 | # Copyright 2009 Matt Chaput. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
"""
This module contains classes and functions for working with Directed Acyclic
Word Graphs (DAWGs). This structure is used to efficiently store a list of
words.
This code should be considered an implementation detail and may change in
future releases.
TODO: try to find a way to traverse the term index efficiently to do within()
instead of storing a DAWG separately.
"""
from array import array
from whoosh.compat import b, xrange, iteritems, iterkeys, unichr
from whoosh.system import _INT_SIZE
from whoosh.util import utf8encode, utf8decode
class BaseNode(object):
"""This is the base class for objects representing nodes in a directed
acyclic word graph (DAWG).
* ``final`` is a property which is True if this node represents the end of
a word.
* ``__contains__(label)`` returns True if the node has an edge with the
given label.
* ``__iter__()`` returns an iterator of the labels for the node's outgoing
edges. ``keys()`` is available as a convenient shortcut to get a list.
* ``__len__()`` returns the number of outgoing edges.
* ``edge(label)`` returns the Node connected to the edge with the given
label.
* ``all_edges()`` returns a dictionary of the node's outgoing edges, where
the keys are the edge labels and the values are the connected nodes.
"""
def __contains__(self, key):
raise NotImplementedError
def __iter__(self):
raise NotImplementedError
def __len__(self):
raise NotImplementedError
def keys(self):
"""Returns a list of the outgoing edge labels.
"""
return list(self)
def edge(self, key, expand=True):
"""Returns the node connected to the outgoing edge with the given
label.
"""
raise NotImplementedError
def all_edges(self):
"""Returns a dictionary mapping outgoing edge labels to nodes.
"""
e = self.edge
return dict((key, e(key)) for key in self)
def edge_count(self):
"""Returns the recursive count of edges in this node and the tree under
it.
"""
return len(self) + sum(self.edge(key).edge_count() for key in self)
class NullNode(BaseNode):
"""An empty node. This is sometimes useful for representing an empty graph.
"""
final = False
def __containts__(self, key):
return False
def __iter__(self):
return iter([])
def __len__(self):
return 0
def edge(self, key, expand=True):
raise KeyError(key)
def all_edges(self):
return {}
def edge_count(self):
return 0
class BuildNode(object):
"""Node type used by DawgBuilder when constructing a graph from scratch.
"""
def __init__(self):
self.final = False
self._edges = {}
self._hash = None
def __repr__(self):
return "<%s:%s %s>" % (self.__class__.__name__,
",".join(sorted(self._edges.keys())),
self.final)
def __hash__(self):
if self._hash is not None:
return self._hash
h = int(self.final)
for key, node in iteritems(self._edges):
h ^= hash(key) ^ hash(node)
self._hash = h
return h
def __eq__(self, other):
if self is other:
return True
if self.final != other.final:
return False
mine, theirs = self.all_edges(), other.all_edges()
if len(mine) != len(theirs):
return False
for key in iterkeys(mine):
if key not in theirs or not mine[key] == theirs[key]:
return False
return True
def __ne__(self, other):
return not(self.__eq__(other))
def __contains__(self, key):
return key in self._edges
def __iter__(self):
return iter(self._edges)
def __len__(self):
return len(self._edges)
def put(self, key, node):
self._hash = None # Invalidate the cached hash value
self._edges[key] = node
def edge(self, key, expand=True):
return self._edges[key]
def all_edges(self):
return self._edges
class DawgBuilder(object):
"""Class for building a graph from scratch.
>>> db = DawgBuilder()
>>> db.insert(u"alfa")
>>> db.insert(u"bravo")
>>> db.write(dbfile)
This class does not have the cleanest API, because it was cobbled together
to support the spelling correction system.
"""
def __init__(self, reduced=True, field_root=False):
"""
:param dbfile: an optional StructFile. If you pass this argument to the
initializer, you don't have to pass a file to the ``write()``
method after you construct the graph.
:param reduced: when the graph is finished, branches of single-edged
nodes will be collapsed into single nodes to form a Patricia tree.
:param field_root: treats the root node edges as field names,
preventing them from being reduced and allowing them to be inserted
out-of-order.
"""
self._reduced = reduced
self._field_root = field_root
self.lastword = None
# List of nodes that have not been checked for duplication.
self.unchecked = []
# List of unique nodes that have been checked for duplication.
self.minimized = {}
self.root = BuildNode()
def insert(self, word):
"""Add the given "word" (a string or list of strings) to the graph.
Words must be inserted in sorted order.
"""
lw = self.lastword
prefixlen = 0
if lw:
if self._field_root and lw[0] != word[0]:
# If field_root == True, caller can add entire fields out-of-
# order (but not individual terms)
pass
elif word < lw:
raise Exception("Out of order %r..%r." % (self.lastword, word))
else:
# find common prefix between word and previous word
for i in xrange(min(len(word), len(lw))):
if word[i] != lw[i]: break
prefixlen += 1
# Check the unchecked for redundant nodes, proceeding from last
# one down to the common prefix size. Then truncate the list at
# that point.
self._minimize(prefixlen)
# Add the suffix, starting from the correct node mid-way through the
# graph
if not self.unchecked:
node = self.root
else:
node = self.unchecked[-1][2]
for letter in word[prefixlen:]:
nextnode = BuildNode()
node.put(letter, nextnode)
self.unchecked.append((node, letter, nextnode))
node = nextnode
node.final = True
self.lastword = word
def _minimize(self, downto):
# Proceed from the leaf up to a certain point
for i in xrange(len(self.unchecked) - 1, downto - 1, -1):
(parent, letter, child) = self.unchecked[i];
if child in self.minimized:
# Replace the child with the previously encountered one
parent.put(letter, self.minimized[child])
else:
# Add the state to the minimized nodes.
self.minimized[child] = child;
self.unchecked.pop()
def finish(self):
"""Minimize the graph by merging duplicates, and reduce branches of
single-edged nodes. You can call this explicitly if you are building
a graph to use in memory. Otherwise it is automatically called by
the write() method.
"""
self._minimize(0)
if self._reduced:
self.reduce(self.root, self._field_root)
def write(self, dbfile):
self.finish()
DawgWriter(dbfile).write(self.root)
@staticmethod
def reduce(root, field_root=False):
if not field_root:
reduce(root)
else:
for key in root:
v = root.edge(key)
reduce(v)
class DawgWriter(object):
def __init__(self, dbfile):
self.dbfile = dbfile
self.offsets = {}
def write(self, root):
"""Write the graph to the given StructFile. If you passed a file to
the initializer, you don't have to pass it here.
"""
dbfile = self.dbfile
dbfile.write(b("GR01")) # Magic number
dbfile.write_int(0) # File flags
dbfile.write_uint(0) # Pointer to root node
offset = self._write_node(dbfile, root)
# Seek back and write the pointer to the root node
dbfile.flush()
dbfile.seek(_INT_SIZE * 2)
dbfile.write_uint(offset)
dbfile.close()
def _write_node(self, dbfile, node):
keys = node._edges.keys()
ptrs = array("I")
for key in keys:
sn = node._edges[key]
if id(sn) in self.offsets:
ptrs.append(self.offsets[id(sn)])
else:
ptr = self._write_node(dbfile, sn)
self.offsets[id(sn)] = ptr
ptrs.append(ptr)
start = dbfile.tell()
# The low bit indicates whether this node represents the end of a word
flags = int(node.final)
# The second lowest bit = whether this node has children
flags |= bool(keys) << 1
# The third lowest bit = whether all keys are single chars
singles = all(len(k) == 1 for k in keys)
flags |= singles << 2
# The fourth lowest bit = whether all keys are one byte
if singles:
sbytes = all(ord(key) <= 255 for key in keys)
flags |= sbytes << 3
dbfile.write_byte(flags)
if keys:
dbfile.write_varint(len(keys))
dbfile.write_array(ptrs)
if singles:
for key in keys:
o = ord(key)
if sbytes:
dbfile.write_byte(o)
else:
dbfile.write_ushort(o)
else:
for key in keys:
dbfile.write_string(utf8encode(key)[0])
return start
class DiskNode(BaseNode):
def __init__(self, dbfile, offset, expand=True):
self.id = offset
self.dbfile = dbfile
dbfile.seek(offset)
flags = dbfile.read_byte()
self.final = bool(flags & 1)
self._edges = {}
if flags & 2:
singles = flags & 4
bytes = flags & 8
nkeys = dbfile.read_varint()
ptrs = dbfile.read_array("I", nkeys)
for i in xrange(nkeys):
ptr = ptrs[i]
if singles:
if bytes:
charnum = dbfile.read_byte()
else:
charnum = dbfile.read_ushort()
self._edges[unichr(charnum)] = ptr
else:
key = utf8decode(dbfile.read_string())[0]
if len(key) > 1 and expand:
self._edges[key[0]] = PatNode(dbfile, key[1:], ptr)
else:
self._edges[key] = ptr
def __repr__(self):
return "<%s %s:%s %s>" % (self.__class__.__name__, self.id,
",".join(sorted(self._edges.keys())),
self.final)
def __contains__(self, key):
return key in self._edges
def __iter__(self):
return iter(self._edges)
def __len__(self):
return len(self._edges)
def edge(self, key, expand=True):
v = self._edges[key]
if not isinstance(v, BaseNode):
# Convert pointer to disk node
v = DiskNode(self.dbfile, v, expand=expand)
#if self.caching:
self._edges[key] = v
return v
@classmethod
def load(cls, dbfile, expand=True):
dbfile.seek(0)
magic = dbfile.read(4)
if magic != b("GR01"):
raise Exception("%r does not seem to be a graph file" % dbfile)
_ = dbfile.read_int() # File flags (currently unused)
return DiskNode(dbfile, dbfile.read_uint(), expand=expand)
class PatNode(BaseNode):
final = False
def __init__(self, dbfile, label, nextptr, i=0):
self.dbfile = dbfile
self.label = label
self.nextptr = nextptr
self.i = i
def __repr__(self):
return "<%r(%d) %s>" % (self.label, self.i, self.final)
def __contains__(self, key):
if self.i < len(self.label) and key == self.label[self.i]:
return True
else:
return False
def __iter__(self):
if self.i < len(self.label):
return iter(self.label[self.i])
else:
return []
def __len__(self):
if self.i < len(self.label):
return 1
else:
return 0
def edge(self, key, expand=True):
label = self.label
i = self.i
if i < len(label) and key == label[i]:
i += 1
if i < len(self.label):
return PatNode(self.dbfile, label, self.nextptr, i)
else:
return DiskNode(self.dbfile, self.nextptr)
else:
raise KeyError(key)
def edge_count(self):
return DiskNode(self.dbfile, self.nextptr).edge_count()
class ComboNode(BaseNode):
"""Base class for DAWG nodes that blend the nodes of two different graphs.
Concrete subclasses need to implement the ``edge()`` method and possibly
the ``final`` property.
"""
def __init__(self, a, b):
self.a = a
self.b = b
def __repr__(self):
return "<%s %r %r>" % (self.__class__.__name__, self.a, self.b)
def __contains__(self, key):
return key in self.a or key in self.b
def __iter__(self):
return iter(set(self.a) | set(self.b))
def __len__(self):
return len(set(self.a) | set(self.b))
@property
def final(self):
return self.a.final or self.b.final
class UnionNode(ComboNode):
"""Makes two graphs appear to be the union of the two graphs.
"""
def edge(self, key, expand=True):
a = self.a
b = self.b
if key in a and key in b:
return UnionNode(a.edge(key), b.edge(key))
elif key in a:
return a.edge(key)
else:
return b.edge(key)
class IntersectionNode(ComboNode):
"""Makes two graphs appear to be the intersection of the two graphs.
"""
def edge(self, key, expand=True):
a = self.a
b = self.b
if key in a and key in b:
return IntersectionNode(a.edge(key), b.edge(key))
# Functions
def reduce(node):
edges = node._edges
if edges:
for key, sn in edges.items():
reduce(sn)
if len(sn) == 1 and not sn.final:
skey, ssn = list(sn._edges.items())[0]
del edges[key]
edges[key + skey] = ssn
def edge_count(node):
c = len(node)
return c + sum(edge_count(node.edge(key)) for key in node)
def flatten(node, sofar=""):
if node.final:
yield sofar
for key in sorted(node):
for word in flatten(node.edge(key, expand=False), sofar + key):
yield word
def dump_dawg(node, tab=0):
print("%s%s %s" % (" " * tab, hex(id(node)), node.final))
for key in sorted(node):
print("%s%r:" % (" " * tab, key))
dump_dawg(node.edge(key), tab + 1)
def within(node, text, k=1, prefix=0, seen=None):
if seen is None:
seen = set()
sofar = ""
if prefix:
node = skip_prefix(node, text, prefix)
if node is None:
return
sofar, text = text[:prefix], text[prefix:]
for sug in _within(node, text, k, sofar=sofar):
if sug in seen:
continue
yield sug
seen.add(sug)
def _within(node, word, k=1, i=0, sofar=""):
assert k >= 0
if i == len(word) and node.final:
yield sofar
# Match
if i < len(word) and word[i] in node:
for w in _within(node.edge(word[i]), word, k, i + 1, sofar + word[i]):
yield w
if k > 0:
dk = k - 1
ii = i + 1
# Insertions
for key in node:
for w in _within(node.edge(key), word, dk, i, sofar + key):
yield w
if i < len(word):
char = word[i]
# Transposition
if i < len(word) - 1 and char != word[ii] and word[ii] in node:
second = node.edge(word[i + 1])
if char in second:
for w in _within(second.edge(char), word, dk, i + 2,
sofar + word[ii] + char):
yield w
# Deletion
for w in _within(node, word, dk, ii, sofar):
yield w
# Replacements
for key in node:
if key != char:
for w in _within(node.edge(key), word, dk, ii,
sofar + key):
yield w
def skip_prefix(node, text, prefix):
for key in text[:prefix]:
if key in node:
node = node.edge(key)
else:
return None
return node
def find_nearest(node, prefix):
sofar = []
for i in xrange(len(prefix)):
char = prefix[i]
if char in node:
sofar.apped(char)
node = node.edge(char)
else:
break
sofar.extend(run_out(node, sofar))
return "".join(sofar)
def run_out(node, sofar):
sofar = []
while not node.final:
first = min(node.keys())
sofar.append(first)
node = node.edge(first)
return sofar
| gpl-2.0 | -5,617,002,949,850,297,000 | 28.874809 | 79 | 0.553659 | false | 4.015596 | false | false | false |
skyfromwell/paperwallet | encryption.py | 1 | 1425 | #remove all others only keep Bip38 here. Need to learn more about this.
from bitcoin.bip38 import Bip38
from bitcoin.key import CKey
from bitcoin.base58 import CBase58Data
__b58chars = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
__b58base = len(__b58chars)
def encode_pw(key, pw):
key = CKey()
decode_string = __decode_b58(key)[1:-4]
key.generate(decode_string)
key.set_compressed(False)
bt = Bip38(key, pw)
return str(CBase58Data(bt.get_encrypted(), 0x01))
def __encode_b58(v):
value = 0L
for (i, c) in enumerate(v[::-1]):
value += (256**i) * ord(c)
result = ""
while value >= __b58base:
div, mod = divmod(value, __b58base)
result = __b58chars[mod] + result
value = div
result = __b58chars[value] + result
pad = 0
for c in v:
if c=='\0':
pad += 1
else:
break
return (__b58chars[0]*pad) + result
def __decode_b58(v):
value = 0L
for (i, c) in enumerate(v[::-1]):
value += __b58chars.find(c) * (__b58base**i)
result = ""
while value >= 256:
div, mod = divmod(value, 256)
result = chr(mod) + result
value = div
result = chr(value) + result
pad = 0
for c in v:
if c==__b58chars[0]:
pad += 1
else:
break
result = chr(0)*pad + result
return result
| gpl-3.0 | 7,973,097,293,177,781,000 | 23.568966 | 73 | 0.554386 | false | 3.268349 | false | false | false |
UltronAI/Deep-Learning | Pattern-Recognition/hw2-Feature-Selection/skfeature/example/test_JMI.py | 1 | 1528 | import scipy.io
from sklearn.metrics import accuracy_score
from sklearn import cross_validation
from sklearn import svm
from skfeature.function.information_theoretical_based import JMI
def main():
# load data
mat = scipy.io.loadmat('../data/colon.mat')
X = mat['X'] # data
X = X.astype(float)
y = mat['Y'] # label
y = y[:, 0]
n_samples, n_features = X.shape # number of samples and number of features
# split data into 10 folds
ss = cross_validation.KFold(n_samples, n_folds=10, shuffle=True)
# perform evaluation on classification task
num_fea = 10 # number of selected features
clf = svm.LinearSVC() # linear SVM
correct = 0
for train, test in ss:
# obtain the index of each feature on the training set
idx,_,_ = JMI.jmi(X[train], y[train], n_selected_features=num_fea)
# obtain the dataset on the selected features
features = X[:, idx[0:num_fea]]
# train a classification model with the selected features on the training dataset
clf.fit(features[train], y[train])
# predict the class labels of test data
y_predict = clf.predict(features[test])
# obtain the classification accuracy on the test data
acc = accuracy_score(y[test], y_predict)
correct = correct + acc
# output the average classification accuracy over all 10 folds
print 'Accuracy:', float(correct)/10
if __name__ == '__main__':
main()
| mit | 376,455,140,267,402,400 | 31.217391 | 89 | 0.628927 | false | 3.968831 | false | false | false |
naterh/chipsec | source/tool/chipsec/hal/cpuid.py | 1 | 1845 | #!/usr/local/bin/python
#CHIPSEC: Platform Security Assessment Framework
#Copyright (c) 2010-2015, Intel Corporation
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; Version 2.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#Contact information:
#[email protected]
#
# -------------------------------------------------------------------------------
#
# CHIPSEC: Platform Hardware Security Assessment Framework
# (c) 2010-2012 Intel Corporation
#
# -------------------------------------------------------------------------------
## \addtogroup hal
# chipsec/hal/cpuid.py
# ======================
# CPUID information
# ~~~
# #usage:
# cpuid(0)
# ~~~
#
__version__ = '1.0'
import struct
import sys
import os.path
from chipsec.logger import logger
class CpuIDRuntimeError (RuntimeError):
pass
class CpuID:
def __init__( self, cs ):
self.helper = cs.helper
self.cs = cs
def cpuid(self, eax, ecx ):
if logger().VERBOSE: logger().log( "[cpuid] in: EAX=0x%08X, ECX=0x%08X" % (eax, ecx) )
(eax, ebx, ecx, edx) = self.helper.cpuid( eax, ecx )
if logger().VERBOSE: logger().log( "[cpuid] out: EAX=0x%08X, EBX=0x%08X, ECX=0x%08X, EDX=0x%08X" % (eax, ebx, ecx, edx) )
return (eax, ebx, ecx, edx)
| gpl-2.0 | 5,467,598,300,744,505,000 | 28.783333 | 129 | 0.601626 | false | 3.324324 | false | false | false |
carrdelling/project_euler | problem17.py | 1 | 1728 | #!/usr/bin/env python
################################################################################
#
# Project Euler - Problem 17
#
# If the numbers 1 to 5 are written out in words: one, two, three, four, five,
# then there are 3 + 3 + 5 + 4 + 4 = 19 letters used in total.
#
# If all the numbers from 1 to 1000 (one thousand) inclusive were written out in
# words, how many letters would be used?
#
# NOTE: Do not count spaces or hyphens. For example, 342 (three hundred and
# forty-two) contains 23 letters and 115 (one hundred and fifteen) contains 20
# letters. The use of "and" when writing out numbers is in compliance with
# British usage.
#
# Joaquin Derrac - [email protected]
#
################################################################################
units = {1: 3, 2: 3, 3: 5, 4: 4, 5: 4, 6: 3, 7: 5, 8: 5, 9: 4, 0: 0}
tens = {2: 6, 3: 6, 4: 5, 5: 5, 6: 5, 7: 7, 8: 6, 9: 6}
hundreds = {0: 0, 1: 13, 2: 13, 3: 15, 4: 14, 5: 14, 6: 13, 7: 15, 8: 15, 9: 14}
ten_to_nineteen = {10: 3, 11: 6, 12: 6, 13: 8, 14: 8, 15: 7, 16: 7, 17: 9,
18: 8, 19: 8}
def number_str_lenght(number):
h = number / 100
du = number % 100
d = du / 10
u = du % 10
if du < 1:
# no need for the 'and'
num_length = hundreds[h] - 3
elif 0 < du <= 9:
num_length = hundreds[h] + units[u]
elif 9 < du <= 19:
num_length = hundreds[h] + ten_to_nineteen[du]
else:
num_length = hundreds[h] + tens[d] + units[u]
return num_length
if __name__ == "__main__":
solution = 0
for i in range(1, 1000):
length = number_str_lenght(i)
solution += length
# the last one - 1000
solution += 11
print(solution)
| gpl-2.0 | 492,748,719,102,900,900 | 27.327869 | 80 | 0.513889 | false | 2.963979 | false | false | false |
aceway/cppite | src/py/cppite.py | 1 | 13042 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
########################################################
# ITE command start with: #//
# ITE command keywords:quit,exit,byebye,bye, begin, end,
# verbose, concise, dump_project, dump_make_file, dump_cpp,
# dump_fragment,load_fragment, compile, run, edit
#
########################################################
import os
import commands
import settings as st
import utils as ut
from cpp_fragment_tmpl import hpp_tmpl, cpp_tmpl
from CMakeLists_tmpl import cmakelists_tmpl
class CppIte:
def __init__(self):
self.cpp_fragment = []
self.ite_cmd = []
self.include_files = []
self.include_dirs = []
self.static_files = []
self.is_verbose=False
# command full name and its shortkeys
self.ite_cmd_keymap={
'RUN': ("R", "RU"),
'COMPILE': ("C", "CO", "COM", "COMP"),
'VERBOSE': ("V", "VE", "VERB"),
'SIMPLE': ("S", "SI", "SIM"),
'CLEAR': ("CL", "CLE", ),
'SHOW': ("SH", "SHO", ),
'HELP': ("H", "HEL", ),
'RELOAD_SETTING': ('RS', 'REST'),
'CMD_CLEAR': ("CCL", "CCLE", ),
'CMD_HISTORY': ("CH", "CHIS", ),
'ADD_INCLUDE_FILE': ("AIF", ),
'RM_INCLUDE_FILE': ("RIF", "REMOVE_INCLUDE_FILE"),
'ADD_INCLUDE_DIR': ("AID", ),
'RM_INCLUDE_DIR': ("RID", "REMOVE_INCLUDE_DIR"),
'LIST_INCLUDE_FILE':("LIF", ),
'LIST_INCLUDE_DIR': ("LID", ),
'ADD_STATIC_FILE': ('ASF', ),
'LIST_STATIC_FILE': ('LSF', ),
'RM_STATIC_FILE': ('RSF', "REMOVE_STATIC_FILE"),
'LOAD_FRAG_FILE': ('LFF', 'LDFF'),
}
def is_ite_cmd(self, ri):
""" Test wether the raw input is a ITE(interactive test environment) command
or its c++ code fragment.
"""
if ri.strip().startswith( "#//" ):
self.ite_cmd.append( ri.strip().strip("#//") )
return True
else:
self.cpp_fragment.append( ri )
return False
def do_ite_cmd(self):
""" Do the ITE command """
cmd = self.ite_cmd[-1].strip().split(" ")
ite_cmd=cmd[0].upper()
args=cmd[1:]
if ite_cmd in self.ite_cmd_keymap:
ite_cmd=cmd[0].upper()
args=cmd[1:]
else:
for k, v in self.ite_cmd_keymap.items():
if ite_cmd in v:
ite_cmd=k.upper()
args=cmd[1:]
break
if self.is_verbose:
print "Do c++ ITE command:{c} {a}".format( c = ite_cmd, a=args )
self._do_cmd( ite_cmd.lower(), args )
def _do_cmd( self, cmd, *args, **keywords ):
"""
Private command proxy, execute by command name rule."
"""
if hasattr( self, "cmd_" + cmd.strip().lower() ) \
and callable( getattr(self, "cmd_" + cmd.strip().lower() ) ):
func = getattr(self, "cmd_" + cmd.strip().lower() )
try:
ret = apply( func, *args, **keywords )
except Exception, e:
print "{e}".format( e = e )
ret = None
return ret
else:
print "{c}Not surpported command:{cmd}{e}".format( c=st.color.FG_RED, cmd=cmd, e=st.color.END )
return None
def cmd_help(self, name=None):
"""Print the cppite command help info."""
if name is None:
print "{c}cppite command start with '#//' in the console line, here is all the supported commands:{e}"\
.format(c=st.color.FG_GREEN, e=st.color.END)
cmds = [ c for c in dir(self) if c.startswith("cmd_") ]
for c in cmds:
sc = ",".join( self.ite_cmd_keymap[ c[4:].upper() ] )
print "{c}: {s}. Short command:{sc}\n".format( c=c[4:], s=getattr(self, c).__doc__, sc=sc)
else:
name = name.lower()
cmd_name = "cmd_{n}".format( n= name )
if hasattr(self, cmd_name):
sc = ",".join( self.ite_cmd_keymap[ name.upper() ] )
print "{n}: {s}. Short command:{sc}".format( n=name, s= getattr(self, cmd_name).__doc__, sc=sc)
else:
print "{c}Not surpported command:{n}{e}".format( n=name, c=st.color.FG_RED, e=st.color.END )
def cmd_reload_setting(self):
"""Reload the settings.py"""
reload( st )
def cmd_cmd_history(self):
"""Show cppite commands history that you inputted before."""
for cmd in self.ite_cmd[:-1]:
print "{c}".format( c = cmd.strip() )
def cmd_cmd_clear(self):
"""Clear cppite cached commands"""
self.ite_cmd = []
def cmd_verbose(self):
"""Run in verbose mode, print process detail info."""
self.is_verbose = True
def cmd_simple(self):
"""Run in simple mode, only print the result but no process info."""
self.is_verbose = False
def cmd_show(self):
"""Show the inputted c++ code that cached in cppite temp memory"""
if self.is_verbose:
print "{c}Show the cached c++ code:{e}".format( c=st.color.FG_GREEN, e=st.color.END )
for c in self.cpp_fragment:
print c
def cmd_clear(self):
"""Clear the inputted c++ code that cached in cppite temp memory"""
if self.is_verbose:
print "{c}Clear the cached c++ code:\n{cd}\n{e}". \
format( c=st.color.FG_YELLOW, cd="\n".join(self.cpp_fragment), e=st.color.END )
self.cpp_fragment = []
def cmd_compile(self):
"""Compile the c++ code in cppite caching memory."""
if self.is_verbose:
print "Compile c++ code: {cpp}".format( cpp="\n".join(self.cpp_fragment) )
self.gen_cpp_code_file()
self.gen_cmakelist_file()
return self.exec_bash_cmd( st.compile_tool )
def cmd_run(self):
"""Compile the inputted c++ code and run it"""
if self.is_verbose:
print "Run c++ code fragment: {cpp}".format( cpp="\n".join(self.cpp_fragment) )
if os.path.isfile( st.out_bin_exe ):
status, output = self.exec_bash_cmd( st.out_bin_exe )
if status == 0: print output
else:
print "{c}Cannot find and gen {bf}!{e}".format( c=st.color.FG_RED, bf=st.out_bin_exe, e=st.color.END )
def cmd_list_include_file(self):
"""List c++ include header files"""
print "Now c++ include header file:"
for hf in st.default_include_headers:
print "\t", hf
for hf in self.include_files:
print "\t", hf
def cmd_list_include_dir(self):
"""List c++ include header dirs"""
print "Now c++ include header dir:"
for hd in st.default_include_dirs:
print "\t", hd
for hd in self.include_dirs:
print "\t", hd
def cmd_list_static_file(self):
"""List cmake link static file"""
print "Now cmake link static files:"
for sf in st.default_static_files:
print "\t", sf
for sf in self.static_files:
print "\t", sf
def cmd_add_include_file(self, *file_list):
"""Add c++ include header files"""
if len(file_list) == 0:
print "Need header file name!"
for f in file_list:
if f.strip() in self.include_files:
pass
else:
self.include_files.append( f.strip() )
def cmd_add_include_dir(self, *dir_list):
"""Add c++ include header dirs"""
if len(dir_list) == 0:
print "Need dir name!"
for d in dir_list:
if d.strip() in self.include_dirs:
pass
else:
self.include_dirs.append( d.strip() )
def cmd_add_static_file(self, *file_list):
"""Add static file"""
for f in file_list:
if f.strip() in self.static_files:
pass
else:
self.static_files.append( f.strip() )
def cmd_rm_include_file(self, *file_list):
"""Remove c++ include header files"""
for f in file_list:
if f.strip() in self.include_files:
self.include_files.remove( f.strip() )
else:
pass
def cmd_rm_include_dir(self, *dir_list):
"""Remove c++ include header dirs"""
for d in dir_list:
if d.strip() in self.include_dirs:
self.include_dirs.remove( d.strip() )
else:
pass
def cmd_rm_static_file(self, *file_list):
"""Remove static file from cache"""
for f in file_list:
if f.strip() in self.static_files:
self.static_files.remove( f.strip() )
else:
pass
def cmd_load_frag_file(self, *the_file):
"""Load frag code from a file"""
if len(the_file) == 1:
if os.path.isfile( the_file[0] ):
with open(the_file[0], 'r') as rf:
for line in rf:
self.cpp_fragment.append( line );
else:
print "{c}It's not valid file:{f}.{e}".format( c = st.color.FG_RED, e = st.color.END, f=the_file[0] )
pass
else:
print "{c}Only one file once, but now({ln}):{tf}{e}".format( c = st.color.FG_RED, e = st.color.END, ln=len(the_file), tf=the_file )
def gen_cpp_code_file(self):
"""Use the input c++ code fragment(cached in the list) to generate c++ hpp/cpp file."""
if self.is_verbose:
print "Generating c++ code... {f}".format( f = st.cpp_code_dir )
includes=""
for f in st.default_include_headers:
if f.find('.') < 0 or f.endswith('.h') or f.endswith('.hpp'):
the_include = "#include <{f}>\n".format( f=f )
if includes.find( the_include ) < 0:
includes += the_include
for f in self.include_files:
if f.find('.') < 0 or f.endswith('.h') or f.endswith('.hpp'):
the_include = "#include <{f}>\n".format( f=f )
if includes.find( the_include ) < 0:
includes += the_include
hpp_code= hpp_tmpl.format( includes=includes )
cpp_code = cpp_tmpl.format( head_file=st.hpp_filename, tmp_cpp= "\n".join(self.cpp_fragment) )
with open( st.cpp_code_dir + st.hpp_filename, 'w') as hf:
hf.write( hpp_code )
with open( st.cpp_code_dir + st.cpp_filename, 'w') as cf:
cf.write( cpp_code )
def gen_cmakelist_file(self):
"""Use the input and default config data to generate cmake's CMakeLists.txt"""
include_dirs = ""
for ind in st.default_include_dirs:
include_dirs += "{d}\n".format( d = ind )
for ind in self.include_dirs:
include_dirs += "{d}\n".format( d = ind )
static_files = ""
for sf in st.default_static_files:
static_files += "{s}\n".format( s = sf )
for sf in self.static_files:
static_files += "{s}\n".format( s = sf )
cmake_tmpl=cmakelists_tmpl.format( add_include_dirs=include_dirs, add_static_libs=static_files )
with open( st.cmakelists_dir + st.cmakelists_filename, 'w') as cmf:
cmf.write( cmake_tmpl )
def exec_bash_cmd(self, cmd):
"""
Call the bash command or scripts, and get the return info.
"""
the_data = {}
cmd = "{sh} ".format(sh=cmd)
(status, output) = commands.getstatusoutput(cmd)
if status == 0:
the_data['code'] = 0
the_data['data'] = output
the_data['desc'] = "OK"
else:
info = output.split(" ")
new_info = []
# 屏蔽密码
for d in info:
if len(d) > 2 and d.lower().startswith("-p"):
d = "-p******"
elif len(d) > 2 and d.lower().startswith('"-p'):
d = "-p******"
elif len(d) > 2 and d.lower().startswith("'-p"):
d = "-p******"
else:
d = d
new_info.append(d)
output = " ".join(new_info)
the_data['code'] = -1
the_data['data'] = "<br>{op}".format(op=output)
the_data['desc'] = "{op}".format(op=output)
if status != 0:
print "{c}{out}{e}".format( c=st.color.FG_RED, out=output, e=st.color.END )
elif self.is_verbose:
print "{c}{out}{e}".format( c=st.color.FG_GREEN, out=output, e=st.color.END )
return status, output
| mit | 8,411,447,687,974,248,000 | 35.205556 | 143 | 0.491023 | false | 3.635704 | false | false | false |
bdh1011/wau | venv/lib/python2.7/site-packages/notebook/services/kernelspecs/handlers.py | 1 | 2798 | """Tornado handlers for kernel specifications.
Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-25%3A-Registry-of-installed-kernels#rest-api
"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import glob
import json
import os
pjoin = os.path.join
from tornado import web
from ...base.handlers import APIHandler, json_errors
from ...utils import url_path_join
def kernelspec_model(handler, name):
"""Load a KernelSpec by name and return the REST API model"""
ksm = handler.kernel_spec_manager
spec = ksm.get_kernel_spec(name)
d = {'name': name}
d['spec'] = spec.to_dict()
d['resources'] = resources = {}
resource_dir = spec.resource_dir
for resource in ['kernel.js', 'kernel.css']:
if os.path.exists(pjoin(resource_dir, resource)):
resources[resource] = url_path_join(
handler.base_url,
'kernelspecs',
name,
resource
)
for logo_file in glob.glob(pjoin(resource_dir, 'logo-*')):
fname = os.path.basename(logo_file)
no_ext, _ = os.path.splitext(fname)
resources[no_ext] = url_path_join(
handler.base_url,
'kernelspecs',
name,
fname
)
return d
class MainKernelSpecHandler(APIHandler):
SUPPORTED_METHODS = ('GET', 'OPTIONS')
@web.authenticated
@json_errors
def get(self):
ksm = self.kernel_spec_manager
km = self.kernel_manager
model = {}
model['default'] = km.default_kernel_name
model['kernelspecs'] = specs = {}
for kernel_name in ksm.find_kernel_specs():
try:
d = kernelspec_model(self, kernel_name)
except Exception:
self.log.error("Failed to load kernel spec: '%s'", kernel_name, exc_info=True)
continue
specs[kernel_name] = d
self.set_header("Content-Type", 'application/json')
self.finish(json.dumps(model))
@web.authenticated
@json_errors
def options(self):
self.finish()
class KernelSpecHandler(APIHandler):
SUPPORTED_METHODS = ('GET',)
@web.authenticated
@json_errors
def get(self, kernel_name):
try:
model = kernelspec_model(self, kernel_name)
except KeyError:
raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name)
self.set_header("Content-Type", 'application/json')
self.finish(json.dumps(model))
# URL to handler mappings
kernel_name_regex = r"(?P<kernel_name>\w+)"
default_handlers = [
(r"/api/kernelspecs", MainKernelSpecHandler),
(r"/api/kernelspecs/%s" % kernel_name_regex, KernelSpecHandler),
]
| mit | 4,596,191,801,277,789,700 | 28.765957 | 118 | 0.611866 | false | 3.755705 | false | false | false |
pythondigest/pythondigest | digest/forms.py | 1 | 3789 | # -*- encoding: utf-8 -*-
from ckeditor.widgets import CKEditorWidget, json_encode
from django import forms
from django.contrib import admin
from django.contrib.admin import widgets
from django.contrib.admin.options import get_ul_class
from django.forms import ChoiceField, ModelForm
from django.template.loader import render_to_string
from django.utils.encoding import force_text
from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe
try:
# Django >=1.7
from django.forms.utils import flatatt
except ImportError:
# Django <1.7
from django.forms.util import flatatt
from digest.models import Item
ITEM_STATUS_CHOICES = (('queue', 'В очередь'),
('moderated', 'Отмодерировано'),)
class GlavRedWidget(CKEditorWidget):
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
self._set_config()
external_plugin_resources = [
[force_text(a), force_text(b), force_text(c)]
for a, b, c in self.external_plugin_resources]
return mark_safe(
render_to_string('custom_widget/ckeditor_widget.html', {
'final_attrs': flatatt(final_attrs),
'value': conditional_escape(force_text(value)),
'id': final_attrs['id'],
'config': json_encode(self.config),
'external_plugin_resources': json_encode(
external_plugin_resources)
}))
class ItemStatusForm(ModelForm):
status = ChoiceField(label='Статус',
widget=widgets.AdminRadioSelect(
attrs={'class': get_ul_class(admin.HORIZONTAL)}),
choices=ITEM_STATUS_CHOICES)
class Meta:
model = Item
fields = '__all__'
widgets = {
'description': GlavRedWidget,
}
EMPTY_VALUES = (None, '')
class HoneypotWidget(forms.TextInput):
is_hidden = True
def __init__(self, attrs=None, html_comment=False, *args, **kwargs):
self.html_comment = html_comment
super(HoneypotWidget, self).__init__(attrs, *args, **kwargs)
if 'class' not in self.attrs:
self.attrs['style'] = 'display:none'
def render(self, *args, **kwargs):
html = super(HoneypotWidget, self).render(*args, **kwargs)
if self.html_comment:
html = '<!-- %s -->' % html
return html
class HoneypotField(forms.Field):
widget = HoneypotWidget
def clean(self, value):
if self.initial in EMPTY_VALUES and value in EMPTY_VALUES or value == self.initial:
return value
raise forms.ValidationError('Anti-spam field changed in value.')
class AddNewsForm(forms.ModelForm):
name = HoneypotField()
class Meta:
model = Item
fields = ('link', 'section', 'title', 'language', 'description',)
def __init__(self, *args, **kwargs):
kwargs['initial'] = {
'section': 6
} # На форме 6й section будет помечен как selected
super(AddNewsForm, self).__init__(*args, **kwargs)
self.fields['title'].widget.attrs = {
'class': 'form-control small',
}
self.fields['title'].required = False
self.fields['link'].widget.attrs = {
'class': 'form-control small',
}
self.fields['language'].widget.attrs = {
'class': 'form-control',
}
self.fields['description'].widget.attrs = {
'class': 'form-control',
}
self.fields['section'].widget.attrs = {
'class': 'form-control',
}
| mit | -280,419,232,003,009,540 | 29.892562 | 91 | 0.590155 | false | 3.86157 | false | false | false |
MrMinimal64/timezonefinder | build_n_install.py | 1 | 1317 | import os
import sys
PACKAGE = 'timezonefinder'
VERSION_FILE = 'VERSION'
VIRT_ENVS = ['APIenv']
VIRT_ENV_COMMAND = '. ~/miniconda3/etc/profile.d/conda.sh; conda activate {virt_env}; '
PY_VERSION_IDS = ['36', '37', '38'] # the supported python versions to create wheels for
PYTHON_TAG = '.'.join([f'py{v}' for v in PY_VERSION_IDS])
if __name__ == "__main__":
print('building now:')
# routine("python3 setup.py sdist bdist_wheel upload", 'Uploading the package now.') # deprecated
# new twine publishing routine:
# https://packaging.python.org/tutorials/packaging-projects/
# delete the build folder before to get a fresh build
# TODO do not remove dist in the future
os.system('rm -r -f build')
os.system('rm -r -f dist')
build_cmd = f"python setup.py sdist bdist_wheel --python-tag {PYTHON_TAG}"
os.system(build_cmd)
# in all specified virtual environments
for virt_env in VIRT_ENVS:
virt_env_cmd = VIRT_ENV_COMMAND.format(virt_env=virt_env)
install_cmd = f'{virt_env_cmd} python setup.py install'
os.system(install_cmd)
# routine(build_cmd, 'building the package now.',
# 'build done. check the included files! installing package in virtual environment next.')
# routine(install_cmd)
os.system('rm -r -f build')
| mit | 1,168,199,045,781,017,300 | 36.628571 | 102 | 0.664389 | false | 3.411917 | false | false | false |
ligovirgo/seismon | RfPrediction/BLRMS_Prediction/condor_seismic_peaks.py | 1 | 1969 |
import os, sys
import glob
import optparse
import tables
import pandas as pd
import numpy as np
import h5py
def parse_commandline():
"""
Parse the options given on the command-line.
"""
parser = optparse.OptionParser()
parser.add_option('-i','--ifos', type=str, default='LHO,LLO', help='GW Observatories: LLO,LHO...')
opts, args = parser.parse_args()
return opts
# Parse command line
opts = parse_commandline()
condorDir = './'
logDir = os.path.join(condorDir,'logs')
if not os.path.isdir(logDir):
os.makedirs(logDir)
condordag = os.path.join(condorDir,'condor.dag')
fid = open(condordag,'w')
condorsh = os.path.join(condorDir,'condor.sh')
fid1 = open(condorsh,'w')
job_number = 0
ifos = opts.ifos.split(",")
for ifo in ifos:
x = np.genfromtxt('./masterlists/{}.dat'.format(ifo))
for ii,row in enumerate(x):
fid1.write('python fetch_seismic_peaks.py -i %s -ID %d -blrmsBand 30M_100M -saveResult 1 -saveImage 0\n'%(ifo,ii))
fid.write('JOB %d condor.sub\n'%(job_number))
fid.write('RETRY %d 3\n'%(job_number))
fid.write('VARS %d jobNumber="%d" ifo="%s" id="%d"\n'%(job_number,job_number, ifo, ii))
fid.write('\n\n')
job_number = job_number + 1
fid1.close()
fid.close()
fid = open(os.path.join(condorDir,'condor.sub'),'w')
fid.write('executable = ./fetch_seismic_peaks.py\n')
fid.write('output = logs/out.$(jobNumber)\n');
fid.write('error = logs/err.$(jobNumber)\n');
fid.write('arguments = -IFO $(ifo) -ID $(id) -blrmsBand 30M_100M -saveResult 1 -saveImage 0\n')
fid.write('requirements = OpSys == "LINUX"\n');
fid.write('request_memory = 8192\n');
fid.write('request_cpus = 1\n');
fid.write('accounting_group = ligo.dev.o2.burst.allsky.stamp\n');
fid.write('notification = never\n');
fid.write('getenv = true\n');
fid.write('log = /usr1/mcoughlin/seismon.log\n')
fid.write('+MaxHours = 24\n');
fid.write('universe = vanilla\n');
fid.write('queue 1\n');
fid.close()
| gpl-3.0 | 4,905,851,540,327,796,000 | 27.536232 | 122 | 0.655663 | false | 2.757703 | false | false | false |
lepinsk/pydub | setup.py | 1 | 1425 | __doc__ = """
Manipulate audio with an simple and easy high level interface.
See the README file for details, usage info, and a list of gotchas.
"""
from setuptools import setup
setup(
name='pydub',
version='0.9.0',
author='James Robert',
author_email='[email protected]',
description='Manipulate audio with an simple and easy high level interface',
license='MIT',
keywords='audio sound high-level',
url='http://pydub.com',
packages=['pydub'],
long_description=__doc__,
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
"Topic :: Multimedia :: Sound/Audio",
"Topic :: Multimedia :: Sound/Audio :: Analysis",
"Topic :: Multimedia :: Sound/Audio :: Conversion",
"Topic :: Multimedia :: Sound/Audio :: Editors",
"Topic :: Multimedia :: Sound/Audio :: Mixers",
"Topic :: Software Development :: Libraries",
'Topic :: Utilities',
]
)
| mit | 3,066,871,106,902,735,400 | 34.625 | 80 | 0.602807 | false | 4.048295 | false | false | false |
keenondrums/sovrin-node | sovrin_client/agent/endpoint.py | 1 | 2195 | from typing import Callable
from plenum import config
from plenum.common.message_processor import MessageProcessor
from stp_core.common.log import getlogger
from stp_core.network.auth_mode import AuthMode
from stp_raet.util import getHaFromLocalEstate
from plenum.common.util import randomString
from stp_core.crypto.util import randomSeed
from stp_raet.rstack import SimpleRStack
from stp_core.types import HA
from stp_zmq.simple_zstack import SimpleZStack
logger = getlogger()
class EndpointCore(MessageProcessor):
def tracedMsgHandler(self, msg):
logger.debug("Got {}".format(msg))
self.msgHandler(msg)
class REndpoint(SimpleRStack, EndpointCore):
def __init__(self, port: int, msgHandler: Callable,
name: str=None, basedirpath: str=None):
if name and basedirpath:
ha = getHaFromLocalEstate(name, basedirpath)
if ha and ha[1] != port:
port = ha[1]
stackParams = {
"name": name or randomString(8),
"ha": HA("0.0.0.0", port),
"main": True,
"auth_mode": AuthMode.ALLOW_ANY.value,
"mutable": "mutable",
"messageTimeout": config.RAETMessageTimeout
}
if basedirpath:
stackParams["basedirpath"] = basedirpath
SimpleRStack.__init__(self, stackParams, self.tracedMsgHandler)
self.msgHandler = msgHandler
class ZEndpoint(SimpleZStack, EndpointCore):
def __init__(self, port: int, msgHandler: Callable,
name: str=None, basedirpath: str=None, seed=None,
onlyListener=False, msgRejectHandler=None):
stackParams = {
"name": name or randomString(8),
"ha": HA("0.0.0.0", port),
"auth_mode": AuthMode.ALLOW_ANY.value
}
if basedirpath:
stackParams["basedirpath"] = basedirpath
seed = seed or randomSeed()
SimpleZStack.__init__(
self,
stackParams,
self.tracedMsgHandler,
seed=seed,
onlyListener=onlyListener,
msgRejectHandler=msgRejectHandler)
self.msgHandler = msgHandler
| apache-2.0 | -4,259,224,821,556,638,000 | 30.357143 | 75 | 0.62369 | false | 3.878092 | false | false | false |
ltucker/radarpost | radarpost/commands/useradmin.py | 1 | 4133 | from couchdb import Server, ResourceNotFound
from radarpost.cli import COMMANDLINE_PLUGIN, BasicCommand, get_basic_option_parser
from radarpost import plugins
from radarpost.user import User, ROLE_ADMIN
from getpass import getpass
class CreateUserCommand(BasicCommand):
command_name = 'create_user'
description = 'create a user'
@classmethod
def setup_options(cls, parser):
parser.set_usage(r"%prog" + "%s <username> [options]" % cls.command_name)
parser.add_option('--admin', action="store_true", dest="is_admin",
default=False, help="create an administrative user")
parser.add_option('--locked', action="store_true", dest="is_locked",
default=False,
help="create with locked password, do not prompt for password.")
def __call__(self, username, is_locked=False, is_admin=False):
"""
Create a user with the given username.
is_locked - if True, create with a locked password
is_admin - if True, grant administrative rights to the user
"""
couchdb = Server(self.config['couchdb.address'])
try:
udb = couchdb[self.config['couchdb.users_database']]
except:
print "Failed to connect to couchdb at %s/%s" % (self.config['couchdb.address'],
self.config['couchdb.users_database'])
return 1
new_user = User(username=username)
if new_user.id in udb:
print 'User "%s" already exists' % username
return 1
if not is_locked:
done = False
while(not done):
password = getpass(prompt="Password for %s: " % username)
password2 = getpass(prompt="Repeat password: ")
if password == password2:
done = True
else:
print "Passwords did not match, try again.\n"
new_user.set_password(password)
if is_admin:
new_user.roles = [ROLE_ADMIN]
new_user.store(udb)
print 'Created user "%s"' % username
plugins.register(CreateUserCommand, COMMANDLINE_PLUGIN)
class ResetPasswordCommand(BasicCommand):
command_name = 'reset_password'
description = "reset a user's password"
@classmethod
def setup_options(cls, parser):
parser.set_usage(r"%prog" + "%s <username> [options]" % cls.command_name)
parser.add_option('--locked', action="store_true", dest="is_locked",
default=False,
help="lock the user's password, do not prompt for password.")
def __call__(self, username, is_locked=False):
"""
Reset the password of the user with the given username.
is_locked - if True, lock the user's password
"""
couchdb = Server(self.config['couchdb.address'])
try:
udb = couchdb[self.config['couchdb.users_database']]
except:
print "Failed to connect to couchdb at %s/%s" % (self.config['couchdb.address'],
self.config['couchdb.users_database'])
return 1
try:
user = User.get_by_username(udb, username)
except ResourceNotFound:
print 'User "%s" does not exist' % username
return 1
if not is_locked:
done = False
while(not done):
password = getpass(prompt="New password for %s: " % username)
password2 = getpass(prompt="Repeat password: ")
if password == password2:
done = True
else:
print "Passwords did not match, try again.\n"
user.set_password(password)
else:
user.lock_password()
user.store(udb)
print 'Password changed for user "%s"' % username
plugins.register(ResetPasswordCommand, COMMANDLINE_PLUGIN) | gpl-2.0 | -4,687,873,527,966,541,000 | 36.926606 | 99 | 0.553109 | false | 4.453664 | true | false | false |
pcm17/tensorflow | tensorflow/contrib/distributions/python/ops/inverse_gamma.py | 1 | 10539 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The InverseGamma distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops import distribution
from tensorflow.contrib.distributions.python.ops import distribution_util
from tensorflow.contrib.framework.python.framework import tensor_util as contrib_tensor_util
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import random_ops
__all__ = [
"InverseGamma",
"InverseGammaWithSoftplusConcentrationRate",
]
class InverseGamma(distribution.Distribution):
"""InverseGamma distribution.
The `InverseGamma` distribution is defined over positive real numbers using
parameters `concentration` (aka "alpha") and `rate` (aka "beta").
#### Mathematical Details
The probability density function (pdf) is,
```none
pdf(x; alpha, beta, x > 0) = x**(-alpha - 1) exp(-beta / x) / Z
Z = Gamma(alpha) beta**-alpha
```
where:
* `concentration = alpha`,
* `rate = beta`,
* `Z` is the normalizing constant, and,
* `Gamma` is the [gamma function](
https://en.wikipedia.org/wiki/Gamma_function).
The cumulative density function (cdf) is,
```none
cdf(x; alpha, beta, x > 0) = GammaInc(alpha, beta / x) / Gamma(alpha)
```
where `GammaInc` is the [upper incomplete Gamma function](
https://en.wikipedia.org/wiki/Incomplete_gamma_function).
The parameters can be intuited via their relationship to mean and stddev,
```none
concentration = alpha = (mean / stddev)**2
rate = beta = mean / stddev**2
```
Distribution parameters are automatically broadcast in all functions; see
examples for details.
WARNING: This distribution may draw 0-valued samples for small concentration
values. See note in `tf.random_gamma` docstring.
#### Examples
```python
dist = InverseGamma(concentration=3.0, rate=2.0)
dist2 = InverseGamma(concentration=[3.0, 4.0], rate=[2.0, 3.0])
```
"""
def __init__(self,
concentration,
rate,
validate_args=False,
allow_nan_stats=True,
name="InverseGamma"):
"""Construct InverseGamma with `concentration` and `rate` parameters.
The parameters `concentration` and `rate` must be shaped in a way that
supports broadcasting (e.g. `concentration + rate` is a valid operation).
Args:
concentration: Floating point tensor, the concentration params of the
distribution(s). Must contain only positive values.
rate: Floating point tensor, the inverse scale params of the
distribution(s). Must contain only positive values.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
Raises:
TypeError: if `concentration` and `rate` are different dtypes.
"""
parameters = locals()
with ops.name_scope(name, values=[concentration, rate]) as ns:
with ops.control_dependencies([
check_ops.assert_positive(concentration),
check_ops.assert_positive(rate),
] if validate_args else []):
self._concentration = array_ops.identity(
concentration, name="concentration")
self._rate = array_ops.identity(rate, name="rate")
contrib_tensor_util.assert_same_float_dtype(
[self._concentration, self._rate])
super(InverseGamma, self).__init__(
dtype=self._concentration.dtype,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
reparameterization_type=distribution.NOT_REPARAMETERIZED,
parameters=parameters,
graph_parents=[self._concentration,
self._rate],
name=ns)
@staticmethod
def _param_shapes(sample_shape):
return dict(
zip(("concentration", "rate"), ([ops.convert_to_tensor(
sample_shape, dtype=dtypes.int32)] * 2)))
@property
def concentration(self):
"""Concentration parameter."""
return self._concentration
@property
def rate(self):
"""Rate parameter."""
return self._rate
def _batch_shape_tensor(self):
return array_ops.broadcast_dynamic_shape(
array_ops.shape(self.concentration),
array_ops.shape(self.rate))
def _batch_shape(self):
return array_ops.broadcast_static_shape(
self.concentration.get_shape(),
self.rate.get_shape())
def _event_shape_tensor(self):
return constant_op.constant([], dtype=dtypes.int32)
def _event_shape(self):
return tensor_shape.scalar()
@distribution_util.AppendDocstring(
"""Note: See `tf.random_gamma` docstring for sampling details and
caveats.""")
def _sample_n(self, n, seed=None):
return 1. / random_ops.random_gamma(
shape=[n],
alpha=self.concentration,
beta=self.rate,
dtype=self.dtype,
seed=seed)
def _log_prob(self, x):
return self._log_unnormalized_prob(x) - self._log_normalization()
def _prob(self, x):
return math_ops.exp(self._log_prob(x))
def _log_cdf(self, x):
return math_ops.log(self._cdf(x))
def _cdf(self, x):
x = self._maybe_assert_valid_sample(x)
# Note that igammac returns the upper regularized incomplete gamma
# function Q(a, x), which is what we want for the CDF.
return math_ops.igammac(self.concentration, self.rate / x)
def _log_unnormalized_prob(self, x):
x = self._maybe_assert_valid_sample(x)
return -(1. + self.concentration) * math_ops.log(x) - self.rate / x
def _log_normalization(self):
return (math_ops.lgamma(self.concentration)
- self.concentration * math_ops.log(self.rate))
def _entropy(self):
return (self.concentration
+ math_ops.log(self.rate)
+ math_ops.lgamma(self.concentration)
- ((1. + self.concentration) *
math_ops.digamma(self.concentration)))
@distribution_util.AppendDocstring(
"""The mean of an inverse gamma distribution is
`rate / (concentration - 1)`, when `concentration > 1`, and `NaN`
otherwise. If `self.allow_nan_stats` is `False`, an exception will be
raised rather than returning `NaN`""")
def _mean(self):
mean = self.rate / (self.concentration - 1.)
if self.allow_nan_stats:
nan = array_ops.fill(
self.batch_shape_tensor(),
np.array(np.nan, dtype=self.dtype.as_numpy_dtype()),
name="nan")
return array_ops.where(self.concentration > 1., mean, nan)
else:
return control_flow_ops.with_dependencies([
check_ops.assert_less(
array_ops.ones([], self.dtype), self.concentration,
message="mean undefined when any concentration <= 1"),
], mean)
@distribution_util.AppendDocstring(
"""Variance for inverse gamma is defined only for `concentration > 2`. If
`self.allow_nan_stats` is `False`, an exception will be raised rather
than returning `NaN`.""")
def _variance(self):
var = (math_ops.square(self.rate)
/ math_ops.square(self.concentration - 1.)
/ (self.concentration - 2.))
if self.allow_nan_stats:
nan = array_ops.fill(
self.batch_shape_tensor(),
np.array(np.nan, dtype=self.dtype.as_numpy_dtype()),
name="nan")
return array_ops.where(self.concentration > 2., var, nan)
else:
return control_flow_ops.with_dependencies([
check_ops.assert_less(
constant_op.constant(2., dtype=self.dtype),
self.concentration,
message="variance undefined when any concentration <= 2"),
], var)
@distribution_util.AppendDocstring(
"""The mode of an inverse gamma distribution is `rate / (concentration +
1)`.""")
def _mode(self):
return self.rate / (1. + self.concentration)
def _maybe_assert_valid_sample(self, x):
contrib_tensor_util.assert_same_float_dtype(
tensors=[x], dtype=self.dtype)
if not self.validate_args:
return x
return control_flow_ops.with_dependencies([
check_ops.assert_positive(x),
], x)
class InverseGammaWithSoftplusConcentrationRate(InverseGamma):
"""`InverseGamma` with softplus of `concentration` and `rate`."""
def __init__(self,
concentration,
rate,
validate_args=False,
allow_nan_stats=True,
name="InverseGammaWithSoftplusConcentrationRate"):
parameters = locals()
with ops.name_scope(name, values=[concentration, rate]) as ns:
super(InverseGammaWithSoftplusConcentrationRate, self).__init__(
concentration=nn.softplus(concentration,
name="softplus_concentration"),
rate=nn.softplus(rate, name="softplus_rate"),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
name=ns)
self._parameters = parameters
| apache-2.0 | 5,066,371,313,782,107,000 | 34.365772 | 92 | 0.656988 | false | 3.981488 | false | false | false |
WorldViews/Spirals | YEI/foo_api.py | 1 | 146347 | #!/usr/bin/env python2.7
from __future__ import print_function
""" This module is an API module for ThreeSpace devices.
The ThreeSpace API module is a collection of classes, functions, structures,
and static variables use exclusivly for ThreeSpace devices. This module can
be used with a system running Python 2.5 and newer (including Python 3.x).
"""
__version__ = "2.0.2.3"
__authors__ = [
'"Chris George" <[email protected]>',
'"Dan Morrison" <[email protected]>',
]
import threading
import sys
import serial
import struct
import collections
import traceback
import time
import os
# chose an implementation, depending on os
if os.name == 'nt': # sys.platform == 'win32':
from win32_threespace_utils import *
else:
from threespace_utils import *
print("WARNING: No additional utils are loaded!!!!!!")
### Globals ###
global_file_path = os.getcwd()
global_error = None
global_counter = 0
global_donglist = {}
global_sensorlist = {}
global_broadcaster = None
TSS_TIMESTAMP_SENSOR = 0
TSS_TIMESTAMP_SYSTEM = 1
TSS_TIMESTAMP_NONE = 2
TSS_JOYSTICK = 0
TSS_MOUSE = 2
TSS_BUTTON_LEFT = 0
TSS_BUTTON_RIGHT = 1
### Private ###
_baudrate = 115200
_allowed_baudrates = [1200, 2400, 4800, 9600, 19200, 28800, 38400, 57600, 115200, 230400, 460800, 921600]
_wireless_retries = 5
### Functions ###
if sys.version_info >= (3, 0):
def makeWriteArray(startbyte, index_byte=None, command_byte=None, data=None):
rtn_array = bytearray((startbyte,))
if index_byte is not None:
rtn_array.append(index_byte)
if command_byte is not None:
rtn_array.append(command_byte)
if data is not None:
rtn_array += data
rtn_array.append((sum(rtn_array) - startbyte) % 256) # checksum
_hexDump(rtn_array)
return rtn_array
else:
def makeWriteArray(startbyte, index_byte=None, command_byte=None, data=None):
rtn_array = chr(startbyte)
if index_byte is not None:
rtn_array += chr(index_byte)
if command_byte is not None:
rtn_array += chr(command_byte)
if data is not None:
rtn_array += data
rtn_array += chr((sum(bytearray(rtn_array)) - startbyte) % 256) # checksum
_hexDump(rtn_array)
return rtn_array
def _hexDump(serial_string, header='i'):
if "-d_hex" in sys.argv:
ba = bytearray(serial_string)
print('{0}('.format(header), end='')
for i in range(len(ba)):
if i == len(ba)-1:
print('0x{0:02x}'.format(ba[i]), end='')
else:
print('0x{0:02x},'.format(ba[i]), end='')
print(')')
def _print(string):
if "-d" in sys.argv:
print(string)
def _echoCallback(sensor, state):
_print('{0}:{1}'.format(sensor, state))
def _generateProtocolHeader(success_failure=False,
timestamp=False,
command_echo=False,
checksum=False,
logical_id=False,
serial_number=False,
data_length=False):
byte = 0
struct_str = '>'
idx_list = []
if success_failure:
byte += 0x1
struct_str += '?'
idx_list.append(0)
if timestamp:
byte += 0x2
struct_str += 'I'
idx_list.append(1)
if command_echo:
byte += 0x4
struct_str += 'B'
idx_list.append(2)
if checksum:
byte += 0x8
struct_str += 'B'
idx_list.append(3)
if logical_id:
byte += 0x10
struct_str += 'B'
idx_list.append(4)
if serial_number:
byte += 0x20
struct_str += 'I'
idx_list.append(5)
if data_length:
byte += 0x40
struct_str += 'B'
idx_list.append(6)
return (byte, struct.Struct(struct_str), idx_list)
def _generateSensorClass(sensor_inst, serial_port, allowed_device_types):
sensor_inst.compatibility = checkSoftwareVersionFromPort(serial_port)
sensor_inst.port_name = serial_port.name
sensor_inst.serial_port_settings = serial_port.getSettingsDict()
sensor_inst.serial_port = serial_port
hardware_version = convertString(sensor_inst.f7WriteRead('getHardwareVersionString'))
dev_type = hardware_version[4:-8].strip()
if dev_type not in allowed_device_types:
raise Exception("This is a %s device, not one of these devices %s!" % (dev_type, allowed_device_types))
sensor_inst.device_type = dev_type
serial_number = sensor_inst.f7WriteRead('getSerialNumber')
sensor_inst.serial_number = serial_number
if dev_type == "DNG":
if serial_number in global_donglist:
rtn_inst = global_donglist[serial_number]
rtn_inst.close()
rtn_inst.compatibility = sensor_inst.compatibility
rtn_inst.port_name = serial_port.name
rtn_inst.serial_port_settings = serial_port.getSettingsDict()
rtn_inst.serial_port = serial_port
return rtn_inst
global_donglist[serial_number] = sensor_inst
else:
if serial_number in global_sensorlist:
rtn_inst = global_sensorlist[serial_number]
rtn_inst.close()
rtn_inst.compatibility = sensor_inst.compatibility
rtn_inst.port_name = serial_port.name
rtn_inst.serial_port_settings = serial_port.getSettingsDict()
rtn_inst.serial_port = serial_port
if "BT" in dev_type:
rtn_inst.serial_port.timeout = 1.5
rtn_inst.serial_port.writeTimeout = 1.5
if "WL" in dev_type:
rtn_inst.switchToWiredMode()
return rtn_inst
if "BT" in dev_type:
sensor_inst.serial_port.timeout = 1.5
sensor_inst.serial_port.writeTimeout = 1.5
elif "WL" in dev_type:
sensor_inst.switchToWiredMode()
global_sensorlist[serial_number] = sensor_inst
return sensor_inst
def parseAxisDirections(axis_byte):
axis_order_num = axis_byte & 7
if axis_order_num == 0:
axis_order = "XYZ"
elif axis_order_num == 1:
axis_order = "XZY"
elif axis_order_num == 2:
axis_order = "YXZ"
elif axis_order_num == 3:
axis_order = "YZX"
elif axis_order_num == 4:
axis_order = "ZXY"
elif axis_order_num == 5:
axis_order = "ZYX"
else:
raise ValueError
neg_x = neg_y = neg_z = False
if (axis_byte & 32) > 0:
neg_x = True
if (axis_byte & 16) > 0:
neg_y = True
if (axis_byte & 8) > 0:
neg_z = True
return axis_order, neg_x, neg_y, neg_z
def generateAxisDirections(axis_order, neg_x=False, neg_y=False, neg_z=False):
axis_order = axis_order.upper()
if axis_order == "XYZ":
axis_byte = 0
elif axis_order == "XZY":
axis_byte = 1
elif axis_order == "YXZ":
axis_byte = 2
elif axis_order == "YZX":
axis_byte = 3
elif axis_order == "ZXY":
axis_byte = 4
elif axis_order == "ZYX":
axis_byte = 5
else:
raise ValueError
if neg_x:
axis_byte = axis_byte | 32
if neg_y:
axis_byte = axis_byte | 16
if neg_z:
axis_byte = axis_byte | 8
return axis_byte
def getSystemWirelessRetries():
return _wireless_retries
def setSystemWirelessRetries(retries):
global _wireless_retries
_wireless_retries = retries
def getDefaultCreateDeviceBaudRate():
return _baudrate
def setDefaultCreateDeviceBaudRate(new_baudrate):
global _baudrate
if new_baudrate in _allowed_baudrates:
_baudrate = new_baudrate
def padProtocolHeader69(header_data, sys_timestamp):
fail_byte, cmd_echo, data_size = header_data
return (fail_byte, sys_timestamp, cmd_echo, None, None, None, data_size)
def padProtocolHeader71(header_data):
fail_byte, timestamp, cmd_echo, data_size = header_data
return (fail_byte, timestamp, cmd_echo, None, None, None, data_size)
def padProtocolHeader85(header_data, sys_timestamp):
fail_byte, cmd_echo, rtn_log_id, data_size = header_data
return (fail_byte, sys_timestamp, cmd_echo, None, rtn_log_id, None, data_size)
def padProtocolHeader87(header_data):
fail_byte, timestamp, cmd_echo, rtn_log_id, data_size = header_data
return (fail_byte, timestamp, cmd_echo, None, rtn_log_id, None, data_size)
### Classes ###
class Broadcaster(object):
def __init__(self):
self.retries = 10
def setRetries(self, retries=10):
self.retries = retries
def sequentialWriteRead(self, command, input_list=None, filter=None):
if filter is None:
filter = list(global_sensorlist.values())
val_list = {}
for i in range(self.retries):
for sensor in reversed(filter):
packet = sensor.writeRead(command, input_list)
if packet[0]: # fail_byte
continue
val_list[sensor.serial_number] = packet
filter.remove(sensor)
if not filter:
break
# _print("##Attempt: {0} complete".format(i))
else:
# _print("sensor failed to succeed")
for sensor in filter:
val_list[sensor.serial_number] = (True, None, None)
return val_list
def writeRead(self, command, input_list=None, filter=None):
q = TSCommandQueue()
if filter is None:
filter = list(global_sensorlist.values())
for sensor in filter:
q.queueWriteRead(sensor, sensor.serial_number, self.retries, command, input_list)
return q.proccessQueue()
def _broadcastMethod(self, filter, method, default=None, *args):
# _print(filter)
if filter is None:
filter = list(global_sensorlist.values())
val_list = {}
for i in range(self.retries):
for sensor in reversed(filter):
packet = getattr(sensor, method)(*args)
if packet is default: # fail_byte
continue
val_list[sensor.serial_number] = packet
filter.remove(sensor)
if not filter:
break
# _print("##Attempt: {0} complete".format(i))
else:
# _print("sensor failed to succeed")
for sensor in filter:
val_list[sensor.serial_number] = default
return val_list
def broadcastMethod(self, method, default=None, args=[], filter=None, callback_func=None):
q = TSCommandQueue()
if filter is None:
filter = list(global_sensorlist.values())
for sensor in filter:
q.queueMethod( getattr(sensor, method),
sensor,
self.retries,
default,
args,
callback_func)
return q.proccessQueue()
def setStreamingSlots(self, slot0='null',
slot1='null',
slot2='null',
slot3='null',
slot4='null',
slot5='null',
slot6='null',
slot7='null',
filter=None,
callback_func=None):
args = (slot0, slot1, slot2, slot3, slot4, slot5, slot6, slot7)
return self.broadcastMethod('setStreamingSlots', False, args, filter, callback_func)
def getStreamingSlots(self, filter=None, callback_func=None):
return self.broadcastMethod('getStreamingSlots', None, [], filter, callback_func)
def startStreaming(self, record_data=False, filter=None, callback_func=None):
return self.broadcastMethod('startStreaming', False, [record_data], filter, callback_func)
def stopStreaming(self, filter=None, callback_func=None):
return self.broadcastMethod('stopStreaming', False, [], filter, callback_func)
def setStreamingTiming(self, interval, duration, delay, delay_offset, filter=None, callback_func=None):
if filter is None:
filter = list(global_sensorlist.values())
else:
filter = list(filter)
val_list = {}
for sensor in reversed(filter):
success = False
for i in range(self.retries):
if sensor.setStreamingTiming(interval, duration, delay):
if callback_func is not None:
callback_func(sensor, True)
success = True
break
# _print("##Attempt: {0} complete".format(i))
if callback_func is not None:
callback_func(sensor, False)
else:
# _print("sensor failed to succeed")
pass
val_list[sensor] = success
filter.remove(sensor)
delay += delay_offset
return val_list
def startRecordingData(self, filter=None, callback_func=None):
if filter is None:
filter = list(global_sensorlist.values())
for sensor in filter:
sensor.record_data = True
if callback_func is not None:
callback_func(sensor, True)
def stopRecordingData(self, filter=None, callback_func=None):
if filter is None:
filter = list(global_sensorlist.values())
for sensor in filter:
sensor.record_data = False
if callback_func is not None:
callback_func(sensor, True)
def debugPrint(self, broadcast_dict):
for sensor, data in broadcast_dict.items():
_print('Sensor {0:08X}: {1}'.format(sensor, data))
class TSCommandQueue(object):
def __init__(self):
self.queue = []
self.return_dict = {}
def queueWriteRead(self, sensor, rtn_key, retries, command, input_list=None):
self.queue.append(("queueWriteRead", sensor, (self.return_dict, rtn_key, retries, command, input_list)))
def queueMethod(self, method_obj, rtn_key, retries, default=None, input_list=None, callback_func=None):
self.queue.append(("queueMethod", (method_obj, rtn_key, retries, default, input_list, callback_func)))
def _queueMethod(self, method_obj, rtn_key, retries, default=None, input_list=None, callback_func=None):
try:
for i in range(retries):
packet = method_obj(*input_list)
if packet is default: # fail_byte
if callback_func is not None:
callback_func(rtn_key, False)
continue
if callback_func is not None:
callback_func(rtn_key, True)
self.return_dict[rtn_key] = packet
break
else:
self.return_dict[rtn_key] = default
except(KeyboardInterrupt):
print('\n! Received keyboard interrupt, quitting threads.\n')
raise KeyboardInterrupt # fix bug where a thread eats the interupt
def createThreads(self):
thread_queue = []
for item in self.queue:
if item[0] == "queueWriteRead":
thread_queue.append(item[1].queueWriteRead(*item[2]))
elif item[0] == "queueMethod":
qThread = threading.Thread(target=self._queueMethod, args=item[1])
thread_queue.append(qThread)
return thread_queue
def proccessQueue(self, clear_queue=False):
thread_queue = self.createThreads()
[qThread.start() for qThread in thread_queue]
[qThread.join() for qThread in thread_queue]
if clear_queue:
self.queue = []
return self.return_dict
# Base class should not be used directly
class _TSBase(object):
command_dict = {
'checkLongCommands': (0x19, 1, '>B', 0, None, 1),
'startStreaming': (0x55, 0, None, 0, None, 1),
'stopStreaming': (0x56, 0, None, 0, None, 1),
'updateCurrentTimestamp': (0x5f, 0, None, 4, '>I', 1),
'setLEDMode': (0xc4, 0, None, 1, '>B', 1),
'getLEDMode': (0xc8, 1, '>B', 0, None, 1),
'_setWiredResponseHeaderBitfield': (0xdd, 0, None, 4, '>I', 1),
'_getWiredResponseHeaderBitfield': (0xde, 4, '>I', 0, None, 1),
'getFirmwareVersionString': (0xdf, 12, '>12s', 0, None, 1),
'commitSettings': (0xe1, 0, None, 0, None, 1),
'softwareReset': (0xe2, 0, None, 0, None, 1),
'getHardwareVersionString': (0xe6, 32, '>32s', 0, None, 1),
'getSerialNumber': (0xed, 4, '>I', 0, None, 1),
'setLEDColor': (0xee, 0, None, 12, '>fff', 1),
'getLEDColor': (0xef, 12, '>fff', 0, None, 1),
'setJoystickAndMousePresentRemoved': (0xfd, 0, None, 2, '>BB', 1),
'getJoystickAndMousePresentRemoved': (0xfe, 2, '>B', 0, None, 1),
'null': (0xff, 0, None, 0, None, 1)
}
def __init__(self, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
self.protocol_args = { 'success_failure': True,
'timestamp': True,
'command_echo': True,
'data_length': True}
if timestamp_mode != TSS_TIMESTAMP_SENSOR:
self.protocol_args['timestamp'] = False
self.timestamp_mode = timestamp_mode
self.baudrate = baudrate
reinit = False
try: # if this is set the class had been there before
check = self.stream_parse
reinit = True
# _print("sensor reinit!!!")
except:
self._setupBaseVariables()
self._setupProtocolHeader(**self.protocol_args)
self._setupThreadedReadLoop()
if reinit:
if self.stream_timing is not None:
self.setStreamingTiming(*self.stream_timing)
if self.stream_slot_cmds is not None:
self.setStreamingSlots(*self.stream_slot_cmds)
def _setupBaseVariables(self):
self.serial_number_hex = '{0:08X}'.format(self.serial_number)
self.stream_timing = None
self.stream_parse = None
self.stream_slot_cmds = ['null'] * 8
self.stream_last_data = None
self.stream_data = []
self.record_data = False
self.data_loop = False
def _setupProtocolHeader(self, success_failure=False,
timestamp=False,
command_echo=False,
checksum=False,
logical_id=False,
serial_number=False,
data_length=False):
protocol_header = _generateProtocolHeader( success_failure,
timestamp,
command_echo,
checksum,
logical_id,
serial_number,
data_length)
protocol_byte, self.header_parse, self.header_idx_lst = protocol_header
d_header = self.f7WriteRead('_getWiredResponseHeaderBitfield')
if d_header != protocol_byte:
self.f7WriteRead('_setWiredResponseHeaderBitfield', protocol_byte)
d_header = self.f7WriteRead('_getWiredResponseHeaderBitfield')
if d_header != protocol_byte:
print("!!!!!fail d_header={0}, protocol_header_byte={1}".format(d_header, protocol_byte))
raise Exception
def _setupThreadedReadLoop(self):
self.read_lock = threading.Condition(threading.Lock())
self.read_queue = collections.deque()
self.read_dict = {}
self.data_loop = True
self.read_thread = threading.Thread(target=self._dataReadLoop)
self.read_thread.daemon = True
self.read_thread.start()
def __repr__(self):
return "<YEI3Space {0}:{1}>".format(self.device_type, self.serial_number_hex)
def __str__(self):
return self.__repr__()
def close(self):
self.data_loop = False
if self.serial_port:
self.serial_port.close()
self.serial_port = None
self.read_thread.join()
def reconnect(self):
self.close()
if not tryPort(self.port_name):
_print("tryport fail")
try:
serial_port = serial.Serial(self.port_name, baudrate=self.baudrate, timeout=0.5, writeTimeout=0.5)
serial_port.applySettingsDict(self.serial_port_settings)
self.serial_port = serial_port
except:
traceback.print_exc()
return False
self._setupProtocolHeader(**self.protocol_args)
self._setupThreadedReadLoop()
if self.stream_timing is not None:
self.setStreamingTiming(*self.stream_timing)
if self.stream_slot_cmds is not None:
self.setStreamingSlots(*self.stream_slot_cmds)
return True
# Wired Old Protocol WriteRead
def f7WriteRead(self, command, input_list=None):
command_args = self.command_dict[command]
cmd_byte, out_len, out_struct, in_len, in_struct, compatibility = command_args
packed_data = None
if in_struct:
if type(input_list) in (list, tuple):
packed_data = struct.pack(in_struct, *input_list)
else:
packed_data = struct.pack(in_struct, input_list)
write_array = makeWriteArray(0xf7, None, cmd_byte, packed_data)
self.serial_port.write(write_array)
if out_struct:
output_data = self.serial_port.read(out_len)
rtn_list = struct.unpack(out_struct, output_data)
if len(rtn_list) != 1:
return rtn_list
return rtn_list[0]
# requires the dataloop, do not call
# Wired New Protocol WriteRead
def f9WriteRead(self, command, input_list=None):
global global_counter
command_args = self.command_dict[command]
cmd_byte, out_len, out_struct, in_len, in_struct, compatibility = command_args
if self.compatibility < compatibility:
raise Exception("Firmware for device on ( %s ) is out of date for this function. Recommend updating to latest firmware." % self.serial_port.name)
packed_data = None
if in_struct:
if type(input_list) in (list, tuple):
packed_data = struct.pack(in_struct, *input_list)
else:
packed_data = struct.pack(in_struct, input_list)
write_array = makeWriteArray(0xf9, None, cmd_byte, packed_data)
self.read_lock.acquire()
uid = global_counter
global_counter += 1
try:
self.serial_port.write(write_array) # release in reader thread
except serial.SerialTimeoutException:
self.read_lock.release()
self.serial_port.close()
# _print("SerialTimeoutException!!!!")
# !!!!!Reconnect
return (True, None, None)
except ValueError:
try:
# _print("trying to open it back up!!!!")
self.serial_port.open()
# _print("aaand open!!!!")
except serial.SerialException:
self.read_lock.release()
# _print("SerialTimeoutException!!!!")
# !!!!!Reconnect
return (True, None, None)
queue_packet = (uid, cmd_byte)
timeout_time = 0.5 + (len(self.read_queue) * 0.150) # timeout increases as queue gets larger
self.read_queue.append(queue_packet)
start_time = time.clock() + timeout_time
read_data = None
while(timeout_time > 0):
self.read_lock.wait(timeout_time)
read_data = self.read_dict.get(uid, None)
if read_data is not None:
break
timeout_time =start_time -time.clock()
# _print("Still waiting {0} {1} {2}".format(uid, command, timeout_time))
else:
# _print("Operation timed out!!!!")
try:
self.read_queue.remove(queue_packet)
except:
traceback.print_exc()
self.read_lock.release()
return (True, None, None)
self.read_lock.release()
del self.read_dict[uid]
header_list, output_data = read_data
fail_byte, timestamp, cmd_echo, ck_sum, rtn_log_id, sn, data_size = header_list
if cmd_echo != cmd_byte:
# _print("!!!!!!!!cmd_echo!=cmd_byte!!!!!")
# _print('cmd_echo= 0x{0:02x} cmd_byte= 0x{1:02x}'.format(cmd_echo, cmd_byte))
return (True, timestamp, None)
rtn_list = None
if not fail_byte:
if out_struct:
rtn_list = struct.unpack(out_struct, output_data)
if len(rtn_list) == 1:
rtn_list = rtn_list[0]
else:
# _print("fail_byte!!!!triggered")
pass
return (fail_byte, timestamp, rtn_list)
writeRead = f9WriteRead
def isConnected(self, try_reconnect=False):
try:
serial = self.getSerialNumber()
if serial is not None:
return True
except:
pass
return False
## generated functions USB and WL_ and DNG and EM_ and DL_ and BT_
## 85(0x55)
def stopStreaming(self):
fail_byte, t_stamp, data = self.writeRead('stopStreaming')
return not fail_byte
## 86(0x56)
def startStreaming(self):
fail_byte, t_stamp, data = self.writeRead('startStreaming')
return not fail_byte
## 95(0x5f)
def updateCurrentTimestamp(self, time, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('updateCurrentTimestamp', time)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 196(0xc4)
def setLEDMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setLEDMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 200(0xc8)
def getLEDMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getLEDMode')
if timestamp:
return (data, t_stamp)
return data
## 223(0xdf)
def getFirmwareVersionString(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getFirmwareVersionString')
data = convertString(data)
if timestamp:
return (data, t_stamp)
return data
## 225(0xe1)
def commitSettings(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('commitSettings')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 230(0xe6)
def getHardwareVersionString(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getHardwareVersionString')
data = convertString(data)
if timestamp:
return (data, t_stamp)
return data
## 237(0xed)
def getSerialNumber(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getSerialNumber')
if timestamp:
return (data, t_stamp)
return data
## 238(0xee)
def setLEDColor(self, rgb, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setLEDColor', rgb)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 239(0xef)
def getLEDColor(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getLEDColor')
if timestamp:
return (data, t_stamp)
return data
## 253(0xfd)
def setJoystickAndMousePresentRemoved(self, joystick, mouse, timestamp=False):
arg_list = (joystick, mouse)
fail_byte, t_stamp, data = self.writeRead('setJoystickAndMousePresentRemoved', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 254(0xfe)
def getJoystickAndMousePresentRemoved(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getJoystickAndMousePresentRemoved')
if timestamp:
return (data, t_stamp)
return data
## END generated functions USB and WL_ and DNG and EM_ and DL_ and BT_
class _TSSensor(_TSBase):
command_dict = _TSBase.command_dict.copy()
command_dict.update({
'getTaredOrientationAsQuaternion': (0x0, 16, '>4f', 0, None, 1),
'getTaredOrientationAsEulerAngles': (0x1, 12, '>fff', 0, None, 1),
'getTaredOrientationAsRotationMatrix': (0x2, 36, '>9f', 0, None, 1),
'getTaredOrientationAsAxisAngle': (0x3, 16, '>4f', 0, None, 1),
'getTaredOrientationAsTwoVector': (0x4, 24, '>6f', 0, None, 1),
'getDifferenceQuaternion': (0x5, 16, '>4f', 0, None, 1),
'getUntaredOrientationAsQuaternion': (0x6, 16, '>4f', 0, None, 1),
'getUntaredOrientationAsEulerAngles': (0x7, 12, '>fff', 0, None, 1),
'getUntaredOrientationAsRotationMatrix': (0x8, 36, '>9f', 0, None, 1),
'getUntaredOrientationAsAxisAngle': (0x9, 16, '>4f', 0, None, 1),
'getUntaredOrientationAsTwoVector': (0xa, 24, '>6f', 0, None, 1),
'getTaredTwoVectorInSensorFrame': (0xb, 24, '>6f', 0, None, 1),
'getUntaredTwoVectorInSensorFrame': (0xc, 24, '>6f', 0, None, 1),
'setEulerAngleDecompositionOrder': (0x10, 0, None, 1, '>B', 1),
'setMagnetoresistiveThreshold': (0x11, 0, None, 16, '>fIff', 3),
'setAccelerometerResistanceThreshold': (0x12, 0, None, 8, '>fI', 3),
'offsetWithCurrentOrientation': (0x13, 0, None, 0, None, 3),
'resetBaseOffset': (0x14, 0, None, 0, None, 3),
'offsetWithQuaternion': (0x15, 0, None, 16, '>4f', 3),
'setBaseOffsetWithCurrentOrientation': (0x16, 0, None, 0, None, 3),
'getAllNormalizedComponentSensorData': (0x20, 36, '>9f', 0, None, 1),
'getNormalizedGyroRate': (0x21, 12, '>fff', 0, None, 1),
'getNormalizedAccelerometerVector': (0x22, 12, '>fff', 0, None, 1),
'getNormalizedCompassVector': (0x23, 12, '>fff', 0, None, 1),
'getAllCorrectedComponentSensorData': (0x25, 36, '>9f', 0, None, 1),
'getCorrectedGyroRate': (0x26, 12, '>fff', 0, None, 1),
'getCorrectedAccelerometerVector': (0x27, 12, '>fff', 0, None, 1),
'getCorrectedCompassVector': (0x28, 12, '>fff', 0, None, 1),
'getCorrectedLinearAccelerationInGlobalSpace': (0x29, 12, '>fff', 0, None, 1),
'getTemperatureC': (0x2b, 4, '>f', 0, None, 1),
'getTemperatureF': (0x2c, 4, '>f', 0, None, 1),
'getConfidenceFactor': (0x2d, 4, '>f', 0, None, 1),
'getAllRawComponentSensorData': (0x40, 36, '>9f', 0, None, 1),
'getRawGyroscopeRate': (0x41, 12, '>fff', 0, None, 1),
'getRawAccelerometerData': (0x42, 12, '>fff', 0, None, 1),
'getRawCompassData': (0x43, 12, '>fff', 0, None, 1),
'_setStreamingSlots': (0x50, 0, None, 8, '>8B', 1),
'_getStreamingSlots': (0x51, 8, '>8B', 0, None, 1),
'_setStreamingTiming': (0x52, 0, None, 12, '>III', 1),
'_getStreamingTiming': (0x53, 12, '>III', 0, None, 1),
'_getStreamingBatch': (0x54, 0, None, 0, None, 1),
'tareWithCurrentOrientation': (0x60, 0, None, 0, None, 1),
'tareWithQuaternion': (0x61, 0, None, 16, '>4f', 1),
'tareWithRotationMatrix': (0x62, 0, None, 36, '>9f', 1),
'setStaticAccelerometerTrustValue': (0x63, 0, None, 4, '>f', 2),
'setConfidenceAccelerometerTrustValues': (0x64, 0, None, 8, '>ff', 2),
'setStaticCompassTrustValue': (0x65, 0, None, 4, '>f', 2),
'setConfidenceCompassTrustValues': (0x66, 0, None, 8, '>ff', 2),
'setDesiredUpdateRate': (0x67, 0, None, 4, '>I', 1),
'setReferenceVectorMode': (0x69, 0, None, 1, '>B', 1),
'setOversampleRate': (0x6a, 0, None, 1, '>B', 1),
'setGyroscopeEnabled': (0x6b, 0, None, 1, '>B', 1),
'setAccelerometerEnabled': (0x6c, 0, None, 1, '>B', 1),
'setCompassEnabled': (0x6d, 0, None, 1, '>B', 1),
'setAxisDirections': (0x74, 0, None, 1, '>B', 1),
'setRunningAveragePercent': (0x75, 0, None, 4, '>f', 1),
'setCompassReferenceVector': (0x76, 0, None, 12, '>fff', 1),
'setAccelerometerReferenceVector': (0x77, 0, None, 12, '>fff', 1),
'resetKalmanFilter': (0x78, 0, None, 0, None, 1),
'setAccelerometerRange': (0x79, 0, None, 1, '>B', 1),
'setFilterMode': (0x7b, 0, None, 1, '>B', 1),
'setRunningAverageMode': (0x7c, 0, None, 1, '>B', 1),
'setGyroscopeRange': (0x7d, 0, None, 1, '>B', 1),
'setCompassRange': (0x7e, 0, None, 1, '>B', 1),
'getTareAsQuaternion': (0x80, 16, '>4f', 0, None, 1),
'getTareAsRotationMatrix': (0x81, 36, '>9f', 0, None, 1),
'getAccelerometerTrustValues': (0x82, 8, '>ff', 0, None, 2),
'getCompassTrustValues': (0x83, 8, '>ff', 0, None, 2),
'getCurrentUpdateRate': (0x84, 4, '>I', 0, None, 1),
'getCompassReferenceVector': (0x85, 12, '>fff', 0, None, 1),
'getAccelerometerReferenceVector': (0x86, 12, '>fff', 0, None, 1),
'getGyroscopeEnabledState': (0x8c, 1, '>B', 0, None, 1),
'getAccelerometerEnabledState': (0x8d, 1, '>B', 0, None, 1),
'getCompassEnabledState': (0x8e, 1, '>B', 0, None, 1),
'getAxisDirections': (0x8f, 1, '>B', 0, None, 1),
'getOversampleRate': (0x90, 1, '>B', 0, None, 1),
'getRunningAveragePercent': (0x91, 4, '>f', 0, None, 1),
'getDesiredUpdateRate': (0x92, 4, '>I', 0, None, 1),
'getAccelerometerRange': (0x94, 1, '>B', 0, None, 1),
'getFilterMode': (0x98, 1, '>B', 0, None, 1),
'getRunningAverageMode': (0x99, 1, '>B', 0, None, 1),
'getGyroscopeRange': (0x9a, 1, '>B', 0, None, 1),
'getCompassRange': (0x9b, 1, '>B', 0, None, 1),
'getEulerAngleDecompositionOrder': (0x9c, 1, '>B', 0, None, 1),
'getMagnetoresistiveThreshold': (0x9d, 16, '>fIff', 0, None, 3),
'getAccelerometerResistanceThreshold': (0x9e, 8, '>fI', 0, None, 3),
'getOffsetOrientationAsQuaternion': (0x9f, 16, '>4f', 0, None, 3),
'setCompassCalibrationCoefficients': (0xa0, 0, None, 48, '>12f', 1),
'setAccelerometerCalibrationCoefficients': (0xa1, 0, None, 48, '>12f', 1),
'getCompassCalibrationCoefficients': (0xa2, 48, '>12f', 0, None, 1),
'getAccelerometerCalibrationCoefficients': (0xa3, 48, '>12f', 0, None, 1),
'getGyroscopeCalibrationCoefficients': (0xa4, 48, '>12f', 0, None, 1),
'beginGyroscopeAutoCalibration': (0xa5, 0, None, 0, None, 1),
'setGyroscopeCalibrationCoefficients': (0xa6, 0, None, 48, '>12f', 1),
'setCalibrationMode': (0xa9, 0, None, 1, '>B', 1),
'getCalibrationMode': (0xaa, 1, '>B', 0, None, 1),
'setOrthoCalibrationDataPointFromCurrentOrientation': (0xab, 0, None, 0, None, 1),
'setOrthoCalibrationDataPointFromVector': (0xac, 0, None, 14, '>BBfff', 1),
'getOrthoCalibrationDataPoint': (0xad, 12, '>fff', 2, '>BB', 1),
'performOrthoCalibration': (0xae, 0, None, 0, None, 1),
'clearOrthoCalibrationData': (0xaf, 0, None, 0, None, 1),
'setSleepMode': (0xe3, 0, None, 1, '>B', 1),
'getSleepMode': (0xe4, 1, '>B', 0, None, 1),
'setJoystickEnabled': (0xf0, 0, None, 1, '>B', 1),
'setMouseEnabled': (0xf1, 0, None, 1, '>B', 1),
'getJoystickEnabled': (0xf2, 1, '>B', 0, None, 1),
'getMouseEnabled': (0xf3, 1, '>B', 0, None, 1),
'setControlMode': (0xf4, 0, None, 3, '>BBB', 1),
'setControlData': (0xf5, 0, None, 7, '>BBBf', 1),
'getControlMode': (0xf6, 1, '>B', 2, '>BB', 1),
'getControlData': (0xf7, 4, '>f', 3, '>BBB', 1),
'setMouseAbsoluteRelativeMode': (0xfb, 0, None, 1, '>B', 1),
'getMouseAbsoluteRelativeMode': (0xfc, 1, '>B', 0, None, 1)
})
reverse_command_dict = dict(map(lambda x: [x[1][0], x[0]], command_dict.items()))
_device_types = ["!BASE"]
def __new__(cls, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
if com_port:
if type(com_port) is str:
port_name = com_port
elif type(com_port) is ComInfo:
port_name = com_port.com_port
else:
_print("An erronous parameter was passed in")
return None
if baudrate not in _allowed_baudrates:
baudrate = _baudrate
_print("Error baudrate value not allowed. Using default.")
serial_port = serial.Serial(port_name, baudrate=baudrate, timeout=0.5, writeTimeout=0.5)
if serial_port is not None:
new_inst = super(_TSSensor, cls).__new__(cls)
return _generateSensorClass(new_inst, serial_port, _TSSensor._device_types)
_print('Error serial port was not made')
def __init__(self, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
self.protocol_args = { 'success_failure': True,
'timestamp': True,
'command_echo': True,
'data_length': True}
if timestamp_mode != TSS_TIMESTAMP_SENSOR:
self.protocol_args['timestamp'] = False
self.timestamp_mode = timestamp_mode
self.baudrate = baudrate
reinit = False
try: # if this is set the class had been there before
check = self.stream_parse
reinit = True
# _print("sensor reinit!!!")
except:
self._setupBaseVariables()
self.callback_func = None
self._setupProtocolHeader(**self.protocol_args)
self._setupThreadedReadLoop()
self.latest_lock = threading.Condition(threading.Lock())
self.new_data = False
if reinit:
if self.stream_timing is not None:
self.setStreamingTiming(*self.stream_timing)
if self.stream_slot_cmds is not None:
self.setStreamingSlots(*self.stream_slot_cmds)
def _queueWriteRead(self, rtn_dict, rtn_key, retries, command, input_list=None):
try:
for i in range(retries):
packet = self.writeRead(command, input_list)
if packet[0]:
# _print("##Attempt: {0} complete".format(i))
time.sleep(0.1)
continue
rtn_dict[rtn_key] = packet
break
else:
# _print("sensor failed to succeed")
rtn_dict[rtn_key] = (True, None, None)
except(KeyboardInterrupt):
print('\n! Received keyboard interrupt, quitting threads.\n')
raise KeyboardInterrupt # fix bug where a thread eats the interupt
def queueWriteRead(self, rtn_dict, rtn_key, retries, command, input_list=None):
return threading.Thread(target=self._queueWriteRead, args=(rtn_dict, rtn_key, retries, command, input_list))
def _generateStreamParse(self):
stream_string = '>'
if self.stream_slot_cmds is None:
self.getStreamingSlots()
for slot_cmd in self.stream_slot_cmds:
if slot_cmd is not 'null':
out_struct = self.command_dict[slot_cmd][2]
stream_string += out_struct[1:] # stripping the >
self.stream_parse = struct.Struct(stream_string)
# Set streaming batch command
self.command_dict['_getStreamingBatch'] = (0x54, self.stream_parse.size, stream_string, 0, None, 1)
def _parseStreamData(self, protocol_data, output_data):
rtn_list = self.stream_parse.unpack(output_data)
if len(rtn_list) == 1:
rtn_list = rtn_list[0]
self.latest_lock.acquire()
self.new_data = True
self.latest_lock.notify()
self.latest_lock.release()
data = (protocol_data, rtn_list)
self.stream_last_data = data
if self.record_data:
self.stream_data.append(data)
if self.callback_func:
self.callback_func(data)
def _dataReadLoop(self):
while self.data_loop:
try:
self._readDataWiredProHeader()
except(KeyboardInterrupt):
print('\n! Received keyboard interrupt, quitting threads.\n')
raise KeyboardInterrupt # fix bug where a thread eats the interupt
except:
# traceback.print_exc()
# _print("bad _parseStreamData parse")
# _print('!!!!!inWaiting = {0}'.format(self.serial_port.inWaiting()))
self._read_data = None
try:
self.read_lock.release()
except:
pass
def _readDataWiredProHeader(self):
_serial_port = self.serial_port
# in_wait = _serial_port.inWaiting()
# if in_wait:
# _print('!666! inWaiting = {0}'.format(in_wait))
header_bytes = _serial_port.read(self.header_parse.size)
if header_bytes:
if self.timestamp_mode == TSS_TIMESTAMP_SENSOR:
header_data = self.header_parse.unpack(header_bytes)
header_list = padProtocolHeader71(header_data)
elif self.timestamp_mode == TSS_TIMESTAMP_SYSTEM:
sys_timestamp = time.clock() # time packet was parsed it might been in the system buffer a few ms
sys_timestamp *= 1000000
header_data = self.header_parse.unpack(header_bytes)
header_list = padProtocolHeader69(header_data, sys_timestamp)
else:
header_data = self.header_parse.unpack(header_bytes)
header_list = padProtocolHeader69(header_data, None)
fail_byte, timestamp, cmd_echo, ck_sum, rtn_log_id, sn, data_size = header_list
output_data = _serial_port.read(data_size)
if cmd_echo is 0xff:
if data_size:
self._parseStreamData(timestamp, output_data)
return
self.read_lock.acquire()
if len(self.read_queue): # here for a bug in the code
uid, cmd_byte = self.read_queue.popleft()
if cmd_byte == cmd_echo:
self.read_dict[uid] = (header_list, output_data)
self.read_lock.notify() # dies in 3 seconds if there is a writeRead in wait
else:
# _print('Unrequested packet found!!!')
# _hexDump(header_bytes, 'o')
# _hexDump(output_data, 'o')
self.read_queue.appendleft((uid, cmd_byte))
self.read_lock.release()
return
# _print('Unrequested packet found!!!')
# _hexDump(header_bytes, 'o')
# _hexDump(output_data, 'o')
self.read_lock.release()
def getLatestStreamData(self, timeout):
self.latest_lock.acquire()
self.new_data = False
self.latest_lock.wait(timeout)
self.latest_lock.release()
if self.new_data:
return self.stream_last_data
def setNewDataCallBack(self, callback):
self.callback_func = callback
def startRecordingData(self):
self.record_data = True
def stopRecordingData(self):
self.record_data = False
def clearRecordingData(self):
self.stream_data= []
# Convenience functions to replace commands 244(0xf4) and 245(0xf5)
def setGlobalAxis(self, hid_type, config_axis, local_axis, global_axis, deadzone, scale, power):
""" Sets an axis of the desired emulated input device as a 'Global Axis'
style axis. Axis operating under this style use a reference vector
and a consitent local vector to determine the state of the device's
axis. As the local vector rotates, it is projected onto the global
vector. Once the distance of that projection on the global vector
exceeds the inputted "deadzone", the device will begin tranmitting
non-zero values for the device's desired axis.
@param hid_type: An integer whose value defines whether the device
in question is a TSS_JOYSTICK or TSS_MOUSE.
@param config_axis: A string whose value may be either 'X' or 'Y'
for a mouse or 'X', 'Y', or 'Z' for a joystick. This string
defines what axis of the device is to be configured.
@param local_axis: A list of 3 Floats whose value is a normalized
Vector3. This vector represents the sensor's local vector to
track.
@param global_axis: A list of 3 Floats whose value is a normalized
Vector3. This vector represents the global vector to project the
local vector onto (should be orthoginal to the local vector).
@param deadzone: A float that defines the minimum distance necessary
for the device's axis to read a non-zero value.
@param scale: A float that defines the linear scale for the values
being returned for the axis.
@param power: A float whose value is an exponental power used to
further modify data being returned from the sensor.
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
# Set class
if hid_type != TSS_JOYSTICK and hid_type != TSS_MOUSE:
_print("Invalid command for hid_type: {0:d}".format(hid_type))
return False
cntl_class = hid_type
# Set index
axis_idx = ["X", "Y", "Z"]
if cntl_class == TSS_MOUSE:
axis_idx.pop(-1)
config_axis = config_axis.upper()
cntl_idx = -1
try:
cntl_idx = axis_idx.index(config_axis)
except:
_print("Invalid command for config_axis: {0:s}".format(config_axis))
return False
# Set mode
if not self.setControlMode(cntl_class, cntl_idx, 0):
return False
# Create data array
data_array = local_axis + global_axis + [deadzone, scale, power]
# Set data
for i in range(len(data_array)):
if not self.setControlData(cntl_class, cntl_idx, i, data_array[i]):
return False
return True
def setScreenPointAxis(self, hid_type, config_axis, dist_from_screen, dist_on_axis, collision_component, sensor_dir, button_halt):
""" Sets an axis of the desired emulated input device as a 'Screen Point
Axis' style axis. An axis operating under this style projects a
vector along the sensor's direction vector into a mathmatical plane.
The collision point on the plane is then used to determine what the
device's axis's current value is. The direction vector is rotated
based on the orientation of the sensor.
@param hid_type: An integer whose value defines whether the device
in question is a TSS_JOYSTICK or TSS_MOUSE.
@param config_axis: A string whose value may be either 'X' or 'Y'
for a mouse or 'X', 'Y', or 'Z' for a joystick. This string
defines what axis of the device is to be configured.
@param dist_from_screen: A float whose value is the real world
distance the sensor is from the user's screen. Must be the same
units as dist_on_axis.
@param dist_on_axis: A float whose value is the real world length of
the axis along the user's screen (width of screen for x-axis,
height of screen for y-axis). Must be the same units as
dist_from_screen.
@param collision_component: A string whose value may be 'X', 'Y', or
'Z'. This string defines what component of the look vector's
collision point on the virtual plane to use for manipulating the
device's axis.
@param sensor_dir: A string whose value may be 'X', 'Y', or 'Z'.
This string defines which of the sensor's local axis to use for
creating the vector to collide with the virtual plane.
@param button_halt: A float whose value is a pause time in
milliseconds. When a button is pressed on the emulated device,
transmission of changes to the axis is paused for the inputted
amount of time to prevent undesired motion detection when
pressing buttons.
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
# Set class
if hid_type != TSS_JOYSTICK and hid_type != TSS_MOUSE:
_print("Invalid command for hid_type: {0:d}".format(hid_type))
return False
cntl_class = hid_type
# Set index
axis_idx = ["X", "Y", "Z"]
if cntl_class == TSS_MOUSE:
axis_idx.pop(-1)
config_axis = config_axis.upper()
cntl_idx = -1
try:
cntl_idx = axis_idx.index(config_axis)
except:
_print("Invalid command for config_axis: {0:s}".format(config_axis))
return False
# Set mode
if not self.setControlMode(cntl_class, cntl_idx, 1):
return False
# Create data array
axis_idx = ["X", "Y", "Z"]
data_array = []
data_array.append(dist_from_screen)
data_array.append(dist_on_axis)
collision_component = collision_component.upper()
try:
data_array.append(axis_idx.index(collision_component))
except:
_print("Invalid command for collision_component: {0:s}".format(collision_component))
return False
sensor_dir = sensor_dir.upper()
try:
data_array.append(axis_idx.index(sensor_dir))
except:
_print("Invalid command for sensor_dir: {0:s}".format(sensor_dir))
return False
data_array.append(0)
data_array.append(0)
data_array.append(0)
data_array.append(button_halt)
data_array.append(0)
data_array.append(0)
# Set data
for i in range(len(data_array)):
if not self.setControlData(cntl_class, cntl_idx, i, data_array[i]):
return False
return True
def disableAxis(self, hid_type, config_axis):
""" Disables an axis on the passed in device.
@param hid_type: An integer whose value defines whether the device
in question is a TSS_JOYSTICK or TSS_MOUSE.
@param config_axis: A string whose value may be either 'X' or 'Y'
for a mouse or 'X', 'Y', or 'Z' for a joystick. This string
defines what axis of the device is to be configured.
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
# Set class
if hid_type != TSS_JOYSTICK and hid_type != TSS_MOUSE:
_print("Invalid command for hid_type: {0:d}".format(hid_type))
return False
cntl_class = hid_type
# Set index
axis_idx = ["X", "Y", "Z"]
if cntl_class == TSS_MOUSE:
axis_idx.pop(-1)
config_axis = config_axis.upper()
cntl_idx = -1
try:
cntl_idx = axis_idx.index(config_axis)
except:
_print("Invalid command for config_axis: {0:s}".format(config_axis))
return False
# Set mode
return self.setControlMode(cntl_class, cntl_idx, 255)
def setPhysicalButton(self, hid_type, button_idx, button_bind):
""" Binds a sensor's physical button to an emulated device's button.
@param hid_type: An integer whose value defines whether the device
in question is a TSS_JOYSTICK or TSS_MOUSE.
@param button_idx: An integer whose value defines which button on
the emulated device to configure. Default range is 0 through 7.
@param button_bind: An integer whose value defines which physical
button to bind to the emulated device's button to as defined by
button_idx, either TSS_BUTTON_LEFT or TSS_BUTTON_RIGHT.
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
# Set class
if hid_type != TSS_JOYSTICK and hid_type != TSS_MOUSE:
_print("Invalid command for hid_type: {0:d}".format(hid_type))
return False
cntl_class = 1 + hid_type
# Set mode
if not self.setControlMode(cntl_class, button_idx, 0):
return False
# Create data
if button_bind != TSS_BUTTON_LEFT and button_bind != TSS_BUTTON_RIGHT:
_print("Invalid command for button_bind: {0:d}".format(button_bind))
return False
data = button_bind
# Set data
return self.setControlData(cntl_class, button_idx, 0, data)
def setOrientationButton(self, hid_type, button_idx, local_axis, global_axis, max_dist):
""" Sets up a device's button such that it is 'pressed' when a reference
vector aligns itself with a local vector.
@param hid_type: An integer whose value defines whether the device
in question is a TSS_JOYSTICK or TSS_MOUSE.
@param button_idx: An integer whose value defines which button on
the emulated device to configure. Default range is 0 through 7.
@param local_axis: A list of 3 floats whose value represents a
normalized Vector3. This vector represents the sensor's local
vector to track.
@param global_axis: A list of 3 floats whose value is a normalized
Vector3. This vector represents the global vector to move the
local vector towards for "pressing" (should not be colinear to
the local vector).
@param max_dist: A float whose value defines how close the local
vector's orientation must be to the global vector for the button
to be 'pressed'.
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
# Set class
if hid_type != TSS_JOYSTICK and hid_type != TSS_MOUSE:
_print("Invalid command for hid_type: {0:d}".format(hid_type))
return False
cntl_class = 1 + hid_type
# Set mode
if not self.setControlMode(cntl_class, button_idx, 1):
return False
# Create data array
data_array = local_axis + global_axis + [max_dist]
# Set data
for i in range(7):
if not self.setControlData(cntl_class, button_idx, i, data_array[i]):
return False
return True
def setShakeButton(self, hid_type, button_idx, threshold):
""" Sets up an emulated device's button such that it is 'pressed' when
the sensor is shaken.
@param hid_type: An integer whose value defines whether the device
in question is a TSS_JOYSTICK or TSS_MOUSE.
@param button_idx: An integer whose value defines which button on
the emulated device to configure. Default range is 0 through 7.
@param threshold: A float whose value defines how many Gs of force
must be experienced by the sensor before the button is
'pressed'.
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
# Set class
if hid_type != TSS_JOYSTICK and hid_type != TSS_MOUSE:
_print("Invalid command for hid_type: {0:d}".format(hid_type))
return False
cntl_class = 1 + hid_type
# Set mode
if not self.setControlMode(cntl_class, button_idx, 2):
return False
# Create data array
data_array = [0, 0, 0, threshold]
# Set data
for i in range(4):
if not self.setControlData(cntl_class, button_idx, i, data_array[i]):
return False
return True
def disableButton(self, hid_type, button_idx):
""" Disables a button on the passed in emulated device.
@param hid_type: An integer whose value defines whether the device
in question is a TSS_JOYSTICK or TSS_MOUSE.
@param button_idx: An integer whose value defines which button on
the emulated device to configure. Default range is 0 through 7.
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
# Set class
if hid_type != TSS_JOYSTICK and hid_type != TSS_MOUSE:
_print("Invalid command for hid_type: {0:d}".format(hid_type))
return False
cntl_class = 1 + hid_type
# Set mode
return self.setControlMode(cntl_class, button_idx, 255)
# Convenience functions for setting up simple mouse/joystick implimentations
def setupSimpleMouse(self, diagonal_size, dist_from_screen, aspect_ratio, is_relative=True):
""" Creates a simple emulated mouse device using the features of the
sensor. Left button and right button emulate the mouse's left and
right buttons respectivly and using the sensor as a pointing device
with the front of the device facing towards the screen will move the
mouse cursor.
@param diagonal_size: A float whose value is the real world diagonal
size of the user's screen.
@param dist_from_screen: A float whose value is the real world
distance the sensor is from the user's screen. Must be the same
units as diagonal_size.
@param aspect_ratio: A float whose value is the real world aspect
ratio of the user's screen.
@param is_relative: A boolean whose value expresses whether the
mouse is to operate in relative mode (True) or absolute mode
(False).
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
cur_mouse_rel = self.getMouseAbsoluteRelativeMode()
if cur_mouse_rel != is_relative:
if self.setMouseAbsoluteRelativeMode(is_relative):
fail_byte, t_stamp, data = self.writeRead('softwareReset')
if not fail_byte:
while self.getSerialNumber():
pass
self.close()
time.sleep(5)
while self.reconnect():
pass
unit_hyp = (aspect_ratio ** 2 + 1) ** 0.5
screen_multiplyer = diagonal_size / unit_hyp
screen_width = screen_multiplyer * aspect_ratio
screen_height = screen_multiplyer
_print("Height: {0:2f}".format(screen_height))
_print("Width: {0:2f}".format(screen_width))
self.setScreenPointAxis(TSS_MOUSE, "X", dist_from_screen, screen_width, "X", "Z", 50)
self.setScreenPointAxis(TSS_MOUSE, "Y", dist_from_screen, screen_height, "Y", "Z", 50)
self.setPhysicalButton(TSS_MOUSE, 0, TSS_BUTTON_LEFT)
self.setPhysicalButton(TSS_MOUSE, 1, TSS_BUTTON_RIGHT)
self.disableButton(TSS_MOUSE, 2)
self.disableButton(TSS_MOUSE, 3)
self.disableButton(TSS_MOUSE, 4)
self.disableButton(TSS_MOUSE, 5)
self.disableButton(TSS_MOUSE, 6)
self.disableButton(TSS_MOUSE, 7)
def setupSimpleJoystick(self, deadzone, scale, power, shake_threshold, max_dist):
""" Creates a simple emulated joystick device using the features of the
sensor. The left and right physical buttons on the sensor act as
buttons 0 and 1 for the joystick. Button 2 is a shake button.
Buttons 3 and 4 are pressed when the sensor is rotated +-90 degrees
on the Z-axis. Rotations on the sensor's Y and X axis correspond to
movements on the joystick's X and Y axis.
@param deadzone: A float that defines the minimum distance necessary
for the device's axis to read a non-zero value.
@param scale: A float that defines the linear scale for the values
being returned for the axis.
@param power:A float whose value is an exponental power used to
further modify data being returned from the sensor.
@param shake_threshold: A float whose value defines how many Gs of
force must be experienced by the sensor before the button 2 is
'pressed'.
@param max_dist: A float whose value defines how close the local
vector's orientation must be to the global vector for buttons 3
and 4 are "pressed".
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
self.setGlobalAxis(TSS_JOYSTICK, "X", [1, 0, 0], [0, 0, -1], deadzone, scale, power)
self.setGlobalAxis(TSS_JOYSTICK, "Y", [0, 1, 0], [0, 0, -1], deadzone, scale, power)
self.setPhysicalButton(TSS_JOYSTICK, 0, TSS_BUTTON_LEFT)
self.setPhysicalButton(TSS_JOYSTICK, 1, TSS_BUTTON_RIGHT)
self.setShakeButton(TSS_JOYSTICK, 2, shake_threshold)
self.setOrientationButton(TSS_JOYSTICK, 3, [0, 1, 0], [-1, 0, 0], max_dist)
self.setOrientationButton(TSS_JOYSTICK, 4, [0, 1, 0], [1, 0, 0], max_dist)
self.disableButton(TSS_JOYSTICK, 5)
self.disableButton(TSS_JOYSTICK, 6)
self.disableButton(TSS_JOYSTICK, 7)
# LightGun Functions
def setupSimpleLightgun(self, diagonal_size, dist_from_screen, aspect_ratio, is_relative=True):
""" Creates a simple emulated mouse based lightgun device using the
features of the sensor. Left button of the sensor emulates the
mouse's left button. Shaking the sensor emulates the mouse's right
button. This configuration uses the sensor as a pointing device with
the front of the device facing forward the screen will move the
mouse cursor.
@param diagonal_size: A float whose value is the real world diagonal
size of the user's screen.
@param dist_from_screen: A float whose value is the real world
distance the sensor is from the user's screen. Must be the same
units as diagonal_size.
@param aspect_ratio: A float whose value is the real world aspect
ratio of the user's screen.
@param is_relative: A boolean whose value expresses whether the
mouse is to operate in relative mode (True) or absolute mode
(False).
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
cur_mouse_rel = self.getMouseAbsoluteRelativeMode()
if cur_mouse_rel != is_relative:
if self.setMouseAbsoluteRelativeMode(is_relative):
fail_byte, t_stamp, data = self.writeRead('softwareReset')
if not fail_byte:
while self.getSerialNumber():
pass
self.close()
time.sleep(5)
while self.reconnect():
pass
unit_hyp = (aspect_ratio ** 2 + 1) ** 0.5
screen_multiplyer = diagonal_size / unit_hyp
screen_width = screen_multiplyer * aspect_ratio
screen_height = screen_multiplyer
_print("Height: {0:2f}".format(screen_height))
_print("Width: {0:2f}".format(screen_width))
self.setScreenPointAxis(TSS_MOUSE, "X", dist_from_screen, screen_width, "X", "Z", 50)
self.setScreenPointAxis(TSS_MOUSE, "Y", dist_from_screen, screen_height, "Y", "Z", 50)
self.setPhysicalButton(TSS_MOUSE, 0, TSS_BUTTON_LEFT)
self.setShakeButton(TSS_MOUSE, 1, 1.0)
self.disableButton(TSS_MOUSE, 2)
self.disableButton(TSS_MOUSE, 3)
self.disableButton(TSS_MOUSE, 4)
self.disableButton(TSS_MOUSE, 5)
self.disableButton(TSS_MOUSE, 6)
self.disableButton(TSS_MOUSE, 7)
## 80(0x50)
def setStreamingSlots(self, slot0='null',
slot1='null',
slot2='null',
slot3='null',
slot4='null',
slot5='null',
slot6='null',
slot7='null'):
slots = [slot0, slot1, slot2, slot3, slot4, slot5, slot6, slot7]
slot_bytes = []
for slot in slots:
cmd_byte = self.command_dict[slot][0]
slot_bytes.append(cmd_byte)
fail_byte, timestamp, filler = self.writeRead('_setStreamingSlots', slot_bytes)
self.stream_slot_cmds = slots
self._generateStreamParse()
return not fail_byte
## 81(0x51)
def getStreamingSlots(self):
if self.stream_slot_cmds is None:
self.stream_slot_cmds = ['null'] * 8
fail_byte, timestamp, slot_bytes = self.writeRead('_getStreamingSlots')
need_update = False
if slot_bytes:
for slot_idx in range(len(self.stream_slot_cmds)):
cmd_byte = slot_bytes[slot_idx]
cmd_string = self.reverse_command_dict[cmd_byte]
if self.stream_slot_cmds[slot_idx] != cmd_string:
self.stream_slot_cmds[slot_idx] = cmd_string
need_update = True
if need_update:
self._generateStreamParse()
return self.stream_slot_cmds
## 82(0x52)
def setStreamingTiming(self, interval, duration, delay, timestamp=False):
arg_list = (interval, duration, delay)
fail_byte, t_stamp, data = self.writeRead('_setStreamingTiming', arg_list)
if not fail_byte:
self.stream_timing = arg_list
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 83(0x53)
def getStreamingTiming(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('_getStreamingTiming')
if data:
self.stream_timing = data
if timestamp:
return (data, t_stamp)
return data
## 84(0x54)
def getStreamingBatch(self, timestamp=False):
if self.stream_parse is None:
self._generateStreamParse()
fail_byte, t_stamp, data = self.writeRead('_getStreamingBatch')
if timestamp:
return (data, t_stamp)
return data
## 85(0x55)
def stopStreaming(self):
self.record_data = False
fail_byte, timestamp, slot_bytes = self.writeRead('stopStreaming')
return not fail_byte
## 86(0x56)
def startStreaming(self, start_record=False):
self.record_data = start_record
if self.stream_parse is None:
self._generateStreamParse()
fail_byte, timestamp, slot_bytes = self.writeRead('startStreaming')
return not fail_byte
## generated functions USB and WL_ and EM_ and DL_ and BT_
## 0(0x00)
def getTaredOrientationAsQuaternion(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTaredOrientationAsQuaternion')
if timestamp:
return (data, t_stamp)
return data
## 1(0x01)
def getTaredOrientationAsEulerAngles(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTaredOrientationAsEulerAngles')
if timestamp:
return (data, t_stamp)
return data
## 2(0x02)
def getTaredOrientationAsRotationMatrix(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTaredOrientationAsRotationMatrix')
if timestamp:
return (data, t_stamp)
return data
## 3(0x03)
def getTaredOrientationAsAxisAngle(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTaredOrientationAsAxisAngle')
if timestamp:
return (data, t_stamp)
return data
## 4(0x04)
def getTaredOrientationAsTwoVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTaredOrientationAsTwoVector')
if timestamp:
return (data, t_stamp)
return data
## 5(0x05)
def getDifferenceQuaternion(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getDifferenceQuaternion')
if timestamp:
return (data, t_stamp)
return data
## 6(0x06)
def getUntaredOrientationAsQuaternion(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUntaredOrientationAsQuaternion')
if timestamp:
return (data, t_stamp)
return data
## 7(0x07)
def getUntaredOrientationAsEulerAngles(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUntaredOrientationAsEulerAngles')
if timestamp:
return (data, t_stamp)
return data
## 8(0x08)
def getUntaredOrientationAsRotationMatrix(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUntaredOrientationAsRotationMatrix')
if timestamp:
return (data, t_stamp)
return data
## 9(0x09)
def getUntaredOrientationAsAxisAngle(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUntaredOrientationAsAxisAngle')
if timestamp:
return (data, t_stamp)
return data
## 10(0x0a)
def getUntaredOrientationAsTwoVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUntaredOrientationAsTwoVector')
if timestamp:
return (data, t_stamp)
return data
## 11(0x0b)
def getTaredTwoVectorInSensorFrame(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTaredTwoVectorInSensorFrame')
if timestamp:
return (data, t_stamp)
return data
## 12(0x0c)
def getUntaredTwoVectorInSensorFrame(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUntaredTwoVectorInSensorFrame')
if timestamp:
return (data, t_stamp)
return data
## 16(0x10)
def setEulerAngleDecompositionOrder(self, angle_order, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setEulerAngleDecompositionOrder', angle_order)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 17(0x11)
def setMagnetoresistiveThreshold(self, threshold, trust_frames, lockout_decay, perturbation_detection_value, timestamp=False):
arg_list = (threshold, trust_frames, lockout_decay, perturbation_detection_value)
fail_byte, t_stamp, data = self.writeRead('setMagnetoresistiveThreshold', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 18(0x12)
def setAccelerometerResistanceThreshold(self, threshold, lockout_decay, timestamp=False):
arg_list = (threshold, lockout_decay)
fail_byte, t_stamp, data = self.writeRead('setAccelerometerResistanceThreshold', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 19(0x13)
def offsetWithCurrentOrientation(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('offsetWithCurrentOrientation')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 20(0x14)
def resetBaseOffset(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('resetBaseOffset')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 21(0x15)
def offsetWithQuaternion(self, quaternion, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('offsetWithQuaternion', quaternion)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 22(0x16)
def setBaseOffsetWithCurrentOrientation(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setBaseOffsetWithCurrentOrientation')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 32(0x20)
def getAllNormalizedComponentSensorData(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAllNormalizedComponentSensorData')
if timestamp:
return (data, t_stamp)
return data
## 33(0x21)
def getNormalizedGyroRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getNormalizedGyroRate')
if timestamp:
return (data, t_stamp)
return data
## 34(0x22)
def getNormalizedAccelerometerVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getNormalizedAccelerometerVector')
if timestamp:
return (data, t_stamp)
return data
## 35(0x23)
def getNormalizedCompassVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getNormalizedCompassVector')
if timestamp:
return (data, t_stamp)
return data
## 37(0x25)
def getAllCorrectedComponentSensorData(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAllCorrectedComponentSensorData')
if timestamp:
return (data, t_stamp)
return data
## 38(0x26)
def getCorrectedGyroRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCorrectedGyroRate')
if timestamp:
return (data, t_stamp)
return data
## 39(0x27)
def getCorrectedAccelerometerVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCorrectedAccelerometerVector')
if timestamp:
return (data, t_stamp)
return data
## 40(0x28)
def getCorrectedCompassVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCorrectedCompassVector')
if timestamp:
return (data, t_stamp)
return data
## 41(0x29)
def getCorrectedLinearAccelerationInGlobalSpace(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCorrectedLinearAccelerationInGlobalSpace')
if timestamp:
return (data, t_stamp)
return data
## 43(0x2b)
def getTemperatureC(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTemperatureC')
if timestamp:
return (data, t_stamp)
return data
## 44(0x2c)
def getTemperatureF(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTemperatureF')
if timestamp:
return (data, t_stamp)
return data
## 45(0x2d)
def getConfidenceFactor(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getConfidenceFactor')
if timestamp:
return (data, t_stamp)
return data
## 64(0x40)
def getAllRawComponentSensorData(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAllRawComponentSensorData')
if timestamp:
return (data, t_stamp)
return data
## 65(0x41)
def getRawGyroscopeRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getRawGyroscopeRate')
if timestamp:
return (data, t_stamp)
return data
## 66(0x42)
def getRawAccelerometerData(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getRawAccelerometerData')
if timestamp:
return (data, t_stamp)
return data
## 67(0x43)
def getRawCompassData(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getRawCompassData')
if timestamp:
return (data, t_stamp)
return data
## 96(0x60)
def tareWithCurrentOrientation(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('tareWithCurrentOrientation')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 97(0x61)
def tareWithQuaternion(self, quaternion, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('tareWithQuaternion', quaternion)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 98(0x62)
def tareWithRotationMatrix(self, rotation_matrix, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('tareWithRotationMatrix', rotation_matrix)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 99(0x63)
def setStaticAccelerometerTrustValue(self, trust_value, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setStaticAccelerometerTrustValue', trust_value)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 100(0x64)
def setConfidenceAccelerometerTrustValues(self, min_trust_value, max_trust_value, timestamp=False):
arg_list = (min_trust_value, max_trust_value)
fail_byte, t_stamp, data = self.writeRead('setConfidenceAccelerometerTrustValues', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 101(0x65)
def setStaticCompassTrustValue(self, trust_value, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setStaticCompassTrustValue', trust_value)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 102(0x66)
def setConfidenceCompassTrustValues(self, min_trust_value, max_trust_value, timestamp=False):
arg_list = (min_trust_value, max_trust_value)
fail_byte, t_stamp, data = self.writeRead('setConfidenceCompassTrustValues', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 103(0x67)
def setDesiredUpdateRate(self, update_rate, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setDesiredUpdateRate', update_rate)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 105(0x69)
def setReferenceVectorMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setReferenceVectorMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 106(0x6a)
def setOversampleRate(self, samples_per_iteration, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setOversampleRate', samples_per_iteration)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 107(0x6b)
def setGyroscopeEnabled(self, enabled, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setGyroscopeEnabled', enabled)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 108(0x6c)
def setAccelerometerEnabled(self, enabled, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setAccelerometerEnabled', enabled)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 109(0x6d)
def setCompassEnabled(self, enabled, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setCompassEnabled', enabled)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 116(0x74)
def setAxisDirections(self, axis_direction_byte, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setAxisDirections', axis_direction_byte)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 117(0x75)
def setRunningAveragePercent(self, running_average_percent, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setRunningAveragePercent', running_average_percent)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 118(0x76)
def setCompassReferenceVector(self, reference_vector, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setCompassReferenceVector', reference_vector)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 119(0x77)
def setAccelerometerReferenceVector(self, reference_vector, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setAccelerometerReferenceVector', reference_vector)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 120(0x78)
def resetKalmanFilter(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('resetKalmanFilter')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 121(0x79)
def setAccelerometerRange(self, accelerometer_range_setting, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setAccelerometerRange', accelerometer_range_setting)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 123(0x7b)
def setFilterMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setFilterMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 124(0x7c)
def setRunningAverageMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setRunningAverageMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 125(0x7d)
def setGyroscopeRange(self, gyroscope_range_setting, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setGyroscopeRange', gyroscope_range_setting)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 126(0x7e)
def setCompassRange(self, compass_range_setting, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setCompassRange', compass_range_setting)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 128(0x80)
def getTareAsQuaternion(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTareAsQuaternion')
if timestamp:
return (data, t_stamp)
return data
## 129(0x81)
def getTareAsRotationMatrix(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTareAsRotationMatrix')
if timestamp:
return (data, t_stamp)
return data
## 130(0x82)
def getAccelerometerTrustValues(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAccelerometerTrustValues')
if timestamp:
return (data, t_stamp)
return data
## 131(0x83)
def getCompassTrustValues(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCompassTrustValues')
if timestamp:
return (data, t_stamp)
return data
## 132(0x84)
def getCurrentUpdateRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCurrentUpdateRate')
if timestamp:
return (data, t_stamp)
return data
## 133(0x85)
def getCompassReferenceVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCompassReferenceVector')
if timestamp:
return (data, t_stamp)
return data
## 134(0x86)
def getAccelerometerReferenceVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAccelerometerReferenceVector')
if timestamp:
return (data, t_stamp)
return data
## 140(0x8c)
def getGyroscopeEnabledState(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getGyroscopeEnabledState')
if timestamp:
return (data, t_stamp)
return data
## 141(0x8d)
def getAccelerometerEnabledState(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAccelerometerEnabledState')
if timestamp:
return (data, t_stamp)
return data
## 142(0x8e)
def getCompassEnabledState(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCompassEnabledState')
if timestamp:
return (data, t_stamp)
return data
## 143(0x8f)
def getAxisDirections(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAxisDirections')
if timestamp:
return (data, t_stamp)
return data
## 144(0x90)
def getOversampleRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getOversampleRate')
if timestamp:
return (data, t_stamp)
return data
## 145(0x91)
def getRunningAveragePercent(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getRunningAveragePercent')
if timestamp:
return (data, t_stamp)
return data
## 146(0x92)
def getDesiredUpdateRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getDesiredUpdateRate')
if timestamp:
return (data, t_stamp)
return data
## 148(0x94)
def getAccelerometerRange(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAccelerometerRange')
if timestamp:
return (data, t_stamp)
return data
## 152(0x98)
def getFilterMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getFilterMode')
if timestamp:
return (data, t_stamp)
return data
## 153(0x99)
def getRunningAverageMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getRunningAverageMode')
if timestamp:
return (data, t_stamp)
return data
## 154(0x9a)
def getGyroscopeRange(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getGyroscopeRange')
if timestamp:
return (data, t_stamp)
return data
## 155(0x9b)
def getCompassRange(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCompassRange')
if timestamp:
return (data, t_stamp)
return data
## 156(0x9c)
def getEulerAngleDecompositionOrder(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getEulerAngleDecompositionOrder')
if timestamp:
return (data, t_stamp)
return data
## 157(0x9d)
def getMagnetoresistiveThreshold(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getMagnetoresistiveThreshold')
if timestamp:
return (data, t_stamp)
return data
## 158(0x9e)
def getAccelerometerResistanceThreshold(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAccelerometerResistanceThreshold')
if timestamp:
return (data, t_stamp)
return data
## 159(0x9f)
def getOffsetOrientationAsQuaternion(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getOffsetOrientationAsQuaternion')
if timestamp:
return (data, t_stamp)
return data
## 160(0xa0)
def setCompassCalibrationCoefficients(self, matrix, bias, timestamp=False):
arg_list = []
arg_list.extend(matrix)
arg_list.extend(bias)
fail_byte, t_stamp, data = self.writeRead('setCompassCalibrationCoefficients', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 161(0xa1)
def setAccelerometerCalibrationCoefficients(self, matrix, bias, timestamp=False):
arg_list = []
arg_list.extend(matrix)
arg_list.extend(bias)
fail_byte, t_stamp, data = self.writeRead('setAccelerometerCalibrationCoefficients', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 162(0xa2)
def getCompassCalibrationCoefficients(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCompassCalibrationCoefficients')
if timestamp:
return (data, t_stamp)
return data
## 163(0xa3)
def getAccelerometerCalibrationCoefficients(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAccelerometerCalibrationCoefficients')
if timestamp:
return (data, t_stamp)
return data
## 164(0xa4)
def getGyroscopeCalibrationCoefficients(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getGyroscopeCalibrationCoefficients')
if timestamp:
return (data, t_stamp)
return data
## 165(0xa5)
def beginGyroscopeAutoCalibration(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('beginGyroscopeAutoCalibration')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 166(0xa6)
def setGyroscopeCalibrationCoefficients(self, matrix, bias, timestamp=False):
arg_list = []
arg_list.extend(matrix)
arg_list.extend(bias)
fail_byte, t_stamp, data = self.writeRead('setGyroscopeCalibrationCoefficients', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 169(0xa9)
def setCalibrationMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setCalibrationMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 170(0xaa)
def getCalibrationMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCalibrationMode')
if timestamp:
return (data, t_stamp)
return data
## 171(0xab)
def setOrthoCalibrationDataPointFromCurrentOrientation(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setOrthoCalibrationDataPointFromCurrentOrientation')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 172(0xac)
def setOrthoCalibrationDataPointFromVector(self, type, index, vector, timestamp=False):
arg_list = (type, index, vector)
fail_byte, t_stamp, data = self.writeRead('setOrthoCalibrationDataPointFromVector', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 173(0xad)
def getOrthoCalibrationDataPoint(self, type, index, timestamp=False):
arg_list = (type, index)
fail_byte, t_stamp, data = self.writeRead('getOrthoCalibrationDataPoint', arg_list)
if timestamp:
return (data, t_stamp)
return data
## 174(0xae)
def performOrthoCalibration(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('performOrthoCalibration')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 175(0xaf)
def clearOrthoCalibrationData(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('clearOrthoCalibrationData')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 227(0xe3)
def setSleepMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setSleepMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 228(0xe4)
def getSleepMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getSleepMode')
if timestamp:
return (data, t_stamp)
return data
## 240(0xf0)
def setJoystickEnabled(self, enabled, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setJoystickEnabled', enabled)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 241(0xf1)
def setMouseEnabled(self, enabled, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setMouseEnabled', enabled)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 242(0xf2)
def getJoystickEnabled(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getJoystickEnabled')
if timestamp:
return (data, t_stamp)
return data
## 243(0xf3)
def getMouseEnabled(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getMouseEnabled')
if timestamp:
return (data, t_stamp)
return data
## 244(0xf4)
def setControlMode(self, control_class, control_index, handler_index, timestamp=False):
arg_list = (control_class, control_index, handler_index)
fail_byte, t_stamp, data = self.writeRead('setControlMode', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 245(0xf5)
def setControlData(self, control_class, control_index, data_point_index, data_point, timestamp=False):
arg_list = (control_class, control_index, data_point_index, data_point)
fail_byte, t_stamp, data = self.writeRead('setControlData', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 246(0xf6)
def getControlMode(self, control_class, control_index, timestamp=False):
arg_list = (control_class, control_index)
fail_byte, t_stamp, data = self.writeRead('getControlMode', arg_list)
if timestamp:
return (data, t_stamp)
return data
## 247(0xf7)
def getControlData(self, control_class, control_index, handler_index, timestamp=False):
arg_list = (control_class, control_index, handler_index)
fail_byte, t_stamp, data = self.writeRead('getControlData', arg_list)
if timestamp:
return (data, t_stamp)
return data
## 251(0xfb)
def setMouseAbsoluteRelativeMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setMouseAbsoluteRelativeMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 252(0xfc)
def getMouseAbsoluteRelativeMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getMouseAbsoluteRelativeMode')
if timestamp:
return (data, t_stamp)
return data
## END generated functions USB and WL_ and EM_ and DL_ and BT_
class TSUSBSensor(_TSSensor):
command_dict = _TSSensor.command_dict.copy()
command_dict.update({
'_setUARTBaudRate': (0xe7, 0, None, 4, '>I', 1),
'getUARTBaudRate': (0xe8, 4, '>I', 0, None, 1),
'getButtonState': (0xfa, 1, '>B', 0, None, 1)
})
reverse_command_dict = dict(map(lambda x: [x[1][0], x[0]], command_dict.items()))
_device_types = ["USB", "USB-HH", "MUSB", "MUSB-HH", "USBWT", "USBWT-HH"]
def __new__(cls, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
if com_port is None:
return None
if com_port:
if type(com_port) is str:
port_name = com_port
elif type(com_port) is ComInfo:
port_name = com_port.com_port
else:
_print("An erronous parameter was passed in")
return None
if baudrate not in _allowed_baudrates:
baudrate = _baudrate
_print("Error baudrate value not allowed. Using default.")
serial_port = serial.Serial(port_name, baudrate=baudrate, timeout=0.5, writeTimeout=0.5)
if serial_port is not None:
new_inst = super(_TSSensor, cls).__new__(cls)
serial_port.write(bytearray((0xf7, 0x56, 0x56)))
time.sleep(0.01)
serial_port.flushInput()
return _generateSensorClass(new_inst, serial_port, TSUSBSensor._device_types)
_print('Error serial port was not made')
## 231(0xe7)
def setUARTBaudRate(self, baud_rate, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('_setUARTBaudRate', baud_rate)
if not fail_byte:
self.baudrate = baud_rate
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## generated functions USB
## 232(0xe8)
def getUARTBaudRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUARTBaudRate')
if timestamp:
return (data, t_stamp)
return data
## 250(0xfa)
def getButtonState(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getButtonState')
if timestamp:
return (data, t_stamp)
return data
## END generated functions USB
class TSWLSensor(_TSSensor):
command_dict = _TSSensor.command_dict.copy()
command_dict.update({
'_getWirelessPanID': (0xc0, 2, '>H', 0, None, 1),
'_setWirelessPanID': (0xc1, 0, None, 2, '>H', 1),
'_getWirelessChannel': (0xc2, 1, '>B', 0, None, 1),
'_setWirelessChannel': (0xc3, 0, None, 1, '>B', 1),
'commitWirelessSettings': (0xc5, 0, None, 0, None, 1),
'getWirelessAddress': (0xc6, 2, '>H', 0, None, 1),
'getBatteryVoltage': (0xc9, 4, '>f', 0, None, 1),
'getBatteryPercentRemaining': (0xca, 1, '>B', 0, None, 1),
'getBatteryStatus': (0xcb, 1, '>B', 0, None, 1),
'getButtonState': (0xfa, 1, '>B', 0, None, 1)
})
reverse_command_dict = dict(map(lambda x: [x[1][0], x[0]], command_dict.items()))
_device_types = ["WL", "WL-HH"]
def __new__(cls, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR, logical_id=None, dongle=None):
if com_port is None and logical_id is None and dongle is None:
return None
if com_port:
if type(com_port) is str:
port_name = com_port
elif type(com_port) is ComInfo:
port_name = com_port.com_port
else:
_print("An erronous parameter was passed in")
return None
if baudrate not in _allowed_baudrates:
baudrate = _baudrate
_print("Error baudrate value not allowed. Using default.")
serial_port = serial.Serial(port_name, baudrate=baudrate, timeout=0.5, writeTimeout=0.5)
if serial_port is not None:
new_inst = super(_TSSensor, cls).__new__(cls)
new_inst.dongle = None
new_inst.logical_id = None
serial_port.write(bytearray((0xf7, 0x56, 0x56)))
time.sleep(0.01)
serial_port.flushInput()
return _generateSensorClass(new_inst, serial_port, TSWLSensor._device_types)
_print('Error serial port was not made')
if logical_id is not None and dongle:
for tries in range(_wireless_retries + 1):
fail_byte, timestamp, serial_number = dongle.faWriteRead(logical_id, 'getSerialNumber')
if not fail_byte:
if serial_number in global_sensorlist:
rtn_inst = global_sensorlist[serial_number]
if rtn_inst.dongle:
_print("sensor was already paired before")
pass
rtn_inst.dongle = dongle
rtn_inst.logical_id = logical_id
dongle.wireless_table[logical_id] = serial_number
rtn_inst.switchToWirelessMode()
return rtn_inst
else:
new_inst = super(_TSSensor, cls).__new__(cls)
for tries in range(_wireless_retries + 1):
fail_byte, timestamp, hardware_version = dongle.faWriteRead(logical_id, 'getHardwareVersionString')
if not fail_byte:
new_inst.device_type = convertString(hardware_version)[4:-8].strip()
break
else:
new_inst.device_type = "WL"
new_inst.dongle = dongle
new_inst.logical_id = logical_id
new_inst.port_name = ""
new_inst.serial_port_settings = {}
new_inst.serial_port = None
new_inst.switchToWirelessMode()
new_inst.serial_number = serial_number
global_sensorlist[serial_number] = new_inst
return new_inst
_print("raise wireless fail error here")
return None
_print('this sould never happen')
return None
def __init__(self, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR, logical_id=None, dongle=None):
self.protocol_args = { 'success_failure': True,
'timestamp': True,
'command_echo': True,
'data_length': True}
if timestamp_mode != TSS_TIMESTAMP_SENSOR:
self.protocol_args['timestamp'] = False
self.timestamp_mode = timestamp_mode
self.baudrate = baudrate
reinit = False
try: # if this is set the class had been there before
check = self.stream_parse
reinit = True
# _print("sensor reinit!!!")
except:
self._setupBaseVariables()
self.callback_func = None
if self.serial_port and not self.data_loop:
self._setupProtocolHeader(**self.protocol_args)
self._setupThreadedReadLoop()
self.latest_lock = threading.Condition(threading.Lock())
self.new_data = False
if reinit:
if self.stream_timing is not None:
self.setStreamingTiming(*self.stream_timing)
if self.stream_slot_cmds is not None:
self.setStreamingSlots(*self.stream_slot_cmds)
def close(self):
if self.serial_port is not None:
super(TSWLSensor, self).close()
def _wirlessWriteRead(self, command, input_list=None):
result = (True, None, None)
for i in range(_wireless_retries + 1):
result = self.dongle.faWriteRead(self.logical_id, command, input_list)
if not result[0]:
break
return result
def switchToWirelessMode(self):
if self.dongle and self.logical_id is not None:
self.writeRead = self._wirlessWriteRead
self.wireless_com = True
return True
return False
def switchToWiredMode(self):
if self.serial_port:
self.writeRead = self.f9WriteRead
self.wireless_com = False
return True
return False
## 192(0xc0)
def getWirelessPanID(self, timestamp=False):
t_stamp = None
data = None
fail_byte, t_stamp, data = self.writeRead('_getWirelessPanID')
if timestamp:
return (data, t_stamp)
return data
## 193(0xc1)
def setWirelessPanID(self, PanID, timestamp=False):
t_stamp = None
fail_byte = True
if not self.wireless_com:
fail_byte, t_stamp, data = self.writeRead('_setWirelessPanID', PanID)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 194(0xc2)
def getWirelessChannel(self, timestamp=False):
t_stamp = None
data = None
fail_byte, t_stamp, data = self.writeRead('_getWirelessChannel')
if timestamp:
return (data, t_stamp)
return data
## 195(0xc3)
def setWirelessChannel(self, channel, timestamp=False):
t_stamp = None
fail_byte = True
if not self.wireless_com:
fail_byte, t_stamp, data = self.writeRead('_setWirelessChannel', channel)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## generated functions WL_
## 197(0xc5)
def commitWirelessSettings(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('commitWirelessSettings')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 198(0xc6)
def getWirelessAddress(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessAddress')
if timestamp:
return (data, t_stamp)
return data
## 201(0xc9)
def getBatteryVoltage(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryVoltage')
if timestamp:
return (data, t_stamp)
return data
## 202(0xca)
def getBatteryPercentRemaining(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryPercentRemaining')
if timestamp:
return (data, t_stamp)
return data
## 203(0xcb)
def getBatteryStatus(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryStatus')
if timestamp:
return (data, t_stamp)
return data
## 250(0xfa)
def getButtonState(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getButtonState')
if timestamp:
return (data, t_stamp)
return data
## END generated functions WL_
class TSDongle(_TSBase):
command_dict = _TSBase.command_dict.copy()
command_dict.update({
'setWirelessStreamingAutoFlushMode': (0xb0, 0, None, 1, '>B', 1),
'getWirelessStreamingAutoFlushMode': (0xb1, 1, '>B', 0, None, 1),
'_setWirelessStreamingManualFlushBitfield': (0xb2, 0, None, 2, '>H', 1),
'_getWirelessStreamingManualFlushBitfield': (0xb3, 2, '>H', 0, None, 1),
'_getManualFlushSingle': (0xb4, 0, None, 1, '>B', 1),
'_getManualFlushBulk': (0xb5, 0, None, 0, None, 1),
'broadcastSynchronizationPulse': (0xb6, 0, None, 0, None, 1),
'getReceptionBitfield': (0xb7, 2, '>H', 0, None, 1),
'getWirelessPanID': (0xc0, 2, '>H', 0, None, 1),
'setWirelessPanID': (0xc1, 0, None, 2, '>H', 1),
'getWirelessChannel': (0xc2, 1, '>B', 0, None, 1),
'setWirelessChannel': (0xc3, 0, None, 1, '>B', 1),
'commitWirelessSettings': (0xc5, 0, None, 0, None, 1),
'getWirelessAddress': (0xc6, 2, '>H', 0, None, 1),
'getSerialNumberAtLogicalID': (0xd0, 4, '>I', 1, '>B', 1),
'_setSerialNumberAtLogicalID': (0xd1, 0, None, 5, '>BI', 1),
'getWirelessChannelNoiseLevels': (0xd2, 16, '>16B', 0, None, 1),
'setWirelessRetries': (0xd3, 0, None, 1, '>B', 1),
'getWirelessRetries': (0xd4, 1, '>B', 0, None, 1),
'getWirelessSlotsOpen': (0xd5, 1, '>B', 0, None, 1),
'getSignalStrength': (0xd6, 1, '>B', 0, None, 1),
'setWirelessHIDUpdateRate': (0xd7, 0, None, 1, '>B', 1),
'getWirelessHIDUpdateRate': (0xd8, 1, '>B', 0, None, 1),
'setWirelessHIDAsynchronousMode': (0xd9, 0, None, 1, '>B', 1),
'getWirelessHIDAsynchronousMode': (0xda, 1, '>B', 0, None, 1),
'_setWirelessResponseHeaderBitfield': (0xdb, 0, None, 4, '>I', 1),
'_getWirelessResponseHeaderBitfield': (0xdc, 4, '>I', 0, None, 1),
'setJoystickLogicalID': (0xf0, 0, None, 1, '>B', 1),
'setMouseLogicalID': (0xf1, 0, None, 1, '>B', 1),
'getJoystickLogicalID': (0xf2, 1, '>B', 0, None, 1),
'getMouseLogicalID': (0xf3, 1, '>B', 0, None, 1)
})
wl_command_dict = TSWLSensor.command_dict.copy()
_device_types = ["DNG"]
def __new__(cls, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
if com_port:
if type(com_port) is str:
port_name = com_port
elif type(com_port) is ComInfo:
port_name = com_port.com_port
else:
_print("An erronous parameter was passed in")
return None
if baudrate not in _allowed_baudrates:
baudrate = _baudrate
_print("Error baudrate value not allowed. Using default.")
serial_port = serial.Serial(port_name, baudrate=baudrate, timeout=0.5, writeTimeout=0.5)
if serial_port is not None:
new_inst = super(TSDongle, cls).__new__(cls)
serial_port.write(bytearray((0xf7, 0x56, 0x56)))
time.sleep(0.05)
serial_port.flushInput()
checkSoftwareVersionFromPort(serial_port)
serial_port.write(bytearray((0xf7, 0xb7, 0xb7)))
reception_bitfield = struct.unpack('>H', serial_port.read(2))[0]
idx = 1
for i in range(15):
if idx & reception_bitfield:
count = 0
serial_port.write(bytearray((0xf7, 0xd0, i, 0xd0 + i)))
wl_id = struct.unpack('>I', serial_port.read(4))[0]
while count < 15:
count += 1
serial_port.write(bytearray((0xf8, i, 0x56, 0x56 + i)))
did_fail = struct.unpack('>B', serial_port.read(1))[0]
if did_fail:
serial_port.read(1)
else:
_print("Stopped {0:08X} on try {1:d}".format(wl_id, count))
serial_port.read(2)
break
idx <<= 1
return _generateSensorClass(new_inst, serial_port, TSDongle._device_types)
_print('Error serial port was not made')
def __init__(self, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
self.protocol_args = { 'success_failure': True,
'timestamp': True,
'command_echo': True,
'logical_id': True,
'data_length': True}
if timestamp_mode != TSS_TIMESTAMP_SENSOR:
self.protocol_args['timestamp'] = False
self.timestamp_mode = timestamp_mode
self.baudrate = baudrate
reinit = False
try: # if this is set the class had been there before
check = self.wireless_table
reinit = True
# _print("sensor reinit!!!")
except:
self._setupBaseVariables()
self._setupProtocolHeader(**self.protocol_args)
self._setupThreadedReadLoop()
self.setWirelessStreamingAutoFlushMode(1)
self.startStreaming()
def reconnect(self):
self.close()
if not tryPort(self.port_name):
_print("tryport fail")
try:
serial_port = serial.Serial(self.port_name, baudrate=self.baudrate, timeout=0.5, writeTimeout=0.5)
serial_port.applySettingsDict(self.serial_port_settings)
self.serial_port = serial_port
self.setWirelessStreamingAutoFlushMode(0)
time.sleep(0.05)
self.serial_port.flushInput()
for i in range(15):
serial_port.write(bytearray((0xf7, 0xd0, i, 0xd0 + i)))
for i in range(10):
try:
wl_id = struct.unpack('>I', serial_port.read(4))[0]
except:
continue
break
if wl_id != 0:
count = 0
while count < 25:
count += 1
serial_port.write(bytearray((0xf8, i, 0x56, 0x56 + i)))
did_fail = struct.unpack('>B', serial_port.read(1))[0]
if did_fail:
serial_port.read(1)
else:
_print("Stopped {0:08X} on try {1:d}".format(wl_id, count))
serial_port.read(2)
break
except:
traceback.print_exc()
return False
self._setupProtocolHeader(**self.protocol_args)
self._setupThreadedReadLoop()
self.setWirelessStreamingAutoFlushMode(1)
return True
def _setupBaseVariables(self):
self.serial_number_hex = '{0:08X}'.format(self.serial_number)
self.wireless_table = [0] * 15
for i in range(15):
tmp_id = self.f7WriteRead('getSerialNumberAtLogicalID', i)
if tmp_id not in self.wireless_table or tmp_id == 0:
self.wireless_table[i] = tmp_id
else:
self.f7WriteRead('_setSerialNumberAtLogicalID', (i, 0))
def _setupProtocolHeader(self, success_failure=False,
timestamp=False,
command_echo=False,
checksum=False,
logical_id=False,
serial_number=False,
data_length=False):
protocol_header =_generateProtocolHeader( success_failure,
timestamp,
command_echo,
checksum,
logical_id,
serial_number,
data_length)
protocol_byte, self.header_parse, self.header_idx_lst = protocol_header
d_header = self.f7WriteRead('_getWiredResponseHeaderBitfield')
dwl_header = self.f7WriteRead('_getWirelessResponseHeaderBitfield')
if d_header != protocol_byte or dwl_header != protocol_byte:
self.f7WriteRead('_setWiredResponseHeaderBitfield', protocol_byte)
self.f7WriteRead('_setWirelessResponseHeaderBitfield', protocol_byte)
d_header = self.f7WriteRead('_getWiredResponseHeaderBitfield')
dwl_header = self.f7WriteRead('_getWirelessResponseHeaderBitfield')
if d_header != protocol_byte or dwl_header != protocol_byte:
print("!!!!!fail d_header={0}, dwl_header={1}, protocol_header_byte={2}".format(d_header, dwl_header, protocol_byte))
raise Exception
# Wireless Old Protocol WriteRead
def f8WriteRead(self, logical_id, command, input_list=None):
command_args = self.command_dict[command]
cmd_byte, out_len, out_struct, in_len, in_struct, compatibility = command_args
packed_data = None
if in_struct:
if type(input_list) in (list, tuple):
packed_data = struct.pack(in_struct, *input_list)
else:
packed_data = struct.pack(in_struct, input_list)
write_array = makeWriteArray(0xf8, logical_id, cmd_byte, packed_data)
self.serial_port.write(write_array)
rtn_list = []
output_data = self.serial_port.read(2)
if len(output_data) == 2:
fail_byte = struct.unpack('>B', output_data[0])[0]
logical_id_byte = struct.unpack('>B', output_data[1])[0]
rtn_list.append(fail_byte)
if not fail_byte:
self.serial_port.read(1)
else:
return True
if out_struct:
output_data = self.serial_port.read(out_len)
rtn_list.append(struct.unpack(out_struct, output_data))
if len(rtn_list) != 1:
return rtn_list
return rtn_list[0]
return True
## Wireless New Protocol WriteRead
def faWriteRead(self, logical_id, command, input_list=None):
global global_counter
command_args = self.wl_command_dict[command]
cmd_byte, out_len, out_struct, in_len, in_struct, compatibility = command_args
if self.compatibility < compatibility:
raise Exception("Firmware for device on ( %s ) is out of date for this function. Recommend updating to latest firmware." % self.serial_port.name)
packed_data = None
if in_struct:
if type(input_list) in (list, tuple):
packed_data=struct.pack(in_struct, *input_list)
else:
packed_data=struct.pack(in_struct, input_list)
write_array = makeWriteArray(0xfa, logical_id, cmd_byte, packed_data)
while len(self.read_queue) > 15:
_print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!too many commands!!!!!")
time.sleep(0.01)
self.read_lock.acquire()
uid = global_counter
global_counter += 1
try:
self.serial_port.write(write_array) # release in reader thread
except serial.SerialTimeoutException:
self.read_lock.release()
self.serial_port.close()
# _print("SerialTimeoutException!!!!")
return (True, None, None)
except ValueError:
try:
# _print("trying to open it back up!!!!")
self.serial_port.open()
# _print("aaand open!!!!")
except serial.SerialException:
self.read_lock.release()
# _print("SerialTimeoutException!!!!")
return (True, None, None)
queue_packet = (uid, cmd_byte)
timeout_time = 0.5 + (len(self.read_queue) * 0.150) # timeout increases as queue gets larger
self.read_queue.append(queue_packet)
start_time = time.clock() + timeout_time
read_data = None
while(timeout_time > 0):
self.read_lock.wait(timeout_time)
read_data = self.read_dict.get(uid, None)
if read_data is not None:
break
timeout_time =start_time -time.clock()
# _print("Still waiting {0} {1} {2} {3}".format(uid, command,logical_id, timeout_time))
else:
# _print("Operation timed out!!!!")
try:
self.read_queue.remove(queue_packet)
except:
traceback.print_exc()
self.read_lock.release()
return (True, None, None)
self.read_lock.release()
del self.read_dict[uid]
header_list, output_data = read_data
fail_byte, timestamp, cmd_echo, ck_sum, rtn_log_id, sn, data_size = header_list
# _print("RESponse {0} {1} {2} {3}".format(uid, command,logical_id, timeout_time))
if logical_id != rtn_log_id:
# _print("!!!!!!!!logical_id != rtn_log_id!!!!!")
# _print(header_list)
# _hexDump(output_data, 'o')
# _print('!!!!!inWaiting = {0}'.format(self.serial_port.inWaiting()))
return (True, timestamp, None)
if cmd_echo != cmd_byte:
# _print("!!!!!!!!cmd_echo!=cmd_byte!!!!!")
# _print('cmd_echo= 0x{0:02x} cmd_byte= 0x{1:02x}'.format(cmd_echo, cmd_byte))
# _print(header_list)
# _hexDump(output_data, 'o')
# _print('!!!!!inWaiting = {0}'.format(self.serial_port.inWaiting()))
# _print('!!!!!!end')
return (True, timestamp, None)
rtn_list = None
if not fail_byte:
if out_struct:
rtn_list = struct.unpack(out_struct, output_data)
if len(rtn_list) == 1:
rtn_list = rtn_list[0]
elif cmd_echo == 0x54:
rtn_list = self[logical_id].stream_parse.unpack(output_data)
if len(rtn_list) == 1:
rtn_list = rtn_list[0]
else:
# _print("fail_byte!!!!triggered")
pass
self._read_data = None
return (fail_byte, timestamp, rtn_list)
def __getitem__(self, idx):
hw_id = self.wireless_table[idx]
if hw_id == 0:
return None
# Check if sensor exists.
if hw_id in global_sensorlist:
rtn_inst = global_sensorlist[hw_id]
if rtn_inst.dongle is self:
return rtn_inst
elif rtn_inst.dongle is None:
_print("updating sensor {0:08X} to be wireless".format(hw_id))
return TSWLSensor(timestamp_mode=self.timestamp_mode, dongle=self, logical_id=idx)
return None
# Else, make a new TSWLSensor
else:
_print("making new sensor {0:08X}".format(hw_id))
return TSWLSensor(timestamp_mode=self.timestamp_mode, dongle=self, logical_id=idx)
def getSensorFromDongle(self, idx):
return self.__getitem__(idx)
def setSensorToDongle(self, idx, hw_id):
other_hw_id = self.wireless_table[idx]
if other_hw_id != 0:
if other_hw_id in global_sensorlist:
other_sens = global_sensorlist[other_hw_id]
other_sens.dongle = None
other_sens.logical_id = None
if hw_id not in self.wireless_table:
if hw_id in global_sensorlist:
sensor = global_sensorlist[hw_id]
sensor.dongle = None
sensor.logical_id = None
self.setSerialNumberAtLogicalID(idx, hw_id)
else:
if other_hw_id != hw_id:
other_idx = self.wireless_table.index(hw_id)
self.setSerialNumberAtLogicalID(other_idx, 0)
self.setSerialNumberAtLogicalID(idx, hw_id)
return self.__getitem__(idx)
elif hw_id != 0:
self.setSerialNumberAtLogicalID(idx, hw_id)
return self.__getitem__(idx)
def _dataReadLoop(self):
while self.data_loop:
try:
self._readDataWirelessProHeader()
except(KeyboardInterrupt):
print('\n! Received keyboard interrupt, quitting threads.\n')
raise KeyboardInterrupt # fix bug where a thread eats the interupt
break
except:
# traceback.print_exc()
# _print("bad _parseStreamData parse")
# _print('!!!!!inWaiting = {0}'.format(self.serial_port.inWaiting()))
try:
self.read_lock.release()
except:
pass
def _readDataWirelessProHeader(self):
_serial_port = self.serial_port
# in_wait = _serial_port.inWaiting()
# if in_wait:
# _print('!1025! inWaiting = {0}'.format(in_wait))
header_bytes = _serial_port.read(self.header_parse.size)
if header_bytes:
# _hexDump(header_bytes, 'o')
if self.timestamp_mode == TSS_TIMESTAMP_SENSOR:
header_data = self.header_parse.unpack(header_bytes)
header_list = padProtocolHeader87(header_data)
elif self.timestamp_mode == TSS_TIMESTAMP_SYSTEM:
sys_timestamp = time.clock() # time packet was parsed it might been in the system buffer a few ms
sys_timestamp *= 1000000
header_data = self.header_parse.unpack(header_bytes)
header_list = padProtocolHeader85(header_data, sys_timestamp)
else:
header_data = self.header_parse.unpack(header_bytes)
header_list = padProtocolHeader85(header_data, None)
fail_byte, timestamp, cmd_echo, ck_sum, rtn_log_id, sn, data_size = header_list
# _print("!!!!fail_byte={0}, cmd_echo={1}, rtn_log_id={2}, data_size={3}".format(fail_byte, cmd_echo, rtn_log_id, data_size))
output_data = _serial_port.read(data_size)
if cmd_echo is 0xff:
if data_size:
self[rtn_log_id]._parseStreamData(timestamp, output_data)
return
self.read_lock.acquire()
# _print('retrning data!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
if len(self.read_queue): # here for a bug in the code
uid, cmd_byte = self.read_queue.popleft()
if cmd_byte == cmd_echo:
self.read_dict[uid] = (header_list, output_data)
self.read_lock.notifyAll() # dies in 3 seconds if there is a writeRead in wait
else:
# _print('Unrequested packet found!!!')
# _hexDump(header_bytes, 'o')
# _hexDump(output_data, 'o')
self.read_queue.appendleft((uid, cmd_byte))
self.read_lock.release()
return
# _print('Unrequested packet found (read_queue is empty)!!!')
# _hexDump(header_bytes, 'o')
# _hexDump(output_data, 'o')
# _print("no status bytes")
self.read_lock.release()
## 209(0xd1)
def setSerialNumberAtLogicalID(self, logical_id, serial_number, timestamp=False):
arg_list = (logical_id, serial_number)
fail_byte, t_stamp, data = self.writeRead('_setSerialNumberAtLogicalID', arg_list)
if not fail_byte:
self.wireless_table[logical_id] = serial_number
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## generated functions DNG
## 176(0xb0)
def setWirelessStreamingAutoFlushMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setWirelessStreamingAutoFlushMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 177(0xb1)
def getWirelessStreamingAutoFlushMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessStreamingAutoFlushMode')
if timestamp:
return (data, t_stamp)
return data
## 182(0xb6)
def broadcastSynchronizationPulse(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('broadcastSynchronizationPulse')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 183(0xb7)
def getReceptionBitfield(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getReceptionBitfield')
if timestamp:
return (data, t_stamp)
return data
## 192(0xc0)
def getWirelessPanID(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessPanID')
if timestamp:
return (data, t_stamp)
return data
## 193(0xc1)
def setWirelessPanID(self, PanID, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setWirelessPanID', PanID)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 194(0xc2)
def getWirelessChannel(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessChannel')
if timestamp:
return (data, t_stamp)
return data
## 195(0xc3)
def setWirelessChannel(self, channel, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setWirelessChannel', channel)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 197(0xc5)
def commitWirelessSettings(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('commitWirelessSettings')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 198(0xc6)
def getWirelessAddress(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessAddress')
if timestamp:
return (data, t_stamp)
return data
## 208(0xd0)
def getSerialNumberAtLogicalID(self, logical_id, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getSerialNumberAtLogicalID', logical_id)
if timestamp:
return (data, t_stamp)
return data
## 210(0xd2)
def getWirelessChannelNoiseLevels(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessChannelNoiseLevels')
if timestamp:
return (data, t_stamp)
return data
## 211(0xd3)
def setWirelessRetries(self, retries, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setWirelessRetries', retries)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 212(0xd4)
def getWirelessRetries(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessRetries')
if timestamp:
return (data, t_stamp)
return data
## 213(0xd5)
def getWirelessSlotsOpen(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessSlotsOpen')
if timestamp:
return (data, t_stamp)
return data
## 214(0xd6)
def getSignalStrength(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getSignalStrength')
if timestamp:
return (data, t_stamp)
return data
## 215(0xd7)
def setWirelessHIDUpdateRate(self, update_rate, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setWirelessHIDUpdateRate', update_rate)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 216(0xd8)
def getWirelessHIDUpdateRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessHIDUpdateRate')
if timestamp:
return (data, t_stamp)
return data
## 217(0xd9)
def setWirelessHIDAsynchronousMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setWirelessHIDAsynchronousMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 218(0xda)
def getWirelessHIDAsynchronousMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessHIDAsynchronousMode')
if timestamp:
return (data, t_stamp)
return data
## 240(0xf0)
def setJoystickLogicalID(self, logical_id, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setJoystickLogicalID', logical_id)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 241(0xf1)
def setMouseLogicalID(self, logical_id, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setMouseLogicalID', logical_id)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 242(0xf2)
def getJoystickLogicalID(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getJoystickLogicalID')
if timestamp:
return (data, t_stamp)
return data
## 243(0xf3)
def getMouseLogicalID(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getMouseLogicalID')
if timestamp:
return (data, t_stamp)
return data
## END generated functions DNG
class TSEMSensor(_TSSensor):
command_dict = _TSSensor.command_dict.copy()
command_dict.update({
'setPinMode': (0x1d, 0, None, 2, '>BB', 1),
'getPinMode': (0x1e, 2, '>BB', 0, None, 1),
'getInterruptStatus': (0x1f, 1, '>B', 0, None, 1),
'_setUARTBaudRate': (0xe7, 0, None, 4, '>I', 1),
'getUARTBaudRate': (0xe8, 4, '>I', 0, None, 1)
})
reverse_command_dict = dict(map(lambda x: [x[1][0], x[0]], command_dict.items()))
_device_types = ["EM", "EM-HH"]
def __new__(cls, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
if com_port is None:
return None
if com_port:
if type(com_port) is str:
port_name = com_port
elif type(com_port) is ComInfo:
port_name = com_port.com_port
else:
_print("An erronous parameter was passed in")
return None
if baudrate not in _allowed_baudrates:
baudrate = _baudrate
_print("Error baudrate value not allowed. Using default.")
serial_port = serial.Serial(port_name, baudrate=baudrate, timeout=0.5, writeTimeout=0.5)
if serial_port is not None:
new_inst = super(_TSSensor, cls).__new__(cls)
serial_port.write(bytearray((0xf7, 0x56, 0x56)))
time.sleep(0.01)
serial_port.flushInput()
return _generateSensorClass(new_inst, serial_port, TSEMSensor._device_types)
_print('Error serial port was not made')
## 231(0xe7)
def setUARTBaudRate(self, baud_rate, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('_setUARTBaudRate', baud_rate)
if not fail_byte:
self.baudrate = baud_rate
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## generated functions EM_
## 29(0x1d)
def setPinMode(self, mode, pin, timestamp=False):
arg_list = (mode, pin)
fail_byte, t_stamp, data = self.writeRead('setPinMode', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 30(0x1e)
def getPinMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getPinMode')
if timestamp:
return (data, t_stamp)
return data
## 31(0x1f)
def getInterruptStatus(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getInterruptStatus')
if timestamp:
return (data, t_stamp)
return data
## 232(0xe8)
def getUARTBaudRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUARTBaudRate')
if timestamp:
return (data, t_stamp)
return data
## END generated functions EM_
class TSDLSensor(_TSSensor):
command_dict = _TSSensor.command_dict.copy()
command_dict.update({
'turnOnMassStorage': (0x39, 0, None, 0, None, 1),
'turnOffMassStorage': (0x3a, 0, None, 0, None, 1),
'formatAndInitializeSDCard': (0x3b, 0, None, 0, None, 1),
'beginDataLoggingSession': (0x3c, 0, None, 0, None, 1),
'endDataLoggingSession': (0x3d, 0, None, 0, None, 1),
'setClockValues': (0x3e, 0, None, 6, '>6B', 1),
'getClockValues': (0x3f, 6, '>6B', 0, None, 1),
'getBatteryVoltage': (0xc9, 4, '>f', 0, None, 1),
'getBatteryPercentRemaining': (0xca, 1, '>B', 0, None, 1),
'getBatteryStatus': (0xcb, 1, '>B', 0, None, 1),
'getButtonState': (0xfa, 1, '>B', 0, None, 1)
})
reverse_command_dict = dict(map(lambda x: [x[1][0], x[0]], command_dict.items()))
_device_types = ["DL", "DL-HH"]
def __new__(cls, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
if com_port is None:
return None
if com_port:
if type(com_port) is str:
port_name = com_port
elif type(com_port) is ComInfo:
port_name = com_port.com_port
else:
_print("An erronous parameter was passed in")
return None
if baudrate not in _allowed_baudrates:
baudrate = _baudrate
_print("Error baudrate value not allowed. Using default.")
serial_port = serial.Serial(port_name, baudrate=baudrate, timeout=0.5, writeTimeout=0.5)
if serial_port is not None:
new_inst = super(_TSSensor, cls).__new__(cls)
serial_port.write(bytearray((0xf7, 0x56, 0x56)))
time.sleep(0.01)
serial_port.flushInput()
return _generateSensorClass(new_inst, serial_port, TSDLSensor._device_types)
_print('Error serial port was not made')
## generated functions DL_
## 57(0x39)
def turnOnMassStorage(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('turnOnMassStorage')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 58(0x3a)
def turnOffMassStorage(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('turnOffMassStorage')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 59(0x3b)
def formatAndInitializeSDCard(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('formatAndInitializeSDCard')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 60(0x3c)
def beginDataLoggingSession(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('beginDataLoggingSession')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 61(0x3d)
def endDataLoggingSession(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('endDataLoggingSession')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 62(0x3e)
def setClockValues(self, month, day, year, hour, minute, second, timestamp=False):
arg_list = (month, day, year, hour, minute, second)
fail_byte, t_stamp, data = self.writeRead('setClockValues', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 63(0x3f)
def getClockValues(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getClockValues')
if timestamp:
return (data, t_stamp)
return data
## 201(0xc9)
def getBatteryVoltage(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryVoltage')
if timestamp:
return (data, t_stamp)
return data
## 202(0xca)
def getBatteryPercentRemaining(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryPercentRemaining')
if timestamp:
return (data, t_stamp)
return data
## 203(0xcb)
def getBatteryStatus(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryStatus')
if timestamp:
return (data, t_stamp)
return data
## 250(0xfa)
def getButtonState(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getButtonState')
if timestamp:
return (data, t_stamp)
return data
## END generated functions DL_
class TSBTSensor(_TSSensor):
command_dict = _TSSensor.command_dict.copy()
command_dict.update({
'getBatteryVoltage': (0xc9, 4, '>f', 0, None, 1),
'getBatteryPercentRemaining': (0xca, 1, '>B', 0, None, 1),
'getBatteryStatus': (0xcb, 1, '>B', 0, None, 1),
'_setUARTBaudRate': (0xe7, 0, None, 4, '>I', 1),
'getUARTBaudRate': (0xe8, 4, '>I', 0, None, 1),
'getButtonState': (0xfa, 1, '>B', 0, None, 1)
})
reverse_command_dict = dict(map(lambda x: [x[1][0], x[0]], command_dict.items()))
_device_types = ["BT", "BT-HH"]
def __new__(cls, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
if com_port is None:
return None
if com_port:
if type(com_port) is str:
port_name = com_port
elif type(com_port) is ComInfo:
port_name = com_port.com_port
else:
_print("An erronous parameter was passed in")
return None
if baudrate not in _allowed_baudrates:
baudrate = _baudrate
_print("Error baudrate value not allowed. Using default.")
serial_port = serial.Serial(port_name, baudrate=baudrate, timeout=2.5, writeTimeout=2.5)
if serial_port is not None:
new_inst = super(_TSSensor, cls).__new__(cls)
serial_port.write(bytearray((0xf7, 0x56, 0x56)))
time.sleep(0.25)
serial_port.flushInput()
return _generateSensorClass(new_inst, serial_port, TSBTSensor._device_types)
_print('Error serial port was not made')
## 231(0xe7)
def setUARTBaudRate(self, baud_rate, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('_setUARTBaudRate', baud_rate)
if not fail_byte:
self.baudrate = baud_rate
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## generated functions BT_
## 201(0xc9)
def getBatteryVoltage(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryVoltage')
if timestamp:
return (data, t_stamp)
return data
## 202(0xca)
def getBatteryPercentRemaining(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryPercentRemaining')
if timestamp:
return (data, t_stamp)
return data
## 203(0xcb)
def getBatteryStatus(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryStatus')
if timestamp:
return (data, t_stamp)
return data
## 232(0xe8)
def getUARTBaudRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUARTBaudRate')
if timestamp:
return (data, t_stamp)
return data
## 250(0xfa)
def getButtonState(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getButtonState')
if timestamp:
return (data, t_stamp)
return data
## END generated functions BT_
global_broadcaster= Broadcaster()
| mit | 2,051,369,659,077,840,100 | 39.913335 | 157 | 0.573452 | false | 3.788331 | false | false | false |
lioncui/pybix | client/plugin/RedisPlugin.py | 1 | 3753 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from lib import pybixlib
import traceback
from p_class import plugins
import redis
class RedisPlugin(plugins.plugin):
def __init__(self, uuid, taskConf, agentType):
plugins.plugin.__init__(
self, uuid, taskConf, agentType)
def data_format_MB(self, data):
data = int(data)
data = data/1048576
data = "%.2f" % data
data = float(data)
return data
def data_format_Ratio(self, hit, mis):
hit = int(hit)
mis = int(mis)
if (hit+mis) == 0:
return 0
data = (hit*100)/(hit+mis)
data = "%.2f" % data
data = float(data)
return data
def data_format_connected_per_min(self, connected, min):
data = float(connected)/min
data = "%.2f" % data
return data
def data_format_command_per_min(self, command, min):
data = float(command)/min
data = "%.2f" % data
return data
def getData(self):
status_content = {}
try:
host = self.taskConf.get("host")
port = self.taskConf.get("port")
password = self.taskConf.get("password")
self.server = redis.StrictRedis(host=host, port=port,
password=password,
socket_connect_timeout=30)
self.info = self.server.info()
status_content['redis_version'] = self.info['redis_version']
status_content['used_memory'] = self.info['used_memory']
status_content['connected_clients'] = self.info[
'connected_clients']
status_content['connected_slaves'] = self.info['connected_slaves']
status_content['uptime_in_minutes'] = self.info[
'uptime_in_seconds'] / 60
#status_content['connected_per_min'] = self.data_format_connected_per_min(status_content['connected_clients'], status_content['uptime_in_minutes'])
status_content['rejected_connections'] = self.info[
'rejected_connections']
status_content['pubsub_patterns'] = self.info['pubsub_patterns']
status_content['pubsub_channels'] = self.info['pubsub_channels']
status_content['keyspace_hits'] = self.info['keyspace_hits']
status_content['keyspace_misses'] = self.info['keyspace_misses']
#status_content['keyspace_hits'] = self.data_format_Ratio(self.info['keyspace_hits'], self.info['keyspace_misses'])
status_content['commands_total'] = self.info[
'total_commands_processed']
#status_content['command_per_min'] = self.data_format_command_per_min(self.info['total_commands_processed'], status_content['uptime_in_minutes'])
status_content['usedMemoryRss'] = self.info['used_memory_rss']
status_content['memFragmentationRatio'] = self.info[
'mem_fragmentation_ratio']
status_content['blockedClients'] = self.info['blocked_clients']
totalKey = 0
for key in self.info:
if key.startswith('db'):
totalKey = totalKey + self.info[key]['keys']
status_content['totalKeys'] = totalKey
except Exception:
pybixlib.error(self.logHead + traceback.format_exc())
self.errorInfoDone(traceback.format_exc())
status_content = {}
finally:
self.setData({'agentType': self.agentType, 'uuid': self.uuid,
'code': self.code, 'time': self.getCurTime(),
'data': status_content, 'error_info': self.error_info})
self.intStatus()
| gpl-3.0 | -5,436,894,713,689,930,000 | 41.647727 | 159 | 0.565681 | false | 4.048544 | false | false | false |
kernsuite-debian/lofar | SAS/ResourceAssignment/ResourceAssignmentEditor/lib/webservice.py | 1 | 39598 | #!/usr/bin/env python3
# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy)
# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
#
# This file is part of the LOFAR software suite.
# The LOFAR software suite is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# The LOFAR software suite is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
# $Id$
'''ResourceAssignmentEditor webservice serves a interactive html5 website for
viewing and editing lofar resources.'''
import sys
import os
import time
from optparse import OptionParser
from threading import Condition, Lock, current_thread, Thread
import _strptime
from datetime import datetime, timedelta
from json import loads as json_loads
import time
import logging
import subprocess
from dateutil import parser, tz
from flask import Flask
from flask import render_template
from flask import request
from flask import abort
from flask import url_for
from lofar.common.flask_utils import gzipped
from lofar.messaging.rpc import RPCException
from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME
from lofar.sas.resourceassignment.resourceassignmenteditor.fakedata import *
from lofar.sas.resourceassignment.resourceassignmenteditor.changeshandler import ChangesHandler, CHANGE_DELETE_TYPE
from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC
from lofar.mom.momqueryservice.momqueryrpc import MoMQueryRPC
from lofar.sas.resourceassignment.resourceassignmenteditor.mom import updateTaskMomDetails
from lofar.sas.resourceassignment.resourceassignmenteditor.storage import updateTaskStorageDetails
from lofar.sas.datamanagement.cleanup.rpc import CleanupRPC
from lofar.sas.datamanagement.storagequery.rpc import StorageQueryRPC
from lofar.sas.otdb.otdbrpc import OTDBRPC
from lofar.common import isProductionEnvironment, isTestEnvironment
from lofar.common.util import humanreadablesize
from lofar.common.subprocess_utils import communicate_returning_strings
from lofar.common import dbcredentials
from lofar.sas.resourceassignment.database.radb import RADatabase
logger = logging.getLogger(__name__)
def asDatetime(isoString):
if isoString[-1] == 'Z':
isoString = isoString[:-1]
if isoString[-4] == '.':
isoString += '000'
return datetime.strptime(isoString, '%Y-%m-%dT%H:%M:%S.%f')
def asIsoFormat(timestamp):
return datetime.strftime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ')
__root_path = os.path.dirname(os.path.realpath(__file__))
'''The flask webservice app'''
app = Flask('Scheduler',
instance_path=__root_path,
template_folder=os.path.join(__root_path, 'templates'),
static_folder=os.path.join(__root_path, 'static'),
instance_relative_config=True)
# Load the default configuration
app.config.from_object('lofar.sas.resourceassignment.resourceassignmenteditor.config.default')
try:
import ujson
def convertDictDatetimeValuesToString(obj):
'''recursively convert all string values in the dict to buffer'''
if isinstance(obj, list):
return [convertDictDatetimeValuesToString(x) if (isinstance(x, dict) or isinstance(x, list)) else x for x in obj]
return dict( (k, convertDictDatetimeValuesToString(v) if (isinstance(v, dict) or isinstance(v, list)) else asIsoFormat(v) if isinstance(v, datetime) else v) for k,v in list(obj.items()))
def jsonify(obj):
'''faster implementation of flask.json.jsonify using ultrajson and the above datetime->string convertor'''
json_str = ujson.dumps(dict(convertDictDatetimeValuesToString(obj)))
return app.response_class(json_str, mimetype='application/json')
except:
from flask.json import jsonify
from flask.json import JSONEncoder
class CustomJSONEncoder(JSONEncoder):
def default(self, obj):
try:
if isinstance(obj, datetime):
return asIsoFormat(obj)
iterable = iter(obj)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, obj)
app.json_encoder = CustomJSONEncoder
rarpc = None
otdbrpc = None
curpc = None
sqrpc = None
momqueryrpc = None
changeshandler = None
_radb_pool = {}
_radb_pool_lock = Lock()
_radb_dbcreds = None
def radb():
global _radb_pool, _radb_pool_lock
if _radb_dbcreds:
with _radb_pool_lock:
thread = current_thread()
tid = thread.ident
now = datetime.utcnow()
if tid not in _radb_pool:
logger.info('creating radb connection for thread %s', tid)
_radb_pool[tid] = { 'connection': RADatabase(dbcreds=_radb_dbcreds),
'last_used': now }
thread_conn_obj = _radb_pool[tid]
thread_conn_obj['last_used'] = now
threshold = timedelta(minutes=5)
obsolete_connections_tids = [tid for tid,tco in list(_radb_pool.items()) if now - tco['last_used'] > threshold]
for tid in obsolete_connections_tids:
logger.info('deleting radb connection for thread %s', tid)
del _radb_pool[tid]
return thread_conn_obj['connection']
return rarpc
@app.route('/')
@app.route('/index.htm')
@app.route('/index.html')
@gzipped
def index():
'''Serves the ResourceAssignmentEditor's index page'''
return render_template('index.html', title='Scheduler')
@app.route('/projects')
@app.route('/projects.htm')
@app.route('/projects.html')
@gzipped
def projects():
return render_template('projects.html', title='Projects')
@app.route('/rest/config')
@gzipped
def config():
config = {'mom_base_url':'',
'lta_base_url':'',
'inspection_plots_base_url':'https://proxy.lofar.eu/inspect/HTML/',
'sky_view_base_url':'http://dop344.astron.nl:5000/uvis/id'}
if isProductionEnvironment():
config['mom_base_url'] = 'https://lofar.astron.nl/mom3'
config['lta_base_url'] = 'http://lofar.target.rug.nl/'
elif isTestEnvironment():
config['mom_base_url'] = 'http://lofartest.control.lofar:8080/mom3'
config['lta_base_url'] = 'http://lofar-test.target.rug.nl/'
return jsonify({'config': config})
@app.route('/rest/resources')
@gzipped
def resources():
result = radb().getResources(include_availability=True)
return jsonify({'resources': result})
@app.route('/rest/resources/<int:resource_id>')
@gzipped
def resource(resource_id):
result = radb().getResources(resource_ids=[resource_id], include_availability=True)
if result:
return jsonify(result[0])
return jsonify({})
@app.route('/rest/resources/<int:resource_id>/resourceclaims')
@gzipped
def resourceclaimsForResource(resource_id):
return resourceclaimsForResourceFromUntil(resource_id, None, None)
@app.route('/rest/resources/<int:resource_id>/resourceclaims/<string:fromTimestamp>')
@gzipped
def resourceclaimsForResourceFrom(resource_id, fromTimestamp=None):
return resourceclaimsForResourceFromUntil(resource_id, fromTimestamp, None)
@app.route('/rest/resources/<int:resource_id>/resourceclaims/<string:fromTimestamp>/<string:untilTimestamp>')
@gzipped
def resourceclaimsForResourceFromUntil(resource_id, fromTimestamp=None, untilTimestamp=None):
if fromTimestamp and isinstance(fromTimestamp, str):
fromTimestamp = asDatetime(fromTimestamp)
if untilTimestamp and isinstance(untilTimestamp, str):
untilTimestamp = asDatetime(untilTimestamp)
claims = radb().getResourceClaims(lower_bound=fromTimestamp,
upper_bound=untilTimestamp,
resource_ids=[resource_id],
extended=False,
include_properties=True)
return jsonify({'resourceclaims': claims})
@app.route('/rest/resourcegroups')
@gzipped
def resourcegroups():
result = radb().getResourceGroups()
return jsonify({'resourcegroups': result})
@app.route('/rest/resourcegroupmemberships')
@gzipped
def resourcegroupsmemberships():
result = radb().getResourceGroupMemberships()
return jsonify({'resourcegroupmemberships': result})
@app.route('/rest/resourceclaims')
def resourceclaims():
return resourceclaimsFromUntil(None, None)
@app.route('/rest/resourceclaims/<string:fromTimestamp>')
def resourceclaimsFrom(fromTimestamp=None):
return resourceclaimsFromUntil(fromTimestamp, None)
@app.route('/rest/resourceclaims/<string:fromTimestamp>/<string:untilTimestamp>')
@gzipped
def resourceclaimsFromUntil(fromTimestamp=None, untilTimestamp=None):
if fromTimestamp and isinstance(fromTimestamp, str):
fromTimestamp = asDatetime(fromTimestamp)
if untilTimestamp and isinstance(untilTimestamp, str):
untilTimestamp = asDatetime(untilTimestamp)
claims = radb().getResourceClaims(lower_bound=fromTimestamp, upper_bound=untilTimestamp, include_properties=True)
return jsonify({'resourceclaims': claims})
@app.route('/rest/resourceusages')
@gzipped
def resourceUsages():
return resourceUsagesFromUntil(None, None)
@app.route('/rest/resourceusages/<string:fromTimestamp>/<string:untilTimestamp>')
@gzipped
def resourceUsagesFromUntil(fromTimestamp=None, untilTimestamp=None):
if fromTimestamp and isinstance(fromTimestamp, str):
fromTimestamp = asDatetime(fromTimestamp)
if untilTimestamp and isinstance(untilTimestamp, str):
untilTimestamp = asDatetime(untilTimestamp)
result = radb().getResourceUsages(lower_bound=fromTimestamp, upper_bound=untilTimestamp)
return jsonify({'resourceusages': result})
@app.route('/rest/resources/<int:resource_id>/usages', methods=['GET'])
@app.route('/rest/resourceusages/<int:resource_id>', methods=['GET'])
@gzipped
def resourceUsagesForResource(resource_id):
return resourceUsagesForResourceFromUntil(resource_id, None, None)
@app.route('/rest/resources/<int:resource_id>/usages/<string:fromTimestamp>/<string:untilTimestamp>', methods=['GET'])
@app.route('/rest/resourceusages/<int:resource_id>/<string:fromTimestamp>/<string:untilTimestamp>', methods=['GET'])
@gzipped
def resourceUsagesForResourceFromUntil(resource_id, fromTimestamp=None, untilTimestamp=None):
if fromTimestamp and isinstance(fromTimestamp, str):
fromTimestamp = asDatetime(fromTimestamp)
if untilTimestamp and isinstance(untilTimestamp, str):
untilTimestamp = asDatetime(untilTimestamp)
result = radb().getResourceUsages(resource_ids=[resource_id], lower_bound=fromTimestamp, upper_bound=untilTimestamp)
return jsonify({'resourceusages': result})
@app.route('/rest/tasks/<int:task_id>/resourceusages', methods=['GET'])
@gzipped
def resourceUsagesForTask(task_id):
result = radb().getResourceUsages(task_ids=[task_id])
return jsonify({'resourceusages': result})
@app.route('/rest/tasks/<int:task_id>/resourceclaims', methods=['GET'])
@gzipped
def resourceClaimsForTask(task_id):
result = radb().getResourceClaims(task_ids=[task_id], extended=True, include_properties=True)
return jsonify({'resourceclaims': result})
@app.route('/rest/tasks')
def getTasks():
return getTasksFromUntil(None, None)
@app.route('/rest/tasks/<string:fromTimestamp>')
def getTasksFrom(fromTimestamp):
return getTasksFromUntil(fromTimestamp, None)
@app.route('/rest/tasks/<string:fromTimestamp>/<string:untilTimestamp>')
@gzipped
def getTasksFromUntil(fromTimestamp=None, untilTimestamp=None):
if fromTimestamp and isinstance(fromTimestamp, str):
fromTimestamp = asDatetime(fromTimestamp)
if untilTimestamp and isinstance(untilTimestamp, str):
untilTimestamp = asDatetime(untilTimestamp)
tasks = radb().getTasks(fromTimestamp, untilTimestamp)
updateTaskDetails(tasks)
return jsonify({'tasks': tasks})
def updateTaskDetails(tasks):
#update the mom details and the storage details in parallel
t1 = Thread(target=updateTaskMomDetails, args=(tasks, momqueryrpc))
t2 = Thread(target=updateTaskStorageDetails, args=(tasks, sqrpc, curpc))
t1.daemon = True
t2.daemon = True
t1.start()
t2.start()
#wait for mom details thread to finish
t1.join()
#task details (such as name/description) from MoM are done
#get extra details on reserved resources for reservations (while the storage details still run in t2)
reservationTasks = [t for t in tasks if t['type'] == 'reservation']
if reservationTasks:
reservationClaims = radb().getResourceClaims(task_ids=[t['id'] for t in reservationTasks], extended=True, include_properties=False)
task2claims = {}
for claim in reservationClaims:
if claim['task_id'] not in task2claims:
task2claims[claim['task_id']] = []
task2claims[claim['task_id']].append(claim)
for task in reservationTasks:
claims = task2claims.get(task['id'], [])
task['name'] = ', '.join(c['resource_name'] for c in claims)
task['description'] = 'Reservation on ' + task['name']
#wait for storage details thread to finish
t2.join()
@app.route('/rest/tasks/<int:task_id>', methods=['GET'])
@gzipped
def getTask(task_id):
try:
task = radb().getTask(task_id)
if not task:
abort(404)
task['name'] = 'Task %d' % task['id']
updateTaskDetails([task])
return jsonify({'task': task})
except Exception as e:
abort(404)
return jsonify({'task': None})
@app.route('/rest/tasks/otdb/<int:otdb_id>', methods=['GET'])
@gzipped
def getTaskByOTDBId(otdb_id):
try:
task = radb().getTask(otdb_id=otdb_id)
if not task:
abort(404)
task['name'] = 'Task %d' % task['id']
updateTaskDetails([task])
return jsonify({'task': task})
except Exception as e:
abort(404)
return jsonify({'task': None})
@app.route('/rest/tasks/mom/<int:mom_id>', methods=['GET'])
@gzipped
def getTaskByMoMId(mom_id):
try:
task = radb().getTask(mom_id=mom_id)
if not task:
abort(404)
task['name'] = 'Task %d' % task['id']
updateTaskDetails([task])
return jsonify({'task': task})
except Exception as e:
abort(404)
return jsonify({'task': None})
@app.route('/rest/tasks/mom/group/<int:mom_group_id>', methods=['GET'])
@gzipped
def getTasksByMoMGroupId(mom_group_id):
try:
mom_ids = momqueryrpc.getTaskIdsInGroup(mom_group_id)[str(mom_group_id)]
tasks = radb().getTasks(mom_ids=mom_ids)
updateTaskDetails(tasks)
return jsonify({'tasks': tasks})
except Exception as e:
abort(404)
@app.route('/rest/tasks/mom/parentgroup/<int:mom_parent_group_id>', methods=['GET'])
@gzipped
def getTasksByMoMParentGroupId(mom_parent_group_id):
try:
mom_ids = momqueryrpc.getTaskIdsInParentGroup(mom_parent_group_id)[str(mom_parent_group_id)]
tasks = radb().getTasks(mom_ids=mom_ids)
updateTaskDetails(tasks)
return jsonify({'tasks': tasks})
except Exception as e:
abort(404)
@app.route('/rest/tasks/<int:task_id>', methods=['PUT'])
def putTask(task_id):
if 'Content-Type' in request.headers and \
request.headers['Content-Type'].startswith('application/json'):
try:
updatedTask = json_loads(request.data.decode('utf-8'))
if task_id != int(updatedTask['id']):
abort(404, 'task_id in url is not equal to id in request.data')
#check if task is known
task = radb().getTask(task_id)
if not task:
abort(404, "unknown task %s" % str(updatedTask))
# first handle start- endtimes...
if 'starttime' in updatedTask or 'endtime' in updatedTask:
logger.info('starttime or endtime in updatedTask: %s', updatedTask)
if isProductionEnvironment():
abort(403, 'Editing of %s of tasks by users is not yet approved' % (time,))
#update dict for otdb spec
spec_update = {}
for timeprop in ['starttime', 'endtime']:
if timeprop in updatedTask:
try:
updatedTask[timeprop] = asDatetime(updatedTask[timeprop])
except ValueError:
abort(400, 'timestamp not in iso format: ' + updatedTask[timeprop])
otdb_key = 'LOFAR.ObsSW.Observation.' + ('startTime' if timeprop == 'starttime' else 'stopTime')
spec_update[otdb_key] = updatedTask[timeprop].strftime('%Y-%m-%d %H:%M:%S')
#update timestamps in both otdb and radb
otdbrpc.taskSetSpecification(task['otdb_id'], spec_update)
# update the task's (and its claims) start/endtime
# do not update the tasks status directly via the radb. See few lines below. task status is routed via otdb (and then ends up in radb automatically)
# it might be that editing the start/end time results in a (rabd)task status update (for example to 'conflict' due to conflicting claims)
# that's ok, since we'll update the status to the requested status later via otdb (see few lines below)
radb().updateTaskAndResourceClaims(task_id,
starttime=updatedTask.get('starttime'),
endtime=updatedTask.get('endtime'))
# ...then, handle status update which might trigger resource assignment,
# for which the above updated times are needed
if 'status' in updatedTask:
if isProductionEnvironment() and task['type'] == 'observation' and updatedTask['status'] == 'prescheduled':
abort(403, 'Scheduling of observations via the webscheduler by users is not (yet) allowed')
try:
#update status in otdb only
#the status change will propagate automatically into radb via other services (by design)
otdbrpc.taskSetStatus(task['otdb_id'], updatedTask['status'])
#we expect the status in otdb/radb to eventually become what we asked for...
expected_status = updatedTask['status']
#block until radb and mom task status are equal to the expected_statuses (with timeout)
start_wait = datetime.utcnow()
while True:
task = radb().getTask(otdb_id=task['otdb_id'])
otdb_status = otdbrpc.taskGetStatus(task['otdb_id'])
logger.info('waiting for otdb/radb task status to be in [%s].... otdb:%s radb:%s',
expected_status, otdb_status, task['status'])
if (task['status'] == expected_status and otdb_status == expected_status):
logger.info('otdb/radb task status now has the expected status %s otdb:%s radb:%s',
expected_status, otdb_status, task['status'])
break
if datetime.utcnow() - start_wait > timedelta(seconds=10):
logger.warning('timeout while waiting for otdb/radb task status to get the expected status %s otdb:%s radb:%s',
expected_status, otdb_status, task['status'])
break
time.sleep(0.1)
except RPCException as e:
if 'does not exist' in str(e):
# task does not exist (anymore) in otdb
#so remove it from radb as well (with cascading deletes on specification)
logger.warning('task with otdb_id %s does not exist anymore in OTDB. removing task radb_id %s from radb', task['otdb_id'], task['id'])
radb().deleteSpecification(task['specification_id'])
if 'data_pinned' in updatedTask:
task = radb().getTask(task_id)
if not task:
abort(404, "unknown task %s" % str(updatedTask))
curpc.setTaskDataPinned(task['otdb_id'], updatedTask['data_pinned'])
return "", 204
except Exception as e:
logger.error(e)
abort(404, str(e))
abort(406)
@app.route('/rest/tasks/<int:task_id>/cleanup', methods=['DELETE'])
def cleanupTaskData(task_id):
try:
delete_params = {}
if 'Content-Type' in request.headers and (request.headers['Content-Type'].startswith('application/json') or request.headers['Content-Type'].startswith('text/plain')):
delete_params = json_loads(request.data.decode('utf-8'))
task = radb().getTask(task_id)
if not task:
abort(404, 'No such task (id=%s)' % task_id)
logger.info("cleanup task data id=%s otdb_id=%s delete_params=%s", task_id, task['otdb_id'], delete_params)
result = curpc.removeTaskData(task['otdb_id'],
delete_is=delete_params.get('delete_is', True),
delete_cs=delete_params.get('delete_cs', True),
delete_uv=delete_params.get('delete_uv', True),
delete_im=delete_params.get('delete_im', True),
delete_img=delete_params.get('delete_img', True),
delete_pulp=delete_params.get('delete_pulp', True),
delete_scratch=delete_params.get('delete_scratch', True),
force=delete_params.get('force_delete', False))
logger.info(result)
return jsonify(result)
except Exception as e:
abort(500)
@app.route('/rest/tasks/<int:task_id>/datapath', methods=['GET'])
@gzipped
def getTaskDataPath(task_id):
try:
task = radb().getTask(task_id)
if not task:
abort(404, 'No such task (id=%s)' % task_id)
result = sqrpc.getPathForOTDBId(task['otdb_id'])
except Exception as e:
abort(500, str(e))
if result['found']:
return jsonify({'datapath': result['path']})
abort(404, result['message'] if result and 'message' in result else '')
@app.route('/rest/tasks/otdb/<int:otdb_id>/diskusage', methods=['GET'])
@gzipped
def getTaskDiskUsageByOTDBId(otdb_id):
try:
result = sqrpc.getDiskUsageForTaskAndSubDirectories(otdb_id=otdb_id, force_update=request.args.get('force')=='true')
except Exception as e:
abort(500, str(e))
if result['found']:
return jsonify(result)
abort(404, result['message'] if result and 'message' in result else '')
@app.route('/rest/tasks/<int:task_id>/diskusage', methods=['GET'])
@gzipped
def getTaskDiskUsage(task_id):
try:
result = sqrpc.getDiskUsageForTaskAndSubDirectories(radb_id=task_id, force_update=request.args.get('force')=='true')
except Exception as e:
abort(500, str(e))
if result['found']:
return jsonify(result)
abort(404, result['message'] if result and 'message' in result else '')
@app.route('/rest/tasks/<int:task_id>/parset', methods=['GET'])
@gzipped
def getParset(task_id):
try:
task = radb().getTask(task_id)
if not task:
abort(404)
return getParsetByOTDBId(task['otdb_id'])
except Exception as e:
abort(404)
abort(404)
@app.route('/rest/tasks/otdb/<int:otdb_id>/parset', methods=['GET'])
@gzipped
def getParsetByOTDBId(otdb_id):
try:
logger.info('getParsetByOTDBId(%s)', otdb_id)
parset = otdbrpc.taskGetSpecification(otdb_id=otdb_id)['specification']
return '\n'.join(['%s=%s' % (k,parset[k]) for k in sorted(parset.keys())]), 200, {'Content-Type': 'text/plain; charset=utf-8'}
except Exception as e:
abort(404)
abort(404)
@app.route('/rest/tasks/<int:task_id>/resourceclaims')
@gzipped
def taskResourceClaims(task_id):
return jsonify({'taskResourceClaims': radb().getResourceClaims(task_ids=[task_id], include_properties=True)})
@app.route('/rest/tasktypes')
@gzipped
def tasktypes():
result = radb().getTaskTypes()
result = sorted(result, key=lambda q: q['id'])
return jsonify({'tasktypes': result})
@app.route('/rest/taskstatustypes')
@gzipped
def getTaskStatusTypes():
result = radb().getTaskStatuses()
result = sorted(result, key=lambda q: q['id'])
return jsonify({'taskstatustypes': result})
@app.route('/rest/resourcetypes')
@gzipped
def resourcetypes():
result = radb().getResourceTypes()
result = sorted(result, key=lambda q: q['id'])
return jsonify({'resourcetypes': result})
@app.route('/rest/resourceclaimpropertytypes')
@gzipped
def resourceclaimpropertytypes():
result = radb().getResourceClaimPropertyTypes()
result = sorted(result, key=lambda q: q['id'])
return jsonify({'resourceclaimpropertytypes': result})
@app.route('/rest/projects')
@gzipped
def getProjects():
projects = []
try:
projects = momqueryrpc.getProjects()
projects = [x for x in projects if x['status_id'] in [1, 7]]
for project in projects:
project['mom_id'] = project.pop('mom2id')
except Exception as e:
logger.error(e)
projects.append({'name':'<unknown>', 'mom_id':-99, 'description': 'Container project for tasks for which we could not find a MoM project'})
projects.append({'name':'OTDB Only', 'mom_id':-98, 'description': 'Container project for tasks which exists only in OTDB'})
projects.append({'name':'Reservations', 'mom_id':-97, 'description': 'Container project for reservation tasks'})
return jsonify({'momprojects': projects})
@app.route('/rest/projects/<int:project_mom2id>')
@gzipped
def getProject(project_mom2id):
try:
projects = momqueryrpc.getProjects()
project = next(x for x in projects if x['mom2id'] == project_mom2id)
return jsonify({'momproject': project})
except StopIteration as e:
logger.error(e)
abort(404, "No project with mom2id %s" % project_mom2id)
except Exception as e:
logger.error(e)
abort(404, str(e))
@app.route('/rest/projects/<int:project_mom2id>/tasks')
@gzipped
def getProjectTasks(project_mom2id):
return getProjectTasksFromUntil(project_mom2id, None, None)
@app.route('/rest/projects/<int:project_mom2id>/tasks/<string:fromTimestamp>/<string:untilTimestamp>')
@gzipped
def getProjectTasksFromUntil(project_mom2id, fromTimestamp=None, untilTimestamp=None):
try:
if fromTimestamp and isinstance(fromTimestamp, str):
fromTimestamp = asDatetime(fromTimestamp)
if untilTimestamp and isinstance(untilTimestamp, str):
untilTimestamp = asDatetime(untilTimestamp)
task_mom2ids = momqueryrpc.getProjectTaskIds(project_mom2id)['task_mom2ids']
tasks = radb().getTasks(mom_ids=task_mom2ids, lower_bound=fromTimestamp, upper_bound=untilTimestamp)
updateTaskDetails(tasks)
return jsonify({'tasks': tasks})
except Exception as e:
logger.error(e)
abort(404, str(e))
@app.route('/rest/projects/<int:project_mom2id>/taskstimewindow')
@gzipped
def getProjectTasksTimeWindow(project_mom2id):
try:
task_mom2ids = momqueryrpc.getProjectTaskIds(project_mom2id)['task_mom2ids']
timewindow = radb().getTasksTimeWindow(mom_ids=task_mom2ids)
return jsonify(timewindow)
except Exception as e:
logger.error(e)
abort(404, str(e))
@app.route('/rest/projects/<int:project_mom2id>/diskusage')
@gzipped
def getProjectDiskUsageById(project_mom2id):
try:
project = momqueryrpc.getProject(project_mom2id=project_mom2id)
return getProjectDiskUsageByName(project['name'])
except StopIteration as e:
logger.error(e)
abort(404, "No project with mom2id %s" % project_mom2id)
except Exception as e:
logger.error(e)
abort(404, str(e))
@app.route('/rest/projects/<string:project_name>/diskusage')
@gzipped
def getProjectDiskUsageByName(project_name):
try:
result = sqrpc.getDiskUsageForProjectDirAndSubDirectories(project_name=project_name, force_update=request.args.get('force')=='true')
return jsonify(result)
except Exception as e:
logger.error(e)
abort(404, str(e))
@app.route('/rest/projects/diskusage')
@gzipped
def getProjectsDiskUsage():
try:
result = sqrpc.getDiskUsageForProjectsDirAndSubDirectories(force_update=request.args.get('force')=='true')
return jsonify(result)
except Exception as e:
logger.error(e)
abort(404, str(e))
@app.route('/rest/momobjectdetails/<int:mom2id>')
@gzipped
def getMoMObjectDetails(mom2id):
details = momqueryrpc.getObjectDetails(mom2id)
details = list(details.values())[0] if details else None
if details:
details['project_mom_id'] = details.pop('project_mom2id')
details['object_mom_id'] = details.pop('object_mom2id')
return jsonify({'momobjectdetails': details})
@app.route('/rest/updates/<int:sinceChangeNumber>')
@gzipped
def getUpdateEventsSince(sinceChangeNumber):
changesSince = changeshandler.getChangesSince(sinceChangeNumber)
return jsonify({'changes': changesSince})
@app.route('/rest/mostRecentChangeNumber')
@gzipped
def getMostRecentChangeNumber():
mrcn = changeshandler.getMostRecentChangeNumber()
return jsonify({'mostRecentChangeNumber': mrcn})
@app.route('/rest/updates')
def getUpdateEvents():
return getUpdateEventsSince(-1)
@app.route('/rest/logEvents')
@gzipped
def getMostRecentLogEvents():
return getLogEventsSince(datetime.utcnow() - timedelta(hours=6))
@app.route('/rest/logEvents/<string:fromTimestamp>')
@gzipped
def getLogEventsSince(fromTimestamp=None):
if not fromTimestamp:
fromTimestamp = datetime.utcnow() - timedelta(hours=6)
eventsSince = changeshandler.getEventsSince(fromTimestamp)
return jsonify({'logEvents': eventsSince})
@app.route('/rest/lofarTime')
@gzipped
def getLofarTime():
return jsonify({'lofarTime': asIsoFormat(datetime.utcnow())})
#ugly method to generate html tables for all tasks
@app.route('/tasks.html')
@gzipped
def getTasksHtml():
tasks = radb().getTasks()
if not tasks:
abort(404)
updateTaskDetails(tasks)
html = '<!DOCTYPE html><html><head><title>Tasks</title><style>table, th, td {border: 1px solid black; border-collapse: collapse; padding: 4px;}</style></head><body><table style="width:100%">\n'
props = sorted(tasks[0].keys())
html += '<tr>%s</tr>\n' % ''.join('<th>%s</th>' % prop for prop in props)
for task in tasks:
html += '<tr>'
for prop in props:
if prop in task:
if prop == 'id':
html += '<td><a href="/rest/tasks/%s.html">%s</a></td> ' % (task[prop], task[prop])
else:
html += '<td>%s</td> ' % task[prop]
html += '</tr>\n'
html += '</table></body></html>\n'
return html
#ugly method to generate html tables for the task and it's claims
@app.route('/tasks/<int:task_id>.html', methods=['GET'])
@gzipped
def getTaskHtml(task_id):
task = radb().getTask(task_id)
if not task:
abort(404, 'No such task %s' % task_id)
task['name'] = 'Task %d' % task['id']
updateTaskDetails([task])
html = '<!DOCTYPE html><html><head><title>Tasks</title><style>table, th, td {border: 1px solid black; border-collapse: collapse; padding: 4px;}</style></head><body><table style="">\n'
html += '<h1>Task %s</h1>' % task_id
html += '<p><a href="/tasks/%s/log.html">%s log</a></p> ' % (task['id'], task['type'])
html += '<p><a href="/rest/tasks/%s/parset">view %s parset</a></p> ' % (task['id'], task['type'])
props = sorted(task.keys())
html += '<tr><th>key</th><th>value</th></tr>\n'
for prop in props:
html += '<tr><td>%s</td>' % prop
if prop == 'id':
html += '<td><a href="/tasks/%s.html">%s</a></td> ' % (task[prop], task[prop])
elif prop == 'predecessor_ids' or prop == 'successor_ids':
ids = task[prop]
if ids:
html += '<td>%s</td> ' % ', '.join('<a href="/tasks/%s.html">%s</a>' % (id, id) for id in ids)
else:
html += '<td></td> '
else:
html += '<td>%s</td> ' % task[prop]
html += '</tr>'
html += '</table>\n<br>'
claims = radb().getResourceClaims(task_ids=[task_id], extended=True, include_properties=True)
if claims:
html += '<h1>Claims</h1>'
for claim in claims:
html += '<table>'
for claim_key,claim_value in list(claim.items()):
if claim_key == 'properties':
html += '<tr><td>properties</td><td><table>'
if claim_value:
propnames = sorted(claim_value[0].keys())
html += '<tr>%s</tr>\n' % ''.join('<th>%s</th>' % propname for propname in propnames)
for prop in claim_value:
html += '<tr>%s</tr>\n' % ''.join('<td>%s</td>' % prop[propname] for propname in propnames)
html += '</table></td></tr>'
elif claim_key == 'saps':
html += '<tr><td>saps</td><td><table>'
saps = claim_value
if saps:
sap_keys = ['sap_nr', 'properties']
html += '<tr>%s</tr>\n' % ''.join('<th>%s</th>' % sap_key for sap_key in sap_keys)
for sap in saps:
html += '<tr>'
for sap_key in sap_keys:
if sap_key == 'properties':
html += '<td><table>'
sap_props = sap[sap_key]
if sap_props:
propnames = sorted(sap_props[0].keys())
html += '<tr>%s</tr>\n' % ''.join('<th>%s</th>' % propname for propname in propnames)
for prop in sap_props:
html += '<tr>%s</tr>\n' % ''.join('<td>%s</td>' % prop[propname] for propname in propnames)
html += '</table></td>'
else:
html += '<td>%s</td>' % (sap[sap_key])
html += '</tr>'
html += '</table></td></tr>'
else:
html += '<tr><td>%s</td><td>%s</td></tr>' % (claim_key,claim_value)
html += '</table>'
html += '<br>'
html += '</body></html>\n'
return html
@app.route('/rest/tasks/<int:task_id>/resourceclaims.html', methods=['GET'])
@gzipped
def resourceClaimsForTaskHtml(task_id):
claims = radb().getResourceClaims(task_ids=[task_id], extended=True, include_properties=True)
if not claims:
abort(404, 'No resource claims for task %s' % task_id)
html = '<!DOCTYPE html><html><head><title>Tasks</title><style>table, th, td {border: 1px solid black; border-collapse: collapse; padding: 4px;}</style></head><body><table style="">\n'
for claim in claims:
html += '<tr><td>%s</td>' % claim
html += '</table></body></html>\n'
return html
@app.route('/tasks/<int:task_id>/log.html', methods=['GET'])
@gzipped
def getTaskLogHtml(task_id):
task = radb().getTask(task_id)
cmd = []
if task['type'] == 'pipeline':
cmd = ['ssh', '[email protected]', 'cat /data/log/pipeline-%s-*.log' % task['otdb_id']]
else:
cmd = ['ssh', 'mcu001.control.lofar', 'cat /opt/lofar/var/log/mcu001\\:ObservationControl\\[0\\]\\{%s\\}.log*' % task['otdb_id']]
logger.info(' '.join(cmd))
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = communicate_returning_strings(proc)
if proc.returncode == 0:
return out, 200, {'Content-Type': 'text/plain; charset=utf-8'}
else:
return err, 500, {'Content-Type': 'text/plain; charset=utf-8'}
def main():
# make sure we run in UTC timezone
import os
os.environ['TZ'] = 'UTC'
# Check the invocation arguments
parser = OptionParser('%prog [options]',
description='run the resource assignment editor web service')
parser.add_option('--webserver_port', dest='webserver_port', type='int', default=7412, help='port number on which to host the webservice, default: %default')
parser.add_option('-q', '--broker', dest='broker', type='string', default=DEFAULT_BROKER, help='Address of the qpid broker, default: %default')
parser.add_option('--exchange', dest='exchange', type='string', default=DEFAULT_BUSNAME, help='Name of the bus exchange on the qpid broker, default: %default')
parser.add_option('-V', '--verbose', dest='verbose', action='store_true', help='verbose logging')
parser.add_option_group(dbcredentials.options_group(parser))
parser.set_defaults(dbcredentials="RADB")
(options, args) = parser.parse_args()
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
level=logging.DEBUG if options.verbose else logging.INFO)
global _radb_dbcreds
_radb_dbcreds = dbcredentials.parse_options(options)
if _radb_dbcreds.database:
logger.info("Using dbcreds for direct RADB access: %s" % _radb_dbcreds.stringWithHiddenPassword())
else:
_radb_dbcreds = None
global rarpc
rarpc = RADBRPC.create(exchange=options.exchange, broker=options.broker)
global otdbrpc
otdbrpc = OTDBRPC.create(exchange=options.exchange, broker=options.broker)
global curpc
curpc = CleanupRPC.create(exchange=options.exchange, broker=options.broker)
global sqrpc
sqrpc = StorageQueryRPC.create(exchange=options.exchange, timeout=10, broker=options.broker)
global momqueryrpc
momqueryrpc = MoMQueryRPC.create(exchange=options.exchange, timeout=10, broker=options.broker)
global changeshandler
changeshandler = ChangesHandler(exchange=options.exchange,
broker=options.broker, momqueryrpc=momqueryrpc, radbrpc=rarpc, sqrpc=sqrpc)
with changeshandler, rarpc, otdbrpc, curpc, sqrpc, momqueryrpc:
'''Start the webserver'''
app.run(debug=options.verbose, threaded=True, host='0.0.0.0', port=options.webserver_port)
if __name__ == '__main__':
main()
| gpl-3.0 | -5,672,104,624,823,130,000 | 38.244797 | 197 | 0.632027 | false | 3.726169 | false | false | false |
moschlar/SAUCE | migration/versions/530b45f11128_public_submission.py | 1 | 1291 | """public_submission
Revision ID: 530b45f11128
Revises: 282efa88cdbc
Create Date: 2013-10-02 18:31:40.722832
"""
#
# # SAUCE - System for AUtomated Code Evaluation
# # Copyright (C) 2013 Moritz Schlarb
# #
# # This program is free software: you can redistribute it and/or modify
# # it under the terms of the GNU Affero General Public License as published by
# # the Free Software Foundation, either version 3 of the License, or
# # any later version.
# #
# # This program is distributed in the hope that it will be useful,
# # but WITHOUT ANY WARRANTY; without even the implied warranty of
# # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# # GNU Affero General Public License for more details.
# #
# # You should have received a copy of the GNU Affero General Public License
# # along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# revision identifiers, used by Alembic.
revision = '530b45f11128'
down_revision = '26d123af03a7'
from alembic import op
#from alembic.operations import Operations as op
import sqlalchemy as sa
def upgrade():
op.add_column('submissions',
sa.Column('public', sa.Boolean(), nullable=False,
default=False, server_default='False'))
def downgrade():
op.drop_column('submissions', 'public')
| agpl-3.0 | 4,536,200,629,361,021,000 | 29.738095 | 79 | 0.726569 | false | 3.576177 | false | false | false |
anurag03/integration_tests | cfme/configure/access_control/__init__.py | 1 | 58461 | import attr
import six
from navmazing import NavigateToSibling, NavigateToAttribute
from widgetastic.widget import Checkbox, View, Text, ConditionalSwitchableView
from widgetastic_patternfly import (
BootstrapSelect, Button, Input, Tab, CheckableBootstrapTreeview as CbTree,
BootstrapSwitch, CandidateNotFound, Dropdown)
from widgetastic_manageiq import (
UpDownSelect, PaginationPane, SummaryFormItem, Table, SummaryForm)
from widgetastic_manageiq.expression_editor import GroupTagExpressionEditor
from cfme.base.credential import Credential
from cfme.base.ui import ConfigurationView
from cfme.common import Taggable
from cfme.exceptions import CFMEException, RBACOperationBlocked
from cfme.modeling.base import BaseCollection, BaseEntity
from cfme.utils.appliance.implementations.ui import navigator, CFMENavigateStep, navigate_to
from cfme.utils.blockers import BZ
from cfme.utils.log import logger
from cfme.utils.pretty import Pretty
from cfme.utils.update import Updateable
from cfme.utils.wait import wait_for
EVM_DEFAULT_GROUPS = [
'evmgroup-super_administrator',
'evmgroup-administrator',
'evmgroup-approver',
'evmgroup-auditor',
'evmgroup-desktop',
'evmgroup-operator',
'evmgroup-security',
'evmgroup-support',
'evmgroup-user',
'evmgroup-vm_user'
]
class AccessControlToolbar(View):
""" Toolbar on the Access Control page """
configuration = Dropdown('Configuration')
policy = Dropdown('Policy')
####################################################################################################
# RBAC USER METHODS
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
class UserForm(ConfigurationView):
""" User Form View."""
name_txt = Input(name='name')
userid_txt = Input(name='userid')
password_txt = Input(id='password')
password_verify_txt = Input(id='verify')
email_txt = Input(name='email')
user_group_select = BootstrapSelect(id='chosen_group')
cancel_button = Button('Cancel')
class UsersEntities(View):
table = Table("//div[@id='records_div' or @id='main_div']//table")
class AllUserView(ConfigurationView):
""" All Users View."""
toolbar = View.nested(AccessControlToolbar)
entities = View.nested(UsersEntities)
paginator = PaginationPane()
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Access Control EVM Users'
)
class AddUserView(UserForm):
""" Add User View."""
add_button = Button('Add')
@property
def is_displayed(self):
return self.accordions.accesscontrol.is_opened and self.title.text == "Adding a new User"
class DetailsUserEntities(View):
smart_management = SummaryForm('Smart Management')
class DetailsUserView(ConfigurationView):
""" User Details view."""
toolbar = View.nested(AccessControlToolbar)
entities = View.nested(DetailsUserEntities)
@property
def is_displayed(self):
return (
self.title.text == 'EVM User "{}"'.format(self.context['object'].name) and
self.accordions.accesscontrol.is_opened
)
class EditUserView(UserForm):
""" User Edit View."""
save_button = Button('Save')
reset_button = Button('Reset')
change_stored_password = Text('#change_stored_password')
cancel_password_change = Text('#cancel_password_change')
@property
def is_displayed(self):
return (
self.title.text == 'Editing User "{}"'.format(self.context['object'].name) and
self.accordions.accesscontrol.is_opened
)
@attr.s
class User(Updateable, Pretty, BaseEntity, Taggable):
""" Class represents an user in CFME UI
Args:
name: Name of the user
credential: User's credentials
email: User's email
groups: Add User to multiple groups in Versions >= 5.9.
cost_center: User's cost center
value_assign: user's value to assign
appliance: appliance under test
"""
pretty_attrs = ['name', 'group']
name = attr.ib(default=None)
credential = attr.ib(default=None)
email = attr.ib(default=None)
groups = attr.ib(default=None)
cost_center = attr.ib(default=None)
value_assign = attr.ib(default=None)
_restore_user = attr.ib(default=None, init=False)
def __enter__(self):
if self._restore_user != self.appliance.user:
logger.info('Switching to new user: %s', self.credential.principal)
self._restore_user = self.appliance.user
self.appliance.server.logout()
self.appliance.user = self
def __exit__(self, *args, **kwargs):
if self._restore_user != self.appliance.user:
logger.info('Restoring to old user: %s', self._restore_user.credential.principal)
self.appliance.server.logout()
self.appliance.user = self._restore_user
self._restore_user = None
def update(self, updates):
""" Update user method
Args:
updates: user data that should be changed
Note: In case updates is the same as original user data, update will be canceled,
as 'Save' button will not be active
"""
view = navigate_to(self, 'Edit')
self.change_stored_password()
new_updates = {}
if 'credential' in updates:
new_updates.update({
'userid_txt': updates.get('credential').principal,
'password_txt': updates.get('credential').secret,
'password_verify_txt': updates.get('credential').verify_secret
})
new_updates.update({
'name_txt': updates.get('name'),
'email_txt': updates.get('email'),
'user_group_select': getattr(
updates.get('group'),
'description', None)
})
changed = view.fill({
'name_txt': new_updates.get('name_txt'),
'userid_txt': new_updates.get('userid_txt'),
'password_txt': new_updates.get('password_txt'),
'password_verify_txt': new_updates.get('password_verify_txt'),
'email_txt': new_updates.get('email_txt'),
'user_group_select': new_updates.get('user_group_select')
})
if changed:
view.save_button.click()
flash_message = 'User "{}" was saved'.format(updates.get('name', self.name))
else:
view.cancel_button.click()
flash_message = 'Edit of User was cancelled by the user'
view = self.create_view(DetailsUserView, override=updates)
view.flash.assert_message(flash_message)
assert view.is_displayed
def copy(self):
""" Creates copy of existing user
return: User object of copied user
"""
view = navigate_to(self, 'Details')
view.toolbar.configuration.item_select('Copy this User to a new User')
view = self.create_view(AddUserView)
new_user = self.parent.instantiate(
name="{}copy".format(self.name),
credential=Credential(principal='redhat', secret='redhat')
)
view.fill({
'name_txt': new_user.name,
'userid_txt': new_user.credential.principal,
'password_txt': new_user.credential.secret,
'password_verify_txt': new_user.credential.verify_secret
})
view.add_button.click()
view = self.create_view(AllUserView)
view.flash.assert_success_message('User "{}" was saved'.format(new_user.name))
assert view.is_displayed
return new_user
def delete(self, cancel=True):
"""Delete existing user
Args:
cancel: Default value 'True', user will be deleted
'False' - deletion of user will be canceled
Throws:
RBACOperationBlocked: If operation is blocked due to current user
not having appropriate permissions OR delete is not allowed
for currently selected user
"""
flash_success_msg = 'EVM User "{}": Delete successful'.format(self.name)
flash_blocked_msg = "Default EVM User \"{}\" cannot be deleted".format(self.name)
delete_user_txt = 'Delete this User'
view = navigate_to(self, 'Details')
if not view.toolbar.configuration.item_enabled(delete_user_txt):
raise RBACOperationBlocked("Configuration action '{}' is not enabled".format(
delete_user_txt))
view.toolbar.configuration.item_select(delete_user_txt, handle_alert=cancel)
try:
view.flash.assert_message(flash_blocked_msg)
raise RBACOperationBlocked(flash_blocked_msg)
except AssertionError:
pass
view.flash.assert_message(flash_success_msg)
if cancel:
view = self.create_view(AllUserView)
view.flash.assert_success_message(flash_success_msg)
else:
view = self.create_view(DetailsUserView)
assert view.is_displayed
# TODO update elements, after 1469035 fix
def change_stored_password(self, changes=None, cancel=False):
""" Changes user password
Args:
changes: dict with fields to be changes,
if None, passwords fields only be anabled
cancel: True, if you want to disable password change
"""
view = navigate_to(self, 'Edit')
self.browser.execute_script(
self.browser.get_attribute(
'onClick', self.browser.element(view.change_stored_password)))
if changes:
view.fill(changes)
if cancel:
self.browser.execute_script(
self.browser.get_attribute(
'onClick', self.browser.element(view.cancel_password_change)))
@property
def exists(self):
try:
navigate_to(self, 'Details')
return True
except CandidateNotFound:
return False
@property
def description(self):
return self.credential.principal
@property
def my_settings(self):
from cfme.configure.settings import MySettings
my_settings = MySettings(appliance=self.appliance)
return my_settings
@attr.s
class UserCollection(BaseCollection):
ENTITY = User
def simple_user(self, userid, password, fullname=None):
"""If a fullname is not supplied, userid is used for credential principal and user name"""
creds = Credential(principal=userid, secret=password)
return self.instantiate(name=fullname or userid, credential=creds)
def create(self, name=None, credential=None, email=None, groups=None, cost_center=None,
value_assign=None, cancel=False):
""" User creation method
Args:
name: Name of the user
credential: User's credentials, credential.principal is used as username
email: User's email
groups: Add User to multiple groups in Versions >= 5.9.
cost_center: User's cost center
value_assign: user's value to assign
cancel: True - if you want to cancel user creation,
by defaul user will be created
Throws:
RBACOperationBlocked: If operation is blocked due to current user
not having appropriate permissions OR update is not allowed
for currently selected role
"""
if self.appliance.version < "5.8":
user_blocked_msg = "Userid has already been taken"
else:
user_blocked_msg = ("Userid is not unique within region {}".format(
self.appliance.server.zone.region.number))
if type(groups) is not list:
groups = [groups]
if self.appliance.version < "5.9" and len(groups) > 1:
raise CFMEException(
"Assigning a user to multiple groups is only supported in CFME versions > 5.8")
user = self.instantiate(
name=name, credential=credential, email=email, groups=groups, cost_center=cost_center,
value_assign=value_assign
)
# view.fill supports iteration over a list when selecting pulldown list items but
# will throw an exception when the item doesn't appear in the list so filter out
# null items since they "shouldn't" exist
user_group_names = [getattr(ug, 'description', None) for ug in user.groups if ug]
fill_values = {
'name_txt': user.name,
'userid_txt': user.credential.principal,
'email_txt': user.email,
'user_group_select': user_group_names
}
# only fill password if auth_mode is set to Database
if self.appliance.server.authentication.auth_mode.lower() == 'database':
fill_values.update({
'password_txt': user.credential.secret,
'password_verify_txt': user.credential.verify_secret}
)
view = navigate_to(self, 'Add')
view.fill(fill_values)
if cancel:
view.cancel_button.click()
flash_message = 'Add of new User was cancelled by the user'
else:
view.add_button.click()
flash_message = 'User "{}" was saved'.format(user.name)
try:
view.flash.assert_message(user_blocked_msg)
raise RBACOperationBlocked(user_blocked_msg)
except AssertionError:
pass
view = self.create_view(AllUserView)
view.flash.assert_success_message(flash_message)
assert view.is_displayed
# To ensure tree update
view.browser.refresh()
return user
@navigator.register(UserCollection, 'All')
class UserAll(CFMENavigateStep):
VIEW = AllUserView
prerequisite = NavigateToAttribute('appliance.server', 'Configuration')
def step(self):
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Users')
@navigator.register(UserCollection, 'Add')
class UserAdd(CFMENavigateStep):
VIEW = AddUserView
def prerequisite(self):
navigate_to(self.obj.appliance.server, 'Configuration')
return navigate_to(self.obj, 'All')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select("Add a new User")
@navigator.register(User, 'Details')
class UserDetails(CFMENavigateStep):
VIEW = DetailsUserView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self):
try:
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Users', self.obj.name)
except CandidateNotFound:
self.obj.appliance.browser.widgetastic.refresh()
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Users', self.obj.name)
@navigator.register(User, 'Edit')
class UserEdit(CFMENavigateStep):
VIEW = EditUserView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select('Edit this User')
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# RBAC USER METHODS
####################################################################################################
####################################################################################################
# RBAC GROUP METHODS
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
class MyCompanyTagsTree(View):
tree_locator = 'tags_treebox'
tree = CbTree(tree_locator)
class MyCompanyTagsExpressionView(View):
tag_expression = GroupTagExpressionEditor()
class MyCompanyTagsWithExpression(Tab):
""" Represents 'My company tags' tab in Group Form """
TAB_NAME = "My Company Tags"
tag_mode = BootstrapSelect(id='use_filter_expression')
tag_settings = ConditionalSwitchableView(reference='tag_mode')
tag_settings.register('Specific Tags', default=True, widget=MyCompanyTagsTree)
tag_settings.register('Tags Based On Expression', widget=MyCompanyTagsExpressionView)
class Hosts_And_Clusters(Tab): # noqa
""" Represents 'Hosts and Clusters' tab in Group Form """
TAB_NAME = "Hosts & Clusters"
tree = CbTree('hac_treebox')
class Vms_And_Templates(Tab): # noqa
""" Represents 'VM's and Templates' tab in Group Form """
TAB_NAME = "VMs & Templates"
tree = CbTree('vat_treebox')
class GroupForm(ConfigurationView):
""" Group Form in CFME UI."""
ldap_groups_for_user = BootstrapSelect(id='ldap_groups_user')
description_txt = Input(name='description')
lookup_ldap_groups_chk = Checkbox(name='lookup')
role_select = BootstrapSelect(id='group_role')
group_tenant = BootstrapSelect(id='group_tenant')
user_to_look_up = Input(name='user')
username = Input(name='user_id')
password = Input(name='password')
tag = SummaryFormItem('Smart Management', 'My Company Tags')
cancel_button = Button('Cancel')
retrieve_button = Button('Retrieve')
my_company_tags = View.nested(MyCompanyTagsWithExpression)
hosts_and_clusters = View.nested(Hosts_And_Clusters)
vms_and_templates = View.nested(Vms_And_Templates)
class AddGroupView(GroupForm):
""" Add Group View in CFME UI """
add_button = Button("Add")
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == "Adding a new Group"
)
class DetailsGroupEntities(View):
smart_management = SummaryForm('Smart Management')
my_company_tags = View.nested(MyCompanyTagsWithExpression)
hosts_and_clusters = View.nested(Hosts_And_Clusters)
vms_and_templates = View.nested(Vms_And_Templates)
class DetailsGroupView(ConfigurationView):
""" Details Group View in CFME UI """
toolbar = View.nested(AccessControlToolbar)
entities = View.nested(DetailsGroupEntities)
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'EVM Group "{}"'.format(self.context['object'].description)
)
class EditGroupView(GroupForm):
""" Edit Group View in CFME UI """
save_button = Button("Save")
reset_button = Button('Reset')
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Editing Group "{}"'.format(self.context['object'].description)
)
class AllGroupView(ConfigurationView):
""" All Groups View in CFME UI """
toolbar = View.nested(AccessControlToolbar)
table = Table("//div[@id='main_div']//table")
paginator = PaginationPane()
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Access Control EVM Groups'
)
class EditGroupSequenceView(ConfigurationView):
""" Edit Groups Sequence View in CFME UI """
group_order_selector = UpDownSelect(
'#seq_fields',
'//button[@title="Move selected fields up"]/i',
'//button[@title="Move selected fields down"]/i')
save_button = Button('Save')
reset_button = Button('Reset')
cancel_button = Button('Cancel')
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == "Editing Sequence of User Groups"
)
@attr.s
class Group(BaseEntity, Taggable):
"""Represents a group in CFME UI
Properties:
description: group description
role: group role
tenant: group tenant
user_to_lookup: ldap user to lookup
ldap_credentials: ldap user credentials
tag: tag for group restriction
host_cluster: host/cluster for group restriction
vm_template: vm/template for group restriction
appliance: appliance under test
"""
pretty_attrs = ['description', 'role']
description = attr.ib(default=None)
role = attr.ib(default=None)
tenant = attr.ib(default="My Company")
ldap_credentials = attr.ib(default=None)
user_to_lookup = attr.ib(default=None)
tag = attr.ib(default=None)
host_cluster = attr.ib(default=None)
vm_template = attr.ib(default=None)
def _retrieve_ldap_user_groups(self):
""" Retrive ldap user groups
return: AddGroupView
"""
view = navigate_to(self.parent, 'Add')
view.fill({'lookup_ldap_groups_chk': True,
'user_to_look_up': self.user_to_lookup,
'username': self.ldap_credentials.principal,
'password': self.ldap_credentials.secret})
view.retrieve_button.click()
return view
def _retrieve_ext_auth_user_groups(self):
""" Retrive external authorization user groups
return: AddGroupView
"""
view = navigate_to(self.parent, 'Add')
view.fill({'lookup_ldap_groups_chk': True,
'user_to_look_up': self.user_to_lookup})
view.retrieve_button.click()
return view
def _fill_ldap_group_lookup(self, view):
""" Fills ldap info for group lookup
Args: view: view for group creation(AddGroupView)
"""
view.fill({'ldap_groups_for_user': self.description,
'description_txt': self.description,
'role_select': self.role,
'group_tenant': self.tenant})
view.add_button.click()
view = self.create_view(AllGroupView)
view.flash.assert_success_message('Group "{}" was saved'.format(self.description))
assert view.is_displayed
def add_group_from_ldap_lookup(self):
"""Adds a group from ldap lookup"""
view = self._retrieve_ldap_user_groups()
self._fill_ldap_group_lookup(view)
def add_group_from_ext_auth_lookup(self):
"""Adds a group from external authorization lookup"""
view = self._retrieve_ext_auth_user_groups()
self._fill_ldap_group_lookup(view)
def update(self, updates):
""" Update group method
Args:
updates: group data that should be changed
Note: In case updates is the same as original group data, update will be canceled,
as 'Save' button will not be active
"""
edit_group_txt = 'Edit this Group'
view = navigate_to(self, 'Details')
if not view.toolbar.configuration.item_enabled(edit_group_txt):
raise RBACOperationBlocked("Configuration action '{}' is not enabled".format(
edit_group_txt))
view = navigate_to(self, 'Edit')
changed = view.fill({
'description_txt': updates.get('description'),
'role_select': updates.get('role'),
'group_tenant': updates.get('tenant')
})
changed_tag = self._set_group_restriction(view.my_company_tags, updates.get('tag'))
changed_host_cluster = self._set_group_restriction(
view.hosts_and_clusters, updates.get('host_cluster'))
changed_vm_template = self._set_group_restriction(
view.vms_and_templates, updates.get('vm_template'))
if changed or changed_tag or changed_host_cluster or changed_vm_template:
view.save_button.click()
flash_message = 'Group "{}" was saved'.format(
updates.get('description', self.description))
else:
view.cancel_button.click()
flash_message = 'Edit of Group was cancelled by the user'
view = self.create_view(DetailsGroupView, override=updates)
view.flash.assert_message(flash_message)
assert view.is_displayed
def delete(self, cancel=True):
"""
Delete existing group
Args:
cancel: Default value 'True', group will be deleted
'False' - deletion of group will be canceled
Throws:
RBACOperationBlocked: If operation is blocked due to current user
not having appropriate permissions OR delete is not allowed
for currently selected group
"""
flash_success_msg = 'EVM Group "{}": Delete successful'.format(self.description)
flash_blocked_msg_list = [
('EVM Group "{}": '
'Error during delete: A read only group cannot be deleted.'.format(self.description)),
('EVM Group "{}": Error during delete: '
'The group has users assigned that do not '
'belong to any other group'.format(self.description))]
delete_group_txt = 'Delete this Group'
view = navigate_to(self, 'Details')
if not view.toolbar.configuration.item_enabled(delete_group_txt):
raise RBACOperationBlocked("Configuration action '{}' is not enabled".format(
delete_group_txt))
view.toolbar.configuration.item_select(delete_group_txt, handle_alert=cancel)
for flash_blocked_msg in flash_blocked_msg_list:
try:
view.flash.assert_message(flash_blocked_msg)
raise RBACOperationBlocked(flash_blocked_msg)
except AssertionError:
pass
view.flash.assert_no_error()
view.flash.assert_message(flash_success_msg)
if cancel:
view = self.create_view(AllGroupView)
view.flash.assert_success_message(flash_success_msg)
else:
view = self.create_view(DetailsGroupView)
assert view.is_displayed, (
"Access Control Group {} Detail View is not displayed".format(self.description))
def set_group_order(self, updated_order):
""" Sets group order for group lookup
Args:
updated_order: group order list
"""
if self.appliance.version < "5.9.2":
name_column = "Name"
else:
name_column = "Description"
find_row_kwargs = {name_column: self.description}
view = navigate_to(self.parent, 'All')
row = view.paginator.find_row_on_pages(view.table, **find_row_kwargs)
original_sequence = row.sequence.text
original_order = self.group_order[:len(updated_order)]
view = self.create_view(EditGroupSequenceView)
assert view.is_displayed
# We pick only the same amount of items for comparing
if updated_order == original_order:
return # Ignore that, would cause error on Save click
view.group_order_selector.fill(updated_order)
view.save_button.click()
view = self.create_view(AllGroupView)
assert view.is_displayed
row = view.paginator.find_row_on_pages(view.table, **find_row_kwargs)
changed_sequence = row.sequence.text
assert original_sequence != changed_sequence, "{} Group Edit Sequence Failed".format(
self.description)
def _set_group_restriction(self, tab_view, item, update=True):
""" Sets tag/host/template restriction for the group
Args:
tab_view: tab view
item: path to check box that should be selected/deselected
ex. _set_group_restriction([patent, child], True)
or tags expression(string) to be set in My company tags in expression editor
ex. _set_group_restriction('fill_tag(My Company Tags : Auto Approve - Max CPU, 1)'),
_set_group_restriction('delete_whole_expression')
update: If True - checkbox state will be updated
Returns: True - if update is successful
"""
updated_result = False
if item is not None:
if update:
if isinstance(item, six.string_types):
updated_result = tab_view.fill({
'tag_mode': 'Tags Based On Expression',
'tag_settings': {'tag_expression': item}})
else:
path, action_type = item
if isinstance(path, list):
tab_form = getattr(tab_view, 'form', tab_view)
tree_view = getattr(tab_form, 'tag_settings', tab_form)
node = (tree_view.tree.CheckNode(path) if action_type else
tree_view.tree.UncheckNode(path))
updated_result = tree_view.tree.fill(node)
return updated_result
@property
def group_order(self):
view = navigate_to(self, 'EditGroupSequence')
return view.group_order_selector.items
@property
def exists(self):
try:
navigate_to(self, 'Details')
return True
except CandidateNotFound:
return False
@attr.s
class GroupCollection(BaseCollection):
""" Collection object for the :py:class: `cfme.configure.access_control.Group`. """
ENTITY = Group
def create(self, description=None, role=None, tenant="My Company", ldap_credentials=None,
user_to_lookup=None, tag=None, host_cluster=None, vm_template=None, cancel=False):
""" Create group method
Args:
description: group description
role: group role
tenant: group tenant
user_to_lookup: ldap user to lookup
ldap_credentials: ldap user credentials
tag: tag for group restriction
host_cluster: host/cluster for group restriction
vm_template: vm/template for group restriction
appliance: appliance under test
cancel: True - if you want to cancel group creation,
by default group will be created
Throws:
RBACOperationBlocked: If operation is blocked due to current user
not having appropriate permissions OR delete is not allowed
for currently selected user
"""
if self.appliance.version < "5.8":
flash_blocked_msg = ("Description has already been taken")
else:
flash_blocked_msg = "Description is not unique within region {}".format(
self.appliance.server.zone.region.number)
view = navigate_to(self, 'Add')
group = self.instantiate(
description=description, role=role, tenant=tenant, ldap_credentials=ldap_credentials,
user_to_lookup=user_to_lookup, tag=tag, host_cluster=host_cluster,
vm_template=vm_template)
view.fill({
'description_txt': group.description,
'role_select': group.role,
'group_tenant': group.tenant
})
group._set_group_restriction(view.my_company_tags, group.tag)
group._set_group_restriction(view.hosts_and_clusters, group.host_cluster)
group._set_group_restriction(view.vms_and_templates, group.vm_template)
if cancel:
view.cancel_button.click()
flash_message = 'Add of new Group was cancelled by the user'
else:
view.add_button.click()
flash_message = 'Group "{}" was saved'.format(group.description)
view = self.create_view(AllGroupView)
try:
view.flash.assert_message(flash_blocked_msg)
raise RBACOperationBlocked(flash_blocked_msg)
except AssertionError:
pass
view.flash.assert_success_message(flash_message)
assert view.is_displayed
# To ensure that the group list is updated
view.browser.refresh()
return group
@navigator.register(GroupCollection, 'All')
class GroupAll(CFMENavigateStep):
VIEW = AllGroupView
prerequisite = NavigateToAttribute('appliance.server', 'Configuration')
def step(self):
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Groups')
def resetter(self, *args, **kwargs):
self.obj.appliance.browser.widgetastic.browser.refresh()
@navigator.register(GroupCollection, 'Add')
class GroupAdd(CFMENavigateStep):
VIEW = AddGroupView
prerequisite = NavigateToSibling('All')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select("Add a new Group")
@navigator.register(Group, 'EditGroupSequence')
class EditGroupSequence(CFMENavigateStep):
VIEW = EditGroupSequenceView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select(
'Edit Sequence of User Groups for LDAP Look Up')
@navigator.register(Group, 'Details')
class GroupDetails(CFMENavigateStep):
VIEW = DetailsGroupView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self):
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Groups', self.obj.description)
@navigator.register(Group, 'Edit')
class GroupEdit(CFMENavigateStep):
VIEW = EditGroupView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select('Edit this Group')
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# END RBAC GROUP METHODS
####################################################################################################
####################################################################################################
# RBAC ROLE METHODS
####################################################################################################
class RoleForm(ConfigurationView):
""" Role Form for CFME UI """
name_txt = Input(name='name')
vm_restriction_select = BootstrapSelect(id='vm_restriction')
features_tree = CbTree("features_treebox")
cancel_button = Button('Cancel')
class AddRoleView(RoleForm):
""" Add Role View """
add_button = Button('Add')
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Adding a new Role'
)
class EditRoleView(RoleForm):
""" Edit Role View """
save_button = Button('Save')
reset_button = Button('Reset')
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Editing Role "{}"'.format(self.context['object'].name)
)
class DetailsRoleView(RoleForm):
""" Details Role View """
toolbar = View.nested(AccessControlToolbar)
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Role "{}"'.format(self.context['object'].name)
)
class AllRolesView(ConfigurationView):
""" All Roles View """
toolbar = View.nested(AccessControlToolbar)
table = Table("//div[@id='main_div']//table")
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Access Control Roles'
)
@attr.s
class Role(Updateable, Pretty, BaseEntity):
""" Represents a role in CFME UI
Args:
name: role name
vm_restriction: restriction used for role
product_features: product feature to select
appliance: appliance unter test
"""
pretty_attrs = ['name', 'product_features']
name = attr.ib(default=None)
vm_restriction = attr.ib(default=None)
product_features = attr.ib(default=None)
def __attrs_post_init__(self):
if not self.product_features:
self.product_features = []
def update(self, updates):
""" Update role method
Args:
updates: role data that should be changed
Note: In case updates is the same as original role data, update will be canceled,
as 'Save' button will not be active
"""
flash_blocked_msg = "Read Only Role \"{}\" can not be edited".format(self.name)
edit_role_txt = 'Edit this Role'
view = navigate_to(self, 'Details')
# TODO: Remove following code when toolbar disappear issue (BZ1630012) get patched
if not view.toolbar.configuration.is_displayed:
view.browser.refresh()
if not view.toolbar.configuration.item_enabled(edit_role_txt):
raise RBACOperationBlocked("Configuration action '{}' is not enabled".format(
edit_role_txt))
view = navigate_to(self, 'Edit')
try:
view.flash.assert_message(flash_blocked_msg)
raise RBACOperationBlocked(flash_blocked_msg)
except AssertionError:
pass
changed = view.fill({
'name_txt': updates.get('name'),
'vm_restriction_select': updates.get('vm_restriction')
})
feature_changed = self.set_role_product_features(view, updates.get('product_features'))
if changed or feature_changed:
view.save_button.click()
flash_message = 'Role "{}" was saved'.format(updates.get('name', self.name))
else:
view.cancel_button.click()
flash_message = 'Edit of Role was cancelled by the user'
view = self.create_view(DetailsRoleView, override=updates)
view.flash.assert_message(flash_message)
# Typically this would be a safe check but BZ 1561698 will sometimes cause the accordion
# to fail to update the role name w/o a manual refresh causing is_displayed to fail
# Instead of inserting a blind refresh, just disable this until the bug is resolved since
# it's a good check for accordion UI failures
# See BZ https://bugzilla.redhat.com/show_bug.cgi?id=1561698
if not BZ(1561698, forced_streams=['5.9']).blocks:
assert view.is_displayed
def delete(self, cancel=True):
""" Delete existing role
Args:
cancel: Default value 'True', role will be deleted
'False' - deletion of role will be canceled
Throws:
RBACOperationBlocked: If operation is blocked due to current user
not having appropriate permissions OR delete is not allowed
for currently selected role
"""
flash_blocked_msg = ("Role \"{}\": Error during delete: Cannot delete record "
"because of dependent entitlements".format(self.name))
flash_success_msg = 'Role "{}": Delete successful'.format(self.name)
delete_role_txt = 'Delete this Role'
view = navigate_to(self, 'Details')
if not view.toolbar.configuration.item_enabled(delete_role_txt):
raise RBACOperationBlocked("Configuration action '{}' is not enabled".format(
delete_role_txt))
view.toolbar.configuration.item_select(delete_role_txt, handle_alert=cancel)
try:
view.flash.assert_message(flash_blocked_msg)
raise RBACOperationBlocked(flash_blocked_msg)
except AssertionError:
pass
view.flash.assert_message(flash_success_msg)
if cancel:
view = self.create_view(AllRolesView)
view.flash.assert_success_message(flash_success_msg)
else:
view = self.create_view(DetailsRoleView)
assert view.is_displayed
def copy(self, name=None):
""" Creates copy of existing role
Returns: Role object of copied role
"""
if name is None:
name = "{}_copy".format(self.name)
view = navigate_to(self, 'Details')
view.toolbar.configuration.item_select('Copy this Role to a new Role')
view = self.create_view(AddRoleView)
new_role = self.parent.instantiate(name=name)
view.fill({'name_txt': new_role.name})
view.add_button.click()
view = self.create_view(AllRolesView)
view.flash.assert_success_message('Role "{}" was saved'.format(new_role.name))
assert view.is_displayed
return new_role
def set_role_product_features(self, view, product_features):
""" Sets product features for role restriction
Args:
view: AddRoleView or EditRoleView
product_features: list of product features with options to select
"""
if product_features is not None and isinstance(product_features, (list, tuple, set)):
changes = [
view.fill({
'features_tree': CbTree.CheckNode(path) if option else CbTree.UncheckNode(path)
})
for path, option in product_features
]
return True in changes
else:
return False
@attr.s
class RoleCollection(BaseCollection):
ENTITY = Role
def create(self, name=None, vm_restriction=None, product_features=None, cancel=False):
""" Create role method
Args:
cancel: True - if you want to cancel role creation,
by default, role will be created
Raises:
RBACOperationBlocked: If operation is blocked due to current user
not having appropriate permissions OR update is not allowed
for currently selected role
"""
flash_blocked_msg = "Name has already been taken"
role = self.instantiate(
name=name, vm_restriction=vm_restriction, product_features=product_features
)
view = navigate_to(self, 'Add')
view.fill({'name_txt': role.name,
'vm_restriction_select': role.vm_restriction})
role.set_role_product_features(view, role.product_features)
if cancel:
view.cancel_button.click()
flash_message = 'Add of new Role was cancelled by the user'
else:
view.add_button.click()
flash_message = 'Role "{}" was saved'.format(role.name)
view = self.create_view(AllRolesView)
try:
view.flash.assert_message(flash_blocked_msg)
raise RBACOperationBlocked(flash_blocked_msg)
except AssertionError:
pass
view.flash.assert_success_message(flash_message)
assert view.is_displayed
return role
@navigator.register(RoleCollection, 'All')
class RoleAll(CFMENavigateStep):
VIEW = AllRolesView
prerequisite = NavigateToAttribute('appliance.server', 'Configuration')
def step(self):
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Roles')
@navigator.register(RoleCollection, 'Add')
class RoleAdd(CFMENavigateStep):
VIEW = AddRoleView
prerequisite = NavigateToSibling('All')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select("Add a new Role")
@navigator.register(Role, 'Details')
class RoleDetails(CFMENavigateStep):
VIEW = DetailsRoleView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self):
self.prerequisite_view.browser.refresh() # workaround for 5.9 issue of role now shown
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Roles', self.obj.name)
@navigator.register(Role, 'Edit')
class RoleEdit(CFMENavigateStep):
VIEW = EditRoleView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select('Edit this Role')
####################################################################################################
# RBAC TENANT METHODS
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
class TenantForm(ConfigurationView):
""" Tenant Form """
name = Input(name='name')
description = Input(name='description')
add_button = Button('Add')
cancel_button = Button('Cancel')
class TenantQuotaForm(View):
cpu_cb = BootstrapSwitch(id='cpu_allocated')
memory_cb = BootstrapSwitch(id='mem_allocated')
storage_cb = BootstrapSwitch(id='storage_allocated')
vm_cb = BootstrapSwitch(id='vms_allocated')
template_cb = BootstrapSwitch(id='templates_allocated')
cpu_txt = Input(id='id_cpu_allocated')
memory_txt = Input(id='id_mem_allocated')
storage_txt = Input(id='id_storage_allocated')
vm_txt = Input(id='id_vms_allocated')
template_txt = Input(id='id_templates_allocated')
class TenantQuotaView(ConfigurationView):
""" Tenant Quota View """
form = View.nested(TenantQuotaForm)
save_button = Button('Save')
reset_button = Button('Reset')
cancel_button = Button('Cancel')
@property
def is_displayed(self):
return (
self.form.template_cb.is_displayed and
self.title.text == 'Manage quotas for {} "{}"'.format(self.context['object'].obj_type,
self.context['object'].name))
class AllTenantView(ConfigurationView):
""" All Tenants View """
toolbar = View.nested(AccessControlToolbar)
table = Table('//*[@id="miq-gtl-view"]/miq-data-table/div/table')
paginator = PaginationPane()
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Access Control Tenants'
)
class AddTenantView(ConfigurationView):
""" Add Tenant View """
form = View.nested(TenantForm)
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.form.description.is_displayed and
self.title.text in ('Adding a new Project', 'Adding a new Tenant')
)
class DetailsTenantEntities(View):
smart_management = SummaryForm('Smart Management')
class DetailsTenantView(ConfigurationView):
""" Details Tenant View """
entities = View.nested(DetailsTenantEntities)
# Todo move to entities
toolbar = View.nested(AccessControlToolbar)
name = Text('Name')
description = Text('Description')
parent = Text('Parent')
table = Table('//*[self::fieldset or @id="fieldset"]/table')
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == '{} "{}"'.format(self.context['object'].obj_type,
self.context['object'].name)
)
class ParentDetailsTenantView(DetailsTenantView):
""" Parent Tenant Details View """
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == '{} "{}"'.format(self.context['object'].parent_tenant.obj_type,
self.context['object'].parent_tenant.name)
)
class EditTenantView(View):
""" Edit Tenant View """
form = View.nested(TenantForm)
save_button = Button('Save')
reset_button = Button('Reset')
@property
def is_displayed(self):
return (
self.form.accordions.accesscontrol.is_opened and
self.form.description.is_displayed and
self.form.title.text == 'Editing {} "{}"'.format(self.context['object'].obj_type,
self.context['object'].name)
)
@attr.s
class Tenant(Updateable, BaseEntity, Taggable):
""" Class representing CFME tenants in the UI.
* Kudos to mfalesni *
The behaviour is shared with Project, which is the same except it cannot create more nested
tenants/projects.
Args:
name: Name of the tenant
description: Description of the tenant
parent_tenant: Parent tenant, can be None, can be passed as string or object
"""
obj_type = 'Tenant'
name = attr.ib()
description = attr.ib(default="")
parent_tenant = attr.ib(default=None)
_default = attr.ib(default=False)
def update(self, updates):
""" Update tenant/project method
Args:
updates: tenant/project data that should be changed
Note: In case updates is the same as original tenant/project data, update will be canceled,
as 'Save' button will not be active
"""
view = navigate_to(self, 'Edit')
changed = view.form.fill(updates)
if changed:
view.save_button.click()
if self.appliance.version < '5.9':
flash_message = 'Project "{}" was saved'.format(updates.get('name', self.name))
else:
flash_message = '{} "{}" has been successfully saved.'.format(
self.obj_type, updates.get('name', self.name))
else:
view.cancel_button.click()
if self.appliance.version < '5.9':
flash_message = 'Edit of Project "{}" was cancelled by the user'.format(
updates.get('name', self.name))
else:
flash_message = 'Edit of {} "{}" was canceled by the user.'.format(
self.obj_type, updates.get('name', self.name))
view = self.create_view(DetailsTenantView, override=updates)
view.flash.assert_message(flash_message)
def delete(self, cancel=True):
""" Delete existing role
Args:
cancel: Default value 'True', role will be deleted
'False' - deletion of role will be canceled
"""
view = navigate_to(self, 'Details')
view.toolbar.configuration.item_select(
'Delete this item', handle_alert=cancel)
if cancel:
view = self.create_view(ParentDetailsTenantView)
view.flash.assert_success_message(
'Tenant "{}": Delete successful'.format(self.description))
else:
view = self.create_view(DetailsRoleView)
assert view.is_displayed
def set_quota(self, **kwargs):
""" Sets tenant quotas """
view = navigate_to(self, 'ManageQuotas')
changed = view.form.fill({'cpu_cb': kwargs.get('cpu_cb'),
'cpu_txt': kwargs.get('cpu'),
'memory_cb': kwargs.get('memory_cb'),
'memory_txt': kwargs.get('memory'),
'storage_cb': kwargs.get('storage_cb'),
'storage_txt': kwargs.get('storage'),
'vm_cb': kwargs.get('vm_cb'),
'vm_txt': kwargs.get('vm'),
'template_cb': kwargs.get('template_cb'),
'template_txt': kwargs.get('template')})
if changed:
view.save_button.click()
expected_msg = 'Quotas for {} "{}" were saved'.format(self.obj_type, self.name)
else:
view.cancel_button.click()
expected_msg = 'Manage quotas for {} "{}" was cancelled by the user'\
.format(self.obj_type, self.name)
view = self.create_view(DetailsTenantView)
view.flash.assert_success_message(expected_msg)
assert view.is_displayed
@property
def quota(self):
view = navigate_to(self, 'Details')
quotas = {
'cpu': 'Allocated Virtual CPUs',
'memory': 'Allocated Memory in GB',
'storage': 'Allocated Storage in GB',
'num_vms': 'Allocated Number of Virtual Machines',
'templates': 'Allocated Number of Templates'
}
for field in quotas:
item = view.table.row(name=quotas[field])
quotas[field] = {
'total': item.total_quota.text,
'in_use': item.in_use.text,
'allocated': item.allocated.text,
'available': item.available.text
}
return quotas
def __eq__(self, other):
if not isinstance(other, type(self)):
return False
else:
return self.tree_path == other.tree_path
@property
def exists(self):
try:
navigate_to(self, 'Details')
return True
except CandidateNotFound:
return False
@property
def tree_path(self):
if self._default:
return [self.name]
else:
return self.parent_tenant.tree_path + [self.name]
@property
def parent_path(self):
return self.parent_tenant.tree_path
@attr.s
class TenantCollection(BaseCollection):
"""Collection class for Tenant"""
ENTITY = Tenant
def get_root_tenant(self):
return self.instantiate(str(self.appliance.rest_api.collections.tenants[0].name),
default=True)
def create(self, name, description, parent):
if self.appliance.version > '5.9':
tenant_success_flash_msg = 'Tenant "{}" has been successfully added.'
else:
tenant_success_flash_msg = 'Tenant "{}" was saved'
tenant = self.instantiate(name, description, parent)
view = navigate_to(tenant.parent_tenant, 'Details')
view.toolbar.configuration.item_select('Add child Tenant to this Tenant')
view = self.create_view(AddTenantView)
wait_for(lambda: view.is_displayed, timeout=5)
changed = view.form.fill({'name': name,
'description': description})
if changed:
view.form.add_button.click()
else:
view.form.cancel_button.click()
view = self.create_view(ParentDetailsTenantView)
view.flash.assert_success_message(tenant_success_flash_msg.format(name))
return tenant
def delete(self, *tenants):
view = navigate_to(self, 'All')
for tenant in tenants:
try:
row = view.table.row(name=tenant.name)
row[0].check()
except Exception:
logger.exception('Failed to check element "%s"', tenant.name)
else:
view.toolbar.configuration.item_select('Delete selected items', handle_alert=True)
@navigator.register(TenantCollection, 'All')
class TenantAll(CFMENavigateStep):
VIEW = AllTenantView
prerequisite = NavigateToAttribute('appliance.server', 'Configuration')
def step(self):
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Tenants')
@navigator.register(Tenant, 'Details')
class TenantDetails(CFMENavigateStep):
VIEW = DetailsTenantView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self):
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Tenants', *self.obj.tree_path)
@navigator.register(Tenant, 'Edit')
class TenantEdit(CFMENavigateStep):
VIEW = EditTenantView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select('Edit this item')
@navigator.register(Tenant, 'ManageQuotas')
class TenantManageQuotas(CFMENavigateStep):
VIEW = TenantQuotaView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select('Manage Quotas')
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# END TENANT METHODS
####################################################################################################
####################################################################################################
# RBAC PROJECT METHODS
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
class Project(Tenant):
""" Class representing CFME projects in the UI.
Project cannot create more child tenants/projects.
Args:
name: Name of the project
description: Description of the project
parent_tenant: Parent project, can be None, can be passed as string or object
"""
obj_type = 'Project'
class ProjectCollection(TenantCollection):
"""Collection class for Projects under Tenants"""
ENTITY = Project
def get_root_tenant(self):
# returning Tenant directly because 'My Company' needs to be treated like Tenant object,
# to be able to make child tenant/project under it
return self.appliance.collections.tenants.instantiate(
name=str(self.appliance.rest_api.collections.tenants[0].name), default=True)
def create(self, name, description, parent):
if self.appliance.version > '5.9':
project_success_flash_msg = 'Project "{}" has been successfully added.'
else:
project_success_flash_msg = 'Project "{}" was saved'
project = self.instantiate(name, description, parent)
view = navigate_to(project.parent_tenant, 'Details')
view.toolbar.configuration.item_select('Add Project to this Tenant')
view = self.create_view(AddTenantView)
wait_for(lambda: view.is_displayed, timeout=5)
changed = view.form.fill({'name': name,
'description': description})
if changed:
view.form.add_button.click()
else:
view.form.cancel_button.click()
view = self.create_view(ParentDetailsTenantView)
view.flash.assert_success_message(project_success_flash_msg.format(name))
return project
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# END PROJECT METHODS
####################################################################################################
| gpl-2.0 | 2,881,992,309,818,430,000 | 34.821691 | 100 | 0.600349 | false | 4.180265 | true | false | false |
uksf/modpack | tools/build.py | 1 | 3612 | #!/usr/bin/env python3
import os
import sys
import subprocess
######## GLOBALS #########
MAINPREFIX = "u"
PREFIX = "uksf_"
##########################
def tryHemttBuild(projectpath):
hemttExe = os.path.join(projectpath, "hemtt.exe")
if os.path.isfile(hemttExe):
os.chdir(projectpath)
ret = subprocess.call([hemttExe, "pack"], stderr=subprocess.STDOUT)
return True
else:
print("hemtt not installed");
return False
def mod_time(path):
if not os.path.isdir(path):
return os.path.getmtime(path)
maxi = os.path.getmtime(path)
for p in os.listdir(path):
maxi = max(mod_time(os.path.join(path, p)), maxi)
return maxi
def check_for_changes(addonspath, module):
if not os.path.exists(os.path.join(addonspath, "{}{}.pbo".format(PREFIX,module))):
return True
return mod_time(os.path.join(addonspath, module)) > mod_time(os.path.join(addonspath, "{}{}.pbo".format(PREFIX,module)))
def check_for_obsolete_pbos(addonspath, file):
module = file[len(PREFIX):-4]
if not os.path.exists(os.path.join(addonspath, module)):
return True
return False
def main(argv):
print("""
#####################
# UKSF Debug Build #
#####################
""")
compile_extensions = False
if "compile" in argv:
argv.remove("compile")
compile_extensions = True
scriptpath = os.path.realpath(__file__)
projectpath = os.path.dirname(os.path.dirname(scriptpath))
addonspath = os.path.join(projectpath, "addons")
extensionspath = os.path.join(projectpath, "extensions")
if (not tryHemttBuild(projectpath)):
os.chdir(addonspath)
made = 0
failed = 0
skipped = 0
removed = 0
for file in os.listdir(addonspath):
if os.path.isfile(file):
if check_for_obsolete_pbos(addonspath, file):
removed += 1
print(" Removing obsolete file => " + file)
os.remove(file)
print("")
for p in os.listdir(addonspath):
path = os.path.join(addonspath, p)
if not os.path.isdir(path):
continue
if p[0] == ".":
continue
if not check_for_changes(addonspath, p):
skipped += 1
print(" Skipping {}.".format(p))
continue
print("# Making {} ...".format(p))
try:
subprocess.check_output([
"makepbo",
"-NUP",
"-@={}\\{}\\addons\\{}".format(MAINPREFIX,PREFIX.rstrip("_"),p),
p,
"{}{}.pbo".format(PREFIX,p)
], stderr=subprocess.STDOUT)
except:
failed += 1
print(" Failed to make {}.".format(p))
else:
made += 1
print(" Successfully made {}.".format(p))
print("\n# Done.")
print(" Made {}, skipped {}, removed {}, failed to make {}.".format(made, skipped, removed, failed))
if (compile_extensions):
try:
print("\nCompiling extensions in {}".format(extensionspath))
os.chdir(extensionspath)
# Prepare 64bit build dirs
ret = subprocess.call(["msbuild", "uksf.sln", "/m", "/p:Configuration=Release", "/p:Platform=x64"])
if ret == 1:
return 1
except:
print("Failed to compile extension")
raise
if __name__ == "__main__":
sys.exit(main(sys.argv))
| gpl-3.0 | 6,139,524,776,622,342,000 | 29.1 | 124 | 0.52381 | false | 3.89644 | false | false | false |
jacobgilroy/FinalYearProject | MainView.py | 1 | 2934 | from PyQt5.QtWidgets import QWidget, QSplitter, QVBoxLayout, QFrame, QFileDialog, QScrollArea, QMenuBar, QAction, QToolBar
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QIcon
from JamSpace.Views.LaneSpaceView import LaneSpaceView
from JamSpace.Views.ControlBar import ControlBar
class MainView(QWidget):
def __init__(self):
super().__init__()
# declare member variables:
self.laneSpace = LaneSpaceView(parent=self)
self.controlBar = ControlBar(parent=self)
self.menuBar = QMenuBar(self)
self.toolBar = QToolBar(self)
self.toolBar.show()
self.laneScrollArea = QScrollArea()
self.laneScrollArea.setWidgetResizable(True)
self.WIDTH = 900
self.HEIGHT = 700
# Initialise the UI:
self.initUI()
def initUI(self):
self.setGeometry(20, 30, self.WIDTH, self.HEIGHT)
self.setWindowTitle('JamSpace')
# configure the menu bar:
# create menus:
fileMenu = self.menuBar.addMenu('&File')
editMenu = self.menuBar.addMenu('&Edit')
# create actions:
self.exitAction = QAction('Exit', self)
self.exitAction.setStatusTip('Close the application')
self.addLaneAction = QAction(QIcon('addLaneIcon.png'), 'Add Lane', self)
self.playAction = QAction(QIcon('playIcon.png'), 'Play', self)
self.stopAction = QAction(QIcon('stopIcon.ico'), 'Stop', self)
self.addLaneAction.setStatusTip('Add a new lane')
self.playAction.setStatusTip('Start playback')
self.stopAction.setStatusTip('Stop playback')
# add the actions to the menus/toolbar:
fileMenu.addAction(self.exitAction)
self.toolBar.addAction(self.playAction)
self.toolBar.addAction(self.stopAction)
self.toolBar.addAction(self.addLaneAction)
self.laneScrollArea.setWidget(self.laneSpace)
# Instantiate UI components:
laneEditSpace = QFrame(self)
laneEditSpace.setFrameShape(QFrame.StyledPanel)
clipEditSpace = QFrame(self)
clipEditSpace.setFrameShape(QFrame.StyledPanel)
# Apply layout:
vSplitter = QSplitter(Qt.Vertical)
hSplitter = QSplitter(Qt.Horizontal)
hSplitter.addWidget(laneEditSpace)
hSplitter.addWidget(clipEditSpace)
vSplitter.addWidget(self.controlBar)
vSplitter.addWidget(self.laneScrollArea)
vSplitter.addWidget(hSplitter)
vbox = QVBoxLayout(self)
vbox.addWidget(vSplitter)
#vbox.setAlignment(Qt.AlignTop)
self.setLayout(vbox)
self.show()
def showDirectoryDialog(self):
dirSelectionDialog = QFileDialog(self)
projectDir = QFileDialog.getExistingDirectory(dirSelectionDialog, 'Select Project Folder')
return projectDir | gpl-3.0 | 15,029,270,134,049,856 | 29.569892 | 122 | 0.647921 | false | 3.932976 | false | false | false |
evanbiederstedt/CMBintheLikeHoodz | source_code/CAMB_vary_OmegaB_lmax1100_Feb2016.py | 1 | 137613 |
# coding: utf-8
# In[1]:
#
#
# hundred_samples = np.linspace(0.05, 0.5, num=100)
#
# Planck found \Omega_CDM
# GAVO simulated map set at \Omega_CDM = 0.122
# CAMB default below at omch2=0.122
#
# In[2]:
#
# First output 200 CAMB scalar outputs
#
# 0.005 to 0.05
#
# In[3]:
from matplotlib import pyplot as plt
import numpy as np
import camb
from camb import model, initialpower
# In[4]:
"""
#Set up a new set of parameters for CAMB
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.022, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(2000, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
for name in powers:
print(name)
# In[5]:
# plot the total lensed CMB power spectra versus unlensed, and fractional difference
totCL=powers['total']
unlensedCL=powers['unlensed_scalar']
print(totCL.shape)
# Python CL arrays are all zero based (starting at L=0), Note L=0,1 entries will be zero by default.
# The differenent CL are always in the order TT, EE, BB, TE (with BB=0 for unlensed scalar results).
ls = np.arange(totCL.shape[0])
print(ls)
#print(totCL[:30]) # print first 30 totCL
fig, ax = plt.subplots(2,2, figsize = (12,12))
ax[0,0].plot(ls,totCL[:,0], color='k')
ax[0,0].plot(ls,unlensedCL[:,0], color='r')
ax[0,0].set_title('TT')
ax[0,1].plot(ls[2:], 1-unlensedCL[2:,0]/totCL[2:,0]);
ax[0,1].set_title(r'$\Delta TT$')
ax[1,0].plot(ls,totCL[:,1], color='k')
ax[1,0].plot(ls,unlensedCL[:,1], color='r')
ax[1,0].set_title(r'$EE$')
ax[1,1].plot(ls,totCL[:,3], color='k')
ax[1,1].plot(ls,unlensedCL[:,3], color='r')
ax[1,1].set_title(r'$TE$');
for ax in ax.reshape(-1): ax.set_xlim([2,2500])
"""
# In[6]:
twohundred_samples = np.linspace(0.005, 0.05, num=200)
#print(twohundred_samples)
#Set up a new set of parameters for CAMB
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.022, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
pars.set_for_lmax(2500, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers =results.get_cmb_power_spectra(pars)
for name in powers:
print(name)
"""
array([ 0.005 , 0.00522613, 0.00545226, 0.00567839, 0.00590452,
0.00613065, 0.00635678, 0.00658291, 0.00680905, 0.00703518,
0.00726131, 0.00748744, 0.00771357, 0.0079397 , 0.00816583,
0.00839196, 0.00861809, 0.00884422, 0.00907035, 0.00929648,
0.00952261, 0.00974874, 0.00997487, 0.01020101, 0.01042714,
0.01065327, 0.0108794 , 0.01110553, 0.01133166, 0.01155779,
0.01178392, 0.01201005, 0.01223618, 0.01246231, 0.01268844,
0.01291457, 0.0131407 , 0.01336683, 0.01359296, 0.0138191 ,
0.01404523, 0.01427136, 0.01449749, 0.01472362, 0.01494975,
0.01517588, 0.01540201, 0.01562814, 0.01585427, 0.0160804 ,
0.01630653, 0.01653266, 0.01675879, 0.01698492, 0.01721106,
0.01743719, 0.01766332, 0.01788945, 0.01811558, 0.01834171,
0.01856784, 0.01879397, 0.0190201 , 0.01924623, 0.01947236,
0.01969849, 0.01992462, 0.02015075, 0.02037688, 0.02060302,
0.02082915, 0.02105528, 0.02128141, 0.02150754, 0.02173367,
0.0219598 , 0.02218593, 0.02241206, 0.02263819, 0.02286432,
0.02309045, 0.02331658, 0.02354271, 0.02376884, 0.02399497,
0.02422111, 0.02444724, 0.02467337, 0.0248995 , 0.02512563,
0.02535176, 0.02557789, 0.02580402, 0.02603015, 0.02625628,
0.02648241, 0.02670854, 0.02693467, 0.0271608 , 0.02738693,
0.02761307, 0.0278392 , 0.02806533, 0.02829146, 0.02851759,
0.02874372, 0.02896985, 0.02919598, 0.02942211, 0.02964824,
0.02987437, 0.0301005 , 0.03032663, 0.03055276, 0.03077889,
0.03100503, 0.03123116, 0.03145729, 0.03168342, 0.03190955,
0.03213568, 0.03236181, 0.03258794, 0.03281407, 0.0330402 ,
0.03326633, 0.03349246, 0.03371859, 0.03394472, 0.03417085,
0.03439698, 0.03462312, 0.03484925, 0.03507538, 0.03530151,
0.03552764, 0.03575377, 0.0359799 , 0.03620603, 0.03643216,
0.03665829, 0.03688442, 0.03711055, 0.03733668, 0.03756281,
0.03778894, 0.03801508, 0.03824121, 0.03846734, 0.03869347,
0.0389196 , 0.03914573, 0.03937186, 0.03959799, 0.03982412,
0.04005025, 0.04027638, 0.04050251, 0.04072864, 0.04095477,
0.0411809 , 0.04140704, 0.04163317, 0.0418593 , 0.04208543,
0.04231156, 0.04253769, 0.04276382, 0.04298995, 0.04321608,
0.04344221, 0.04366834, 0.04389447, 0.0441206 , 0.04434673,
0.04457286, 0.04479899, 0.04502513, 0.04525126, 0.04547739,
0.04570352, 0.04592965, 0.04615578, 0.04638191, 0.04660804,
0.04683417, 0.0470603 , 0.04728643, 0.04751256, 0.04773869,
0.04796482, 0.04819095, 0.04841709, 0.04864322, 0.04886935,
0.04909548, 0.04932161, 0.04954774, 0.04977387, 0.05 ])
"""
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls0 = unlencl[:,0][2:1101]
print(len(cls0))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls1 = unlencl[:,0][2:1101]
print(len(cls1))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls2 = unlencl[:,0][2:1101]
print(len(cls2))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls3 = unlencl[:,0][2:1101]
print(len(cls3))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls4 = unlencl[:,0][2:1101]
print(len(cls4))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls5 = unlencl[:,0][2:1101]
print(len(cls5))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls6 = unlencl[:,0][2:1101]
print(len(cls6))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls7 = unlencl[:,0][2:1101]
print(len(cls7))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls8 = unlencl[:,0][2:1101]
print(len(cls8))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls9 = unlencl[:,0][2:1101]
print(len(cls9))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls10 = unlencl[:,0][2:1101]
print(len(cls10))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls11 = unlencl[:,0][2:1101]
print(len(cls11))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls12 = unlencl[:,0][2:1101]
print(len(cls12))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls13 = unlencl[:,0][2:1101]
print(len(cls13))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls14 = unlencl[:,0][2:1101]
print(len(cls14))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls15 = unlencl[:,0][2:1101]
print(len(cls15))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls16 = unlencl[:,0][2:1101]
print(len(cls16))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls17 = unlencl[:,0][2:1101]
print(len(cls17))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls18 = unlencl[:,0][2:1101]
print(len(cls18))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls19 = unlencl[:,0][2:1101]
print(len(cls19))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls20 = unlencl[:,0][2:1101]
print(len(cls20))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls21 = unlencl[:,0][2:1101]
print(len(cls21))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls22 = unlencl[:,0][2:1101]
print(len(cls22))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls23 = unlencl[:,0][2:1101]
print(len(cls23))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls24 = unlencl[:,0][2:1101]
print(len(cls24))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls25 = unlencl[:,0][2:1101]
print(len(cls25))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls26 = unlencl[:,0][2:1101]
print(len(cls26))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls27 = unlencl[:,0][2:1101]
print(len(cls27))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls28 = unlencl[:,0][2:1101]
print(len(cls28))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls29 = unlencl[:,0][2:1101]
print(len(cls29))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls30 = unlencl[:,0][2:1101]
print(len(cls30))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls31 = unlencl[:,0][2:1101]
print(len(cls31))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls32 = unlencl[:,0][2:1101]
print(len(cls32))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls33 = unlencl[:,0][2:1101]
print(len(cls33))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls34 = unlencl[:,0][2:1101]
print(len(cls34))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls35 = unlencl[:,0][2:1101]
print(len(cls35))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls36 = unlencl[:,0][2:1101]
print(len(cls36))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls37 = unlencl[:,0][2:1101]
print(len(cls37))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls38 = unlencl[:,0][2:1101]
print(len(cls38))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls39 = unlencl[:,0][2:1101]
print(len(cls39))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls40 = unlencl[:,0][2:1101]
print(len(cls40))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls41 = unlencl[:,0][2:1101]
print(len(cls41))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls42 = unlencl[:,0][2:1101]
print(len(cls42))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls43 = unlencl[:,0][2:1101]
print(len(cls43))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls44 = unlencl[:,0][2:1101]
print(len(cls44))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls45 = unlencl[:,0][2:1101]
print(len(cls45))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls46 = unlencl[:,0][2:1101]
print(len(cls46))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls47 = unlencl[:,0][2:1101]
print(len(cls47))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls48 = unlencl[:,0][2:1101]
print(len(cls48))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls49 = unlencl[:,0][2:1101]
print(len(cls49))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls50 = unlencl[:,0][2:1101]
print(len(cls50))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls51 = unlencl[:,0][2:1101]
print(len(cls51))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls52 = unlencl[:,0][2:1101]
print(len(cls52))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls53 = unlencl[:,0][2:1101]
print(len(cls53))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls54 = unlencl[:,0][2:1101]
print(len(cls54))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls55 = unlencl[:,0][2:1101]
print(len(cls55))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls56 = unlencl[:,0][2:1101]
print(len(cls56))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls57 = unlencl[:,0][2:1101]
print(len(cls57))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls58 = unlencl[:,0][2:1101]
print(len(cls58))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls59 = unlencl[:,0][2:1101]
print(len(cls59))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls60 = unlencl[:,0][2:1101]
print(len(cls60))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls61 = unlencl[:,0][2:1101]
print(len(cls61))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls62 = unlencl[:,0][2:1101]
print(len(cls62))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls63 = unlencl[:,0][2:1101]
print(len(cls63))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls64 = unlencl[:,0][2:1101]
print(len(cls64))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls65 = unlencl[:,0][2:1101]
print(len(cls65))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls66 = unlencl[:,0][2:1101]
print(len(cls66))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls67 = unlencl[:,0][2:1101]
print(len(cls67))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls68 = unlencl[:,0][2:1101]
print(len(cls68))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls69 = unlencl[:,0][2:1101]
print(len(cls69))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls70 = unlencl[:,0][2:1101]
print(len(cls70))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls71 = unlencl[:,0][2:1101]
print(len(cls71))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls72 = unlencl[:,0][2:1101]
print(len(cls72))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls73 = unlencl[:,0][2:1101]
print(len(cls73))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls74 = unlencl[:,0][2:1101]
print(len(cls74))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls75 = unlencl[:,0][2:1101]
print(len(cls75))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls76 = unlencl[:,0][2:1101]
print(len(cls76))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls77 = unlencl[:,0][2:1101]
print(len(cls77))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls78 = unlencl[:,0][2:1101]
print(len(cls78))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls79 = unlencl[:,0][2:1101]
print(len(cls79))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls80 = unlencl[:,0][2:1101]
print(len(cls80))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls81 = unlencl[:,0][2:1101]
print(len(cls81))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls82 = unlencl[:,0][2:1101]
print(len(cls82))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls83 = unlencl[:,0][2:1101]
print(len(cls83))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls84 = unlencl[:,0][2:1101]
print(len(cls84))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls85 = unlencl[:,0][2:1101]
print(len(cls85))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls86 = unlencl[:,0][2:1101]
print(len(cls86))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls87 = unlencl[:,0][2:1101]
print(len(cls87))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls88 = unlencl[:,0][2:1101]
print(len(cls88))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls89 = unlencl[:,0][2:1101]
print(len(cls89))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls90 = unlencl[:,0][2:1101]
print(len(cls90))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls91 = unlencl[:,0][2:1101]
print(len(cls91))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls92 = unlencl[:,0][2:1101]
print(len(cls92))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls93 = unlencl[:,0][2:1101]
print(len(cls93))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls94 = unlencl[:,0][2:1101]
print(len(cls94))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls95 = unlencl[:,0][2:1101]
print(len(cls95))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls96 = unlencl[:,0][2:1101]
print(len(cls96))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls97 = unlencl[:,0][2:1101]
print(len(cls97))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls98 = unlencl[:,0][2:1101]
print(len(cls98))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls99 = unlencl[:,0][2:1101]
print(len(cls99))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls100 = unlencl[:,0][2:1101]
print(len(cls100))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls101 = unlencl[:,0][2:1101]
print(len(cls101))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls102 = unlencl[:,0][2:1101]
print(len(cls102))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls103 = unlencl[:,0][2:1101]
print(len(cls103))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls104 = unlencl[:,0][2:1101]
print(len(cls104))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls105 = unlencl[:,0][2:1101]
print(len(cls105))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls106 = unlencl[:,0][2:1101]
print(len(cls106))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls107 = unlencl[:,0][2:1101]
print(len(cls107))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls108 = unlencl[:,0][2:1101]
print(len(cls108))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls109 = unlencl[:,0][2:1101]
print(len(cls109))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls110 = unlencl[:,0][2:1101]
print(len(cls110))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls111 = unlencl[:,0][2:1101]
print(len(cls111))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls112 = unlencl[:,0][2:1101]
print(len(cls112))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls113 = unlencl[:,0][2:1101]
print(len(cls113))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls114 = unlencl[:,0][2:1101]
print(len(cls114))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls115 = unlencl[:,0][2:1101]
print(len(cls115))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls116 = unlencl[:,0][2:1101]
print(len(cls116))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls117 = unlencl[:,0][2:1101]
print(len(cls117))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls118 = unlencl[:,0][2:1101]
print(len(cls118))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls119 = unlencl[:,0][2:1101]
print(len(cls119))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls120 = unlencl[:,0][2:1101]
print(len(cls120))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls121 = unlencl[:,0][2:1101]
print(len(cls121))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls122 = unlencl[:,0][2:1101]
print(len(cls122))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls123 = unlencl[:,0][2:1101]
print(len(cls123))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls124 = unlencl[:,0][2:1101]
print(len(cls124))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls125 = unlencl[:,0][2:1101]
print(len(cls125))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls126 = unlencl[:,0][2:1101]
print(len(cls126))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls127 = unlencl[:,0][2:1101]
print(len(cls127))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls128 = unlencl[:,0][2:1101]
print(len(cls128))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls129 = unlencl[:,0][2:1101]
print(len(cls129))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls130 = unlencl[:,0][2:1101]
print(len(cls130))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls131 = unlencl[:,0][2:1101]
print(len(cls131))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls132 = unlencl[:,0][2:1101]
print(len(cls132))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls133 = unlencl[:,0][2:1101]
print(len(cls133))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls134 = unlencl[:,0][2:1101]
print(len(cls134))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls135 = unlencl[:,0][2:1101]
print(len(cls135))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls136 = unlencl[:,0][2:1101]
print(len(cls136))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls137 = unlencl[:,0][2:1101]
print(len(cls137))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls138 = unlencl[:,0][2:1101]
print(len(cls138))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls139 = unlencl[:,0][2:1101]
print(len(cls139))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls140 = unlencl[:,0][2:1101]
print(len(cls140))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls141 = unlencl[:,0][2:1101]
print(len(cls141))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls142 = unlencl[:,0][2:1101]
print(len(cls142))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls143 = unlencl[:,0][2:1101]
print(len(cls143))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls144 = unlencl[:,0][2:1101]
print(len(cls144))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls145 = unlencl[:,0][2:1101]
print(len(cls145))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls146 = unlencl[:,0][2:1101]
print(len(cls146))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls147 = unlencl[:,0][2:1101]
print(len(cls147))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls148 = unlencl[:,0][2:1101]
print(len(cls148))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls149 = unlencl[:,0][2:1101]
print(len(cls149))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls150 = unlencl[:,0][2:1101]
print(len(cls150))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls151 = unlencl[:,0][2:1101]
print(len(cls151))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls152 = unlencl[:,0][2:1101]
print(len(cls152))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls153 = unlencl[:,0][2:1101]
print(len(cls153))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls154 = unlencl[:,0][2:1101]
print(len(cls154))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls155 = unlencl[:,0][2:1101]
print(len(cls155))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls156 = unlencl[:,0][2:1101]
print(len(cls156))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls157 = unlencl[:,0][2:1101]
print(len(cls157))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls158 = unlencl[:,0][2:1101]
print(len(cls158))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls159 = unlencl[:,0][2:1101]
print(len(cls159))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls160 = unlencl[:,0][2:1101]
print(len(cls160))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls161 = unlencl[:,0][2:1101]
print(len(cls161))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls162 = unlencl[:,0][2:1101]
print(len(cls162))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls163 = unlencl[:,0][2:1101]
print(len(cls163))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls164 = unlencl[:,0][2:1101]
print(len(cls164))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls165 = unlencl[:,0][2:1101]
print(len(cls165))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls166 = unlencl[:,0][2:1101]
print(len(cls166))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls167 = unlencl[:,0][2:1101]
print(len(cls167))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls168 = unlencl[:,0][2:1101]
print(len(cls168))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls169 = unlencl[:,0][2:1101]
print(len(cls169))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls170 = unlencl[:,0][2:1101]
print(len(cls170))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls171 = unlencl[:,0][2:1101]
print(len(cls171))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls172 = unlencl[:,0][2:1101]
print(len(cls172))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls173 = unlencl[:,0][2:1101]
print(len(cls173))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls174 = unlencl[:,0][2:1101]
print(len(cls174))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls175 = unlencl[:,0][2:1101]
print(len(cls175))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls176 = unlencl[:,0][2:1101]
print(len(cls176))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls177 = unlencl[:,0][2:1101]
print(len(cls177))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls178 = unlencl[:,0][2:1101]
print(len(cls178))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls179 = unlencl[:,0][2:1101]
print(len(cls179))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls180 = unlencl[:,0][2:1101]
print(len(cls180))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls181 = unlencl[:,0][2:1101]
print(len(cls181))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls182 = unlencl[:,0][2:1101]
print(len(cls182))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls183 = unlencl[:,0][2:1101]
print(len(cls183))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls184 = unlencl[:,0][2:1101]
print(len(cls184))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls185 = unlencl[:,0][2:1101]
print(len(cls185))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls186 = unlencl[:,0][2:1101]
print(len(cls186))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls187 = unlencl[:,0][2:1101]
print(len(cls187))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls188 = unlencl[:,0][2:1101]
print(len(cls188))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls189 = unlencl[:,0][2:1101]
print(len(cls189))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls190 = unlencl[:,0][2:1101]
print(len(cls190))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls191 = unlencl[:,0][2:1101]
print(len(cls191))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls192 = unlencl[:,0][2:1101]
print(len(cls192))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls193 = unlencl[:,0][2:1101]
print(len(cls193))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls194 = unlencl[:,0][2:1101]
print(len(cls194))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls195 = unlencl[:,0][2:1101]
print(len(cls195))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls196 = unlencl[:,0][2:1101]
print(len(cls196))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls197 = unlencl[:,0][2:1101]
print(len(cls197))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls198 = unlencl[:,0][2:1101]
print(len(cls198))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls199 = unlencl[:,0][2:1101]
print(len(cls199))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls200 = unlencl[:,0][2:1101]
print(len(cls200))
"""
0.005
0.00522613065327
0.00545226130653
0.0056783919598
0.00590452261307
0.00613065326633
0.0063567839196
0.00658291457286
0.00680904522613
0.0070351758794
0.00726130653266
0.00748743718593
0.0077135678392
0.00793969849246
0.00816582914573
0.00839195979899
0.00861809045226
0.00884422110553
0.00907035175879
0.00929648241206
0.00952261306533
0.00974874371859
0.00997487437186
0.0102010050251
0.0104271356784
0.0106532663317
0.0108793969849
0.0111055276382
0.0113316582915
0.0115577889447
0.011783919598
0.0120100502513
0.0122361809045
0.0124623115578
0.0126884422111
0.0129145728643
0.0131407035176
0.0133668341709
0.0135929648241
0.0138190954774
0.0140452261307
0.0142713567839
0.0144974874372
0.0147236180905
0.0149497487437
0.015175879397
0.0154020100503
0.0156281407035
0.0158542713568
0.0160804020101
0.0163065326633
0.0165326633166
0.0167587939698
0.0169849246231
0.0172110552764
0.0174371859296
0.0176633165829
0.0178894472362
0.0181155778894
0.0183417085427
0.018567839196
0.0187939698492
0.0190201005025
0.0192462311558
0.019472361809
0.0196984924623
0.0199246231156
0.0201507537688
0.0203768844221
0.0206030150754
0.0208291457286
0.0210552763819
0.0212814070352
0.0215075376884
0.0217336683417
0.021959798995
0.0221859296482
0.0224120603015
0.0226381909548
0.022864321608
0.0230904522613
0.0233165829146
0.0235427135678
0.0237688442211
0.0239949748744
0.0242211055276
0.0244472361809
0.0246733668342
0.0248994974874
0.0251256281407
0.025351758794
0.0255778894472
0.0258040201005
0.0260301507538
0.026256281407
0.0264824120603
0.0267085427136
0.0269346733668
0.0271608040201
0.0273869346734
0.0276130653266
0.0278391959799
0.0280653266332
0.0282914572864
0.0285175879397
0.028743718593
0.0289698492462
0.0291959798995
0.0294221105528
0.029648241206
0.0298743718593
0.0301005025126
0.0303266331658
0.0305527638191
0.0307788944724
0.0310050251256
0.0312311557789
0.0314572864322
0.0316834170854
0.0319095477387
0.032135678392
0.0323618090452
0.0325879396985
0.0328140703518
0.033040201005
0.0332663316583
0.0334924623116
0.0337185929648
0.0339447236181
0.0341708542714
0.0343969849246
0.0346231155779
0.0348492462312
0.0350753768844
0.0353015075377
0.035527638191
0.0357537688442
0.0359798994975
0.0362060301508
0.036432160804
0.0366582914573
0.0368844221106
0.0371105527638
0.0373366834171
0.0375628140704
0.0377889447236
0.0380150753769
0.0382412060302
0.0384673366834
0.0386934673367
0.0389195979899
0.0391457286432
0.0393718592965
0.0395979899497
0.039824120603
0.0400502512563
0.0402763819095
0.0405025125628
0.0407286432161
0.0409547738693
0.0411809045226
0.0414070351759
0.0416331658291
0.0418592964824
0.0420854271357
0.0423115577889
0.0425376884422
0.0427638190955
0.0429899497487
0.043216080402
0.0434422110553
0.0436683417085
0.0438944723618
0.0441206030151
0.0443467336683
0.0445728643216
0.0447989949749
0.0450251256281
0.0452512562814
0.0454773869347
0.0457035175879
0.0459296482412
0.0461557788945
0.0463819095477
0.046608040201
0.0468341708543
0.0470603015075
0.0472864321608
0.0475125628141
0.0477386934673
0.0479648241206
0.0481909547739
0.0484170854271
0.0486432160804
0.0488693467337
0.0490954773869
0.0493216080402
0.0495477386935
0.0497738693467
0.05
"""
# In[50]:
cl_array = np.array([cls0, cls1, cls2, cls3, cls4, cls5, cls6, cls7, cls8, cls9, cls10,
cls11, cls12, cls13, cls14, cls15, cls16, cls17, cls18, cls19, cls20,
cls21, cls22, cls23, cls24, cls25, cls26, cls27, cls28, cls29, cls30,
cls31, cls32, cls33, cls34, cls35, cls36, cls37, cls38, cls39, cls40,
cls41, cls42, cls43, cls44, cls45, cls46, cls47, cls48, cls49, cls50,
cls51, cls52, cls53, cls54, cls55, cls56, cls57, cls58, cls59, cls60,
cls61, cls62, cls63, cls64, cls65, cls66, cls67, cls68, cls69, cls70,
cls71, cls72, cls73, cls74, cls75, cls76, cls77, cls78, cls79, cls80,
cls81, cls82, cls83, cls84, cls85, cls86, cls87, cls88, cls89, cls90,
cls91, cls92, cls93, cls94, cls95, cls96, cls97, cls98, cls99, cls100,
cls101, cls102, cls103, cls104, cls105, cls106, cls107, cls108, cls109, cls110,
cls111, cls112, cls113, cls114, cls115, cls116, cls117, cls118, cls119, cls120,
cls121, cls122, cls123, cls124, cls125, cls126, cls127, cls128, cls129, cls130,
cls131, cls132, cls133, cls134, cls135, cls136, cls137, cls138, cls139, cls140,
cls141, cls142, cls143, cls144, cls145, cls146, cls147, cls148, cls149, cls150,
cls151, cls152, cls153, cls154, cls155, cls156, cls157, cls158, cls159, cls160,
cls161, cls162, cls163, cls164, cls165, cls166, cls167, cls168, cls169, cls170,
cls171, cls172, cls173, cls174, cls175, cls176, cls177, cls178, cls179, cls180,
cls181, cls182, cls183, cls184, cls185, cls186, cls187, cls188, cls189, cls190,
cls191, cls192, cls193, cls194, cls195, cls196, cls197, cls198, cls199, cls200])
# In[51]:
print(cl_array.shape)
# In[52]:
f = "CAMB_cl_varyBaryon_lmax1100varyFeb2016.npy"
np.save(f, cl_array)
| mit | 5,311,655,598,611,238,000 | 26.201621 | 108 | 0.741165 | false | 2.454964 | false | false | false |
anish/buildbot | master/buildbot/reporters/gerrit_verify_status.py | 1 | 8571 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from twisted.python import failure
from buildbot.process.properties import Interpolate
from buildbot.process.properties import Properties
from buildbot.process.results import CANCELLED
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import RETRY
from buildbot.process.results import SKIPPED
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.reporters import http
from buildbot.util import httpclientservice
from buildbot.util.logger import Logger
log = Logger()
class GerritVerifyStatusPush(http.HttpStatusPushBase):
name = "GerritVerifyStatusPush"
neededDetails = dict(wantProperties=True)
# overridable constants
RESULTS_TABLE = {
SUCCESS: 1,
WARNINGS: 1,
FAILURE: -1,
SKIPPED: 0,
EXCEPTION: 0,
RETRY: 0,
CANCELLED: 0
}
DEFAULT_RESULT = -1
@defer.inlineCallbacks
def reconfigService(self,
baseURL,
auth,
startDescription=None,
endDescription=None,
verification_name=None,
abstain=False,
category=None,
reporter=None,
verbose=False,
**kwargs):
auth = yield self.renderSecrets(auth)
yield super().reconfigService(**kwargs)
if baseURL.endswith('/'):
baseURL = baseURL[:-1]
self._http = yield httpclientservice.HTTPClientService.getService(
self.master, baseURL, auth=auth,
debug=self.debug, verify=self.verify)
self._verification_name = verification_name or Interpolate(
'%(prop:buildername)s')
self._reporter = reporter or "buildbot"
self._abstain = abstain
self._category = category
self._startDescription = startDescription or 'Build started.'
self._endDescription = endDescription or 'Build done.'
self._verbose = verbose
def createStatus(self,
change_id,
revision_id,
name,
value,
abstain=None,
rerun=None,
comment=None,
url=None,
reporter=None,
category=None,
duration=None):
"""
Abstract the POST REST api documented here:
https://gerrit.googlesource.com/plugins/verify-status/+/master/src/main/resources/Documentation/rest-api-changes.md
:param change_id: The change_id for the change tested (can be in the long form e.g:
myProject~master~I8473b95934b5732ac55d26311a706c9c2bde9940 or in the short integer form).
:param revision_id: the revision_id tested can be the patchset number or
the commit id (short or long).
:param name: The name of the job.
:param value: The pass/fail result for this job: -1: fail 0: unstable, 1: succeed
:param abstain: Whether the value counts as a vote (defaults to false)
:param rerun: Whether this result is from a re-test on the same patchset
:param comment: A short comment about this job
:param url: The url link to more info about this job
:reporter: The user that verified this job
:category: A category for this job
"duration": The time it took to run this job
:return: A deferred with the result from Gerrit.
"""
payload = {'name': name, 'value': value}
if abstain is not None:
payload['abstain'] = abstain
if rerun is not None:
payload['rerun'] = rerun
if comment is not None:
payload['comment'] = comment
if url is not None:
payload['url'] = url
if reporter is not None:
payload['reporter'] = reporter
if category is not None:
payload['category'] = category
if duration is not None:
payload['duration'] = duration
if self._verbose:
log.debug(
'Sending Gerrit status for {change_id}/{revision_id}: data={data}',
change_id=change_id,
revision_id=revision_id,
data=payload)
return self._http.post(
'/'.join([
'/a/changes', str(change_id), 'revisions', str(revision_id),
'verify-status~verifications'
]),
json=payload)
def formatDuration(self, duration):
"""Format the duration.
This method could be overridden if really needed, as the duration format in gerrit
is an arbitrary string.
:param duration: duration in timedelta
"""
days = duration.days
hours, remainder = divmod(duration.seconds, 3600)
minutes, seconds = divmod(remainder, 60)
if days:
return '{} day{} {}h {}m {}s'.format(days, "s" if days > 1 else "",
hours, minutes, seconds)
elif hours:
return '{}h {}m {}s'.format(hours, minutes, seconds)
return '{}m {}s'.format(minutes, seconds)
@staticmethod
def getGerritChanges(props):
""" Get the gerrit changes
This method could be overridden if really needed to accommodate for other
custom steps method for fetching gerrit changes.
:param props: an IProperty
:return: (optionally via deferred) a list of dictionary with at list
change_id, and revision_id,
which format is the one accepted by the gerrit REST API as of
/changes/:change_id/revision/:revision_id paths (see gerrit doc)
"""
if 'gerrit_changes' in props:
return props.getProperty('gerrit_changes')
if 'event.change.number' in props:
return [{
'change_id': props.getProperty('event.change.number'),
'revision_id': props.getProperty('event.patchSet.number')
}]
return []
@defer.inlineCallbacks
def send(self, build):
props = Properties.fromDict(build['properties'])
if build['complete']:
value = self.RESULTS_TABLE.get(build['results'],
self.DEFAULT_RESULT)
comment = yield props.render(self._endDescription)
duration = self.formatDuration(build['complete_at'] - build[
'started_at'])
else:
value = 0
comment = yield props.render(self._startDescription)
duration = 'pending'
name = yield props.render(self._verification_name)
reporter = yield props.render(self._reporter)
category = yield props.render(self._category)
abstain = yield props.render(self._abstain)
# TODO: find reliable way to find out whether its a rebuild
rerun = None
changes = yield self.getGerritChanges(props)
for change in changes:
try:
yield self.createStatus(
change['change_id'],
change['revision_id'],
name,
value,
abstain=abstain,
rerun=rerun,
comment=comment,
url=build['url'],
reporter=reporter,
category=category,
duration=duration)
except Exception:
log.failure(
'Failed to send status!', failure=failure.Failure())
| gpl-2.0 | 5,102,035,959,566,791,000 | 36.265217 | 123 | 0.582896 | false | 4.643012 | false | false | false |
lixiangning888/whole_project | modules/signatures_orginal_20151110/dyre_apis.py | 1 | 6073 | # Copyright (C) 2015 Optiv, Inc. ([email protected]), KillerInstinct
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
try:
import re2 as re
except ImportError:
import re
from lib.cuckoo.common.abstracts import Signature
class Dyre_APIs(Signature):
name = "dyre_behavior"
description = "Exhibits behavior characteristic of Dyre malware"
weight = 3
severity = 3
categories = ["banker", "trojan"]
families = ["dyre"]
authors = ["Optiv", "KillerInstinct"]
minimum = "1.3"
evented = True
# Try to parse a process memory dump to extract regex extract C2 nodes.
extract_c2s = True
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
self.cryptoapis = False
self.networkapis = set()
self.syncapis = False
self.compname = self.get_environ_entry(self.get_initial_process(),
"ComputerName")
filter_apinames = set(["CryptHashData", "HttpOpenRequestA",
"NtCreateNamedPipeFile"])
def on_call(self, call, process):
# Legacy, modern Dyre doesn't have hardcoded hashes in
# CryptHashData anymore
iocs = [
"J7dnlDvybciDvu8d46D\\x00",
"qwererthwebfsdvjaf+\\x00",
]
pipe = [
"\\??\\pipe\\3obdw5e5w4",
"\\??\\pipe\\g2fabg5713",
]
if call["api"] == "CryptHashData":
buf = self.get_argument(call, "Buffer")
if buf in iocs:
self.cryptoapis = True
tmp = re.sub(r"\\x[0-9A-Fa-f]{2}", "", buf)
if self.compname in tmp:
if re.match("^" + self.compname + "[0-9 ]+$", tmp):
self.cryptoapis = True
elif call["api"] == "HttpOpenRequestA":
buf = self.get_argument(call, "Path")
if len(buf) > 10:
self.networkapis.add(buf)
elif call["api"] == "NtCreateNamedPipeFile":
buf = self.get_argument(call, "PipeName")
for npipe in pipe:
if buf == npipe:
self.syncapis = True
break
return None
def on_complete(self):
ret = False
networkret = False
campaign = set()
mutexs = [
"^(Global|Local)\\\\pen3j3832h$",
"^(Global|Local)\\\\u1nyj3rt20",
]
for mutex in mutexs:
if self.check_mutex(pattern=mutex, regex=True):
self.syncapis = True
break
# C2 Beacon check
if self.networkapis:
# Gather computer name
for httpreq in self.networkapis:
# Generate patterns (should only ever be one per indicator)
indicators = [
"/(\d{4}[a-z]{2}\d{2})/" + self.compname + "_",
"/([^/]+)/" + self.compname + "/\d+/\d+/\d+/$",
"/([^/]+)/" + self.compname + "_W\d{6}\.[0-9A-F]{32}",
]
for indicator in indicators:
buf = re.match(indicator, httpreq)
if buf:
networkret = True
campaign.add(buf.group(1))
# Check if there are any winners
if self.cryptoapis or self.syncapis or networkret:
ret = True
if (self.cryptoapis or self.syncapis) and networkret:
self.confidence = 100
self.description = "Exhibits behaviorial and network characteristics of Upatre+Dyre/Mini-Dyre malware"
for camp in campaign:
self.data.append({"Campaign": camp})
elif networkret:
self.description = "Exhibits network behavior characteristic of Upatre+Dyre/Mini-Dyre malware"
for camp in campaign:
self.data.append({"Campaign": camp})
if self.extract_c2s:
dump_pid = 0
for proc in self.results["behavior"]["processtree"]:
for child in proc["children"]:
# Look for lowest PID svchost.exe
if not dump_pid or child["pid"] < dump_pid:
if child["name"] == "svchost.exe":
dump_pid = child["pid"]
if dump_pid:
dump_path = ""
if len(self.results["procmemory"]):
for memdump in self.results["procmemory"]:
if dump_pid == memdump["pid"]:
dump_path = memdump["file"]
if dump_path:
whitelist = [
"1.2.3.4",
"0.0.0.0",
]
with open(dump_path, "rb") as dump_file:
dump_data = dump_file.read()
ippat = "\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{2,5}"
ips = re.findall(ippat, dump_data)
for ip in set(ips):
addit = True
for item in whitelist:
if ip.startswith(item):
addit = False
if addit:
self.data.append({"C2": ip})
return ret
| lgpl-3.0 | -8,487,877,113,169,990,000 | 38.435065 | 118 | 0.494978 | false | 4.067649 | false | false | false |
lafranceinsoumise/api-django | agir/people/management/commands/mailtrain_update.py | 1 | 1382 | from datetime import datetime
import string
from uuid import UUID
from django.core.management import BaseCommand
from django.utils import timezone
from agir.lib.mailtrain import update_person
from agir.people.models import Person
PADDING = "0000000-0000-0000-0000-000000000000"
class Command(BaseCommand):
help = "Synchronize all the database with mailtrain"
def handle(self, *args, **kwargs):
start = datetime.now()
i = 0
min_letter = string.hexdigits[timezone.now().day % 8 * 2]
max_letter = string.hexdigits[(timezone.now().day + 1) % 8 * 2]
qs = Person.objects.filter(id__gte=UUID(min_letter + PADDING))
if max_letter > min_letter:
qs = qs.filter(id__lt=UUID(max_letter + PADDING))
try:
for person in qs.iterator():
update_person(person)
if kwargs["verbosity"] > 1:
print("Updated %s " % person.email)
i += 1
except Exception as e:
duration = datetime.now() - start
print(
f"Updated {i} people over {qs.count()} in {str(duration.seconds)} seconds."
)
raise e
duration = datetime.now() - start
print(
f"Updated people from {min_letter} to {max_letter} ({str(i)}) in {str(duration.seconds)} seconds."
)
| agpl-3.0 | 1,208,920,517,310,383,900 | 28.404255 | 110 | 0.586831 | false | 3.982709 | false | false | false |
DayGitH/Python-Challenges | DailyProgrammer/DP20140625B.py | 1 | 4512 | """
[6/25/2014] Challenge #168 [Intermediate] Block Count, Length & Area
https://www.reddit.com/r/dailyprogrammer/comments/291x9h/6252014_challenge_168_intermediate_block_count/
#Description:
In construction there comes a need to compute the length and area of a jobsite. The areas and lengths computed are used
by estimators
to price out the cost to build that jobsite. If for example a jobsite was a building with a parking lot and had
concrete walkways and some nice
pavers and landscaping it would be good to know the areas of all these and some lengths (for concrete curbs, landscape
headerboard, etc)
So for today's challenge we are going to automate the tedious process of calculating the length and area of aerial
plans or photos.
#ASCII Photo:
To keep this within our scope we have converted the plans into an ASCII picture. We have scaled the plans so 1
character is a square
with dimensions of 10 ft x 10 ft.
The photo is case sensitive. so a "O" and "o" are 2 different blocks of areas to compute.
#Blocks Counts, Lengths and Areas:
Some shorthand to follow:
* SF = square feet
* LF = linear feet
If you have the following picture.
####
OOOO
####
mmmm
* # has a block count of 2. we have 2 areas not joined made up of #
* O and m have a block count of 1. they only have 1 areas each made up of their ASCII character.
* O has 4 blocks. Each block is 100 SF and so you have 400 SF of O.
* O has a circumference length of that 1 block count of 100 LF.
* m also has 4 blocks so there is 400 SF of m and circumference length of 100 LF
* # has 2 block counts each of 4. So # has a total area of 800 SF and a total circumference length of 200 LF.
Pay close attention to how "#" was handled. It was seen as being 2 areas made up of # but the final length and area
adds them together even thou they not together. It recognizes the two areas by having a block count of 2 (2 non-joined
areas made up of "#" characters) while the others only have a block count of 1.
#Input:
Your input is a 2-D ASCII picture. The ASCII characters used are any non-whitespace characters.
##Example:
####
@@oo
o*@!
****
#Output:
You give a Length and Area report of all the blocks.
##Example: (using the example input)
Block Count, Length & Area Report
=================================
#: Total SF (400), Total Circumference LF (100) - Found 1 block
@: Total SF (300), Total Circumference LF (100) - Found 2 blocks
o: Total SF (300), Total Circumference LF (100) - Found 2 blocks
*: Total SF (500), Total Circumference LF (120) - Found 1 block
!: Total SF (100), Total Circumference LF (40) - Found 1 block
#Easy Mode (optional):
Remove the need to compute the block count. Just focus on area and circumference length.
#Challenge Input:
So we have a "B" building. It has a "D" driveway. "O" and "o" landscaping. "c" concrete walks. "p" pavers. "V" & "v"
valley gutters. @ and T tree planting.
Finally we have # as Asphalt Paving.
ooooooooooooooooooooooDDDDDooooooooooooooooooooooooooooo
ooooooooooooooooooooooDDDDDooooooooooooooooooooooooooooo
ooo##################o#####o#########################ooo
o@o##################o#####o#########################ooo
ooo##################o#####o#########################oTo
o@o##################################################ooo
ooo##################################################oTo
o@o############ccccccccccccccccccccccc###############ooo
pppppppppppppppcOOOOOOOOOOOOOOOOOOOOOc###############oTo
o@o############cOBBBBBBBBBBBBBBBBBBBOc###############ooo
ooo####V#######cOBBBBBBBBBBBBBBBBBBBOc###############oTo
o@o####V#######cOBBBBBBBBBBBBBBBBBBBOc###############ooo
ooo####V#######cOBBBBBBBBBBBBBBBBBBBOcpppppppppppppppppp
o@o####V#######cOBBBBBBBBBBBBBBBBBBBOc###############ooo
ooo####V#######cOBBBBBBBBBBBBBBBBBBBOc######v########oTo
o@o####V#######cOBBBBBBBBBBBBBBBBBBBOc######v########ooo
ooo####V#######cOOOOOOOOOOOOOOOOOOOOOc######v########oTo
o@o####V#######ccccccccccccccccccccccc######v########ooo
ooo####V#######ppppppppppppppppppppppp######v########oTo
o@o############ppppppppppppppppppppppp###############ooo
oooooooooooooooooooooooooooooooooooooooooooooooooooooooo
oooooooooooooooooooooooooooooooooooooooooooooooooooooooo
#FAQ:
Diagonals do not connect. The small example shows this. The @ areas are 2 blocks and not 1 because of the Diagonal.
"""
def main():
pass
if __name__ == "__main__":
main()
| mit | -3,471,949,555,615,891,000 | 46.494737 | 119 | 0.649379 | false | 3.481481 | false | false | false |
Upward-Spiral-Science/team1 | code/test_assumptions.py | 1 | 1525 | import numpy as np
import matplotlib.pyplot as plt
import urllib2
#%matplotlib inline
sample_size = 1000
np.random.seed(1)
url = ('https://raw.githubusercontent.com/Upward-Spiral-Science'
'/data/master/syn-density/output.csv')
data = urllib2.urlopen(url)
csv = np.genfromtxt(data, delimiter=",")[1:]
csv_rand = None
for i in range (1, sample_size):
#Randomly sample from dataset
a = np.random.permutation(np.arange(csv.shape[0]))[:100]
csv_rand_sample = csv[a]
# Normalize
mean_unmask = np.mean(csv_rand_sample[:,3])
std_unmask = np.std(csv_rand_sample[:,3])
csv_rand_sample[:,3] = (csv_rand_sample[:,3]-mean_unmask)/std_unmask
#Stack matrix
if i == 1:
csv_rand = csv_rand_sample
else:
csv_rand = np.dstack((csv_rand,csv_rand_sample))
#Average across random samples
csv_rand = np.mean(csv_rand,axis=2)
#Independence Assumption
covar = np.cov(csv_rand_sample)
plt.figure(figsize=(7,7))
plt.imshow(covar)
plt.title('Covariance of Synapse Density dataset')
plt.colorbar()
plt.show()
diag = covar.diagonal()*np.eye(covar.shape[0])
hollow = covar-diag
d_det = np.linalg.slogdet(diag)[1]
h_det = np.linalg.slogdet(hollow)[1]
print d_det
print h_det
plt.figure(figsize=(11,8))
plt.subplot(121)
plt.imshow(diag)
plt.clim([0, np.max(covar)])
plt.title('Determinant of on-diagonal: ' + str(d_det))
plt.subplot(122)
plt.imshow(hollow)
plt.clim([0, np.max(covar)])
plt.title('Determinant of off-diagonal: ' + str(h_det))
plt.show()
print "Ratio of on and off-diagonal determinants: " + str(d_det/h_det)
| apache-2.0 | 4,933,376,767,457,469,000 | 23.596774 | 70 | 0.70623 | false | 2.602389 | false | false | false |
SUSE/kiwi | kiwi/storage/raid_device.py | 1 | 4198 | # Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kiwi. If not, see <http://www.gnu.org/licenses/>
#
import os
import logging
# project
from kiwi.command import Command
from kiwi.storage.device_provider import DeviceProvider
from kiwi.storage.mapped_device import MappedDevice
from kiwi.exceptions import (
KiwiRaidSetupError
)
log = logging.getLogger('kiwi')
class RaidDevice(DeviceProvider):
"""
**Implement raid setup on a storage device**
:param object storage_provider: Instance of class based on DeviceProvider
"""
def __init__(self, storage_provider):
# bind the underlaying block device providing class instance
# to this object (e.g loop) if present. This is done to guarantee
# the correct destructor order when the device should be released.
self.storage_provider = storage_provider
self.raid_level_map = {
'mirroring': '1',
'striping': '0'
}
self.raid_device = None
def get_device(self):
"""
Instance of MappedDevice providing the raid device
:return: mapped raid device
:rtype: MappedDevice
"""
if self.raid_device:
return MappedDevice(
device=self.raid_device, device_provider=self
)
def create_degraded_raid(self, raid_level):
"""
Create a raid array in degraded mode with one device missing.
This only works in the raid levels 0(striping) and 1(mirroring)
:param string raid_level: raid level name
"""
if raid_level not in self.raid_level_map:
raise KiwiRaidSetupError(
'Only raid levels 0(striping) and 1(mirroring) are supported'
)
raid_device = None
for raid_id in range(9):
raid_device = '/dev/md' + format(raid_id)
if os.path.exists(raid_device):
raid_device = None
else:
break
if not raid_device:
raise KiwiRaidSetupError(
'Could not find free raid device in range md0-8'
)
log.info(
'Creating raid array in %s mode as %s',
raid_level, raid_device
)
Command.run(
[
'mdadm', '--create', '--run', raid_device,
'--level', self.raid_level_map[raid_level],
'--raid-disks', '2',
self.storage_provider.get_device(), 'missing'
]
)
self.raid_device = raid_device
def create_raid_config(self, filename):
"""
Create mdadm config file from mdadm request
:param string filename: config file name
"""
mdadm_call = Command.run(
['mdadm', '-Db', self.raid_device]
)
with open(filename, 'w') as mdadmconf:
mdadmconf.write(mdadm_call.output)
def is_loop(self):
"""
Check if storage provider is loop based
Return loop status from base storage provider
:return: True or False
:rtype: bool
"""
return self.storage_provider.is_loop()
def __del__(self):
if self.raid_device:
log.info('Cleaning up %s instance', type(self).__name__)
try:
Command.run(
['mdadm', '--stop', self.raid_device]
)
except Exception:
log.warning(
'Shutdown of raid device failed, %s still busy',
self.raid_device
)
| gpl-3.0 | -406,493,287,530,871,200 | 30.096296 | 77 | 0.585279 | false | 4.111655 | false | false | false |
me-systeme/gsv8pypi | GSV6_FrameRouter.py | 1 | 5296 | # -*- coding: utf-8 -*-
__author__ = 'Dennis Rump'
###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) 2015 Dennis Rump
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Hiermit wird unentgeltlich, jeder Person, die eine Kopie der Software
# und der zugehörigen Dokumentationen (die "Software") erhält, die
# Erlaubnis erteilt, uneingeschränkt zu benutzen, inklusive und ohne
# Ausnahme, dem Recht, sie zu verwenden, kopieren, ändern, fusionieren,
# verlegen, verbreiten, unter-lizenzieren und/oder zu verkaufen, und
# Personen, die diese Software erhalten, diese Rechte zu geben, unter
# den folgenden Bedingungen:
#
# Der obige Urheberrechtsvermerk und dieser Erlaubnisvermerk sind in
# alle Kopien oder Teilkopien der Software beizulegen.
#
# DIE SOFTWARE WIRD OHNE JEDE AUSDRÜCKLICHE ODER IMPLIZIERTE GARANTIE
# BEREITGESTELLT, EINSCHLIESSLICH DER GARANTIE ZUR BENUTZUNG FÜR DEN
# VORGESEHENEN ODER EINEM BESTIMMTEN ZWECK SOWIE JEGLICHER
# RECHTSVERLETZUNG, JEDOCH NICHT DARAUF BESCHRÄNKT. IN KEINEM FALL SIND
# DIE AUTOREN ODER COPYRIGHTINHABER FÜR JEGLICHEN SCHADEN ODER SONSTIGE
# ANSPRUCH HAFTBAR ZU MACHEN, OB INFOLGE DER ERFÜLLUNG VON EINEM
# VERTRAG, EINEM DELIKT ODER ANDERS IM ZUSAMMENHANG MIT DER BENUTZUNG
# ODER SONSTIGE VERWENDUNG DER SOFTWARE ENTSTANDEN.
#
###############################################################################
import logging
import threading
from Queue import Queue
from GSV6_MessFrameHandler import MessFrameHandler
class FrameRouter(threading.Thread):
lock = threading.Lock()
#def __init__(self, frameQueue, antwortQueue, messertRotatingQueue, gsv6Lib):
def __init__(self, frameQueue, antwortQueue, _lastMesswert, gsv6Lib):
threading.Thread.__init__(self)
self.frameQueue = frameQueue
self.antwortQueue = antwortQueue
# self.messertRotatingQueue = messertRotatingQueue
self.lastMesswert = _lastMesswert
self.gsv6 = gsv6Lib
self.running = False
# self.messFrameEventHandler = MessFrameHandler(self.messertRotatingQueue, self.gsv6)
self.messFrameEventHandler = MessFrameHandler(self.lastMesswert, self.gsv6)
# self.antwortFrameEventHandler = AntwortFrameHandler(self.gsv6, self.antwortQueue, self.messFrameEventHandler)
# fallback, this flag kills this thread if main thread killed
self.daemon = True
def run(self):
# arbeits Thread: router -> routen von AntwortFrames und MessFrames
FrameRouter.lock.acquire()
self.running = True
FrameRouter.lock.release()
logging.getLogger('gsv8.FrameRouter').info('started')
# enter rooter loop
while self.running:
try:
# newFrame = self.frameQueue.popleft()
newFrame = self.frameQueue.get()
except IndexError:
pass
except Queue.Empty:
pass
else:
logging.getLogger('gsv8.FrameRouter').debug('new Frame: ' + newFrame.toString())
if newFrame.getFrameType() == 0:
# MesswertFrame
logging.getLogger('gsv8.FrameRouter').debug('Messwert erhalten')
self.messFrameEventHandler.computeFrame(newFrame)
elif newFrame.getFrameType() == 1:
logging.getLogger('gsv8').debug("Antwort eralten.")
# AntwortFrame
# self.antwortFrameEventHandler.computeFrame(newFrame)
self.antwortQueue.put(newFrame)
else:
# error
logging.getLogger('gsv8.FrameRouter').debug(
'nothing to do with an FrameType != Messwert/Antwort')
logging.getLogger('gsv8.FrameRouter').debug('exit')
def stop(self):
FrameRouter.lock.acquire()
self.running = False
FrameRouter.lock.release()
def startCSVRecording(self, csvFilepath, prefix):
self.messFrameEventHandler.startRecording(csvFilepath, prefix)
def stopCSVRecording(self):
self.messFrameEventHandler.stopRecording()
def isRecording(self):
return self.messFrameEventHandler.doRecording | mit | -4,542,068,685,750,619,600 | 43.066667 | 119 | 0.676187 | false | 3.527018 | false | false | false |
wq/wq.io | itertable/loaders.py | 1 | 4908 | from __future__ import print_function
import requests
try:
# Python 2 (uses str)
from StringIO import StringIO
except ImportError:
# Python 3 (Python 2 equivalent uses unicode)
from io import StringIO
from io import BytesIO
from .version import VERSION
from .exceptions import LoadFailed
from zipfile import ZipFile
class BaseLoader(object):
no_pickle_loader = ['file']
empty_file = None
def load(self):
raise NotImplementedError
class FileLoader(BaseLoader):
filename = None
@property
def read_mode(self):
return 'rb' if self.binary else 'r'
@property
def write_mode(self):
return 'wb+' if self.binary else 'w+'
def load(self):
try:
self.file = open(self.filename, self.read_mode)
self.empty_file = False
except IOError:
if self.binary:
self.file = BytesIO()
else:
self.file = StringIO()
self.empty_file = True
def save(self):
file = open(self.filename, self.write_mode)
self.dump(file)
file.close()
class Zipper(object):
inner_filename = None
inner_binary = False
def unzip_file(self):
zipfile = ZipFile(self.file)
inner_file = zipfile.read(
self.get_inner_filename(zipfile)
)
if self.inner_binary:
self.file = BytesIO(inner_file)
else:
self.file = StringIO(inner_file.decode('utf-8'))
zipfile.fp.close()
zipfile.close()
def get_inner_filename(self, zipfile):
if self.inner_filename:
return self.inner_filename
names = zipfile.namelist()
if len(names) == 1:
return names[0]
zipfile.fp.close()
zipfile.close()
raise LoadFailed("Multiple Inner Files!")
class ZipFileLoader(Zipper, FileLoader):
binary = True
def load(self):
super(ZipFileLoader, self).load()
self.unzip_file()
class StringLoader(BaseLoader):
string = ""
@property
def _io_class(self):
return BytesIO if self.binary else StringIO
def load(self):
if self.binary and not self.string:
self.string = b''
self.file = self._io_class(self.string)
def save(self):
file = self._io_class()
self.dump(file)
self.string = file.getvalue()
file.close()
class NetLoader(StringLoader):
"NetLoader: opens HTTP/REST resources for use in IterTable"
username = None
password = None
debug = False
url = None
client = requests
@property
def user_agent(self):
return "IterTable/%s (%s)" % (
VERSION,
requests.utils.default_user_agent()
)
@property
def headers(self):
return {
'User-Agent': self.user_agent,
}
def load(self, **kwargs):
result = self.GET()
self.file = self._io_class(result)
def req(self, url=None, method=None, params=None, body=None, headers={}):
if url is None:
url = self.url
if url is None:
raise LoadFailed("No URL provided")
if params is None:
params = getattr(self, 'params', None)
if isinstance(params, str):
url += '?' + params
params = None
if self.debug:
if params:
from requests.compat import urlencode
debug_url = url + '?' + urlencode(params, doseq=True)
else:
debug_url = url
self.debug_string = "%s: %s" % (method, debug_url)
print(self.debug_string)
if self.username is not None and self.password is not None:
auth = (self.username, self.password)
else:
auth = None
all_headers = self.headers.copy()
all_headers.update(headers)
resp = self.client.request(
method, url,
params=params,
headers=all_headers,
auth=auth,
data=body,
)
resp.connection.close()
if resp.status_code < 200 or resp.status_code > 299:
raise LoadFailed(
resp.text,
path=url,
code=resp.status_code,
)
if self.binary:
return resp.content
else:
return resp.text
def GET(self, **kwargs):
return self.req(method='GET', **kwargs)
def POST(self, **kwargs):
return self.req(method='POST', **kwargs)
def PUT(self, **kwargs):
return self.req(method='PUT', **kwargs)
def DELETE(self, **kwargs):
return self.req(method='DELETE', **kwargs)
class ZipNetLoader(Zipper, NetLoader):
binary = True
def load(self):
super(ZipNetLoader, self).load()
self.unzip_file()
| mit | -7,850,386,608,032,571,000 | 23.41791 | 77 | 0.556031 | false | 4.134794 | false | false | false |
ulikoehler/UliEngineering | UliEngineering/SignalProcessing/Resampling.py | 1 | 9018 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Utilities for selecting and finding specific attributes in datasets
"""
import math
import functools
import numpy as np
import bisect
import concurrent.futures
import scipy.interpolate
from UliEngineering.Utils.Concurrency import QueuedThreadExecutor
from .Utils import LinRange
__all__ = ["resample_discard", "resampled_timespace",
"parallel_resample", "signal_samplerate",
"serial_resample"]
def signal_samplerate(t, ignore_percentile=10, mean_method=np.mean):
"""
Compute the samplerate of a signal
using a quantile-based method to exclude
outliers (in the time delta domain) and
computes the by 1 / mean
Using a low ignore_percentile value is only
desirable if the dataset is small and therefore
does not average properly due to lack of samples.
In most cases, using a high ignore percentile
like 10 is recommended.
Returns a float (samplerate) [1/s].
If t is a LinRange() object, returns t.samplerate()
Parameters
----------
t : numpy array of datetime64 type (or LinRange)
Timestamps associated with the signal
ignore_percentile : number
This percentile of outliers is ignored
for the mean calculation at both the top
and the bottom end.
"5" means considering the 5th...95th percentile
for averaging.
mean_method : unary function
Used to compute the mean after excluding outliers.
Except for special usecases, arithmetic mean (np.mean)
is recommended.
"""
# Special rule for LinRange objects that have a defined samplerate
if isinstance(t, LinRange):
return t.samplerate()
tdelta = np.diff(t)
above = np.percentile(tdelta, ignore_percentile)
below = np.percentile(tdelta, 100 - ignore_percentile)
filtered = tdelta[np.logical_and(tdelta >= above, tdelta <= below)]
# Filtered is too small if the sample periods are too uniform in the array
if len(filtered) < 0.1 * len(tdelta):
filtered = tdelta
mean_sample_period = mean_method(filtered)
mean_sample_period = mean_sample_period.astype("timedelta64[ns]").astype(np.int64)
return 1e9 / mean_sample_period # 1e9 : nanoseconds
def resample_discard(arr, divisor, ofs=0):
"""
Resample with an integral divisor, discarding all other samples.
Returns a view of the data.
Very fast as this doesn't need to read the data.
"""
return arr[ofs::divisor]
def resampled_timespace(t, new_samplerate, assume_sorted=True, time_factor=1e6):
"""
Compute the new timespace after resampling a input timestamp array
(not neccessarily lazy)
Parameters
----------
t : numpy array-like
The source timestamps.
If these are numbers, you must supply time_factor to
specify the resolution of the number.
If they are
new_samplerate : float
The new datarate in Hz
assume_sorted : bool
If this is True, the code assumes the source
timestamp array is monotonically increasing, i.e.
the lowest timestamp comes first and the highest last.
If this is False, the code determines
the min/max value by reading the entire array.
time_factor : float
Ignored if t is of dtype datetime64
Defines what timestamps in the source (and result)
array means. This is required to interpret new_samplerate.
If time_factor=1e6, it means that a difference of 1.0
in two timestamps means a difference of 1/1e6 seconds.
Returns
-------
A LinSpace() (acts like a numpy array but doesn't consume any memory)
that represents the new timespace
news
"""
if len(t) == 0:
raise ValueError("Empty time array given - can not perform any resampling")
if len(t) == 1:
raise ValueError("Time array has only one value - can not perform any resampling")
# Handle numpy datetime64 input
if "datetime64" in t.dtype.name:
t = t.astype('datetime64[ns]').astype(np.int64)
time_factor = 1e9
# Compute time endpoints
dst_tdelta = time_factor / new_samplerate
startt, endt = (t[0], t[-1]) if assume_sorted else (np.min(t), np.max(t))
src_tdelta = endt - startt
if src_tdelta < dst_tdelta:
raise ValueError("The time delta is smaller than a single sample - can not perform resampling")
# Use a lazy linrange to represent time interval
return LinRange.range(startt, endt, dst_tdelta)
def __parallel_resample_worker(torig, tnew, y, out, i, chunksize, ovp_size, prefilter, fitkind):
# Find the time range in the target time
t_target = tnew[i:i + chunksize]
# Find the time range in the source time
srcstart = bisect.bisect_left(torig, t_target[0])
srcend = bisect.bisect_right(torig, t_target[1])
# Compute start and end index with overprovisioning
# This might be out of range of the src array but bisect will ignore that
srcstart_ovp = max(0, srcstart - ovp_size) # Must not get negative indices
srcend_ovp = srcend - ovp_size
# Compute source slices
tsrc_chunk = torig[srcstart_ovp:srcend_ovp]
ysrc_chunk = y[srcstart_ovp:srcend_ovp]
# Perform prefilter
if prefilter is not None:
tsrc_chunk, ysrc_chunk = prefilter(tsrc_chunk, ysrc_chunk)
# Compute interpolating spline (might also be piecewise linear)...
fit = scipy.interpolate.interp1d(tsrc_chunk, ysrc_chunk, fitkind=fitkind)
# ... and evaluate
out[i:i + chunksize] = fit(t_target)
def serial_resample(t, y, new_samplerate, out=None, prefilter=None,
time_factor=1e6,
fitkind='linear', chunksize=10000,
overprovisioning_factor=0.01):
"""
A resampler that uses scipy.interpolate.interp1d but splits the
input into chunks that can be processed.
The chunksize is applied to the output timebase.
The input x array is assumed to be sorted, facilitating binary search.
If the output array is not given, it is automatically allocated with the correct size.
The chunk workers are executed in parallel in a concurrent.futures thread pool.
In order to account for vector end effects, an overprovisioning factor
can be provided so that a fraction of the chunksize is added at both ends of
the source chunk.
This
A overprovisioning factor of 0.01 means that 1% of the chunksize is added on the left
and 1% is added on the right. This does not affect leftmost and rightmost
border of the input array.
Returns the output array.
Applies an optional prefilter to the input data while resampling. If the timebase of
the input data is off significantly, this might produce unexpected results.
The prefilter must be a reentrant functor that takes (t, x) data and returns
a (t, x) tuple. The returned tuple can be of arbitrary size (assuming t and x
have the same length) but its t range must include the t range that is being interpolated.
Note that the prefilter is performed after overprovisioning, so setting a higher
overprovisioning factor (see below) might help dealing with prefilters that
return too small arrays, however at the start and the end of the input array,
no overprovisioning values can be added.
"""
new_t = resampled_timespace(t, new_samplerate, time_factor=time_factor)
# Lazily compute the new timespan
if out is None:
out = np.zeros(len(new_t))
ovp_size = int(math.floor(overprovisioning_factor * chunksize))
# How many chunks do we have to process?
for i in range(len(new_t) // chunksize):
__parallel_resample_worker(i=i, orig=t, tnew=new_t,
y=y, out=out, chunksize=chunksize,
ovp_size=ovp_size, prefilter=prefilter,
fitkind=fitkind)
return out
def parallel_resample(t, y, new_samplerate, out=None, prefilter=None,
executor=None, time_factor=1e6,
fitkind='linear', chunksize=10000,
overprovisioning_factor=0.01):
"""
Parallel variant of serial_resample
"""
new_t = resampled_timespace(t, new_samplerate, time_factor=time_factor)
# Lazily compute the new timespan
if out is None:
out = np.zeros(len(new_t))
if executor is None:
executor = QueuedThreadExecutor()
ovp_size = int(math.floor(overprovisioning_factor * chunksize))
# How many chunks do we have to process?
numchunks = len(new_t) // chunksize
# Bind constant arguments
f = functools.partial(__parallel_resample_worker, torig=t, tnew=new_t,
y=y, out=out, chunksize=chunksize,
ovp_size=ovp_size, prefilter=prefilter,
fitkind=fitkind)
futures = [executor.submit(f, i=i) for i in range(numchunks)]
# Wait for futures to finish
concurrent.futures.wait(futures)
return out
| apache-2.0 | 8,066,749,981,981,142,000 | 39.258929 | 103 | 0.675094 | false | 3.92599 | false | false | false |
Azure/azure-sdk-for-python | sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2016_08_01/models/_models_py3.py | 1 | 295755 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Any, Dict, List, Optional, Union
import msrest.serialization
from ._web_site_management_client_enums import *
class ApiDefinitionInfo(msrest.serialization.Model):
"""Information about the formal API definition for the app.
:param url: The URL of the API definition.
:type url: str
"""
_attribute_map = {
'url': {'key': 'url', 'type': 'str'},
}
def __init__(
self,
*,
url: Optional[str] = None,
**kwargs
):
super(ApiDefinitionInfo, self).__init__(**kwargs)
self.url = url
class ApplicationLogsConfig(msrest.serialization.Model):
"""Application logs configuration.
:param file_system: Application logs to file system configuration.
:type file_system: ~azure.mgmt.web.v2016_08_01.models.FileSystemApplicationLogsConfig
:param azure_table_storage: Application logs to azure table storage configuration.
:type azure_table_storage:
~azure.mgmt.web.v2016_08_01.models.AzureTableStorageApplicationLogsConfig
:param azure_blob_storage: Application logs to blob storage configuration.
:type azure_blob_storage:
~azure.mgmt.web.v2016_08_01.models.AzureBlobStorageApplicationLogsConfig
"""
_attribute_map = {
'file_system': {'key': 'fileSystem', 'type': 'FileSystemApplicationLogsConfig'},
'azure_table_storage': {'key': 'azureTableStorage', 'type': 'AzureTableStorageApplicationLogsConfig'},
'azure_blob_storage': {'key': 'azureBlobStorage', 'type': 'AzureBlobStorageApplicationLogsConfig'},
}
def __init__(
self,
*,
file_system: Optional["FileSystemApplicationLogsConfig"] = None,
azure_table_storage: Optional["AzureTableStorageApplicationLogsConfig"] = None,
azure_blob_storage: Optional["AzureBlobStorageApplicationLogsConfig"] = None,
**kwargs
):
super(ApplicationLogsConfig, self).__init__(**kwargs)
self.file_system = file_system
self.azure_table_storage = azure_table_storage
self.azure_blob_storage = azure_blob_storage
class AutoHealActions(msrest.serialization.Model):
"""Actions which to take by the auto-heal module when a rule is triggered.
:param action_type: Predefined action to be taken. Possible values include: "Recycle",
"LogEvent", "CustomAction".
:type action_type: str or ~azure.mgmt.web.v2016_08_01.models.AutoHealActionType
:param custom_action: Custom action to be taken.
:type custom_action: ~azure.mgmt.web.v2016_08_01.models.AutoHealCustomAction
:param min_process_execution_time: Minimum time the process must execute
before taking the action.
:type min_process_execution_time: str
"""
_attribute_map = {
'action_type': {'key': 'actionType', 'type': 'str'},
'custom_action': {'key': 'customAction', 'type': 'AutoHealCustomAction'},
'min_process_execution_time': {'key': 'minProcessExecutionTime', 'type': 'str'},
}
def __init__(
self,
*,
action_type: Optional[Union[str, "AutoHealActionType"]] = None,
custom_action: Optional["AutoHealCustomAction"] = None,
min_process_execution_time: Optional[str] = None,
**kwargs
):
super(AutoHealActions, self).__init__(**kwargs)
self.action_type = action_type
self.custom_action = custom_action
self.min_process_execution_time = min_process_execution_time
class AutoHealCustomAction(msrest.serialization.Model):
"""Custom action to be executed
when an auto heal rule is triggered.
:param exe: Executable to be run.
:type exe: str
:param parameters: Parameters for the executable.
:type parameters: str
"""
_attribute_map = {
'exe': {'key': 'exe', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'str'},
}
def __init__(
self,
*,
exe: Optional[str] = None,
parameters: Optional[str] = None,
**kwargs
):
super(AutoHealCustomAction, self).__init__(**kwargs)
self.exe = exe
self.parameters = parameters
class AutoHealRules(msrest.serialization.Model):
"""Rules that can be defined for auto-heal.
:param triggers: Conditions that describe when to execute the auto-heal actions.
:type triggers: ~azure.mgmt.web.v2016_08_01.models.AutoHealTriggers
:param actions: Actions to be executed when a rule is triggered.
:type actions: ~azure.mgmt.web.v2016_08_01.models.AutoHealActions
"""
_attribute_map = {
'triggers': {'key': 'triggers', 'type': 'AutoHealTriggers'},
'actions': {'key': 'actions', 'type': 'AutoHealActions'},
}
def __init__(
self,
*,
triggers: Optional["AutoHealTriggers"] = None,
actions: Optional["AutoHealActions"] = None,
**kwargs
):
super(AutoHealRules, self).__init__(**kwargs)
self.triggers = triggers
self.actions = actions
class AutoHealTriggers(msrest.serialization.Model):
"""Triggers for auto-heal.
:param requests: A rule based on total requests.
:type requests: ~azure.mgmt.web.v2016_08_01.models.RequestsBasedTrigger
:param private_bytes_in_kb: A rule based on private bytes.
:type private_bytes_in_kb: int
:param status_codes: A rule based on status codes.
:type status_codes: list[~azure.mgmt.web.v2016_08_01.models.StatusCodesBasedTrigger]
:param slow_requests: A rule based on request execution time.
:type slow_requests: ~azure.mgmt.web.v2016_08_01.models.SlowRequestsBasedTrigger
"""
_attribute_map = {
'requests': {'key': 'requests', 'type': 'RequestsBasedTrigger'},
'private_bytes_in_kb': {'key': 'privateBytesInKB', 'type': 'int'},
'status_codes': {'key': 'statusCodes', 'type': '[StatusCodesBasedTrigger]'},
'slow_requests': {'key': 'slowRequests', 'type': 'SlowRequestsBasedTrigger'},
}
def __init__(
self,
*,
requests: Optional["RequestsBasedTrigger"] = None,
private_bytes_in_kb: Optional[int] = None,
status_codes: Optional[List["StatusCodesBasedTrigger"]] = None,
slow_requests: Optional["SlowRequestsBasedTrigger"] = None,
**kwargs
):
super(AutoHealTriggers, self).__init__(**kwargs)
self.requests = requests
self.private_bytes_in_kb = private_bytes_in_kb
self.status_codes = status_codes
self.slow_requests = slow_requests
class AzureBlobStorageApplicationLogsConfig(msrest.serialization.Model):
"""Application logs azure blob storage configuration.
:param level: Log level. Possible values include: "Off", "Verbose", "Information", "Warning",
"Error".
:type level: str or ~azure.mgmt.web.v2016_08_01.models.LogLevel
:param sas_url: SAS url to a azure blob container with read/write/list/delete permissions.
:type sas_url: str
:param retention_in_days: Retention in days.
Remove blobs older than X days.
0 or lower means no retention.
:type retention_in_days: int
"""
_attribute_map = {
'level': {'key': 'level', 'type': 'str'},
'sas_url': {'key': 'sasUrl', 'type': 'str'},
'retention_in_days': {'key': 'retentionInDays', 'type': 'int'},
}
def __init__(
self,
*,
level: Optional[Union[str, "LogLevel"]] = None,
sas_url: Optional[str] = None,
retention_in_days: Optional[int] = None,
**kwargs
):
super(AzureBlobStorageApplicationLogsConfig, self).__init__(**kwargs)
self.level = level
self.sas_url = sas_url
self.retention_in_days = retention_in_days
class AzureBlobStorageHttpLogsConfig(msrest.serialization.Model):
"""Http logs to azure blob storage configuration.
:param sas_url: SAS url to a azure blob container with read/write/list/delete permissions.
:type sas_url: str
:param retention_in_days: Retention in days.
Remove blobs older than X days.
0 or lower means no retention.
:type retention_in_days: int
:param enabled: True if configuration is enabled, false if it is disabled and null if
configuration is not set.
:type enabled: bool
"""
_attribute_map = {
'sas_url': {'key': 'sasUrl', 'type': 'str'},
'retention_in_days': {'key': 'retentionInDays', 'type': 'int'},
'enabled': {'key': 'enabled', 'type': 'bool'},
}
def __init__(
self,
*,
sas_url: Optional[str] = None,
retention_in_days: Optional[int] = None,
enabled: Optional[bool] = None,
**kwargs
):
super(AzureBlobStorageHttpLogsConfig, self).__init__(**kwargs)
self.sas_url = sas_url
self.retention_in_days = retention_in_days
self.enabled = enabled
class AzureTableStorageApplicationLogsConfig(msrest.serialization.Model):
"""Application logs to Azure table storage configuration.
All required parameters must be populated in order to send to Azure.
:param level: Log level. Possible values include: "Off", "Verbose", "Information", "Warning",
"Error".
:type level: str or ~azure.mgmt.web.v2016_08_01.models.LogLevel
:param sas_url: Required. SAS URL to an Azure table with add/query/delete permissions.
:type sas_url: str
"""
_validation = {
'sas_url': {'required': True},
}
_attribute_map = {
'level': {'key': 'level', 'type': 'str'},
'sas_url': {'key': 'sasUrl', 'type': 'str'},
}
def __init__(
self,
*,
sas_url: str,
level: Optional[Union[str, "LogLevel"]] = None,
**kwargs
):
super(AzureTableStorageApplicationLogsConfig, self).__init__(**kwargs)
self.level = level
self.sas_url = sas_url
class ProxyOnlyResource(msrest.serialization.Model):
"""Azure proxy only resource. This resource is not tracked by Azure Resource Manager.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(ProxyOnlyResource, self).__init__(**kwargs)
self.id = None
self.name = None
self.kind = kind
self.type = None
class BackupItem(ProxyOnlyResource):
"""Backup description.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar backup_id: Id of the backup.
:vartype backup_id: int
:ivar storage_account_url: SAS URL for the storage account container which contains this
backup.
:vartype storage_account_url: str
:ivar blob_name: Name of the blob which contains data for this backup.
:vartype blob_name: str
:ivar name_properties_name: Name of this backup.
:vartype name_properties_name: str
:ivar status: Backup status. Possible values include: "InProgress", "Failed", "Succeeded",
"TimedOut", "Created", "Skipped", "PartiallySucceeded", "DeleteInProgress", "DeleteFailed",
"Deleted".
:vartype status: str or ~azure.mgmt.web.v2016_08_01.models.BackupItemStatus
:ivar size_in_bytes: Size of the backup in bytes.
:vartype size_in_bytes: long
:ivar created: Timestamp of the backup creation.
:vartype created: ~datetime.datetime
:ivar log: Details regarding this backup. Might contain an error message.
:vartype log: str
:ivar databases: List of databases included in the backup.
:vartype databases: list[~azure.mgmt.web.v2016_08_01.models.DatabaseBackupSetting]
:ivar scheduled: True if this backup has been created due to a schedule being triggered.
:vartype scheduled: bool
:ivar last_restore_time_stamp: Timestamp of a last restore operation which used this backup.
:vartype last_restore_time_stamp: ~datetime.datetime
:ivar finished_time_stamp: Timestamp when this backup finished.
:vartype finished_time_stamp: ~datetime.datetime
:ivar correlation_id: Unique correlation identifier. Please use this along with the timestamp
while communicating with Azure support.
:vartype correlation_id: str
:ivar website_size_in_bytes: Size of the original web app which has been backed up.
:vartype website_size_in_bytes: long
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'backup_id': {'readonly': True},
'storage_account_url': {'readonly': True},
'blob_name': {'readonly': True},
'name_properties_name': {'readonly': True},
'status': {'readonly': True},
'size_in_bytes': {'readonly': True},
'created': {'readonly': True},
'log': {'readonly': True},
'databases': {'readonly': True},
'scheduled': {'readonly': True},
'last_restore_time_stamp': {'readonly': True},
'finished_time_stamp': {'readonly': True},
'correlation_id': {'readonly': True},
'website_size_in_bytes': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'backup_id': {'key': 'properties.id', 'type': 'int'},
'storage_account_url': {'key': 'properties.storageAccountUrl', 'type': 'str'},
'blob_name': {'key': 'properties.blobName', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'str'},
'size_in_bytes': {'key': 'properties.sizeInBytes', 'type': 'long'},
'created': {'key': 'properties.created', 'type': 'iso-8601'},
'log': {'key': 'properties.log', 'type': 'str'},
'databases': {'key': 'properties.databases', 'type': '[DatabaseBackupSetting]'},
'scheduled': {'key': 'properties.scheduled', 'type': 'bool'},
'last_restore_time_stamp': {'key': 'properties.lastRestoreTimeStamp', 'type': 'iso-8601'},
'finished_time_stamp': {'key': 'properties.finishedTimeStamp', 'type': 'iso-8601'},
'correlation_id': {'key': 'properties.correlationId', 'type': 'str'},
'website_size_in_bytes': {'key': 'properties.websiteSizeInBytes', 'type': 'long'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(BackupItem, self).__init__(kind=kind, **kwargs)
self.backup_id = None
self.storage_account_url = None
self.blob_name = None
self.name_properties_name = None
self.status = None
self.size_in_bytes = None
self.created = None
self.log = None
self.databases = None
self.scheduled = None
self.last_restore_time_stamp = None
self.finished_time_stamp = None
self.correlation_id = None
self.website_size_in_bytes = None
class BackupItemCollection(msrest.serialization.Model):
"""Collection of backup items.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.BackupItem]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[BackupItem]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["BackupItem"],
**kwargs
):
super(BackupItemCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class BackupRequest(ProxyOnlyResource):
"""Description of a backup which will be performed.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param backup_request_name: Name of the backup.
:type backup_request_name: str
:param enabled: True if the backup schedule is enabled (must be included in that case), false
if the backup schedule should be disabled.
:type enabled: bool
:param storage_account_url: SAS URL to the container.
:type storage_account_url: str
:param backup_schedule: Schedule for the backup if it is executed periodically.
:type backup_schedule: ~azure.mgmt.web.v2016_08_01.models.BackupSchedule
:param databases: Databases included in the backup.
:type databases: list[~azure.mgmt.web.v2016_08_01.models.DatabaseBackupSetting]
:param type_properties_type: Type of the backup. Possible values include: "Default", "Clone",
"Relocation", "Snapshot".
:type type_properties_type: str or
~azure.mgmt.web.v2016_08_01.models.BackupRestoreOperationType
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'backup_request_name': {'key': 'properties.name', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'storage_account_url': {'key': 'properties.storageAccountUrl', 'type': 'str'},
'backup_schedule': {'key': 'properties.backupSchedule', 'type': 'BackupSchedule'},
'databases': {'key': 'properties.databases', 'type': '[DatabaseBackupSetting]'},
'type_properties_type': {'key': 'properties.type', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
backup_request_name: Optional[str] = None,
enabled: Optional[bool] = None,
storage_account_url: Optional[str] = None,
backup_schedule: Optional["BackupSchedule"] = None,
databases: Optional[List["DatabaseBackupSetting"]] = None,
type_properties_type: Optional[Union[str, "BackupRestoreOperationType"]] = None,
**kwargs
):
super(BackupRequest, self).__init__(kind=kind, **kwargs)
self.backup_request_name = backup_request_name
self.enabled = enabled
self.storage_account_url = storage_account_url
self.backup_schedule = backup_schedule
self.databases = databases
self.type_properties_type = type_properties_type
class BackupSchedule(msrest.serialization.Model):
"""Description of a backup schedule. Describes how often should be the backup performed and what should be the retention policy.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param frequency_interval: Required. How often the backup should be executed (e.g. for weekly
backup, this should be set to 7 and FrequencyUnit should be set to Day).
:type frequency_interval: int
:param frequency_unit: Required. The unit of time for how often the backup should be executed
(e.g. for weekly backup, this should be set to Day and FrequencyInterval should be set to 7).
Possible values include: "Day", "Hour". Default value: "Day".
:type frequency_unit: str or ~azure.mgmt.web.v2016_08_01.models.FrequencyUnit
:param keep_at_least_one_backup: Required. True if the retention policy should always keep at
least one backup in the storage account, regardless how old it is; false otherwise.
:type keep_at_least_one_backup: bool
:param retention_period_in_days: Required. After how many days backups should be deleted.
:type retention_period_in_days: int
:param start_time: When the schedule should start working.
:type start_time: ~datetime.datetime
:ivar last_execution_time: Last time when this schedule was triggered.
:vartype last_execution_time: ~datetime.datetime
"""
_validation = {
'frequency_interval': {'required': True},
'frequency_unit': {'required': True},
'keep_at_least_one_backup': {'required': True},
'retention_period_in_days': {'required': True},
'last_execution_time': {'readonly': True},
}
_attribute_map = {
'frequency_interval': {'key': 'frequencyInterval', 'type': 'int'},
'frequency_unit': {'key': 'frequencyUnit', 'type': 'str'},
'keep_at_least_one_backup': {'key': 'keepAtLeastOneBackup', 'type': 'bool'},
'retention_period_in_days': {'key': 'retentionPeriodInDays', 'type': 'int'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'last_execution_time': {'key': 'lastExecutionTime', 'type': 'iso-8601'},
}
def __init__(
self,
*,
frequency_interval: int = 7,
frequency_unit: Union[str, "FrequencyUnit"] = "Day",
keep_at_least_one_backup: bool = True,
retention_period_in_days: int = 30,
start_time: Optional[datetime.datetime] = None,
**kwargs
):
super(BackupSchedule, self).__init__(**kwargs)
self.frequency_interval = frequency_interval
self.frequency_unit = frequency_unit
self.keep_at_least_one_backup = keep_at_least_one_backup
self.retention_period_in_days = retention_period_in_days
self.start_time = start_time
self.last_execution_time = None
class CloningInfo(msrest.serialization.Model):
"""Information needed for cloning operation.
All required parameters must be populated in order to send to Azure.
:param correlation_id: Correlation ID of cloning operation. This ID ties multiple cloning
operations
together to use the same snapshot.
:type correlation_id: str
:param overwrite: :code:`<code>true</code>` to overwrite destination app; otherwise,
:code:`<code>false</code>`.
:type overwrite: bool
:param clone_custom_host_names: :code:`<code>true</code>` to clone custom hostnames from source
app; otherwise, :code:`<code>false</code>`.
:type clone_custom_host_names: bool
:param clone_source_control: :code:`<code>true</code>` to clone source control from source app;
otherwise, :code:`<code>false</code>`.
:type clone_source_control: bool
:param source_web_app_id: Required. ARM resource ID of the source app. App resource ID is of
the form
/subscriptions/{subId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}
for production slots and
/subscriptions/{subId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slotName}
for other slots.
:type source_web_app_id: str
:param hosting_environment: App Service Environment.
:type hosting_environment: str
:param app_settings_overrides: Application setting overrides for cloned app. If specified,
these settings override the settings cloned
from source app. Otherwise, application settings from source app are retained.
:type app_settings_overrides: dict[str, str]
:param configure_load_balancing: :code:`<code>true</code>` to configure load balancing for
source and destination app.
:type configure_load_balancing: bool
:param traffic_manager_profile_id: ARM resource ID of the Traffic Manager profile to use, if it
exists. Traffic Manager resource ID is of the form
/subscriptions/{subId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/trafficManagerProfiles/{profileName}.
:type traffic_manager_profile_id: str
:param traffic_manager_profile_name: Name of Traffic Manager profile to create. This is only
needed if Traffic Manager profile does not already exist.
:type traffic_manager_profile_name: str
:param ignore_quotas: :code:`<code>true</code>` if quotas should be ignored; otherwise,
:code:`<code>false</code>`.
:type ignore_quotas: bool
"""
_validation = {
'source_web_app_id': {'required': True},
}
_attribute_map = {
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'clone_custom_host_names': {'key': 'cloneCustomHostNames', 'type': 'bool'},
'clone_source_control': {'key': 'cloneSourceControl', 'type': 'bool'},
'source_web_app_id': {'key': 'sourceWebAppId', 'type': 'str'},
'hosting_environment': {'key': 'hostingEnvironment', 'type': 'str'},
'app_settings_overrides': {'key': 'appSettingsOverrides', 'type': '{str}'},
'configure_load_balancing': {'key': 'configureLoadBalancing', 'type': 'bool'},
'traffic_manager_profile_id': {'key': 'trafficManagerProfileId', 'type': 'str'},
'traffic_manager_profile_name': {'key': 'trafficManagerProfileName', 'type': 'str'},
'ignore_quotas': {'key': 'ignoreQuotas', 'type': 'bool'},
}
def __init__(
self,
*,
source_web_app_id: str,
correlation_id: Optional[str] = None,
overwrite: Optional[bool] = None,
clone_custom_host_names: Optional[bool] = None,
clone_source_control: Optional[bool] = None,
hosting_environment: Optional[str] = None,
app_settings_overrides: Optional[Dict[str, str]] = None,
configure_load_balancing: Optional[bool] = None,
traffic_manager_profile_id: Optional[str] = None,
traffic_manager_profile_name: Optional[str] = None,
ignore_quotas: Optional[bool] = None,
**kwargs
):
super(CloningInfo, self).__init__(**kwargs)
self.correlation_id = correlation_id
self.overwrite = overwrite
self.clone_custom_host_names = clone_custom_host_names
self.clone_source_control = clone_source_control
self.source_web_app_id = source_web_app_id
self.hosting_environment = hosting_environment
self.app_settings_overrides = app_settings_overrides
self.configure_load_balancing = configure_load_balancing
self.traffic_manager_profile_id = traffic_manager_profile_id
self.traffic_manager_profile_name = traffic_manager_profile_name
self.ignore_quotas = ignore_quotas
class ConnectionStringDictionary(ProxyOnlyResource):
"""String dictionary resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param properties: Connection strings.
:type properties: dict[str, ~azure.mgmt.web.v2016_08_01.models.ConnStringValueTypePair]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{ConnStringValueTypePair}'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
properties: Optional[Dict[str, "ConnStringValueTypePair"]] = None,
**kwargs
):
super(ConnectionStringDictionary, self).__init__(kind=kind, **kwargs)
self.properties = properties
class ConnStringInfo(msrest.serialization.Model):
"""Database connection string information.
:param name: Name of connection string.
:type name: str
:param connection_string: Connection string value.
:type connection_string: str
:param type: Type of database. Possible values include: "MySql", "SQLServer", "SQLAzure",
"Custom", "NotificationHub", "ServiceBus", "EventHub", "ApiHub", "DocDb", "RedisCache",
"PostgreSQL".
:type type: str or ~azure.mgmt.web.v2016_08_01.models.ConnectionStringType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'connection_string': {'key': 'connectionString', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
connection_string: Optional[str] = None,
type: Optional[Union[str, "ConnectionStringType"]] = None,
**kwargs
):
super(ConnStringInfo, self).__init__(**kwargs)
self.name = name
self.connection_string = connection_string
self.type = type
class ConnStringValueTypePair(msrest.serialization.Model):
"""Database connection string value to type pair.
All required parameters must be populated in order to send to Azure.
:param value: Required. Value of pair.
:type value: str
:param type: Required. Type of database. Possible values include: "MySql", "SQLServer",
"SQLAzure", "Custom", "NotificationHub", "ServiceBus", "EventHub", "ApiHub", "DocDb",
"RedisCache", "PostgreSQL".
:type type: str or ~azure.mgmt.web.v2016_08_01.models.ConnectionStringType
"""
_validation = {
'value': {'required': True},
'type': {'required': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
value: str,
type: Union[str, "ConnectionStringType"],
**kwargs
):
super(ConnStringValueTypePair, self).__init__(**kwargs)
self.value = value
self.type = type
class ContinuousWebJob(ProxyOnlyResource):
"""Continuous Web Job Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param status: Job status. Possible values include: "Initializing", "Starting", "Running",
"PendingRestart", "Stopped".
:type status: str or ~azure.mgmt.web.v2016_08_01.models.ContinuousWebJobStatus
:param detailed_status: Detailed status.
:type detailed_status: str
:param log_url: Log URL.
:type log_url: str
:ivar name_properties_name: Job name. Used as job identifier in ARM resource URI.
:vartype name_properties_name: str
:param run_command: Run command.
:type run_command: str
:param url: Job URL.
:type url: str
:param extra_info_url: Extra Info URL.
:type extra_info_url: str
:param job_type: Job type. Possible values include: "Continuous", "Triggered".
:type job_type: str or ~azure.mgmt.web.v2016_08_01.models.WebJobType
:param error: Error information.
:type error: str
:param using_sdk: Using SDK?.
:type using_sdk: bool
:param settings: Job settings.
:type settings: dict[str, any]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'name_properties_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'str'},
'detailed_status': {'key': 'properties.detailedStatus', 'type': 'str'},
'log_url': {'key': 'properties.logUrl', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'run_command': {'key': 'properties.runCommand', 'type': 'str'},
'url': {'key': 'properties.url', 'type': 'str'},
'extra_info_url': {'key': 'properties.extraInfoUrl', 'type': 'str'},
'job_type': {'key': 'properties.jobType', 'type': 'str'},
'error': {'key': 'properties.error', 'type': 'str'},
'using_sdk': {'key': 'properties.usingSdk', 'type': 'bool'},
'settings': {'key': 'properties.settings', 'type': '{object}'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
status: Optional[Union[str, "ContinuousWebJobStatus"]] = None,
detailed_status: Optional[str] = None,
log_url: Optional[str] = None,
run_command: Optional[str] = None,
url: Optional[str] = None,
extra_info_url: Optional[str] = None,
job_type: Optional[Union[str, "WebJobType"]] = None,
error: Optional[str] = None,
using_sdk: Optional[bool] = None,
settings: Optional[Dict[str, Any]] = None,
**kwargs
):
super(ContinuousWebJob, self).__init__(kind=kind, **kwargs)
self.status = status
self.detailed_status = detailed_status
self.log_url = log_url
self.name_properties_name = None
self.run_command = run_command
self.url = url
self.extra_info_url = extra_info_url
self.job_type = job_type
self.error = error
self.using_sdk = using_sdk
self.settings = settings
class ContinuousWebJobCollection(msrest.serialization.Model):
"""Collection of Kudu continuous web job information elements.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.ContinuousWebJob]
:param next_link: Link to next page of resources.
:type next_link: str
"""
_validation = {
'value': {'required': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ContinuousWebJob]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ContinuousWebJob"],
next_link: Optional[str] = None,
**kwargs
):
super(ContinuousWebJobCollection, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class CorsSettings(msrest.serialization.Model):
"""Cross-Origin Resource Sharing (CORS) settings for the app.
:param allowed_origins: Gets or sets the list of origins that should be allowed to make
cross-origin
calls (for example: http://example.com:12345). Use "*" to allow all.
:type allowed_origins: list[str]
"""
_attribute_map = {
'allowed_origins': {'key': 'allowedOrigins', 'type': '[str]'},
}
def __init__(
self,
*,
allowed_origins: Optional[List[str]] = None,
**kwargs
):
super(CorsSettings, self).__init__(**kwargs)
self.allowed_origins = allowed_origins
class CsmPublishingProfileOptions(msrest.serialization.Model):
"""Publishing options for requested profile.
:param format: Name of the format. Valid values are:
FileZilla3
WebDeploy -- default
Ftp. Possible values include: "FileZilla3", "WebDeploy", "Ftp".
:type format: str or ~azure.mgmt.web.v2016_08_01.models.PublishingProfileFormat
"""
_attribute_map = {
'format': {'key': 'format', 'type': 'str'},
}
def __init__(
self,
*,
format: Optional[Union[str, "PublishingProfileFormat"]] = None,
**kwargs
):
super(CsmPublishingProfileOptions, self).__init__(**kwargs)
self.format = format
class CsmSlotEntity(msrest.serialization.Model):
"""Deployment slot parameters.
All required parameters must be populated in order to send to Azure.
:param target_slot: Required. Destination deployment slot during swap operation.
:type target_slot: str
:param preserve_vnet: Required. :code:`<code>true</code>` to preserve Virtual Network to the
slot during swap; otherwise, :code:`<code>false</code>`.
:type preserve_vnet: bool
"""
_validation = {
'target_slot': {'required': True},
'preserve_vnet': {'required': True},
}
_attribute_map = {
'target_slot': {'key': 'targetSlot', 'type': 'str'},
'preserve_vnet': {'key': 'preserveVnet', 'type': 'bool'},
}
def __init__(
self,
*,
target_slot: str,
preserve_vnet: bool,
**kwargs
):
super(CsmSlotEntity, self).__init__(**kwargs)
self.target_slot = target_slot
self.preserve_vnet = preserve_vnet
class CsmUsageQuota(msrest.serialization.Model):
"""Usage of the quota resource.
:param unit: Units of measurement for the quota resource.
:type unit: str
:param next_reset_time: Next reset time for the resource counter.
:type next_reset_time: ~datetime.datetime
:param current_value: The current value of the resource counter.
:type current_value: long
:param limit: The resource limit.
:type limit: long
:param name: Quota name.
:type name: ~azure.mgmt.web.v2016_08_01.models.LocalizableString
"""
_attribute_map = {
'unit': {'key': 'unit', 'type': 'str'},
'next_reset_time': {'key': 'nextResetTime', 'type': 'iso-8601'},
'current_value': {'key': 'currentValue', 'type': 'long'},
'limit': {'key': 'limit', 'type': 'long'},
'name': {'key': 'name', 'type': 'LocalizableString'},
}
def __init__(
self,
*,
unit: Optional[str] = None,
next_reset_time: Optional[datetime.datetime] = None,
current_value: Optional[int] = None,
limit: Optional[int] = None,
name: Optional["LocalizableString"] = None,
**kwargs
):
super(CsmUsageQuota, self).__init__(**kwargs)
self.unit = unit
self.next_reset_time = next_reset_time
self.current_value = current_value
self.limit = limit
self.name = name
class CsmUsageQuotaCollection(msrest.serialization.Model):
"""Collection of CSM usage quotas.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.CsmUsageQuota]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[CsmUsageQuota]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["CsmUsageQuota"],
**kwargs
):
super(CsmUsageQuotaCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class CustomHostnameAnalysisResult(ProxyOnlyResource):
"""Custom domain analysis.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar is_hostname_already_verified: :code:`<code>true</code>` if hostname is already verified;
otherwise, :code:`<code>false</code>`.
:vartype is_hostname_already_verified: bool
:ivar custom_domain_verification_test: DNS verification test result. Possible values include:
"Passed", "Failed", "Skipped".
:vartype custom_domain_verification_test: str or
~azure.mgmt.web.v2016_08_01.models.DnsVerificationTestResult
:ivar custom_domain_verification_failure_info: Raw failure information if DNS verification
fails.
:vartype custom_domain_verification_failure_info:
~azure.mgmt.web.v2016_08_01.models.ErrorEntity
:ivar has_conflict_on_scale_unit: :code:`<code>true</code>` if there is a conflict on a scale
unit; otherwise, :code:`<code>false</code>`.
:vartype has_conflict_on_scale_unit: bool
:ivar has_conflict_across_subscription: :code:`<code>true</code>` if there is a conflict across
subscriptions; otherwise, :code:`<code>false</code>`.
:vartype has_conflict_across_subscription: bool
:ivar conflicting_app_resource_id: Name of the conflicting app on scale unit if it's within the
same subscription.
:vartype conflicting_app_resource_id: str
:param c_name_records: CName records controller can see for this hostname.
:type c_name_records: list[str]
:param txt_records: TXT records controller can see for this hostname.
:type txt_records: list[str]
:param a_records: A records controller can see for this hostname.
:type a_records: list[str]
:param alternate_c_name_records: Alternate CName records controller can see for this hostname.
:type alternate_c_name_records: list[str]
:param alternate_txt_records: Alternate TXT records controller can see for this hostname.
:type alternate_txt_records: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'is_hostname_already_verified': {'readonly': True},
'custom_domain_verification_test': {'readonly': True},
'custom_domain_verification_failure_info': {'readonly': True},
'has_conflict_on_scale_unit': {'readonly': True},
'has_conflict_across_subscription': {'readonly': True},
'conflicting_app_resource_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'is_hostname_already_verified': {'key': 'properties.isHostnameAlreadyVerified', 'type': 'bool'},
'custom_domain_verification_test': {'key': 'properties.customDomainVerificationTest', 'type': 'str'},
'custom_domain_verification_failure_info': {'key': 'properties.customDomainVerificationFailureInfo', 'type': 'ErrorEntity'},
'has_conflict_on_scale_unit': {'key': 'properties.hasConflictOnScaleUnit', 'type': 'bool'},
'has_conflict_across_subscription': {'key': 'properties.hasConflictAcrossSubscription', 'type': 'bool'},
'conflicting_app_resource_id': {'key': 'properties.conflictingAppResourceId', 'type': 'str'},
'c_name_records': {'key': 'properties.cNameRecords', 'type': '[str]'},
'txt_records': {'key': 'properties.txtRecords', 'type': '[str]'},
'a_records': {'key': 'properties.aRecords', 'type': '[str]'},
'alternate_c_name_records': {'key': 'properties.alternateCNameRecords', 'type': '[str]'},
'alternate_txt_records': {'key': 'properties.alternateTxtRecords', 'type': '[str]'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
c_name_records: Optional[List[str]] = None,
txt_records: Optional[List[str]] = None,
a_records: Optional[List[str]] = None,
alternate_c_name_records: Optional[List[str]] = None,
alternate_txt_records: Optional[List[str]] = None,
**kwargs
):
super(CustomHostnameAnalysisResult, self).__init__(kind=kind, **kwargs)
self.is_hostname_already_verified = None
self.custom_domain_verification_test = None
self.custom_domain_verification_failure_info = None
self.has_conflict_on_scale_unit = None
self.has_conflict_across_subscription = None
self.conflicting_app_resource_id = None
self.c_name_records = c_name_records
self.txt_records = txt_records
self.a_records = a_records
self.alternate_c_name_records = alternate_c_name_records
self.alternate_txt_records = alternate_txt_records
class DatabaseBackupSetting(msrest.serialization.Model):
"""Database backup settings.
All required parameters must be populated in order to send to Azure.
:param database_type: Required. Database type (e.g. SqlAzure / MySql). Possible values include:
"SqlAzure", "MySql", "LocalMySql", "PostgreSql".
:type database_type: str or ~azure.mgmt.web.v2016_08_01.models.DatabaseType
:param name:
:type name: str
:param connection_string_name: Contains a connection string name that is linked to the
SiteConfig.ConnectionStrings.
This is used during restore with overwrite connection strings options.
:type connection_string_name: str
:param connection_string: Contains a connection string to a database which is being backed up
or restored. If the restore should happen to a new database, the database name inside is the
new one.
:type connection_string: str
"""
_validation = {
'database_type': {'required': True},
}
_attribute_map = {
'database_type': {'key': 'databaseType', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'connection_string_name': {'key': 'connectionStringName', 'type': 'str'},
'connection_string': {'key': 'connectionString', 'type': 'str'},
}
def __init__(
self,
*,
database_type: Union[str, "DatabaseType"],
name: Optional[str] = None,
connection_string_name: Optional[str] = None,
connection_string: Optional[str] = None,
**kwargs
):
super(DatabaseBackupSetting, self).__init__(**kwargs)
self.database_type = database_type
self.name = name
self.connection_string_name = connection_string_name
self.connection_string = connection_string
class Deployment(ProxyOnlyResource):
"""User credentials used for publishing activity.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param id_properties_id: Identifier for deployment.
:type id_properties_id: str
:param status: Deployment status.
:type status: int
:param message: Details about deployment status.
:type message: str
:param author: Who authored the deployment.
:type author: str
:param deployer: Who performed the deployment.
:type deployer: str
:param author_email: Author email.
:type author_email: str
:param start_time: Start time.
:type start_time: ~datetime.datetime
:param end_time: End time.
:type end_time: ~datetime.datetime
:param active: True if deployment is currently active, false if completed and null if not
started.
:type active: bool
:param details: Details on deployment.
:type details: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'id_properties_id': {'key': 'properties.id', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'int'},
'message': {'key': 'properties.message', 'type': 'str'},
'author': {'key': 'properties.author', 'type': 'str'},
'deployer': {'key': 'properties.deployer', 'type': 'str'},
'author_email': {'key': 'properties.authorEmail', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'end_time': {'key': 'properties.endTime', 'type': 'iso-8601'},
'active': {'key': 'properties.active', 'type': 'bool'},
'details': {'key': 'properties.details', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
id_properties_id: Optional[str] = None,
status: Optional[int] = None,
message: Optional[str] = None,
author: Optional[str] = None,
deployer: Optional[str] = None,
author_email: Optional[str] = None,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
active: Optional[bool] = None,
details: Optional[str] = None,
**kwargs
):
super(Deployment, self).__init__(kind=kind, **kwargs)
self.id_properties_id = id_properties_id
self.status = status
self.message = message
self.author = author
self.deployer = deployer
self.author_email = author_email
self.start_time = start_time
self.end_time = end_time
self.active = active
self.details = details
class DeploymentCollection(msrest.serialization.Model):
"""Collection of app deployments.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.Deployment]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Deployment]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["Deployment"],
**kwargs
):
super(DeploymentCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class EnabledConfig(msrest.serialization.Model):
"""Enabled configuration.
:param enabled: True if configuration is enabled, false if it is disabled and null if
configuration is not set.
:type enabled: bool
"""
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'},
}
def __init__(
self,
*,
enabled: Optional[bool] = None,
**kwargs
):
super(EnabledConfig, self).__init__(**kwargs)
self.enabled = enabled
class ErrorEntity(msrest.serialization.Model):
"""Body of the error response returned from the API.
:param extended_code: Type of error.
:type extended_code: str
:param message_template: Message template.
:type message_template: str
:param parameters: Parameters for the template.
:type parameters: list[str]
:param inner_errors: Inner errors.
:type inner_errors: list[~azure.mgmt.web.v2016_08_01.models.ErrorEntity]
:param code: Basic error code.
:type code: str
:param message: Any details of the error.
:type message: str
"""
_attribute_map = {
'extended_code': {'key': 'extendedCode', 'type': 'str'},
'message_template': {'key': 'messageTemplate', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '[str]'},
'inner_errors': {'key': 'innerErrors', 'type': '[ErrorEntity]'},
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
*,
extended_code: Optional[str] = None,
message_template: Optional[str] = None,
parameters: Optional[List[str]] = None,
inner_errors: Optional[List["ErrorEntity"]] = None,
code: Optional[str] = None,
message: Optional[str] = None,
**kwargs
):
super(ErrorEntity, self).__init__(**kwargs)
self.extended_code = extended_code
self.message_template = message_template
self.parameters = parameters
self.inner_errors = inner_errors
self.code = code
self.message = message
class Experiments(msrest.serialization.Model):
"""Routing rules in production experiments.
:param ramp_up_rules: List of ramp-up rules.
:type ramp_up_rules: list[~azure.mgmt.web.v2016_08_01.models.RampUpRule]
"""
_attribute_map = {
'ramp_up_rules': {'key': 'rampUpRules', 'type': '[RampUpRule]'},
}
def __init__(
self,
*,
ramp_up_rules: Optional[List["RampUpRule"]] = None,
**kwargs
):
super(Experiments, self).__init__(**kwargs)
self.ramp_up_rules = ramp_up_rules
class FileSystemApplicationLogsConfig(msrest.serialization.Model):
"""Application logs to file system configuration.
:param level: Log level. Possible values include: "Off", "Verbose", "Information", "Warning",
"Error".
:type level: str or ~azure.mgmt.web.v2016_08_01.models.LogLevel
"""
_attribute_map = {
'level': {'key': 'level', 'type': 'str'},
}
def __init__(
self,
*,
level: Optional[Union[str, "LogLevel"]] = None,
**kwargs
):
super(FileSystemApplicationLogsConfig, self).__init__(**kwargs)
self.level = level
class FileSystemHttpLogsConfig(msrest.serialization.Model):
"""Http logs to file system configuration.
:param retention_in_mb: Maximum size in megabytes that http log files can use.
When reached old log files will be removed to make space for new ones.
Value can range between 25 and 100.
:type retention_in_mb: int
:param retention_in_days: Retention in days.
Remove files older than X days.
0 or lower means no retention.
:type retention_in_days: int
:param enabled: True if configuration is enabled, false if it is disabled and null if
configuration is not set.
:type enabled: bool
"""
_validation = {
'retention_in_mb': {'maximum': 100, 'minimum': 25},
}
_attribute_map = {
'retention_in_mb': {'key': 'retentionInMb', 'type': 'int'},
'retention_in_days': {'key': 'retentionInDays', 'type': 'int'},
'enabled': {'key': 'enabled', 'type': 'bool'},
}
def __init__(
self,
*,
retention_in_mb: Optional[int] = None,
retention_in_days: Optional[int] = None,
enabled: Optional[bool] = None,
**kwargs
):
super(FileSystemHttpLogsConfig, self).__init__(**kwargs)
self.retention_in_mb = retention_in_mb
self.retention_in_days = retention_in_days
self.enabled = enabled
class FunctionEnvelope(ProxyOnlyResource):
"""Web Job Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar name_properties_name: Function name.
:vartype name_properties_name: str
:ivar function_app_id: Function App ID.
:vartype function_app_id: str
:param script_root_path_href: Script root path URI.
:type script_root_path_href: str
:param script_href: Script URI.
:type script_href: str
:param config_href: Config URI.
:type config_href: str
:param secrets_file_href: Secrets file URI.
:type secrets_file_href: str
:param href: Function URI.
:type href: str
:param config: Config information.
:type config: any
:param files: File list.
:type files: dict[str, str]
:param test_data: Test data used when testing via the Azure Portal.
:type test_data: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'name_properties_name': {'readonly': True},
'function_app_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'function_app_id': {'key': 'properties.functionAppId', 'type': 'str'},
'script_root_path_href': {'key': 'properties.scriptRootPathHref', 'type': 'str'},
'script_href': {'key': 'properties.scriptHref', 'type': 'str'},
'config_href': {'key': 'properties.configHref', 'type': 'str'},
'secrets_file_href': {'key': 'properties.secretsFileHref', 'type': 'str'},
'href': {'key': 'properties.href', 'type': 'str'},
'config': {'key': 'properties.config', 'type': 'object'},
'files': {'key': 'properties.files', 'type': '{str}'},
'test_data': {'key': 'properties.testData', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
script_root_path_href: Optional[str] = None,
script_href: Optional[str] = None,
config_href: Optional[str] = None,
secrets_file_href: Optional[str] = None,
href: Optional[str] = None,
config: Optional[Any] = None,
files: Optional[Dict[str, str]] = None,
test_data: Optional[str] = None,
**kwargs
):
super(FunctionEnvelope, self).__init__(kind=kind, **kwargs)
self.name_properties_name = None
self.function_app_id = None
self.script_root_path_href = script_root_path_href
self.script_href = script_href
self.config_href = config_href
self.secrets_file_href = secrets_file_href
self.href = href
self.config = config
self.files = files
self.test_data = test_data
class FunctionEnvelopeCollection(msrest.serialization.Model):
"""Collection of Kudu function information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.FunctionEnvelope]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[FunctionEnvelope]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["FunctionEnvelope"],
**kwargs
):
super(FunctionEnvelopeCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class FunctionSecrets(ProxyOnlyResource):
"""Function secrets.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param key: Secret key.
:type key: str
:param trigger_url: Trigger URL.
:type trigger_url: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'key': {'key': 'properties.key', 'type': 'str'},
'trigger_url': {'key': 'properties.triggerUrl', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
key: Optional[str] = None,
trigger_url: Optional[str] = None,
**kwargs
):
super(FunctionSecrets, self).__init__(kind=kind, **kwargs)
self.key = key
self.trigger_url = trigger_url
class HandlerMapping(msrest.serialization.Model):
"""The IIS handler mappings used to define which handler processes HTTP requests with certain extension.
For example, it is used to configure php-cgi.exe process to handle all HTTP requests with *.php extension.
:param extension: Requests with this extension will be handled using the specified FastCGI
application.
:type extension: str
:param script_processor: The absolute path to the FastCGI application.
:type script_processor: str
:param arguments: Command-line arguments to be passed to the script processor.
:type arguments: str
"""
_attribute_map = {
'extension': {'key': 'extension', 'type': 'str'},
'script_processor': {'key': 'scriptProcessor', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': 'str'},
}
def __init__(
self,
*,
extension: Optional[str] = None,
script_processor: Optional[str] = None,
arguments: Optional[str] = None,
**kwargs
):
super(HandlerMapping, self).__init__(**kwargs)
self.extension = extension
self.script_processor = script_processor
self.arguments = arguments
class HostingEnvironmentProfile(msrest.serialization.Model):
"""Specification for an App Service Environment to use for this resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID of the App Service Environment.
:type id: str
:ivar name: Name of the App Service Environment.
:vartype name: str
:ivar type: Resource type of the App Service Environment.
:vartype type: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
**kwargs
):
super(HostingEnvironmentProfile, self).__init__(**kwargs)
self.id = id
self.name = None
self.type = None
class HostNameBinding(ProxyOnlyResource):
"""A hostname binding object.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param site_name: App Service app name.
:type site_name: str
:param domain_id: Fully qualified ARM domain resource URI.
:type domain_id: str
:param azure_resource_name: Azure resource name.
:type azure_resource_name: str
:param azure_resource_type: Azure resource type. Possible values include: "Website",
"TrafficManager".
:type azure_resource_type: str or ~azure.mgmt.web.v2016_08_01.models.AzureResourceType
:param custom_host_name_dns_record_type: Custom DNS record type. Possible values include:
"CName", "A".
:type custom_host_name_dns_record_type: str or
~azure.mgmt.web.v2016_08_01.models.CustomHostNameDnsRecordType
:param host_name_type: Hostname type. Possible values include: "Verified", "Managed".
:type host_name_type: str or ~azure.mgmt.web.v2016_08_01.models.HostNameType
:param ssl_state: SSL type. Possible values include: "Disabled", "SniEnabled",
"IpBasedEnabled".
:type ssl_state: str or ~azure.mgmt.web.v2016_08_01.models.SslState
:param thumbprint: SSL certificate thumbprint.
:type thumbprint: str
:ivar virtual_ip: Virtual IP address assigned to the hostname if IP based SSL is enabled.
:vartype virtual_ip: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'virtual_ip': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'site_name': {'key': 'properties.siteName', 'type': 'str'},
'domain_id': {'key': 'properties.domainId', 'type': 'str'},
'azure_resource_name': {'key': 'properties.azureResourceName', 'type': 'str'},
'azure_resource_type': {'key': 'properties.azureResourceType', 'type': 'str'},
'custom_host_name_dns_record_type': {'key': 'properties.customHostNameDnsRecordType', 'type': 'str'},
'host_name_type': {'key': 'properties.hostNameType', 'type': 'str'},
'ssl_state': {'key': 'properties.sslState', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
'virtual_ip': {'key': 'properties.virtualIP', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
site_name: Optional[str] = None,
domain_id: Optional[str] = None,
azure_resource_name: Optional[str] = None,
azure_resource_type: Optional[Union[str, "AzureResourceType"]] = None,
custom_host_name_dns_record_type: Optional[Union[str, "CustomHostNameDnsRecordType"]] = None,
host_name_type: Optional[Union[str, "HostNameType"]] = None,
ssl_state: Optional[Union[str, "SslState"]] = None,
thumbprint: Optional[str] = None,
**kwargs
):
super(HostNameBinding, self).__init__(kind=kind, **kwargs)
self.site_name = site_name
self.domain_id = domain_id
self.azure_resource_name = azure_resource_name
self.azure_resource_type = azure_resource_type
self.custom_host_name_dns_record_type = custom_host_name_dns_record_type
self.host_name_type = host_name_type
self.ssl_state = ssl_state
self.thumbprint = thumbprint
self.virtual_ip = None
class HostNameBindingCollection(msrest.serialization.Model):
"""Collection of hostname bindings.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.HostNameBinding]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[HostNameBinding]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["HostNameBinding"],
**kwargs
):
super(HostNameBindingCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class HostNameSslState(msrest.serialization.Model):
"""SSL-enabled hostname.
:param name: Hostname.
:type name: str
:param ssl_state: SSL type. Possible values include: "Disabled", "SniEnabled",
"IpBasedEnabled".
:type ssl_state: str or ~azure.mgmt.web.v2016_08_01.models.SslState
:param virtual_ip: Virtual IP address assigned to the hostname if IP based SSL is enabled.
:type virtual_ip: str
:param thumbprint: SSL certificate thumbprint.
:type thumbprint: str
:param to_update: Set to :code:`<code>true</code>` to update existing hostname.
:type to_update: bool
:param host_type: Indicates whether the hostname is a standard or repository hostname. Possible
values include: "Standard", "Repository".
:type host_type: str or ~azure.mgmt.web.v2016_08_01.models.HostType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'ssl_state': {'key': 'sslState', 'type': 'str'},
'virtual_ip': {'key': 'virtualIP', 'type': 'str'},
'thumbprint': {'key': 'thumbprint', 'type': 'str'},
'to_update': {'key': 'toUpdate', 'type': 'bool'},
'host_type': {'key': 'hostType', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
ssl_state: Optional[Union[str, "SslState"]] = None,
virtual_ip: Optional[str] = None,
thumbprint: Optional[str] = None,
to_update: Optional[bool] = None,
host_type: Optional[Union[str, "HostType"]] = None,
**kwargs
):
super(HostNameSslState, self).__init__(**kwargs)
self.name = name
self.ssl_state = ssl_state
self.virtual_ip = virtual_ip
self.thumbprint = thumbprint
self.to_update = to_update
self.host_type = host_type
class HttpLogsConfig(msrest.serialization.Model):
"""Http logs configuration.
:param file_system: Http logs to file system configuration.
:type file_system: ~azure.mgmt.web.v2016_08_01.models.FileSystemHttpLogsConfig
:param azure_blob_storage: Http logs to azure blob storage configuration.
:type azure_blob_storage: ~azure.mgmt.web.v2016_08_01.models.AzureBlobStorageHttpLogsConfig
"""
_attribute_map = {
'file_system': {'key': 'fileSystem', 'type': 'FileSystemHttpLogsConfig'},
'azure_blob_storage': {'key': 'azureBlobStorage', 'type': 'AzureBlobStorageHttpLogsConfig'},
}
def __init__(
self,
*,
file_system: Optional["FileSystemHttpLogsConfig"] = None,
azure_blob_storage: Optional["AzureBlobStorageHttpLogsConfig"] = None,
**kwargs
):
super(HttpLogsConfig, self).__init__(**kwargs)
self.file_system = file_system
self.azure_blob_storage = azure_blob_storage
class HybridConnection(ProxyOnlyResource):
"""Hybrid Connection contract. This is used to configure a Hybrid Connection.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param service_bus_namespace: The name of the Service Bus namespace.
:type service_bus_namespace: str
:param relay_name: The name of the Service Bus relay.
:type relay_name: str
:param relay_arm_uri: The ARM URI to the Service Bus relay.
:type relay_arm_uri: str
:param hostname: The hostname of the endpoint.
:type hostname: str
:param port: The port of the endpoint.
:type port: int
:param send_key_name: The name of the Service Bus key which has Send permissions. This is used
to authenticate to Service Bus.
:type send_key_name: str
:param send_key_value: The value of the Service Bus key. This is used to authenticate to
Service Bus. In ARM this key will not be returned
normally, use the POST /listKeys API instead.
:type send_key_value: str
:param service_bus_suffix: The suffix for the service bus endpoint. By default this is
.servicebus.windows.net.
:type service_bus_suffix: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'},
'relay_name': {'key': 'properties.relayName', 'type': 'str'},
'relay_arm_uri': {'key': 'properties.relayArmUri', 'type': 'str'},
'hostname': {'key': 'properties.hostname', 'type': 'str'},
'port': {'key': 'properties.port', 'type': 'int'},
'send_key_name': {'key': 'properties.sendKeyName', 'type': 'str'},
'send_key_value': {'key': 'properties.sendKeyValue', 'type': 'str'},
'service_bus_suffix': {'key': 'properties.serviceBusSuffix', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
service_bus_namespace: Optional[str] = None,
relay_name: Optional[str] = None,
relay_arm_uri: Optional[str] = None,
hostname: Optional[str] = None,
port: Optional[int] = None,
send_key_name: Optional[str] = None,
send_key_value: Optional[str] = None,
service_bus_suffix: Optional[str] = None,
**kwargs
):
super(HybridConnection, self).__init__(kind=kind, **kwargs)
self.service_bus_namespace = service_bus_namespace
self.relay_name = relay_name
self.relay_arm_uri = relay_arm_uri
self.hostname = hostname
self.port = port
self.send_key_name = send_key_name
self.send_key_value = send_key_value
self.service_bus_suffix = service_bus_suffix
class HybridConnectionKey(ProxyOnlyResource):
"""Hybrid Connection key contract. This has the send key name and value for a Hybrid Connection.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar send_key_name: The name of the send key.
:vartype send_key_name: str
:ivar send_key_value: The value of the send key.
:vartype send_key_value: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'send_key_name': {'readonly': True},
'send_key_value': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'send_key_name': {'key': 'properties.sendKeyName', 'type': 'str'},
'send_key_value': {'key': 'properties.sendKeyValue', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(HybridConnectionKey, self).__init__(kind=kind, **kwargs)
self.send_key_name = None
self.send_key_value = None
class Identifier(ProxyOnlyResource):
"""A domain specific resource identifier.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param id_properties_id: String representation of the identity.
:type id_properties_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'id_properties_id': {'key': 'properties.id', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
id_properties_id: Optional[str] = None,
**kwargs
):
super(Identifier, self).__init__(kind=kind, **kwargs)
self.id_properties_id = id_properties_id
class IdentifierCollection(msrest.serialization.Model):
"""Collection of identifiers.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.Identifier]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Identifier]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["Identifier"],
**kwargs
):
super(IdentifierCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class IpSecurityRestriction(msrest.serialization.Model):
"""IP security restriction on an app.
All required parameters must be populated in order to send to Azure.
:param ip_address: Required. IP address the security restriction is valid for.
:type ip_address: str
:param subnet_mask: Subnet mask for the range of IP addresses the restriction is valid for.
:type subnet_mask: str
"""
_validation = {
'ip_address': {'required': True},
}
_attribute_map = {
'ip_address': {'key': 'ipAddress', 'type': 'str'},
'subnet_mask': {'key': 'subnetMask', 'type': 'str'},
}
def __init__(
self,
*,
ip_address: str,
subnet_mask: Optional[str] = None,
**kwargs
):
super(IpSecurityRestriction, self).__init__(**kwargs)
self.ip_address = ip_address
self.subnet_mask = subnet_mask
class LocalizableString(msrest.serialization.Model):
"""Localizable string object containing the name and a localized value.
:param value: Non-localized name.
:type value: str
:param localized_value: Localized name.
:type localized_value: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[str] = None,
localized_value: Optional[str] = None,
**kwargs
):
super(LocalizableString, self).__init__(**kwargs)
self.value = value
self.localized_value = localized_value
class ManagedServiceIdentity(msrest.serialization.Model):
"""Managed service identity.
Variables are only populated by the server, and will be ignored when sending a request.
:param type: Type of managed service identity. Possible values include: "SystemAssigned".
:type type: str or ~azure.mgmt.web.v2016_08_01.models.ManagedServiceIdentityType
:ivar tenant_id: Tenant of managed service identity.
:vartype tenant_id: str
:ivar principal_id: Principal Id of managed service identity.
:vartype principal_id: str
"""
_validation = {
'tenant_id': {'readonly': True},
'principal_id': {'readonly': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'principal_id': {'key': 'principalId', 'type': 'str'},
}
def __init__(
self,
*,
type: Optional[Union[str, "ManagedServiceIdentityType"]] = None,
**kwargs
):
super(ManagedServiceIdentity, self).__init__(**kwargs)
self.type = type
self.tenant_id = None
self.principal_id = None
class MigrateMySqlRequest(ProxyOnlyResource):
"""MySQL migration request.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param connection_string: Connection string to the remote MySQL database.
:type connection_string: str
:param migration_type: The type of migration operation to be done. Possible values include:
"LocalToRemote", "RemoteToLocal".
:type migration_type: str or ~azure.mgmt.web.v2016_08_01.models.MySqlMigrationType
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'connection_string': {'key': 'properties.connectionString', 'type': 'str'},
'migration_type': {'key': 'properties.migrationType', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
connection_string: Optional[str] = None,
migration_type: Optional[Union[str, "MySqlMigrationType"]] = None,
**kwargs
):
super(MigrateMySqlRequest, self).__init__(kind=kind, **kwargs)
self.connection_string = connection_string
self.migration_type = migration_type
class MigrateMySqlStatus(ProxyOnlyResource):
"""MySQL migration status.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar migration_operation_status: Status of the migration task. Possible values include:
"InProgress", "Failed", "Succeeded", "TimedOut", "Created".
:vartype migration_operation_status: str or ~azure.mgmt.web.v2016_08_01.models.OperationStatus
:ivar operation_id: Operation ID for the migration task.
:vartype operation_id: str
:ivar local_my_sql_enabled: True if the web app has in app MySql enabled.
:vartype local_my_sql_enabled: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'migration_operation_status': {'readonly': True},
'operation_id': {'readonly': True},
'local_my_sql_enabled': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'migration_operation_status': {'key': 'properties.migrationOperationStatus', 'type': 'str'},
'operation_id': {'key': 'properties.operationId', 'type': 'str'},
'local_my_sql_enabled': {'key': 'properties.localMySqlEnabled', 'type': 'bool'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(MigrateMySqlStatus, self).__init__(kind=kind, **kwargs)
self.migration_operation_status = None
self.operation_id = None
self.local_my_sql_enabled = None
class MSDeploy(ProxyOnlyResource):
"""MSDeploy ARM PUT information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param package_uri: Package URI.
:type package_uri: str
:param connection_string: SQL Connection String.
:type connection_string: str
:param db_type: Database Type.
:type db_type: str
:param set_parameters_xml_file_uri: URI of MSDeploy Parameters file. Must not be set if
SetParameters is used.
:type set_parameters_xml_file_uri: str
:param set_parameters: MSDeploy Parameters. Must not be set if SetParametersXmlFileUri is used.
:type set_parameters: dict[str, str]
:param skip_app_data: Controls whether the MSDeploy operation skips the App_Data directory.
If set to :code:`<code>true</code>`, the existing App_Data directory on the destination
will not be deleted, and any App_Data directory in the source will be ignored.
Setting is :code:`<code>false</code>` by default.
:type skip_app_data: bool
:param app_offline: Sets the AppOffline rule while the MSDeploy operation executes.
Setting is :code:`<code>false</code>` by default.
:type app_offline: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'package_uri': {'key': 'properties.packageUri', 'type': 'str'},
'connection_string': {'key': 'properties.connectionString', 'type': 'str'},
'db_type': {'key': 'properties.dbType', 'type': 'str'},
'set_parameters_xml_file_uri': {'key': 'properties.setParametersXmlFileUri', 'type': 'str'},
'set_parameters': {'key': 'properties.setParameters', 'type': '{str}'},
'skip_app_data': {'key': 'properties.skipAppData', 'type': 'bool'},
'app_offline': {'key': 'properties.appOffline', 'type': 'bool'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
package_uri: Optional[str] = None,
connection_string: Optional[str] = None,
db_type: Optional[str] = None,
set_parameters_xml_file_uri: Optional[str] = None,
set_parameters: Optional[Dict[str, str]] = None,
skip_app_data: Optional[bool] = None,
app_offline: Optional[bool] = None,
**kwargs
):
super(MSDeploy, self).__init__(kind=kind, **kwargs)
self.package_uri = package_uri
self.connection_string = connection_string
self.db_type = db_type
self.set_parameters_xml_file_uri = set_parameters_xml_file_uri
self.set_parameters = set_parameters
self.skip_app_data = skip_app_data
self.app_offline = app_offline
class MSDeployLog(ProxyOnlyResource):
"""MSDeploy log.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar entries: List of log entry messages.
:vartype entries: list[~azure.mgmt.web.v2016_08_01.models.MSDeployLogEntry]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'entries': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'entries': {'key': 'properties.entries', 'type': '[MSDeployLogEntry]'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(MSDeployLog, self).__init__(kind=kind, **kwargs)
self.entries = None
class MSDeployLogEntry(msrest.serialization.Model):
"""MSDeploy log entry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar time: Timestamp of log entry.
:vartype time: ~datetime.datetime
:ivar type: Log entry type. Possible values include: "Message", "Warning", "Error".
:vartype type: str or ~azure.mgmt.web.v2016_08_01.models.MSDeployLogEntryType
:ivar message: Log entry message.
:vartype message: str
"""
_validation = {
'time': {'readonly': True},
'type': {'readonly': True},
'message': {'readonly': True},
}
_attribute_map = {
'time': {'key': 'time', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MSDeployLogEntry, self).__init__(**kwargs)
self.time = None
self.type = None
self.message = None
class MSDeployStatus(ProxyOnlyResource):
"""MSDeploy ARM response.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar deployer: Username of deployer.
:vartype deployer: str
:ivar provisioning_state: Provisioning state. Possible values include: "accepted", "running",
"succeeded", "failed", "canceled".
:vartype provisioning_state: str or
~azure.mgmt.web.v2016_08_01.models.MSDeployProvisioningState
:ivar start_time: Start time of deploy operation.
:vartype start_time: ~datetime.datetime
:ivar end_time: End time of deploy operation.
:vartype end_time: ~datetime.datetime
:ivar complete: Whether the deployment operation has completed.
:vartype complete: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'deployer': {'readonly': True},
'provisioning_state': {'readonly': True},
'start_time': {'readonly': True},
'end_time': {'readonly': True},
'complete': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'deployer': {'key': 'properties.deployer', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'end_time': {'key': 'properties.endTime', 'type': 'iso-8601'},
'complete': {'key': 'properties.complete', 'type': 'bool'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(MSDeployStatus, self).__init__(kind=kind, **kwargs)
self.deployer = None
self.provisioning_state = None
self.start_time = None
self.end_time = None
self.complete = None
class NameValuePair(msrest.serialization.Model):
"""Name value pair.
:param name: Pair name.
:type name: str
:param value: Pair value.
:type value: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
value: Optional[str] = None,
**kwargs
):
super(NameValuePair, self).__init__(**kwargs)
self.name = name
self.value = value
class NetworkFeatures(ProxyOnlyResource):
"""Full view of network features for an app (presently VNET integration and Hybrid Connections).
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar virtual_network_name: The Virtual Network name.
:vartype virtual_network_name: str
:ivar virtual_network_connection: The Virtual Network summary view.
:vartype virtual_network_connection: ~azure.mgmt.web.v2016_08_01.models.VnetInfo
:ivar hybrid_connections: The Hybrid Connections summary view.
:vartype hybrid_connections:
list[~azure.mgmt.web.v2016_08_01.models.RelayServiceConnectionEntity]
:ivar hybrid_connections_v2: The Hybrid Connection V2 (Service Bus) view.
:vartype hybrid_connections_v2: list[~azure.mgmt.web.v2016_08_01.models.HybridConnection]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'virtual_network_name': {'readonly': True},
'virtual_network_connection': {'readonly': True},
'hybrid_connections': {'readonly': True},
'hybrid_connections_v2': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'virtual_network_name': {'key': 'properties.virtualNetworkName', 'type': 'str'},
'virtual_network_connection': {'key': 'properties.virtualNetworkConnection', 'type': 'VnetInfo'},
'hybrid_connections': {'key': 'properties.hybridConnections', 'type': '[RelayServiceConnectionEntity]'},
'hybrid_connections_v2': {'key': 'properties.hybridConnectionsV2', 'type': '[HybridConnection]'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(NetworkFeatures, self).__init__(kind=kind, **kwargs)
self.virtual_network_name = None
self.virtual_network_connection = None
self.hybrid_connections = None
self.hybrid_connections_v2 = None
class Operation(msrest.serialization.Model):
"""An operation on a resource.
:param id: Operation ID.
:type id: str
:param name: Operation name.
:type name: str
:param status: The current status of the operation. Possible values include: "InProgress",
"Failed", "Succeeded", "TimedOut", "Created".
:type status: str or ~azure.mgmt.web.v2016_08_01.models.OperationStatus
:param errors: Any errors associate with the operation.
:type errors: list[~azure.mgmt.web.v2016_08_01.models.ErrorEntity]
:param created_time: Time when operation has started.
:type created_time: ~datetime.datetime
:param modified_time: Time when operation has been updated.
:type modified_time: ~datetime.datetime
:param expiration_time: Time when operation will expire.
:type expiration_time: ~datetime.datetime
:param geo_master_operation_id: Applicable only for stamp operation ids.
:type geo_master_operation_id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'errors': {'key': 'errors', 'type': '[ErrorEntity]'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
'expiration_time': {'key': 'expirationTime', 'type': 'iso-8601'},
'geo_master_operation_id': {'key': 'geoMasterOperationId', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
name: Optional[str] = None,
status: Optional[Union[str, "OperationStatus"]] = None,
errors: Optional[List["ErrorEntity"]] = None,
created_time: Optional[datetime.datetime] = None,
modified_time: Optional[datetime.datetime] = None,
expiration_time: Optional[datetime.datetime] = None,
geo_master_operation_id: Optional[str] = None,
**kwargs
):
super(Operation, self).__init__(**kwargs)
self.id = id
self.name = name
self.status = status
self.errors = errors
self.created_time = created_time
self.modified_time = modified_time
self.expiration_time = expiration_time
self.geo_master_operation_id = geo_master_operation_id
class PerfMonCounterCollection(msrest.serialization.Model):
"""Collection of performance monitor counters.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.PerfMonResponse]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[PerfMonResponse]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["PerfMonResponse"],
**kwargs
):
super(PerfMonCounterCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class PerfMonResponse(msrest.serialization.Model):
"""Performance monitor API response.
:param code: The response code.
:type code: str
:param message: The message.
:type message: str
:param data: The performance monitor counters.
:type data: ~azure.mgmt.web.v2016_08_01.models.PerfMonSet
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'data': {'key': 'data', 'type': 'PerfMonSet'},
}
def __init__(
self,
*,
code: Optional[str] = None,
message: Optional[str] = None,
data: Optional["PerfMonSet"] = None,
**kwargs
):
super(PerfMonResponse, self).__init__(**kwargs)
self.code = code
self.message = message
self.data = data
class PerfMonSample(msrest.serialization.Model):
"""Performance monitor sample in a set.
:param time: Point in time for which counter was measured.
:type time: ~datetime.datetime
:param instance_name: Name of the server on which the measurement is made.
:type instance_name: str
:param value: Value of counter at a certain time.
:type value: float
:param core_count: Core Count of worker. Not a data member.
:type core_count: int
"""
_attribute_map = {
'time': {'key': 'time', 'type': 'iso-8601'},
'instance_name': {'key': 'instanceName', 'type': 'str'},
'value': {'key': 'value', 'type': 'float'},
'core_count': {'key': 'coreCount', 'type': 'int'},
}
def __init__(
self,
*,
time: Optional[datetime.datetime] = None,
instance_name: Optional[str] = None,
value: Optional[float] = None,
core_count: Optional[int] = None,
**kwargs
):
super(PerfMonSample, self).__init__(**kwargs)
self.time = time
self.instance_name = instance_name
self.value = value
self.core_count = core_count
class PerfMonSet(msrest.serialization.Model):
"""Metric information.
:param name: Unique key name of the counter.
:type name: str
:param start_time: Start time of the period.
:type start_time: ~datetime.datetime
:param end_time: End time of the period.
:type end_time: ~datetime.datetime
:param time_grain: Presented time grain.
:type time_grain: str
:param values: Collection of workers that are active during this time.
:type values: list[~azure.mgmt.web.v2016_08_01.models.PerfMonSample]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'time_grain': {'key': 'timeGrain', 'type': 'str'},
'values': {'key': 'values', 'type': '[PerfMonSample]'},
}
def __init__(
self,
*,
name: Optional[str] = None,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
time_grain: Optional[str] = None,
values: Optional[List["PerfMonSample"]] = None,
**kwargs
):
super(PerfMonSet, self).__init__(**kwargs)
self.name = name
self.start_time = start_time
self.end_time = end_time
self.time_grain = time_grain
self.values = values
class Resource(msrest.serialization.Model):
"""Azure resource. This resource is tracked in Azure Resource Manager.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
*,
location: str,
kind: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
**kwargs
):
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.kind = kind
self.location = location
self.type = None
self.tags = tags
class PremierAddOn(Resource):
"""Premier add-on.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param sku: Premier add on SKU.
:type sku: str
:param product: Premier add on Product.
:type product: str
:param vendor: Premier add on Vendor.
:type vendor: str
:param premier_add_on_name: Premier add on Name.
:type premier_add_on_name: str
:param location_properties_location: Premier add on Location.
:type location_properties_location: str
:param tags_properties_tags: Premier add on Tags.
:type tags_properties_tags: dict[str, str]
:param marketplace_publisher: Premier add on Marketplace publisher.
:type marketplace_publisher: str
:param marketplace_offer: Premier add on Marketplace offer.
:type marketplace_offer: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'properties.sku', 'type': 'str'},
'product': {'key': 'properties.product', 'type': 'str'},
'vendor': {'key': 'properties.vendor', 'type': 'str'},
'premier_add_on_name': {'key': 'properties.name', 'type': 'str'},
'location_properties_location': {'key': 'properties.location', 'type': 'str'},
'tags_properties_tags': {'key': 'properties.tags', 'type': '{str}'},
'marketplace_publisher': {'key': 'properties.marketplacePublisher', 'type': 'str'},
'marketplace_offer': {'key': 'properties.marketplaceOffer', 'type': 'str'},
}
def __init__(
self,
*,
location: str,
kind: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
sku: Optional[str] = None,
product: Optional[str] = None,
vendor: Optional[str] = None,
premier_add_on_name: Optional[str] = None,
location_properties_location: Optional[str] = None,
tags_properties_tags: Optional[Dict[str, str]] = None,
marketplace_publisher: Optional[str] = None,
marketplace_offer: Optional[str] = None,
**kwargs
):
super(PremierAddOn, self).__init__(kind=kind, location=location, tags=tags, **kwargs)
self.sku = sku
self.product = product
self.vendor = vendor
self.premier_add_on_name = premier_add_on_name
self.location_properties_location = location_properties_location
self.tags_properties_tags = tags_properties_tags
self.marketplace_publisher = marketplace_publisher
self.marketplace_offer = marketplace_offer
class ProcessInfo(ProxyOnlyResource):
"""Process Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param id_properties_id: ARM Identifier for deployment.
:type id_properties_id: int
:param name_properties_name: Deployment name.
:type name_properties_name: str
:param href: HRef URI.
:type href: str
:param mini_dump: Minidump URI.
:type mini_dump: str
:param is_profile_running: Is profile running?.
:type is_profile_running: bool
:param is_iis_profile_running: Is the IIS Profile running?.
:type is_iis_profile_running: bool
:param iis_profile_timeout_in_seconds: IIS Profile timeout (seconds).
:type iis_profile_timeout_in_seconds: float
:param parent: Parent process.
:type parent: str
:param children: Child process list.
:type children: list[str]
:param threads: Thread list.
:type threads: list[~azure.mgmt.web.v2016_08_01.models.ProcessThreadInfo]
:param open_file_handles: List of open files.
:type open_file_handles: list[str]
:param modules: List of modules.
:type modules: list[~azure.mgmt.web.v2016_08_01.models.ProcessModuleInfo]
:param file_name: File name of this process.
:type file_name: str
:param command_line: Command line.
:type command_line: str
:param user_name: User name.
:type user_name: str
:param handle_count: Handle count.
:type handle_count: int
:param module_count: Module count.
:type module_count: int
:param thread_count: Thread count.
:type thread_count: int
:param start_time: Start time.
:type start_time: ~datetime.datetime
:param total_processor_time: Total CPU time.
:type total_processor_time: str
:param user_processor_time: User CPU time.
:type user_processor_time: str
:param privileged_processor_time: Privileged CPU time.
:type privileged_processor_time: str
:param working_set64: Working set.
:type working_set64: long
:param peak_working_set64: Peak working set.
:type peak_working_set64: long
:param private_memory_size64: Private memory size.
:type private_memory_size64: long
:param virtual_memory_size64: Virtual memory size.
:type virtual_memory_size64: long
:param peak_virtual_memory_size64: Peak virtual memory usage.
:type peak_virtual_memory_size64: long
:param paged_system_memory_size64: Paged system memory.
:type paged_system_memory_size64: long
:param nonpaged_system_memory_size64: Non-paged system memory.
:type nonpaged_system_memory_size64: long
:param paged_memory_size64: Paged memory.
:type paged_memory_size64: long
:param peak_paged_memory_size64: Peak paged memory.
:type peak_paged_memory_size64: long
:param time_stamp: Time stamp.
:type time_stamp: ~datetime.datetime
:param environment_variables: List of environment variables.
:type environment_variables: dict[str, str]
:param is_scm_site: Is this the SCM site?.
:type is_scm_site: bool
:param is_web_job: Is this a Web Job?.
:type is_web_job: bool
:param description: Description of process.
:type description: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'id_properties_id': {'key': 'properties.id', 'type': 'int'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'href': {'key': 'properties.href', 'type': 'str'},
'mini_dump': {'key': 'properties.miniDump', 'type': 'str'},
'is_profile_running': {'key': 'properties.isProfileRunning', 'type': 'bool'},
'is_iis_profile_running': {'key': 'properties.isIisProfileRunning', 'type': 'bool'},
'iis_profile_timeout_in_seconds': {'key': 'properties.iisProfileTimeoutInSeconds', 'type': 'float'},
'parent': {'key': 'properties.parent', 'type': 'str'},
'children': {'key': 'properties.children', 'type': '[str]'},
'threads': {'key': 'properties.threads', 'type': '[ProcessThreadInfo]'},
'open_file_handles': {'key': 'properties.openFileHandles', 'type': '[str]'},
'modules': {'key': 'properties.modules', 'type': '[ProcessModuleInfo]'},
'file_name': {'key': 'properties.fileName', 'type': 'str'},
'command_line': {'key': 'properties.commandLine', 'type': 'str'},
'user_name': {'key': 'properties.userName', 'type': 'str'},
'handle_count': {'key': 'properties.handleCount', 'type': 'int'},
'module_count': {'key': 'properties.moduleCount', 'type': 'int'},
'thread_count': {'key': 'properties.threadCount', 'type': 'int'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'total_processor_time': {'key': 'properties.totalProcessorTime', 'type': 'str'},
'user_processor_time': {'key': 'properties.userProcessorTime', 'type': 'str'},
'privileged_processor_time': {'key': 'properties.privilegedProcessorTime', 'type': 'str'},
'working_set64': {'key': 'properties.workingSet64', 'type': 'long'},
'peak_working_set64': {'key': 'properties.peakWorkingSet64', 'type': 'long'},
'private_memory_size64': {'key': 'properties.privateMemorySize64', 'type': 'long'},
'virtual_memory_size64': {'key': 'properties.virtualMemorySize64', 'type': 'long'},
'peak_virtual_memory_size64': {'key': 'properties.peakVirtualMemorySize64', 'type': 'long'},
'paged_system_memory_size64': {'key': 'properties.pagedSystemMemorySize64', 'type': 'long'},
'nonpaged_system_memory_size64': {'key': 'properties.nonpagedSystemMemorySize64', 'type': 'long'},
'paged_memory_size64': {'key': 'properties.pagedMemorySize64', 'type': 'long'},
'peak_paged_memory_size64': {'key': 'properties.peakPagedMemorySize64', 'type': 'long'},
'time_stamp': {'key': 'properties.timeStamp', 'type': 'iso-8601'},
'environment_variables': {'key': 'properties.environmentVariables', 'type': '{str}'},
'is_scm_site': {'key': 'properties.isScmSite', 'type': 'bool'},
'is_web_job': {'key': 'properties.isWebJob', 'type': 'bool'},
'description': {'key': 'properties.description', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
id_properties_id: Optional[int] = None,
name_properties_name: Optional[str] = None,
href: Optional[str] = None,
mini_dump: Optional[str] = None,
is_profile_running: Optional[bool] = None,
is_iis_profile_running: Optional[bool] = None,
iis_profile_timeout_in_seconds: Optional[float] = None,
parent: Optional[str] = None,
children: Optional[List[str]] = None,
threads: Optional[List["ProcessThreadInfo"]] = None,
open_file_handles: Optional[List[str]] = None,
modules: Optional[List["ProcessModuleInfo"]] = None,
file_name: Optional[str] = None,
command_line: Optional[str] = None,
user_name: Optional[str] = None,
handle_count: Optional[int] = None,
module_count: Optional[int] = None,
thread_count: Optional[int] = None,
start_time: Optional[datetime.datetime] = None,
total_processor_time: Optional[str] = None,
user_processor_time: Optional[str] = None,
privileged_processor_time: Optional[str] = None,
working_set64: Optional[int] = None,
peak_working_set64: Optional[int] = None,
private_memory_size64: Optional[int] = None,
virtual_memory_size64: Optional[int] = None,
peak_virtual_memory_size64: Optional[int] = None,
paged_system_memory_size64: Optional[int] = None,
nonpaged_system_memory_size64: Optional[int] = None,
paged_memory_size64: Optional[int] = None,
peak_paged_memory_size64: Optional[int] = None,
time_stamp: Optional[datetime.datetime] = None,
environment_variables: Optional[Dict[str, str]] = None,
is_scm_site: Optional[bool] = None,
is_web_job: Optional[bool] = None,
description: Optional[str] = None,
**kwargs
):
super(ProcessInfo, self).__init__(kind=kind, **kwargs)
self.id_properties_id = id_properties_id
self.name_properties_name = name_properties_name
self.href = href
self.mini_dump = mini_dump
self.is_profile_running = is_profile_running
self.is_iis_profile_running = is_iis_profile_running
self.iis_profile_timeout_in_seconds = iis_profile_timeout_in_seconds
self.parent = parent
self.children = children
self.threads = threads
self.open_file_handles = open_file_handles
self.modules = modules
self.file_name = file_name
self.command_line = command_line
self.user_name = user_name
self.handle_count = handle_count
self.module_count = module_count
self.thread_count = thread_count
self.start_time = start_time
self.total_processor_time = total_processor_time
self.user_processor_time = user_processor_time
self.privileged_processor_time = privileged_processor_time
self.working_set64 = working_set64
self.peak_working_set64 = peak_working_set64
self.private_memory_size64 = private_memory_size64
self.virtual_memory_size64 = virtual_memory_size64
self.peak_virtual_memory_size64 = peak_virtual_memory_size64
self.paged_system_memory_size64 = paged_system_memory_size64
self.nonpaged_system_memory_size64 = nonpaged_system_memory_size64
self.paged_memory_size64 = paged_memory_size64
self.peak_paged_memory_size64 = peak_paged_memory_size64
self.time_stamp = time_stamp
self.environment_variables = environment_variables
self.is_scm_site = is_scm_site
self.is_web_job = is_web_job
self.description = description
class ProcessInfoCollection(msrest.serialization.Model):
"""Collection of Kudu process information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.ProcessInfo]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ProcessInfo]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ProcessInfo"],
**kwargs
):
super(ProcessInfoCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class ProcessModuleInfo(ProxyOnlyResource):
"""Process Module Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param base_address: Base address. Used as module identifier in ARM resource URI.
:type base_address: str
:param file_name: File name.
:type file_name: str
:param href: HRef URI.
:type href: str
:param file_path: File path.
:type file_path: str
:param module_memory_size: Module memory size.
:type module_memory_size: int
:param file_version: File version.
:type file_version: str
:param file_description: File description.
:type file_description: str
:param product: Product name.
:type product: str
:param product_version: Product version.
:type product_version: str
:param is_debug: Is debug?.
:type is_debug: bool
:param language: Module language (locale).
:type language: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'base_address': {'key': 'properties.baseAddress', 'type': 'str'},
'file_name': {'key': 'properties.fileName', 'type': 'str'},
'href': {'key': 'properties.href', 'type': 'str'},
'file_path': {'key': 'properties.filePath', 'type': 'str'},
'module_memory_size': {'key': 'properties.moduleMemorySize', 'type': 'int'},
'file_version': {'key': 'properties.fileVersion', 'type': 'str'},
'file_description': {'key': 'properties.fileDescription', 'type': 'str'},
'product': {'key': 'properties.product', 'type': 'str'},
'product_version': {'key': 'properties.productVersion', 'type': 'str'},
'is_debug': {'key': 'properties.isDebug', 'type': 'bool'},
'language': {'key': 'properties.language', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
base_address: Optional[str] = None,
file_name: Optional[str] = None,
href: Optional[str] = None,
file_path: Optional[str] = None,
module_memory_size: Optional[int] = None,
file_version: Optional[str] = None,
file_description: Optional[str] = None,
product: Optional[str] = None,
product_version: Optional[str] = None,
is_debug: Optional[bool] = None,
language: Optional[str] = None,
**kwargs
):
super(ProcessModuleInfo, self).__init__(kind=kind, **kwargs)
self.base_address = base_address
self.file_name = file_name
self.href = href
self.file_path = file_path
self.module_memory_size = module_memory_size
self.file_version = file_version
self.file_description = file_description
self.product = product
self.product_version = product_version
self.is_debug = is_debug
self.language = language
class ProcessModuleInfoCollection(msrest.serialization.Model):
"""Collection of Kudu thread information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.ProcessModuleInfo]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ProcessModuleInfo]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ProcessModuleInfo"],
**kwargs
):
super(ProcessModuleInfoCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class ProcessThreadInfo(ProxyOnlyResource):
"""Process Thread Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param id_properties_id: ARM Identifier for deployment.
:type id_properties_id: int
:param href: HRef URI.
:type href: str
:param process: Process URI.
:type process: str
:param start_address: Start address.
:type start_address: str
:param current_priority: Current thread priority.
:type current_priority: int
:param priority_level: Thread priority level.
:type priority_level: str
:param base_priority: Base priority.
:type base_priority: int
:param start_time: Start time.
:type start_time: ~datetime.datetime
:param total_processor_time: Total processor time.
:type total_processor_time: str
:param user_processor_time: User processor time.
:type user_processor_time: str
:param priviledged_processor_time: Privileged processor time.
:type priviledged_processor_time: str
:param state: Thread state.
:type state: str
:param wait_reason: Wait reason.
:type wait_reason: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'id_properties_id': {'key': 'properties.id', 'type': 'int'},
'href': {'key': 'properties.href', 'type': 'str'},
'process': {'key': 'properties.process', 'type': 'str'},
'start_address': {'key': 'properties.startAddress', 'type': 'str'},
'current_priority': {'key': 'properties.currentPriority', 'type': 'int'},
'priority_level': {'key': 'properties.priorityLevel', 'type': 'str'},
'base_priority': {'key': 'properties.basePriority', 'type': 'int'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'total_processor_time': {'key': 'properties.totalProcessorTime', 'type': 'str'},
'user_processor_time': {'key': 'properties.userProcessorTime', 'type': 'str'},
'priviledged_processor_time': {'key': 'properties.priviledgedProcessorTime', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'wait_reason': {'key': 'properties.waitReason', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
id_properties_id: Optional[int] = None,
href: Optional[str] = None,
process: Optional[str] = None,
start_address: Optional[str] = None,
current_priority: Optional[int] = None,
priority_level: Optional[str] = None,
base_priority: Optional[int] = None,
start_time: Optional[datetime.datetime] = None,
total_processor_time: Optional[str] = None,
user_processor_time: Optional[str] = None,
priviledged_processor_time: Optional[str] = None,
state: Optional[str] = None,
wait_reason: Optional[str] = None,
**kwargs
):
super(ProcessThreadInfo, self).__init__(kind=kind, **kwargs)
self.id_properties_id = id_properties_id
self.href = href
self.process = process
self.start_address = start_address
self.current_priority = current_priority
self.priority_level = priority_level
self.base_priority = base_priority
self.start_time = start_time
self.total_processor_time = total_processor_time
self.user_processor_time = user_processor_time
self.priviledged_processor_time = priviledged_processor_time
self.state = state
self.wait_reason = wait_reason
class ProcessThreadInfoCollection(msrest.serialization.Model):
"""Collection of Kudu thread information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.ProcessThreadInfo]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ProcessThreadInfo]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ProcessThreadInfo"],
**kwargs
):
super(ProcessThreadInfoCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class PublicCertificate(ProxyOnlyResource):
"""Public certificate object.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param blob: Public Certificate byte array.
:type blob: bytearray
:param public_certificate_location: Public Certificate Location. Possible values include:
"CurrentUserMy", "LocalMachineMy", "Unknown".
:type public_certificate_location: str or
~azure.mgmt.web.v2016_08_01.models.PublicCertificateLocation
:ivar thumbprint: Certificate Thumbprint.
:vartype thumbprint: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'thumbprint': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'blob': {'key': 'properties.blob', 'type': 'bytearray'},
'public_certificate_location': {'key': 'properties.publicCertificateLocation', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
blob: Optional[bytearray] = None,
public_certificate_location: Optional[Union[str, "PublicCertificateLocation"]] = None,
**kwargs
):
super(PublicCertificate, self).__init__(kind=kind, **kwargs)
self.blob = blob
self.public_certificate_location = public_certificate_location
self.thumbprint = None
class PublicCertificateCollection(msrest.serialization.Model):
"""Collection of public certificates.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.PublicCertificate]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[PublicCertificate]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["PublicCertificate"],
**kwargs
):
super(PublicCertificateCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class PushSettings(ProxyOnlyResource):
"""Push settings for the App.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param is_push_enabled: Gets or sets a flag indicating whether the Push endpoint is enabled.
:type is_push_enabled: bool
:param tag_whitelist_json: Gets or sets a JSON string containing a list of tags that are
whitelisted for use by the push registration endpoint.
:type tag_whitelist_json: str
:param tags_requiring_auth: Gets or sets a JSON string containing a list of tags that require
user authentication to be used in the push registration endpoint.
Tags can consist of alphanumeric characters and the following:
'_', '@', '#', '.', ':', '-'.
Validation should be performed at the PushRequestHandler.
:type tags_requiring_auth: str
:param dynamic_tags_json: Gets or sets a JSON string containing a list of dynamic tags that
will be evaluated from user claims in the push registration endpoint.
:type dynamic_tags_json: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'is_push_enabled': {'key': 'properties.isPushEnabled', 'type': 'bool'},
'tag_whitelist_json': {'key': 'properties.tagWhitelistJson', 'type': 'str'},
'tags_requiring_auth': {'key': 'properties.tagsRequiringAuth', 'type': 'str'},
'dynamic_tags_json': {'key': 'properties.dynamicTagsJson', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
is_push_enabled: Optional[bool] = None,
tag_whitelist_json: Optional[str] = None,
tags_requiring_auth: Optional[str] = None,
dynamic_tags_json: Optional[str] = None,
**kwargs
):
super(PushSettings, self).__init__(kind=kind, **kwargs)
self.is_push_enabled = is_push_enabled
self.tag_whitelist_json = tag_whitelist_json
self.tags_requiring_auth = tags_requiring_auth
self.dynamic_tags_json = dynamic_tags_json
class RampUpRule(msrest.serialization.Model):
"""Routing rules for ramp up testing. This rule allows to redirect static traffic % to a slot or to gradually change routing % based on performance.
:param action_host_name: Hostname of a slot to which the traffic will be redirected if decided
to. E.g. myapp-stage.azurewebsites.net.
:type action_host_name: str
:param reroute_percentage: Percentage of the traffic which will be redirected to
:code:`<code>ActionHostName</code>`.
:type reroute_percentage: float
:param change_step: In auto ramp up scenario this is the step to add/remove from
:code:`<code>ReroutePercentage</code>` until it reaches
:code:`<code>MinReroutePercentage</code>` or :code:`<code>MaxReroutePercentage</code>`. Site
metrics are checked every N minutes specified in :code:`<code>ChangeIntervalInMinutes</code>`.
Custom decision algorithm can be provided in TiPCallback site extension which URL can be
specified in :code:`<code>ChangeDecisionCallbackUrl</code>`.
:type change_step: float
:param change_interval_in_minutes: Specifies interval in minutes to reevaluate
ReroutePercentage.
:type change_interval_in_minutes: int
:param min_reroute_percentage: Specifies lower boundary above which ReroutePercentage will
stay.
:type min_reroute_percentage: float
:param max_reroute_percentage: Specifies upper boundary below which ReroutePercentage will
stay.
:type max_reroute_percentage: float
:param change_decision_callback_url: Custom decision algorithm can be provided in TiPCallback
site extension which URL can be specified. See TiPCallback site extension for the scaffold and
contracts.
https://www.siteextensions.net/packages/TiPCallback/.
:type change_decision_callback_url: str
:param name: Name of the routing rule. The recommended name would be to point to the slot which
will receive the traffic in the experiment.
:type name: str
"""
_attribute_map = {
'action_host_name': {'key': 'actionHostName', 'type': 'str'},
'reroute_percentage': {'key': 'reroutePercentage', 'type': 'float'},
'change_step': {'key': 'changeStep', 'type': 'float'},
'change_interval_in_minutes': {'key': 'changeIntervalInMinutes', 'type': 'int'},
'min_reroute_percentage': {'key': 'minReroutePercentage', 'type': 'float'},
'max_reroute_percentage': {'key': 'maxReroutePercentage', 'type': 'float'},
'change_decision_callback_url': {'key': 'changeDecisionCallbackUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
*,
action_host_name: Optional[str] = None,
reroute_percentage: Optional[float] = None,
change_step: Optional[float] = None,
change_interval_in_minutes: Optional[int] = None,
min_reroute_percentage: Optional[float] = None,
max_reroute_percentage: Optional[float] = None,
change_decision_callback_url: Optional[str] = None,
name: Optional[str] = None,
**kwargs
):
super(RampUpRule, self).__init__(**kwargs)
self.action_host_name = action_host_name
self.reroute_percentage = reroute_percentage
self.change_step = change_step
self.change_interval_in_minutes = change_interval_in_minutes
self.min_reroute_percentage = min_reroute_percentage
self.max_reroute_percentage = max_reroute_percentage
self.change_decision_callback_url = change_decision_callback_url
self.name = name
class RelayServiceConnectionEntity(ProxyOnlyResource):
"""Hybrid Connection for an App Service app.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param entity_name:
:type entity_name: str
:param entity_connection_string:
:type entity_connection_string: str
:param resource_type:
:type resource_type: str
:param resource_connection_string:
:type resource_connection_string: str
:param hostname:
:type hostname: str
:param port:
:type port: int
:param biztalk_uri:
:type biztalk_uri: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'entity_name': {'key': 'properties.entityName', 'type': 'str'},
'entity_connection_string': {'key': 'properties.entityConnectionString', 'type': 'str'},
'resource_type': {'key': 'properties.resourceType', 'type': 'str'},
'resource_connection_string': {'key': 'properties.resourceConnectionString', 'type': 'str'},
'hostname': {'key': 'properties.hostname', 'type': 'str'},
'port': {'key': 'properties.port', 'type': 'int'},
'biztalk_uri': {'key': 'properties.biztalkUri', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
entity_name: Optional[str] = None,
entity_connection_string: Optional[str] = None,
resource_type: Optional[str] = None,
resource_connection_string: Optional[str] = None,
hostname: Optional[str] = None,
port: Optional[int] = None,
biztalk_uri: Optional[str] = None,
**kwargs
):
super(RelayServiceConnectionEntity, self).__init__(kind=kind, **kwargs)
self.entity_name = entity_name
self.entity_connection_string = entity_connection_string
self.resource_type = resource_type
self.resource_connection_string = resource_connection_string
self.hostname = hostname
self.port = port
self.biztalk_uri = biztalk_uri
class RequestsBasedTrigger(msrest.serialization.Model):
"""Trigger based on total requests.
:param count: Request Count.
:type count: int
:param time_interval: Time interval.
:type time_interval: str
"""
_attribute_map = {
'count': {'key': 'count', 'type': 'int'},
'time_interval': {'key': 'timeInterval', 'type': 'str'},
}
def __init__(
self,
*,
count: Optional[int] = None,
time_interval: Optional[str] = None,
**kwargs
):
super(RequestsBasedTrigger, self).__init__(**kwargs)
self.count = count
self.time_interval = time_interval
class ResourceMetric(msrest.serialization.Model):
"""Object representing a metric for any resource .
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name: Name of metric.
:vartype name: ~azure.mgmt.web.v2016_08_01.models.ResourceMetricName
:ivar unit: Metric unit.
:vartype unit: str
:ivar time_grain: Metric granularity. E.g PT1H, PT5M, P1D.
:vartype time_grain: str
:ivar start_time: Metric start time.
:vartype start_time: ~datetime.datetime
:ivar end_time: Metric end time.
:vartype end_time: ~datetime.datetime
:ivar resource_id: Metric resource Id.
:vartype resource_id: str
:ivar id: Resource Id.
:vartype id: str
:ivar metric_values: Metric values.
:vartype metric_values: list[~azure.mgmt.web.v2016_08_01.models.ResourceMetricValue]
:ivar properties: Resource metric properties collection.
:vartype properties: list[~azure.mgmt.web.v2016_08_01.models.ResourceMetricProperty]
"""
_validation = {
'name': {'readonly': True},
'unit': {'readonly': True},
'time_grain': {'readonly': True},
'start_time': {'readonly': True},
'end_time': {'readonly': True},
'resource_id': {'readonly': True},
'id': {'readonly': True},
'metric_values': {'readonly': True},
'properties': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'ResourceMetricName'},
'unit': {'key': 'unit', 'type': 'str'},
'time_grain': {'key': 'timeGrain', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'metric_values': {'key': 'metricValues', 'type': '[ResourceMetricValue]'},
'properties': {'key': 'properties', 'type': '[ResourceMetricProperty]'},
}
def __init__(
self,
**kwargs
):
super(ResourceMetric, self).__init__(**kwargs)
self.name = None
self.unit = None
self.time_grain = None
self.start_time = None
self.end_time = None
self.resource_id = None
self.id = None
self.metric_values = None
self.properties = None
class ResourceMetricAvailability(msrest.serialization.Model):
"""Metrics availability and retention.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar time_grain: Time grain .
:vartype time_grain: str
:ivar retention: Retention period for the current time grain.
:vartype retention: str
"""
_validation = {
'time_grain': {'readonly': True},
'retention': {'readonly': True},
}
_attribute_map = {
'time_grain': {'key': 'timeGrain', 'type': 'str'},
'retention': {'key': 'retention', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceMetricAvailability, self).__init__(**kwargs)
self.time_grain = None
self.retention = None
class ResourceMetricCollection(msrest.serialization.Model):
"""Collection of metric responses.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.ResourceMetric]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ResourceMetric]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ResourceMetric"],
**kwargs
):
super(ResourceMetricCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class ResourceMetricDefinition(ProxyOnlyResource):
"""Metadata for the metrics.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar name_properties_name: Name of the metric.
:vartype name_properties_name: ~azure.mgmt.web.v2016_08_01.models.ResourceMetricName
:ivar unit: Unit of the metric.
:vartype unit: str
:ivar primary_aggregation_type: Primary aggregation type.
:vartype primary_aggregation_type: str
:ivar metric_availabilities: List of time grains supported for the metric together with
retention period.
:vartype metric_availabilities:
list[~azure.mgmt.web.v2016_08_01.models.ResourceMetricAvailability]
:ivar resource_uri: Resource URI.
:vartype resource_uri: str
:ivar id_properties_id: Resource ID.
:vartype id_properties_id: str
:ivar properties: Resource metric definition properties.
:vartype properties: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'name_properties_name': {'readonly': True},
'unit': {'readonly': True},
'primary_aggregation_type': {'readonly': True},
'metric_availabilities': {'readonly': True},
'resource_uri': {'readonly': True},
'id_properties_id': {'readonly': True},
'properties': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'ResourceMetricName'},
'unit': {'key': 'properties.unit', 'type': 'str'},
'primary_aggregation_type': {'key': 'properties.primaryAggregationType', 'type': 'str'},
'metric_availabilities': {'key': 'properties.metricAvailabilities', 'type': '[ResourceMetricAvailability]'},
'resource_uri': {'key': 'properties.resourceUri', 'type': 'str'},
'id_properties_id': {'key': 'properties.id', 'type': 'str'},
'properties': {'key': 'properties.properties', 'type': '{str}'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(ResourceMetricDefinition, self).__init__(kind=kind, **kwargs)
self.name_properties_name = None
self.unit = None
self.primary_aggregation_type = None
self.metric_availabilities = None
self.resource_uri = None
self.id_properties_id = None
self.properties = None
class ResourceMetricDefinitionCollection(msrest.serialization.Model):
"""Collection of metric definitions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.ResourceMetricDefinition]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ResourceMetricDefinition]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ResourceMetricDefinition"],
**kwargs
):
super(ResourceMetricDefinitionCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class ResourceMetricName(msrest.serialization.Model):
"""Name of a metric for any resource .
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: metric name value.
:vartype value: str
:ivar localized_value: Localized metric name value.
:vartype localized_value: str
"""
_validation = {
'value': {'readonly': True},
'localized_value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceMetricName, self).__init__(**kwargs)
self.value = None
self.localized_value = None
class ResourceMetricProperty(msrest.serialization.Model):
"""Resource metric property.
:param key: Key for resource metric property.
:type key: str
:param value: Value of pair.
:type value: str
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
key: Optional[str] = None,
value: Optional[str] = None,
**kwargs
):
super(ResourceMetricProperty, self).__init__(**kwargs)
self.key = key
self.value = value
class ResourceMetricValue(msrest.serialization.Model):
"""Value of resource metric.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar timestamp: Value timestamp.
:vartype timestamp: str
:ivar average: Value average.
:vartype average: float
:ivar minimum: Value minimum.
:vartype minimum: float
:ivar maximum: Value maximum.
:vartype maximum: float
:ivar total: Value total.
:vartype total: float
:ivar count: Value count.
:vartype count: float
:ivar properties: Resource metric properties collection.
:vartype properties: list[~azure.mgmt.web.v2016_08_01.models.ResourceMetricProperty]
"""
_validation = {
'timestamp': {'readonly': True},
'average': {'readonly': True},
'minimum': {'readonly': True},
'maximum': {'readonly': True},
'total': {'readonly': True},
'count': {'readonly': True},
'properties': {'readonly': True},
}
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'str'},
'average': {'key': 'average', 'type': 'float'},
'minimum': {'key': 'minimum', 'type': 'float'},
'maximum': {'key': 'maximum', 'type': 'float'},
'total': {'key': 'total', 'type': 'float'},
'count': {'key': 'count', 'type': 'float'},
'properties': {'key': 'properties', 'type': '[ResourceMetricProperty]'},
}
def __init__(
self,
**kwargs
):
super(ResourceMetricValue, self).__init__(**kwargs)
self.timestamp = None
self.average = None
self.minimum = None
self.maximum = None
self.total = None
self.count = None
self.properties = None
class RestoreRequest(ProxyOnlyResource):
"""Description of a restore request.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param storage_account_url: SAS URL to the container.
:type storage_account_url: str
:param blob_name: Name of a blob which contains the backup.
:type blob_name: str
:param overwrite: :code:`<code>true</code>` if the restore operation can overwrite target app;
otherwise, :code:`<code>false</code>`. :code:`<code>true</code>` is needed if trying to restore
over an existing app.
:type overwrite: bool
:param site_name: Name of an app.
:type site_name: str
:param databases: Collection of databases which should be restored. This list has to match the
list of databases included in the backup.
:type databases: list[~azure.mgmt.web.v2016_08_01.models.DatabaseBackupSetting]
:param ignore_conflicting_host_names: Changes a logic when restoring an app with custom
domains. :code:`<code>true</code>` to remove custom domains automatically. If
:code:`<code>false</code>`, custom domains are added to
the app's object when it is being restored, but that might fail due to conflicts during the
operation.
:type ignore_conflicting_host_names: bool
:param ignore_databases: Ignore the databases and only restore the site content.
:type ignore_databases: bool
:param app_service_plan: Specify app service plan that will own restored site.
:type app_service_plan: str
:param operation_type: Operation type. Possible values include: "Default", "Clone",
"Relocation", "Snapshot".
:type operation_type: str or ~azure.mgmt.web.v2016_08_01.models.BackupRestoreOperationType
:param adjust_connection_strings: :code:`<code>true</code>` if SiteConfig.ConnectionStrings
should be set in new app; otherwise, :code:`<code>false</code>`.
:type adjust_connection_strings: bool
:param hosting_environment: App Service Environment name, if needed (only when restoring an app
to an App Service Environment).
:type hosting_environment: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'storage_account_url': {'key': 'properties.storageAccountUrl', 'type': 'str'},
'blob_name': {'key': 'properties.blobName', 'type': 'str'},
'overwrite': {'key': 'properties.overwrite', 'type': 'bool'},
'site_name': {'key': 'properties.siteName', 'type': 'str'},
'databases': {'key': 'properties.databases', 'type': '[DatabaseBackupSetting]'},
'ignore_conflicting_host_names': {'key': 'properties.ignoreConflictingHostNames', 'type': 'bool'},
'ignore_databases': {'key': 'properties.ignoreDatabases', 'type': 'bool'},
'app_service_plan': {'key': 'properties.appServicePlan', 'type': 'str'},
'operation_type': {'key': 'properties.operationType', 'type': 'str'},
'adjust_connection_strings': {'key': 'properties.adjustConnectionStrings', 'type': 'bool'},
'hosting_environment': {'key': 'properties.hostingEnvironment', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
storage_account_url: Optional[str] = None,
blob_name: Optional[str] = None,
overwrite: Optional[bool] = None,
site_name: Optional[str] = None,
databases: Optional[List["DatabaseBackupSetting"]] = None,
ignore_conflicting_host_names: Optional[bool] = False,
ignore_databases: Optional[bool] = False,
app_service_plan: Optional[str] = None,
operation_type: Optional[Union[str, "BackupRestoreOperationType"]] = None,
adjust_connection_strings: Optional[bool] = None,
hosting_environment: Optional[str] = None,
**kwargs
):
super(RestoreRequest, self).__init__(kind=kind, **kwargs)
self.storage_account_url = storage_account_url
self.blob_name = blob_name
self.overwrite = overwrite
self.site_name = site_name
self.databases = databases
self.ignore_conflicting_host_names = ignore_conflicting_host_names
self.ignore_databases = ignore_databases
self.app_service_plan = app_service_plan
self.operation_type = operation_type
self.adjust_connection_strings = adjust_connection_strings
self.hosting_environment = hosting_environment
class RestoreResponse(ProxyOnlyResource):
"""Response for an app restore request.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar operation_id: When server starts the restore process, it will return an operation ID
identifying that particular restore operation.
:vartype operation_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'operation_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'operation_id': {'key': 'properties.operationId', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(RestoreResponse, self).__init__(kind=kind, **kwargs)
self.operation_id = None
class Site(Resource):
"""A web app, a mobile app backend, or an API app.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param identity: Managed service identity.
:type identity: ~azure.mgmt.web.v2016_08_01.models.ManagedServiceIdentity
:ivar state: Current state of the app.
:vartype state: str
:ivar host_names: Hostnames associated with the app.
:vartype host_names: list[str]
:ivar repository_site_name: Name of the repository site.
:vartype repository_site_name: str
:ivar usage_state: State indicating whether the app has exceeded its quota usage. Read-only.
Possible values include: "Normal", "Exceeded".
:vartype usage_state: str or ~azure.mgmt.web.v2016_08_01.models.UsageState
:param enabled: :code:`<code>true</code>` if the app is enabled; otherwise,
:code:`<code>false</code>`. Setting this value to false disables the app (takes the app
offline).
:type enabled: bool
:ivar enabled_host_names: Enabled hostnames for the app.Hostnames need to be assigned (see
HostNames) AND enabled. Otherwise,
the app is not served on those hostnames.
:vartype enabled_host_names: list[str]
:ivar availability_state: Management information availability state for the app. Possible
values include: "Normal", "Limited", "DisasterRecoveryMode".
:vartype availability_state: str or ~azure.mgmt.web.v2016_08_01.models.SiteAvailabilityState
:param host_name_ssl_states: Hostname SSL states are used to manage the SSL bindings for app's
hostnames.
:type host_name_ssl_states: list[~azure.mgmt.web.v2016_08_01.models.HostNameSslState]
:param server_farm_id: Resource ID of the associated App Service plan, formatted as:
"/subscriptions/{subscriptionID}/resourceGroups/{groupName}/providers/Microsoft.Web/serverfarms/{appServicePlanName}".
:type server_farm_id: str
:param reserved: :code:`<code>true</code>` if reserved; otherwise, :code:`<code>false</code>`.
:type reserved: bool
:ivar last_modified_time_utc: Last time the app was modified, in UTC. Read-only.
:vartype last_modified_time_utc: ~datetime.datetime
:param site_config: Configuration of the app.
:type site_config: ~azure.mgmt.web.v2016_08_01.models.SiteConfig
:ivar traffic_manager_host_names: Azure Traffic Manager hostnames associated with the app.
Read-only.
:vartype traffic_manager_host_names: list[str]
:param scm_site_also_stopped: :code:`<code>true</code>` to stop SCM (KUDU) site when the app is
stopped; otherwise, :code:`<code>false</code>`. The default is :code:`<code>false</code>`.
:type scm_site_also_stopped: bool
:ivar target_swap_slot: Specifies which deployment slot this app will swap into. Read-only.
:vartype target_swap_slot: str
:param hosting_environment_profile: App Service Environment to use for the app.
:type hosting_environment_profile: ~azure.mgmt.web.v2016_08_01.models.HostingEnvironmentProfile
:param client_affinity_enabled: :code:`<code>true</code>` to enable client affinity;
:code:`<code>false</code>` to stop sending session affinity cookies, which route client
requests in the same session to the same instance. Default is :code:`<code>true</code>`.
:type client_affinity_enabled: bool
:param client_cert_enabled: :code:`<code>true</code>` to enable client certificate
authentication (TLS mutual authentication); otherwise, :code:`<code>false</code>`. Default is
:code:`<code>false</code>`.
:type client_cert_enabled: bool
:param host_names_disabled: :code:`<code>true</code>` to disable the public hostnames of the
app; otherwise, :code:`<code>false</code>`.
If :code:`<code>true</code>`, the app is only accessible via API management process.
:type host_names_disabled: bool
:ivar outbound_ip_addresses: List of IP addresses that the app uses for outbound connections
(e.g. database access). Includes VIPs from tenants that site can be hosted with current
settings. Read-only.
:vartype outbound_ip_addresses: str
:ivar possible_outbound_ip_addresses: List of IP addresses that the app uses for outbound
connections (e.g. database access). Includes VIPs from all tenants. Read-only.
:vartype possible_outbound_ip_addresses: str
:param container_size: Size of the function container.
:type container_size: int
:param daily_memory_time_quota: Maximum allowed daily memory-time quota (applicable on dynamic
apps only).
:type daily_memory_time_quota: int
:ivar suspended_till: App suspended till in case memory-time quota is exceeded.
:vartype suspended_till: ~datetime.datetime
:ivar max_number_of_workers: Maximum number of workers.
This only applies to Functions container.
:vartype max_number_of_workers: int
:param cloning_info: If specified during app creation, the app is cloned from a source app.
:type cloning_info: ~azure.mgmt.web.v2016_08_01.models.CloningInfo
:param snapshot_info: If specified during app creation, the app is created from a previous
snapshot.
:type snapshot_info: ~azure.mgmt.web.v2016_08_01.models.SnapshotRecoveryRequest
:ivar resource_group: Name of the resource group the app belongs to. Read-only.
:vartype resource_group: str
:ivar is_default_container: :code:`<code>true</code>` if the app is a default container;
otherwise, :code:`<code>false</code>`.
:vartype is_default_container: bool
:ivar default_host_name: Default hostname of the app. Read-only.
:vartype default_host_name: str
:ivar slot_swap_status: Status of the last deployment slot swap operation.
:vartype slot_swap_status: ~azure.mgmt.web.v2016_08_01.models.SlotSwapStatus
:param https_only: HttpsOnly: configures a web site to accept only https requests. Issues
redirect for
http requests.
:type https_only: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
'state': {'readonly': True},
'host_names': {'readonly': True},
'repository_site_name': {'readonly': True},
'usage_state': {'readonly': True},
'enabled_host_names': {'readonly': True},
'availability_state': {'readonly': True},
'last_modified_time_utc': {'readonly': True},
'traffic_manager_host_names': {'readonly': True},
'target_swap_slot': {'readonly': True},
'outbound_ip_addresses': {'readonly': True},
'possible_outbound_ip_addresses': {'readonly': True},
'suspended_till': {'readonly': True},
'max_number_of_workers': {'readonly': True},
'resource_group': {'readonly': True},
'is_default_container': {'readonly': True},
'default_host_name': {'readonly': True},
'slot_swap_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'state': {'key': 'properties.state', 'type': 'str'},
'host_names': {'key': 'properties.hostNames', 'type': '[str]'},
'repository_site_name': {'key': 'properties.repositorySiteName', 'type': 'str'},
'usage_state': {'key': 'properties.usageState', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'enabled_host_names': {'key': 'properties.enabledHostNames', 'type': '[str]'},
'availability_state': {'key': 'properties.availabilityState', 'type': 'str'},
'host_name_ssl_states': {'key': 'properties.hostNameSslStates', 'type': '[HostNameSslState]'},
'server_farm_id': {'key': 'properties.serverFarmId', 'type': 'str'},
'reserved': {'key': 'properties.reserved', 'type': 'bool'},
'last_modified_time_utc': {'key': 'properties.lastModifiedTimeUtc', 'type': 'iso-8601'},
'site_config': {'key': 'properties.siteConfig', 'type': 'SiteConfig'},
'traffic_manager_host_names': {'key': 'properties.trafficManagerHostNames', 'type': '[str]'},
'scm_site_also_stopped': {'key': 'properties.scmSiteAlsoStopped', 'type': 'bool'},
'target_swap_slot': {'key': 'properties.targetSwapSlot', 'type': 'str'},
'hosting_environment_profile': {'key': 'properties.hostingEnvironmentProfile', 'type': 'HostingEnvironmentProfile'},
'client_affinity_enabled': {'key': 'properties.clientAffinityEnabled', 'type': 'bool'},
'client_cert_enabled': {'key': 'properties.clientCertEnabled', 'type': 'bool'},
'host_names_disabled': {'key': 'properties.hostNamesDisabled', 'type': 'bool'},
'outbound_ip_addresses': {'key': 'properties.outboundIpAddresses', 'type': 'str'},
'possible_outbound_ip_addresses': {'key': 'properties.possibleOutboundIpAddresses', 'type': 'str'},
'container_size': {'key': 'properties.containerSize', 'type': 'int'},
'daily_memory_time_quota': {'key': 'properties.dailyMemoryTimeQuota', 'type': 'int'},
'suspended_till': {'key': 'properties.suspendedTill', 'type': 'iso-8601'},
'max_number_of_workers': {'key': 'properties.maxNumberOfWorkers', 'type': 'int'},
'cloning_info': {'key': 'properties.cloningInfo', 'type': 'CloningInfo'},
'snapshot_info': {'key': 'properties.snapshotInfo', 'type': 'SnapshotRecoveryRequest'},
'resource_group': {'key': 'properties.resourceGroup', 'type': 'str'},
'is_default_container': {'key': 'properties.isDefaultContainer', 'type': 'bool'},
'default_host_name': {'key': 'properties.defaultHostName', 'type': 'str'},
'slot_swap_status': {'key': 'properties.slotSwapStatus', 'type': 'SlotSwapStatus'},
'https_only': {'key': 'properties.httpsOnly', 'type': 'bool'},
}
def __init__(
self,
*,
location: str,
kind: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
identity: Optional["ManagedServiceIdentity"] = None,
enabled: Optional[bool] = None,
host_name_ssl_states: Optional[List["HostNameSslState"]] = None,
server_farm_id: Optional[str] = None,
reserved: Optional[bool] = False,
site_config: Optional["SiteConfig"] = None,
scm_site_also_stopped: Optional[bool] = False,
hosting_environment_profile: Optional["HostingEnvironmentProfile"] = None,
client_affinity_enabled: Optional[bool] = None,
client_cert_enabled: Optional[bool] = None,
host_names_disabled: Optional[bool] = None,
container_size: Optional[int] = None,
daily_memory_time_quota: Optional[int] = None,
cloning_info: Optional["CloningInfo"] = None,
snapshot_info: Optional["SnapshotRecoveryRequest"] = None,
https_only: Optional[bool] = None,
**kwargs
):
super(Site, self).__init__(kind=kind, location=location, tags=tags, **kwargs)
self.identity = identity
self.state = None
self.host_names = None
self.repository_site_name = None
self.usage_state = None
self.enabled = enabled
self.enabled_host_names = None
self.availability_state = None
self.host_name_ssl_states = host_name_ssl_states
self.server_farm_id = server_farm_id
self.reserved = reserved
self.last_modified_time_utc = None
self.site_config = site_config
self.traffic_manager_host_names = None
self.scm_site_also_stopped = scm_site_also_stopped
self.target_swap_slot = None
self.hosting_environment_profile = hosting_environment_profile
self.client_affinity_enabled = client_affinity_enabled
self.client_cert_enabled = client_cert_enabled
self.host_names_disabled = host_names_disabled
self.outbound_ip_addresses = None
self.possible_outbound_ip_addresses = None
self.container_size = container_size
self.daily_memory_time_quota = daily_memory_time_quota
self.suspended_till = None
self.max_number_of_workers = None
self.cloning_info = cloning_info
self.snapshot_info = snapshot_info
self.resource_group = None
self.is_default_container = None
self.default_host_name = None
self.slot_swap_status = None
self.https_only = https_only
class SiteAuthSettings(ProxyOnlyResource):
"""Configuration settings for the Azure App Service Authentication / Authorization feature.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param enabled: :code:`<code>true</code>` if the Authentication / Authorization feature is
enabled for the current app; otherwise, :code:`<code>false</code>`.
:type enabled: bool
:param runtime_version: The RuntimeVersion of the Authentication / Authorization feature in use
for the current app.
The setting in this value can control the behavior of certain features in the Authentication /
Authorization module.
:type runtime_version: str
:param unauthenticated_client_action: The action to take when an unauthenticated client
attempts to access the app. Possible values include: "RedirectToLoginPage", "AllowAnonymous".
:type unauthenticated_client_action: str or
~azure.mgmt.web.v2016_08_01.models.UnauthenticatedClientAction
:param token_store_enabled: :code:`<code>true</code>` to durably store platform-specific
security tokens that are obtained during login flows; otherwise, :code:`<code>false</code>`.
The default is :code:`<code>false</code>`.
:type token_store_enabled: bool
:param allowed_external_redirect_urls: External URLs that can be redirected to as part of
logging in or logging out of the app. Note that the query string part of the URL is ignored.
This is an advanced setting typically only needed by Windows Store application backends.
Note that URLs within the current domain are always implicitly allowed.
:type allowed_external_redirect_urls: list[str]
:param default_provider: The default authentication provider to use when multiple providers are
configured.
This setting is only needed if multiple providers are configured and the unauthenticated
client
action is set to "RedirectToLoginPage". Possible values include: "AzureActiveDirectory",
"Facebook", "Google", "MicrosoftAccount", "Twitter".
:type default_provider: str or ~azure.mgmt.web.v2016_08_01.models.BuiltInAuthenticationProvider
:param token_refresh_extension_hours: The number of hours after session token expiration that a
session token can be used to
call the token refresh API. The default is 72 hours.
:type token_refresh_extension_hours: float
:param client_id: The Client ID of this relying party application, known as the client_id.
This setting is required for enabling OpenID Connection authentication with Azure Active
Directory or
other 3rd party OpenID Connect providers.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html.
:type client_id: str
:param client_secret: The Client Secret of this relying party application (in Azure Active
Directory, this is also referred to as the Key).
This setting is optional. If no client secret is configured, the OpenID Connect implicit auth
flow is used to authenticate end users.
Otherwise, the OpenID Connect Authorization Code Flow is used to authenticate end users.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html.
:type client_secret: str
:param issuer: The OpenID Connect Issuer URI that represents the entity which issues access
tokens for this application.
When using Azure Active Directory, this value is the URI of the directory tenant, e.g.
https://sts.windows.net/{tenant-guid}/.
This URI is a case-sensitive identifier for the token issuer.
More information on OpenID Connect Discovery:
http://openid.net/specs/openid-connect-discovery-1_0.html.
:type issuer: str
:param allowed_audiences: Allowed audience values to consider when validating JWTs issued by
Azure Active Directory. Note that the :code:`<code>ClientID</code>` value is always considered
an
allowed audience, regardless of this setting.
:type allowed_audiences: list[str]
:param additional_login_params: Login parameters to send to the OpenID Connect authorization
endpoint when
a user logs in. Each parameter must be in the form "key=value".
:type additional_login_params: list[str]
:param google_client_id: The OpenID Connect Client ID for the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/.
:type google_client_id: str
:param google_client_secret: The client secret associated with the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/.
:type google_client_secret: str
:param google_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Google
Sign-In authentication.
This setting is optional. If not specified, "openid", "profile", and "email" are used as
default scopes.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/.
:type google_o_auth_scopes: list[str]
:param facebook_app_id: The App ID of the Facebook app used for login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login.
:type facebook_app_id: str
:param facebook_app_secret: The App Secret of the Facebook app used for Facebook Login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login.
:type facebook_app_secret: str
:param facebook_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Facebook
Login authentication.
This setting is optional.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login.
:type facebook_o_auth_scopes: list[str]
:param twitter_consumer_key: The OAuth 1.0a consumer key of the Twitter application used for
sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in.
:type twitter_consumer_key: str
:param twitter_consumer_secret: The OAuth 1.0a consumer secret of the Twitter application used
for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in.
:type twitter_consumer_secret: str
:param microsoft_account_client_id: The OAuth 2.0 client ID that was created for the app used
for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm.
:type microsoft_account_client_id: str
:param microsoft_account_client_secret: The OAuth 2.0 client secret that was created for the
app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm.
:type microsoft_account_client_secret: str
:param microsoft_account_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of
Microsoft Account authentication.
This setting is optional. If not specified, "wl.basic" is used as the default scope.
Microsoft Account Scopes and permissions documentation:
https://msdn.microsoft.com/en-us/library/dn631845.aspx.
:type microsoft_account_o_auth_scopes: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'runtime_version': {'key': 'properties.runtimeVersion', 'type': 'str'},
'unauthenticated_client_action': {'key': 'properties.unauthenticatedClientAction', 'type': 'str'},
'token_store_enabled': {'key': 'properties.tokenStoreEnabled', 'type': 'bool'},
'allowed_external_redirect_urls': {'key': 'properties.allowedExternalRedirectUrls', 'type': '[str]'},
'default_provider': {'key': 'properties.defaultProvider', 'type': 'str'},
'token_refresh_extension_hours': {'key': 'properties.tokenRefreshExtensionHours', 'type': 'float'},
'client_id': {'key': 'properties.clientId', 'type': 'str'},
'client_secret': {'key': 'properties.clientSecret', 'type': 'str'},
'issuer': {'key': 'properties.issuer', 'type': 'str'},
'allowed_audiences': {'key': 'properties.allowedAudiences', 'type': '[str]'},
'additional_login_params': {'key': 'properties.additionalLoginParams', 'type': '[str]'},
'google_client_id': {'key': 'properties.googleClientId', 'type': 'str'},
'google_client_secret': {'key': 'properties.googleClientSecret', 'type': 'str'},
'google_o_auth_scopes': {'key': 'properties.googleOAuthScopes', 'type': '[str]'},
'facebook_app_id': {'key': 'properties.facebookAppId', 'type': 'str'},
'facebook_app_secret': {'key': 'properties.facebookAppSecret', 'type': 'str'},
'facebook_o_auth_scopes': {'key': 'properties.facebookOAuthScopes', 'type': '[str]'},
'twitter_consumer_key': {'key': 'properties.twitterConsumerKey', 'type': 'str'},
'twitter_consumer_secret': {'key': 'properties.twitterConsumerSecret', 'type': 'str'},
'microsoft_account_client_id': {'key': 'properties.microsoftAccountClientId', 'type': 'str'},
'microsoft_account_client_secret': {'key': 'properties.microsoftAccountClientSecret', 'type': 'str'},
'microsoft_account_o_auth_scopes': {'key': 'properties.microsoftAccountOAuthScopes', 'type': '[str]'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
enabled: Optional[bool] = None,
runtime_version: Optional[str] = None,
unauthenticated_client_action: Optional[Union[str, "UnauthenticatedClientAction"]] = None,
token_store_enabled: Optional[bool] = None,
allowed_external_redirect_urls: Optional[List[str]] = None,
default_provider: Optional[Union[str, "BuiltInAuthenticationProvider"]] = None,
token_refresh_extension_hours: Optional[float] = None,
client_id: Optional[str] = None,
client_secret: Optional[str] = None,
issuer: Optional[str] = None,
allowed_audiences: Optional[List[str]] = None,
additional_login_params: Optional[List[str]] = None,
google_client_id: Optional[str] = None,
google_client_secret: Optional[str] = None,
google_o_auth_scopes: Optional[List[str]] = None,
facebook_app_id: Optional[str] = None,
facebook_app_secret: Optional[str] = None,
facebook_o_auth_scopes: Optional[List[str]] = None,
twitter_consumer_key: Optional[str] = None,
twitter_consumer_secret: Optional[str] = None,
microsoft_account_client_id: Optional[str] = None,
microsoft_account_client_secret: Optional[str] = None,
microsoft_account_o_auth_scopes: Optional[List[str]] = None,
**kwargs
):
super(SiteAuthSettings, self).__init__(kind=kind, **kwargs)
self.enabled = enabled
self.runtime_version = runtime_version
self.unauthenticated_client_action = unauthenticated_client_action
self.token_store_enabled = token_store_enabled
self.allowed_external_redirect_urls = allowed_external_redirect_urls
self.default_provider = default_provider
self.token_refresh_extension_hours = token_refresh_extension_hours
self.client_id = client_id
self.client_secret = client_secret
self.issuer = issuer
self.allowed_audiences = allowed_audiences
self.additional_login_params = additional_login_params
self.google_client_id = google_client_id
self.google_client_secret = google_client_secret
self.google_o_auth_scopes = google_o_auth_scopes
self.facebook_app_id = facebook_app_id
self.facebook_app_secret = facebook_app_secret
self.facebook_o_auth_scopes = facebook_o_auth_scopes
self.twitter_consumer_key = twitter_consumer_key
self.twitter_consumer_secret = twitter_consumer_secret
self.microsoft_account_client_id = microsoft_account_client_id
self.microsoft_account_client_secret = microsoft_account_client_secret
self.microsoft_account_o_auth_scopes = microsoft_account_o_auth_scopes
class SiteCloneability(msrest.serialization.Model):
"""Represents whether or not an app is cloneable.
:param result: Name of app. Possible values include: "Cloneable", "PartiallyCloneable",
"NotCloneable".
:type result: str or ~azure.mgmt.web.v2016_08_01.models.CloneAbilityResult
:param blocking_features: List of features enabled on app that prevent cloning.
:type blocking_features: list[~azure.mgmt.web.v2016_08_01.models.SiteCloneabilityCriterion]
:param unsupported_features: List of features enabled on app that are non-blocking but cannot
be cloned. The app can still be cloned
but the features in this list will not be set up on cloned app.
:type unsupported_features: list[~azure.mgmt.web.v2016_08_01.models.SiteCloneabilityCriterion]
:param blocking_characteristics: List of blocking application characteristics.
:type blocking_characteristics:
list[~azure.mgmt.web.v2016_08_01.models.SiteCloneabilityCriterion]
"""
_attribute_map = {
'result': {'key': 'result', 'type': 'str'},
'blocking_features': {'key': 'blockingFeatures', 'type': '[SiteCloneabilityCriterion]'},
'unsupported_features': {'key': 'unsupportedFeatures', 'type': '[SiteCloneabilityCriterion]'},
'blocking_characteristics': {'key': 'blockingCharacteristics', 'type': '[SiteCloneabilityCriterion]'},
}
def __init__(
self,
*,
result: Optional[Union[str, "CloneAbilityResult"]] = None,
blocking_features: Optional[List["SiteCloneabilityCriterion"]] = None,
unsupported_features: Optional[List["SiteCloneabilityCriterion"]] = None,
blocking_characteristics: Optional[List["SiteCloneabilityCriterion"]] = None,
**kwargs
):
super(SiteCloneability, self).__init__(**kwargs)
self.result = result
self.blocking_features = blocking_features
self.unsupported_features = unsupported_features
self.blocking_characteristics = blocking_characteristics
class SiteCloneabilityCriterion(msrest.serialization.Model):
"""An app cloneability criterion.
:param name: Name of criterion.
:type name: str
:param description: Description of criterion.
:type description: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
description: Optional[str] = None,
**kwargs
):
super(SiteCloneabilityCriterion, self).__init__(**kwargs)
self.name = name
self.description = description
class SiteConfig(msrest.serialization.Model):
"""Configuration of an App Service app.
Variables are only populated by the server, and will be ignored when sending a request.
:param number_of_workers: Number of workers.
:type number_of_workers: int
:param default_documents: Default documents.
:type default_documents: list[str]
:param net_framework_version: .NET Framework version.
:type net_framework_version: str
:param php_version: Version of PHP.
:type php_version: str
:param python_version: Version of Python.
:type python_version: str
:param node_version: Version of Node.js.
:type node_version: str
:param linux_fx_version: Linux App Framework and version.
:type linux_fx_version: str
:param request_tracing_enabled: :code:`<code>true</code>` if request tracing is enabled;
otherwise, :code:`<code>false</code>`.
:type request_tracing_enabled: bool
:param request_tracing_expiration_time: Request tracing expiration time.
:type request_tracing_expiration_time: ~datetime.datetime
:param remote_debugging_enabled: :code:`<code>true</code>` if remote debugging is enabled;
otherwise, :code:`<code>false</code>`.
:type remote_debugging_enabled: bool
:param remote_debugging_version: Remote debugging version.
:type remote_debugging_version: str
:param http_logging_enabled: :code:`<code>true</code>` if HTTP logging is enabled; otherwise,
:code:`<code>false</code>`.
:type http_logging_enabled: bool
:param logs_directory_size_limit: HTTP logs directory size limit.
:type logs_directory_size_limit: int
:param detailed_error_logging_enabled: :code:`<code>true</code>` if detailed error logging is
enabled; otherwise, :code:`<code>false</code>`.
:type detailed_error_logging_enabled: bool
:param publishing_username: Publishing user name.
:type publishing_username: str
:param app_settings: Application settings.
:type app_settings: list[~azure.mgmt.web.v2016_08_01.models.NameValuePair]
:param connection_strings: Connection strings.
:type connection_strings: list[~azure.mgmt.web.v2016_08_01.models.ConnStringInfo]
:ivar machine_key: Site MachineKey.
:vartype machine_key: ~azure.mgmt.web.v2016_08_01.models.SiteMachineKey
:param handler_mappings: Handler mappings.
:type handler_mappings: list[~azure.mgmt.web.v2016_08_01.models.HandlerMapping]
:param document_root: Document root.
:type document_root: str
:param scm_type: SCM type. Possible values include: "None", "Dropbox", "Tfs", "LocalGit",
"GitHub", "CodePlexGit", "CodePlexHg", "BitbucketGit", "BitbucketHg", "ExternalGit",
"ExternalHg", "OneDrive", "VSO".
:type scm_type: str or ~azure.mgmt.web.v2016_08_01.models.ScmType
:param use32_bit_worker_process: :code:`<code>true</code>` to use 32-bit worker process;
otherwise, :code:`<code>false</code>`.
:type use32_bit_worker_process: bool
:param web_sockets_enabled: :code:`<code>true</code>` if WebSocket is enabled; otherwise,
:code:`<code>false</code>`.
:type web_sockets_enabled: bool
:param always_on: :code:`<code>true</code>` if Always On is enabled; otherwise,
:code:`<code>false</code>`.
:type always_on: bool
:param java_version: Java version.
:type java_version: str
:param java_container: Java container.
:type java_container: str
:param java_container_version: Java container version.
:type java_container_version: str
:param app_command_line: App command line to launch.
:type app_command_line: str
:param managed_pipeline_mode: Managed pipeline mode. Possible values include: "Integrated",
"Classic".
:type managed_pipeline_mode: str or ~azure.mgmt.web.v2016_08_01.models.ManagedPipelineMode
:param virtual_applications: Virtual applications.
:type virtual_applications: list[~azure.mgmt.web.v2016_08_01.models.VirtualApplication]
:param load_balancing: Site load balancing. Possible values include: "WeightedRoundRobin",
"LeastRequests", "LeastResponseTime", "WeightedTotalTraffic", "RequestHash".
:type load_balancing: str or ~azure.mgmt.web.v2016_08_01.models.SiteLoadBalancing
:param experiments: This is work around for polymorphic types.
:type experiments: ~azure.mgmt.web.v2016_08_01.models.Experiments
:param limits: Site limits.
:type limits: ~azure.mgmt.web.v2016_08_01.models.SiteLimits
:param auto_heal_enabled: :code:`<code>true</code>` if Auto Heal is enabled; otherwise,
:code:`<code>false</code>`.
:type auto_heal_enabled: bool
:param auto_heal_rules: Auto Heal rules.
:type auto_heal_rules: ~azure.mgmt.web.v2016_08_01.models.AutoHealRules
:param tracing_options: Tracing options.
:type tracing_options: str
:param vnet_name: Virtual Network name.
:type vnet_name: str
:param cors: Cross-Origin Resource Sharing (CORS) settings.
:type cors: ~azure.mgmt.web.v2016_08_01.models.CorsSettings
:param push: Push endpoint settings.
:type push: ~azure.mgmt.web.v2016_08_01.models.PushSettings
:param api_definition: Information about the formal API definition for the app.
:type api_definition: ~azure.mgmt.web.v2016_08_01.models.ApiDefinitionInfo
:param auto_swap_slot_name: Auto-swap slot name.
:type auto_swap_slot_name: str
:param local_my_sql_enabled: :code:`<code>true</code>` to enable local MySQL; otherwise,
:code:`<code>false</code>`.
:type local_my_sql_enabled: bool
:param ip_security_restrictions: IP security restrictions.
:type ip_security_restrictions: list[~azure.mgmt.web.v2016_08_01.models.IpSecurityRestriction]
:param http20_enabled: Http20Enabled: configures a web site to allow clients to connect over
http2.0.
:type http20_enabled: bool
:param min_tls_version: MinTlsVersion: configures the minimum version of TLS required for SSL
requests. Possible values include: "1.0", "1.1", "1.2".
:type min_tls_version: str or ~azure.mgmt.web.v2016_08_01.models.SupportedTlsVersions
"""
_validation = {
'machine_key': {'readonly': True},
}
_attribute_map = {
'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'},
'default_documents': {'key': 'defaultDocuments', 'type': '[str]'},
'net_framework_version': {'key': 'netFrameworkVersion', 'type': 'str'},
'php_version': {'key': 'phpVersion', 'type': 'str'},
'python_version': {'key': 'pythonVersion', 'type': 'str'},
'node_version': {'key': 'nodeVersion', 'type': 'str'},
'linux_fx_version': {'key': 'linuxFxVersion', 'type': 'str'},
'request_tracing_enabled': {'key': 'requestTracingEnabled', 'type': 'bool'},
'request_tracing_expiration_time': {'key': 'requestTracingExpirationTime', 'type': 'iso-8601'},
'remote_debugging_enabled': {'key': 'remoteDebuggingEnabled', 'type': 'bool'},
'remote_debugging_version': {'key': 'remoteDebuggingVersion', 'type': 'str'},
'http_logging_enabled': {'key': 'httpLoggingEnabled', 'type': 'bool'},
'logs_directory_size_limit': {'key': 'logsDirectorySizeLimit', 'type': 'int'},
'detailed_error_logging_enabled': {'key': 'detailedErrorLoggingEnabled', 'type': 'bool'},
'publishing_username': {'key': 'publishingUsername', 'type': 'str'},
'app_settings': {'key': 'appSettings', 'type': '[NameValuePair]'},
'connection_strings': {'key': 'connectionStrings', 'type': '[ConnStringInfo]'},
'machine_key': {'key': 'machineKey', 'type': 'SiteMachineKey'},
'handler_mappings': {'key': 'handlerMappings', 'type': '[HandlerMapping]'},
'document_root': {'key': 'documentRoot', 'type': 'str'},
'scm_type': {'key': 'scmType', 'type': 'str'},
'use32_bit_worker_process': {'key': 'use32BitWorkerProcess', 'type': 'bool'},
'web_sockets_enabled': {'key': 'webSocketsEnabled', 'type': 'bool'},
'always_on': {'key': 'alwaysOn', 'type': 'bool'},
'java_version': {'key': 'javaVersion', 'type': 'str'},
'java_container': {'key': 'javaContainer', 'type': 'str'},
'java_container_version': {'key': 'javaContainerVersion', 'type': 'str'},
'app_command_line': {'key': 'appCommandLine', 'type': 'str'},
'managed_pipeline_mode': {'key': 'managedPipelineMode', 'type': 'str'},
'virtual_applications': {'key': 'virtualApplications', 'type': '[VirtualApplication]'},
'load_balancing': {'key': 'loadBalancing', 'type': 'str'},
'experiments': {'key': 'experiments', 'type': 'Experiments'},
'limits': {'key': 'limits', 'type': 'SiteLimits'},
'auto_heal_enabled': {'key': 'autoHealEnabled', 'type': 'bool'},
'auto_heal_rules': {'key': 'autoHealRules', 'type': 'AutoHealRules'},
'tracing_options': {'key': 'tracingOptions', 'type': 'str'},
'vnet_name': {'key': 'vnetName', 'type': 'str'},
'cors': {'key': 'cors', 'type': 'CorsSettings'},
'push': {'key': 'push', 'type': 'PushSettings'},
'api_definition': {'key': 'apiDefinition', 'type': 'ApiDefinitionInfo'},
'auto_swap_slot_name': {'key': 'autoSwapSlotName', 'type': 'str'},
'local_my_sql_enabled': {'key': 'localMySqlEnabled', 'type': 'bool'},
'ip_security_restrictions': {'key': 'ipSecurityRestrictions', 'type': '[IpSecurityRestriction]'},
'http20_enabled': {'key': 'http20Enabled', 'type': 'bool'},
'min_tls_version': {'key': 'minTlsVersion', 'type': 'str'},
}
def __init__(
self,
*,
number_of_workers: Optional[int] = None,
default_documents: Optional[List[str]] = None,
net_framework_version: Optional[str] = "v4.6",
php_version: Optional[str] = None,
python_version: Optional[str] = None,
node_version: Optional[str] = None,
linux_fx_version: Optional[str] = None,
request_tracing_enabled: Optional[bool] = None,
request_tracing_expiration_time: Optional[datetime.datetime] = None,
remote_debugging_enabled: Optional[bool] = None,
remote_debugging_version: Optional[str] = None,
http_logging_enabled: Optional[bool] = None,
logs_directory_size_limit: Optional[int] = None,
detailed_error_logging_enabled: Optional[bool] = None,
publishing_username: Optional[str] = None,
app_settings: Optional[List["NameValuePair"]] = None,
connection_strings: Optional[List["ConnStringInfo"]] = None,
handler_mappings: Optional[List["HandlerMapping"]] = None,
document_root: Optional[str] = None,
scm_type: Optional[Union[str, "ScmType"]] = None,
use32_bit_worker_process: Optional[bool] = None,
web_sockets_enabled: Optional[bool] = None,
always_on: Optional[bool] = None,
java_version: Optional[str] = None,
java_container: Optional[str] = None,
java_container_version: Optional[str] = None,
app_command_line: Optional[str] = None,
managed_pipeline_mode: Optional[Union[str, "ManagedPipelineMode"]] = None,
virtual_applications: Optional[List["VirtualApplication"]] = None,
load_balancing: Optional[Union[str, "SiteLoadBalancing"]] = None,
experiments: Optional["Experiments"] = None,
limits: Optional["SiteLimits"] = None,
auto_heal_enabled: Optional[bool] = None,
auto_heal_rules: Optional["AutoHealRules"] = None,
tracing_options: Optional[str] = None,
vnet_name: Optional[str] = None,
cors: Optional["CorsSettings"] = None,
push: Optional["PushSettings"] = None,
api_definition: Optional["ApiDefinitionInfo"] = None,
auto_swap_slot_name: Optional[str] = None,
local_my_sql_enabled: Optional[bool] = False,
ip_security_restrictions: Optional[List["IpSecurityRestriction"]] = None,
http20_enabled: Optional[bool] = True,
min_tls_version: Optional[Union[str, "SupportedTlsVersions"]] = None,
**kwargs
):
super(SiteConfig, self).__init__(**kwargs)
self.number_of_workers = number_of_workers
self.default_documents = default_documents
self.net_framework_version = net_framework_version
self.php_version = php_version
self.python_version = python_version
self.node_version = node_version
self.linux_fx_version = linux_fx_version
self.request_tracing_enabled = request_tracing_enabled
self.request_tracing_expiration_time = request_tracing_expiration_time
self.remote_debugging_enabled = remote_debugging_enabled
self.remote_debugging_version = remote_debugging_version
self.http_logging_enabled = http_logging_enabled
self.logs_directory_size_limit = logs_directory_size_limit
self.detailed_error_logging_enabled = detailed_error_logging_enabled
self.publishing_username = publishing_username
self.app_settings = app_settings
self.connection_strings = connection_strings
self.machine_key = None
self.handler_mappings = handler_mappings
self.document_root = document_root
self.scm_type = scm_type
self.use32_bit_worker_process = use32_bit_worker_process
self.web_sockets_enabled = web_sockets_enabled
self.always_on = always_on
self.java_version = java_version
self.java_container = java_container
self.java_container_version = java_container_version
self.app_command_line = app_command_line
self.managed_pipeline_mode = managed_pipeline_mode
self.virtual_applications = virtual_applications
self.load_balancing = load_balancing
self.experiments = experiments
self.limits = limits
self.auto_heal_enabled = auto_heal_enabled
self.auto_heal_rules = auto_heal_rules
self.tracing_options = tracing_options
self.vnet_name = vnet_name
self.cors = cors
self.push = push
self.api_definition = api_definition
self.auto_swap_slot_name = auto_swap_slot_name
self.local_my_sql_enabled = local_my_sql_enabled
self.ip_security_restrictions = ip_security_restrictions
self.http20_enabled = http20_enabled
self.min_tls_version = min_tls_version
class SiteConfigResource(ProxyOnlyResource):
"""Web app configuration ARM resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param number_of_workers: Number of workers.
:type number_of_workers: int
:param default_documents: Default documents.
:type default_documents: list[str]
:param net_framework_version: .NET Framework version.
:type net_framework_version: str
:param php_version: Version of PHP.
:type php_version: str
:param python_version: Version of Python.
:type python_version: str
:param node_version: Version of Node.js.
:type node_version: str
:param linux_fx_version: Linux App Framework and version.
:type linux_fx_version: str
:param request_tracing_enabled: :code:`<code>true</code>` if request tracing is enabled;
otherwise, :code:`<code>false</code>`.
:type request_tracing_enabled: bool
:param request_tracing_expiration_time: Request tracing expiration time.
:type request_tracing_expiration_time: ~datetime.datetime
:param remote_debugging_enabled: :code:`<code>true</code>` if remote debugging is enabled;
otherwise, :code:`<code>false</code>`.
:type remote_debugging_enabled: bool
:param remote_debugging_version: Remote debugging version.
:type remote_debugging_version: str
:param http_logging_enabled: :code:`<code>true</code>` if HTTP logging is enabled; otherwise,
:code:`<code>false</code>`.
:type http_logging_enabled: bool
:param logs_directory_size_limit: HTTP logs directory size limit.
:type logs_directory_size_limit: int
:param detailed_error_logging_enabled: :code:`<code>true</code>` if detailed error logging is
enabled; otherwise, :code:`<code>false</code>`.
:type detailed_error_logging_enabled: bool
:param publishing_username: Publishing user name.
:type publishing_username: str
:param app_settings: Application settings.
:type app_settings: list[~azure.mgmt.web.v2016_08_01.models.NameValuePair]
:param connection_strings: Connection strings.
:type connection_strings: list[~azure.mgmt.web.v2016_08_01.models.ConnStringInfo]
:ivar machine_key: Site MachineKey.
:vartype machine_key: ~azure.mgmt.web.v2016_08_01.models.SiteMachineKey
:param handler_mappings: Handler mappings.
:type handler_mappings: list[~azure.mgmt.web.v2016_08_01.models.HandlerMapping]
:param document_root: Document root.
:type document_root: str
:param scm_type: SCM type. Possible values include: "None", "Dropbox", "Tfs", "LocalGit",
"GitHub", "CodePlexGit", "CodePlexHg", "BitbucketGit", "BitbucketHg", "ExternalGit",
"ExternalHg", "OneDrive", "VSO".
:type scm_type: str or ~azure.mgmt.web.v2016_08_01.models.ScmType
:param use32_bit_worker_process: :code:`<code>true</code>` to use 32-bit worker process;
otherwise, :code:`<code>false</code>`.
:type use32_bit_worker_process: bool
:param web_sockets_enabled: :code:`<code>true</code>` if WebSocket is enabled; otherwise,
:code:`<code>false</code>`.
:type web_sockets_enabled: bool
:param always_on: :code:`<code>true</code>` if Always On is enabled; otherwise,
:code:`<code>false</code>`.
:type always_on: bool
:param java_version: Java version.
:type java_version: str
:param java_container: Java container.
:type java_container: str
:param java_container_version: Java container version.
:type java_container_version: str
:param app_command_line: App command line to launch.
:type app_command_line: str
:param managed_pipeline_mode: Managed pipeline mode. Possible values include: "Integrated",
"Classic".
:type managed_pipeline_mode: str or ~azure.mgmt.web.v2016_08_01.models.ManagedPipelineMode
:param virtual_applications: Virtual applications.
:type virtual_applications: list[~azure.mgmt.web.v2016_08_01.models.VirtualApplication]
:param load_balancing: Site load balancing. Possible values include: "WeightedRoundRobin",
"LeastRequests", "LeastResponseTime", "WeightedTotalTraffic", "RequestHash".
:type load_balancing: str or ~azure.mgmt.web.v2016_08_01.models.SiteLoadBalancing
:param experiments: This is work around for polymorphic types.
:type experiments: ~azure.mgmt.web.v2016_08_01.models.Experiments
:param limits: Site limits.
:type limits: ~azure.mgmt.web.v2016_08_01.models.SiteLimits
:param auto_heal_enabled: :code:`<code>true</code>` if Auto Heal is enabled; otherwise,
:code:`<code>false</code>`.
:type auto_heal_enabled: bool
:param auto_heal_rules: Auto Heal rules.
:type auto_heal_rules: ~azure.mgmt.web.v2016_08_01.models.AutoHealRules
:param tracing_options: Tracing options.
:type tracing_options: str
:param vnet_name: Virtual Network name.
:type vnet_name: str
:param cors: Cross-Origin Resource Sharing (CORS) settings.
:type cors: ~azure.mgmt.web.v2016_08_01.models.CorsSettings
:param push: Push endpoint settings.
:type push: ~azure.mgmt.web.v2016_08_01.models.PushSettings
:param api_definition: Information about the formal API definition for the app.
:type api_definition: ~azure.mgmt.web.v2016_08_01.models.ApiDefinitionInfo
:param auto_swap_slot_name: Auto-swap slot name.
:type auto_swap_slot_name: str
:param local_my_sql_enabled: :code:`<code>true</code>` to enable local MySQL; otherwise,
:code:`<code>false</code>`.
:type local_my_sql_enabled: bool
:param ip_security_restrictions: IP security restrictions.
:type ip_security_restrictions: list[~azure.mgmt.web.v2016_08_01.models.IpSecurityRestriction]
:param http20_enabled: Http20Enabled: configures a web site to allow clients to connect over
http2.0.
:type http20_enabled: bool
:param min_tls_version: MinTlsVersion: configures the minimum version of TLS required for SSL
requests. Possible values include: "1.0", "1.1", "1.2".
:type min_tls_version: str or ~azure.mgmt.web.v2016_08_01.models.SupportedTlsVersions
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'machine_key': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'number_of_workers': {'key': 'properties.numberOfWorkers', 'type': 'int'},
'default_documents': {'key': 'properties.defaultDocuments', 'type': '[str]'},
'net_framework_version': {'key': 'properties.netFrameworkVersion', 'type': 'str'},
'php_version': {'key': 'properties.phpVersion', 'type': 'str'},
'python_version': {'key': 'properties.pythonVersion', 'type': 'str'},
'node_version': {'key': 'properties.nodeVersion', 'type': 'str'},
'linux_fx_version': {'key': 'properties.linuxFxVersion', 'type': 'str'},
'request_tracing_enabled': {'key': 'properties.requestTracingEnabled', 'type': 'bool'},
'request_tracing_expiration_time': {'key': 'properties.requestTracingExpirationTime', 'type': 'iso-8601'},
'remote_debugging_enabled': {'key': 'properties.remoteDebuggingEnabled', 'type': 'bool'},
'remote_debugging_version': {'key': 'properties.remoteDebuggingVersion', 'type': 'str'},
'http_logging_enabled': {'key': 'properties.httpLoggingEnabled', 'type': 'bool'},
'logs_directory_size_limit': {'key': 'properties.logsDirectorySizeLimit', 'type': 'int'},
'detailed_error_logging_enabled': {'key': 'properties.detailedErrorLoggingEnabled', 'type': 'bool'},
'publishing_username': {'key': 'properties.publishingUsername', 'type': 'str'},
'app_settings': {'key': 'properties.appSettings', 'type': '[NameValuePair]'},
'connection_strings': {'key': 'properties.connectionStrings', 'type': '[ConnStringInfo]'},
'machine_key': {'key': 'properties.machineKey', 'type': 'SiteMachineKey'},
'handler_mappings': {'key': 'properties.handlerMappings', 'type': '[HandlerMapping]'},
'document_root': {'key': 'properties.documentRoot', 'type': 'str'},
'scm_type': {'key': 'properties.scmType', 'type': 'str'},
'use32_bit_worker_process': {'key': 'properties.use32BitWorkerProcess', 'type': 'bool'},
'web_sockets_enabled': {'key': 'properties.webSocketsEnabled', 'type': 'bool'},
'always_on': {'key': 'properties.alwaysOn', 'type': 'bool'},
'java_version': {'key': 'properties.javaVersion', 'type': 'str'},
'java_container': {'key': 'properties.javaContainer', 'type': 'str'},
'java_container_version': {'key': 'properties.javaContainerVersion', 'type': 'str'},
'app_command_line': {'key': 'properties.appCommandLine', 'type': 'str'},
'managed_pipeline_mode': {'key': 'properties.managedPipelineMode', 'type': 'str'},
'virtual_applications': {'key': 'properties.virtualApplications', 'type': '[VirtualApplication]'},
'load_balancing': {'key': 'properties.loadBalancing', 'type': 'str'},
'experiments': {'key': 'properties.experiments', 'type': 'Experiments'},
'limits': {'key': 'properties.limits', 'type': 'SiteLimits'},
'auto_heal_enabled': {'key': 'properties.autoHealEnabled', 'type': 'bool'},
'auto_heal_rules': {'key': 'properties.autoHealRules', 'type': 'AutoHealRules'},
'tracing_options': {'key': 'properties.tracingOptions', 'type': 'str'},
'vnet_name': {'key': 'properties.vnetName', 'type': 'str'},
'cors': {'key': 'properties.cors', 'type': 'CorsSettings'},
'push': {'key': 'properties.push', 'type': 'PushSettings'},
'api_definition': {'key': 'properties.apiDefinition', 'type': 'ApiDefinitionInfo'},
'auto_swap_slot_name': {'key': 'properties.autoSwapSlotName', 'type': 'str'},
'local_my_sql_enabled': {'key': 'properties.localMySqlEnabled', 'type': 'bool'},
'ip_security_restrictions': {'key': 'properties.ipSecurityRestrictions', 'type': '[IpSecurityRestriction]'},
'http20_enabled': {'key': 'properties.http20Enabled', 'type': 'bool'},
'min_tls_version': {'key': 'properties.minTlsVersion', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
number_of_workers: Optional[int] = None,
default_documents: Optional[List[str]] = None,
net_framework_version: Optional[str] = "v4.6",
php_version: Optional[str] = None,
python_version: Optional[str] = None,
node_version: Optional[str] = None,
linux_fx_version: Optional[str] = None,
request_tracing_enabled: Optional[bool] = None,
request_tracing_expiration_time: Optional[datetime.datetime] = None,
remote_debugging_enabled: Optional[bool] = None,
remote_debugging_version: Optional[str] = None,
http_logging_enabled: Optional[bool] = None,
logs_directory_size_limit: Optional[int] = None,
detailed_error_logging_enabled: Optional[bool] = None,
publishing_username: Optional[str] = None,
app_settings: Optional[List["NameValuePair"]] = None,
connection_strings: Optional[List["ConnStringInfo"]] = None,
handler_mappings: Optional[List["HandlerMapping"]] = None,
document_root: Optional[str] = None,
scm_type: Optional[Union[str, "ScmType"]] = None,
use32_bit_worker_process: Optional[bool] = None,
web_sockets_enabled: Optional[bool] = None,
always_on: Optional[bool] = None,
java_version: Optional[str] = None,
java_container: Optional[str] = None,
java_container_version: Optional[str] = None,
app_command_line: Optional[str] = None,
managed_pipeline_mode: Optional[Union[str, "ManagedPipelineMode"]] = None,
virtual_applications: Optional[List["VirtualApplication"]] = None,
load_balancing: Optional[Union[str, "SiteLoadBalancing"]] = None,
experiments: Optional["Experiments"] = None,
limits: Optional["SiteLimits"] = None,
auto_heal_enabled: Optional[bool] = None,
auto_heal_rules: Optional["AutoHealRules"] = None,
tracing_options: Optional[str] = None,
vnet_name: Optional[str] = None,
cors: Optional["CorsSettings"] = None,
push: Optional["PushSettings"] = None,
api_definition: Optional["ApiDefinitionInfo"] = None,
auto_swap_slot_name: Optional[str] = None,
local_my_sql_enabled: Optional[bool] = False,
ip_security_restrictions: Optional[List["IpSecurityRestriction"]] = None,
http20_enabled: Optional[bool] = True,
min_tls_version: Optional[Union[str, "SupportedTlsVersions"]] = None,
**kwargs
):
super(SiteConfigResource, self).__init__(kind=kind, **kwargs)
self.number_of_workers = number_of_workers
self.default_documents = default_documents
self.net_framework_version = net_framework_version
self.php_version = php_version
self.python_version = python_version
self.node_version = node_version
self.linux_fx_version = linux_fx_version
self.request_tracing_enabled = request_tracing_enabled
self.request_tracing_expiration_time = request_tracing_expiration_time
self.remote_debugging_enabled = remote_debugging_enabled
self.remote_debugging_version = remote_debugging_version
self.http_logging_enabled = http_logging_enabled
self.logs_directory_size_limit = logs_directory_size_limit
self.detailed_error_logging_enabled = detailed_error_logging_enabled
self.publishing_username = publishing_username
self.app_settings = app_settings
self.connection_strings = connection_strings
self.machine_key = None
self.handler_mappings = handler_mappings
self.document_root = document_root
self.scm_type = scm_type
self.use32_bit_worker_process = use32_bit_worker_process
self.web_sockets_enabled = web_sockets_enabled
self.always_on = always_on
self.java_version = java_version
self.java_container = java_container
self.java_container_version = java_container_version
self.app_command_line = app_command_line
self.managed_pipeline_mode = managed_pipeline_mode
self.virtual_applications = virtual_applications
self.load_balancing = load_balancing
self.experiments = experiments
self.limits = limits
self.auto_heal_enabled = auto_heal_enabled
self.auto_heal_rules = auto_heal_rules
self.tracing_options = tracing_options
self.vnet_name = vnet_name
self.cors = cors
self.push = push
self.api_definition = api_definition
self.auto_swap_slot_name = auto_swap_slot_name
self.local_my_sql_enabled = local_my_sql_enabled
self.ip_security_restrictions = ip_security_restrictions
self.http20_enabled = http20_enabled
self.min_tls_version = min_tls_version
class SiteConfigResourceCollection(msrest.serialization.Model):
"""Collection of site configurations.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.SiteConfigResource]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[SiteConfigResource]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["SiteConfigResource"],
**kwargs
):
super(SiteConfigResourceCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class SiteConfigurationSnapshotInfo(ProxyOnlyResource):
"""A snapshot of a web app configuration.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar time: The time the snapshot was taken.
:vartype time: ~datetime.datetime
:ivar id_properties_id: The id of the snapshot.
:vartype id_properties_id: int
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'time': {'readonly': True},
'id_properties_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'time': {'key': 'properties.time', 'type': 'iso-8601'},
'id_properties_id': {'key': 'properties.id', 'type': 'int'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(SiteConfigurationSnapshotInfo, self).__init__(kind=kind, **kwargs)
self.time = None
self.id_properties_id = None
class SiteConfigurationSnapshotInfoCollection(msrest.serialization.Model):
"""Collection of metadata for the app configuration snapshots that can be restored.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.SiteConfigurationSnapshotInfo]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[SiteConfigurationSnapshotInfo]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["SiteConfigurationSnapshotInfo"],
**kwargs
):
super(SiteConfigurationSnapshotInfoCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class SiteExtensionInfo(ProxyOnlyResource):
"""Site Extension Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param id_properties_id: Site extension ID.
:type id_properties_id: str
:param title: Site extension title.
:type title: str
:param type_properties_type: Site extension type. Possible values include: "Gallery",
"WebRoot".
:type type_properties_type: str or ~azure.mgmt.web.v2016_08_01.models.SiteExtensionType
:param summary: Summary description.
:type summary: str
:param description: Detailed description.
:type description: str
:param version: Version information.
:type version: str
:param extension_url: Extension URL.
:type extension_url: str
:param project_url: Project URL.
:type project_url: str
:param icon_url: Icon URL.
:type icon_url: str
:param license_url: License URL.
:type license_url: str
:param feed_url: Feed URL.
:type feed_url: str
:param authors: List of authors.
:type authors: list[str]
:param installation_args: Installer command line parameters.
:type installation_args: str
:param published_date_time: Published timestamp.
:type published_date_time: ~datetime.datetime
:param download_count: Count of downloads.
:type download_count: int
:param local_is_latest_version: :code:`<code>true</code>` if the local version is the latest
version; :code:`<code>false</code>` otherwise.
:type local_is_latest_version: bool
:param local_path: Local path.
:type local_path: str
:param installed_date_time: Installed timestamp.
:type installed_date_time: ~datetime.datetime
:param provisioning_state: Provisioning state.
:type provisioning_state: str
:param comment: Site Extension comment.
:type comment: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'id_properties_id': {'key': 'properties.id', 'type': 'str'},
'title': {'key': 'properties.title', 'type': 'str'},
'type_properties_type': {'key': 'properties.type', 'type': 'str'},
'summary': {'key': 'properties.summary', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'version': {'key': 'properties.version', 'type': 'str'},
'extension_url': {'key': 'properties.extensionUrl', 'type': 'str'},
'project_url': {'key': 'properties.projectUrl', 'type': 'str'},
'icon_url': {'key': 'properties.iconUrl', 'type': 'str'},
'license_url': {'key': 'properties.licenseUrl', 'type': 'str'},
'feed_url': {'key': 'properties.feedUrl', 'type': 'str'},
'authors': {'key': 'properties.authors', 'type': '[str]'},
'installation_args': {'key': 'properties.installationArgs', 'type': 'str'},
'published_date_time': {'key': 'properties.publishedDateTime', 'type': 'iso-8601'},
'download_count': {'key': 'properties.downloadCount', 'type': 'int'},
'local_is_latest_version': {'key': 'properties.localIsLatestVersion', 'type': 'bool'},
'local_path': {'key': 'properties.localPath', 'type': 'str'},
'installed_date_time': {'key': 'properties.installedDateTime', 'type': 'iso-8601'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'comment': {'key': 'properties.comment', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
id_properties_id: Optional[str] = None,
title: Optional[str] = None,
type_properties_type: Optional[Union[str, "SiteExtensionType"]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
version: Optional[str] = None,
extension_url: Optional[str] = None,
project_url: Optional[str] = None,
icon_url: Optional[str] = None,
license_url: Optional[str] = None,
feed_url: Optional[str] = None,
authors: Optional[List[str]] = None,
installation_args: Optional[str] = None,
published_date_time: Optional[datetime.datetime] = None,
download_count: Optional[int] = None,
local_is_latest_version: Optional[bool] = None,
local_path: Optional[str] = None,
installed_date_time: Optional[datetime.datetime] = None,
provisioning_state: Optional[str] = None,
comment: Optional[str] = None,
**kwargs
):
super(SiteExtensionInfo, self).__init__(kind=kind, **kwargs)
self.id_properties_id = id_properties_id
self.title = title
self.type_properties_type = type_properties_type
self.summary = summary
self.description = description
self.version = version
self.extension_url = extension_url
self.project_url = project_url
self.icon_url = icon_url
self.license_url = license_url
self.feed_url = feed_url
self.authors = authors
self.installation_args = installation_args
self.published_date_time = published_date_time
self.download_count = download_count
self.local_is_latest_version = local_is_latest_version
self.local_path = local_path
self.installed_date_time = installed_date_time
self.provisioning_state = provisioning_state
self.comment = comment
class SiteExtensionInfoCollection(msrest.serialization.Model):
"""Collection of Kudu site extension information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.SiteExtensionInfo]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[SiteExtensionInfo]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["SiteExtensionInfo"],
**kwargs
):
super(SiteExtensionInfoCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class SiteInstance(ProxyOnlyResource):
"""Instance of an app.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar name_properties_name: Name of instance.
:vartype name_properties_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'name_properties_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(SiteInstance, self).__init__(kind=kind, **kwargs)
self.name_properties_name = None
class SiteLimits(msrest.serialization.Model):
"""Metric limits set on an app.
:param max_percentage_cpu: Maximum allowed CPU usage percentage.
:type max_percentage_cpu: float
:param max_memory_in_mb: Maximum allowed memory usage in MB.
:type max_memory_in_mb: long
:param max_disk_size_in_mb: Maximum allowed disk size usage in MB.
:type max_disk_size_in_mb: long
"""
_attribute_map = {
'max_percentage_cpu': {'key': 'maxPercentageCpu', 'type': 'float'},
'max_memory_in_mb': {'key': 'maxMemoryInMb', 'type': 'long'},
'max_disk_size_in_mb': {'key': 'maxDiskSizeInMb', 'type': 'long'},
}
def __init__(
self,
*,
max_percentage_cpu: Optional[float] = None,
max_memory_in_mb: Optional[int] = None,
max_disk_size_in_mb: Optional[int] = None,
**kwargs
):
super(SiteLimits, self).__init__(**kwargs)
self.max_percentage_cpu = max_percentage_cpu
self.max_memory_in_mb = max_memory_in_mb
self.max_disk_size_in_mb = max_disk_size_in_mb
class SiteLogsConfig(ProxyOnlyResource):
"""Configuration of App Service site logs.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param application_logs: Application logs configuration.
:type application_logs: ~azure.mgmt.web.v2016_08_01.models.ApplicationLogsConfig
:param http_logs: HTTP logs configuration.
:type http_logs: ~azure.mgmt.web.v2016_08_01.models.HttpLogsConfig
:param failed_requests_tracing: Failed requests tracing configuration.
:type failed_requests_tracing: ~azure.mgmt.web.v2016_08_01.models.EnabledConfig
:param detailed_error_messages: Detailed error messages configuration.
:type detailed_error_messages: ~azure.mgmt.web.v2016_08_01.models.EnabledConfig
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'application_logs': {'key': 'properties.applicationLogs', 'type': 'ApplicationLogsConfig'},
'http_logs': {'key': 'properties.httpLogs', 'type': 'HttpLogsConfig'},
'failed_requests_tracing': {'key': 'properties.failedRequestsTracing', 'type': 'EnabledConfig'},
'detailed_error_messages': {'key': 'properties.detailedErrorMessages', 'type': 'EnabledConfig'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
application_logs: Optional["ApplicationLogsConfig"] = None,
http_logs: Optional["HttpLogsConfig"] = None,
failed_requests_tracing: Optional["EnabledConfig"] = None,
detailed_error_messages: Optional["EnabledConfig"] = None,
**kwargs
):
super(SiteLogsConfig, self).__init__(kind=kind, **kwargs)
self.application_logs = application_logs
self.http_logs = http_logs
self.failed_requests_tracing = failed_requests_tracing
self.detailed_error_messages = detailed_error_messages
class SiteMachineKey(msrest.serialization.Model):
"""MachineKey of an app.
:param validation: MachineKey validation.
:type validation: str
:param validation_key: Validation key.
:type validation_key: str
:param decryption: Algorithm used for decryption.
:type decryption: str
:param decryption_key: Decryption key.
:type decryption_key: str
"""
_attribute_map = {
'validation': {'key': 'validation', 'type': 'str'},
'validation_key': {'key': 'validationKey', 'type': 'str'},
'decryption': {'key': 'decryption', 'type': 'str'},
'decryption_key': {'key': 'decryptionKey', 'type': 'str'},
}
def __init__(
self,
*,
validation: Optional[str] = None,
validation_key: Optional[str] = None,
decryption: Optional[str] = None,
decryption_key: Optional[str] = None,
**kwargs
):
super(SiteMachineKey, self).__init__(**kwargs)
self.validation = validation
self.validation_key = validation_key
self.decryption = decryption
self.decryption_key = decryption_key
class SitePatchResource(ProxyOnlyResource):
"""ARM resource for a site.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar state: Current state of the app.
:vartype state: str
:ivar host_names: Hostnames associated with the app.
:vartype host_names: list[str]
:ivar repository_site_name: Name of the repository site.
:vartype repository_site_name: str
:ivar usage_state: State indicating whether the app has exceeded its quota usage. Read-only.
Possible values include: "Normal", "Exceeded".
:vartype usage_state: str or ~azure.mgmt.web.v2016_08_01.models.UsageState
:param enabled: :code:`<code>true</code>` if the app is enabled; otherwise,
:code:`<code>false</code>`. Setting this value to false disables the app (takes the app
offline).
:type enabled: bool
:ivar enabled_host_names: Enabled hostnames for the app.Hostnames need to be assigned (see
HostNames) AND enabled. Otherwise,
the app is not served on those hostnames.
:vartype enabled_host_names: list[str]
:ivar availability_state: Management information availability state for the app. Possible
values include: "Normal", "Limited", "DisasterRecoveryMode".
:vartype availability_state: str or ~azure.mgmt.web.v2016_08_01.models.SiteAvailabilityState
:param host_name_ssl_states: Hostname SSL states are used to manage the SSL bindings for app's
hostnames.
:type host_name_ssl_states: list[~azure.mgmt.web.v2016_08_01.models.HostNameSslState]
:param server_farm_id: Resource ID of the associated App Service plan, formatted as:
"/subscriptions/{subscriptionID}/resourceGroups/{groupName}/providers/Microsoft.Web/serverfarms/{appServicePlanName}".
:type server_farm_id: str
:param reserved: :code:`<code>true</code>` if reserved; otherwise, :code:`<code>false</code>`.
:type reserved: bool
:ivar last_modified_time_utc: Last time the app was modified, in UTC. Read-only.
:vartype last_modified_time_utc: ~datetime.datetime
:param site_config: Configuration of the app.
:type site_config: ~azure.mgmt.web.v2016_08_01.models.SiteConfig
:ivar traffic_manager_host_names: Azure Traffic Manager hostnames associated with the app.
Read-only.
:vartype traffic_manager_host_names: list[str]
:param scm_site_also_stopped: :code:`<code>true</code>` to stop SCM (KUDU) site when the app is
stopped; otherwise, :code:`<code>false</code>`. The default is :code:`<code>false</code>`.
:type scm_site_also_stopped: bool
:ivar target_swap_slot: Specifies which deployment slot this app will swap into. Read-only.
:vartype target_swap_slot: str
:param hosting_environment_profile: App Service Environment to use for the app.
:type hosting_environment_profile: ~azure.mgmt.web.v2016_08_01.models.HostingEnvironmentProfile
:param client_affinity_enabled: :code:`<code>true</code>` to enable client affinity;
:code:`<code>false</code>` to stop sending session affinity cookies, which route client
requests in the same session to the same instance. Default is :code:`<code>true</code>`.
:type client_affinity_enabled: bool
:param client_cert_enabled: :code:`<code>true</code>` to enable client certificate
authentication (TLS mutual authentication); otherwise, :code:`<code>false</code>`. Default is
:code:`<code>false</code>`.
:type client_cert_enabled: bool
:param host_names_disabled: :code:`<code>true</code>` to disable the public hostnames of the
app; otherwise, :code:`<code>false</code>`.
If :code:`<code>true</code>`, the app is only accessible via API management process.
:type host_names_disabled: bool
:ivar outbound_ip_addresses: List of IP addresses that the app uses for outbound connections
(e.g. database access). Includes VIPs from tenants that site can be hosted with current
settings. Read-only.
:vartype outbound_ip_addresses: str
:ivar possible_outbound_ip_addresses: List of IP addresses that the app uses for outbound
connections (e.g. database access). Includes VIPs from all tenants. Read-only.
:vartype possible_outbound_ip_addresses: str
:param container_size: Size of the function container.
:type container_size: int
:param daily_memory_time_quota: Maximum allowed daily memory-time quota (applicable on dynamic
apps only).
:type daily_memory_time_quota: int
:ivar suspended_till: App suspended till in case memory-time quota is exceeded.
:vartype suspended_till: ~datetime.datetime
:ivar max_number_of_workers: Maximum number of workers.
This only applies to Functions container.
:vartype max_number_of_workers: int
:param cloning_info: If specified during app creation, the app is cloned from a source app.
:type cloning_info: ~azure.mgmt.web.v2016_08_01.models.CloningInfo
:param snapshot_info: If specified during app creation, the app is created from a previous
snapshot.
:type snapshot_info: ~azure.mgmt.web.v2016_08_01.models.SnapshotRecoveryRequest
:ivar resource_group: Name of the resource group the app belongs to. Read-only.
:vartype resource_group: str
:ivar is_default_container: :code:`<code>true</code>` if the app is a default container;
otherwise, :code:`<code>false</code>`.
:vartype is_default_container: bool
:ivar default_host_name: Default hostname of the app. Read-only.
:vartype default_host_name: str
:ivar slot_swap_status: Status of the last deployment slot swap operation.
:vartype slot_swap_status: ~azure.mgmt.web.v2016_08_01.models.SlotSwapStatus
:param https_only: HttpsOnly: configures a web site to accept only https requests. Issues
redirect for
http requests.
:type https_only: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'state': {'readonly': True},
'host_names': {'readonly': True},
'repository_site_name': {'readonly': True},
'usage_state': {'readonly': True},
'enabled_host_names': {'readonly': True},
'availability_state': {'readonly': True},
'last_modified_time_utc': {'readonly': True},
'traffic_manager_host_names': {'readonly': True},
'target_swap_slot': {'readonly': True},
'outbound_ip_addresses': {'readonly': True},
'possible_outbound_ip_addresses': {'readonly': True},
'suspended_till': {'readonly': True},
'max_number_of_workers': {'readonly': True},
'resource_group': {'readonly': True},
'is_default_container': {'readonly': True},
'default_host_name': {'readonly': True},
'slot_swap_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'host_names': {'key': 'properties.hostNames', 'type': '[str]'},
'repository_site_name': {'key': 'properties.repositorySiteName', 'type': 'str'},
'usage_state': {'key': 'properties.usageState', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'enabled_host_names': {'key': 'properties.enabledHostNames', 'type': '[str]'},
'availability_state': {'key': 'properties.availabilityState', 'type': 'str'},
'host_name_ssl_states': {'key': 'properties.hostNameSslStates', 'type': '[HostNameSslState]'},
'server_farm_id': {'key': 'properties.serverFarmId', 'type': 'str'},
'reserved': {'key': 'properties.reserved', 'type': 'bool'},
'last_modified_time_utc': {'key': 'properties.lastModifiedTimeUtc', 'type': 'iso-8601'},
'site_config': {'key': 'properties.siteConfig', 'type': 'SiteConfig'},
'traffic_manager_host_names': {'key': 'properties.trafficManagerHostNames', 'type': '[str]'},
'scm_site_also_stopped': {'key': 'properties.scmSiteAlsoStopped', 'type': 'bool'},
'target_swap_slot': {'key': 'properties.targetSwapSlot', 'type': 'str'},
'hosting_environment_profile': {'key': 'properties.hostingEnvironmentProfile', 'type': 'HostingEnvironmentProfile'},
'client_affinity_enabled': {'key': 'properties.clientAffinityEnabled', 'type': 'bool'},
'client_cert_enabled': {'key': 'properties.clientCertEnabled', 'type': 'bool'},
'host_names_disabled': {'key': 'properties.hostNamesDisabled', 'type': 'bool'},
'outbound_ip_addresses': {'key': 'properties.outboundIpAddresses', 'type': 'str'},
'possible_outbound_ip_addresses': {'key': 'properties.possibleOutboundIpAddresses', 'type': 'str'},
'container_size': {'key': 'properties.containerSize', 'type': 'int'},
'daily_memory_time_quota': {'key': 'properties.dailyMemoryTimeQuota', 'type': 'int'},
'suspended_till': {'key': 'properties.suspendedTill', 'type': 'iso-8601'},
'max_number_of_workers': {'key': 'properties.maxNumberOfWorkers', 'type': 'int'},
'cloning_info': {'key': 'properties.cloningInfo', 'type': 'CloningInfo'},
'snapshot_info': {'key': 'properties.snapshotInfo', 'type': 'SnapshotRecoveryRequest'},
'resource_group': {'key': 'properties.resourceGroup', 'type': 'str'},
'is_default_container': {'key': 'properties.isDefaultContainer', 'type': 'bool'},
'default_host_name': {'key': 'properties.defaultHostName', 'type': 'str'},
'slot_swap_status': {'key': 'properties.slotSwapStatus', 'type': 'SlotSwapStatus'},
'https_only': {'key': 'properties.httpsOnly', 'type': 'bool'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
enabled: Optional[bool] = None,
host_name_ssl_states: Optional[List["HostNameSslState"]] = None,
server_farm_id: Optional[str] = None,
reserved: Optional[bool] = False,
site_config: Optional["SiteConfig"] = None,
scm_site_also_stopped: Optional[bool] = False,
hosting_environment_profile: Optional["HostingEnvironmentProfile"] = None,
client_affinity_enabled: Optional[bool] = None,
client_cert_enabled: Optional[bool] = None,
host_names_disabled: Optional[bool] = None,
container_size: Optional[int] = None,
daily_memory_time_quota: Optional[int] = None,
cloning_info: Optional["CloningInfo"] = None,
snapshot_info: Optional["SnapshotRecoveryRequest"] = None,
https_only: Optional[bool] = None,
**kwargs
):
super(SitePatchResource, self).__init__(kind=kind, **kwargs)
self.state = None
self.host_names = None
self.repository_site_name = None
self.usage_state = None
self.enabled = enabled
self.enabled_host_names = None
self.availability_state = None
self.host_name_ssl_states = host_name_ssl_states
self.server_farm_id = server_farm_id
self.reserved = reserved
self.last_modified_time_utc = None
self.site_config = site_config
self.traffic_manager_host_names = None
self.scm_site_also_stopped = scm_site_also_stopped
self.target_swap_slot = None
self.hosting_environment_profile = hosting_environment_profile
self.client_affinity_enabled = client_affinity_enabled
self.client_cert_enabled = client_cert_enabled
self.host_names_disabled = host_names_disabled
self.outbound_ip_addresses = None
self.possible_outbound_ip_addresses = None
self.container_size = container_size
self.daily_memory_time_quota = daily_memory_time_quota
self.suspended_till = None
self.max_number_of_workers = None
self.cloning_info = cloning_info
self.snapshot_info = snapshot_info
self.resource_group = None
self.is_default_container = None
self.default_host_name = None
self.slot_swap_status = None
self.https_only = https_only
class SitePhpErrorLogFlag(ProxyOnlyResource):
"""Used for getting PHP error logging flag.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param local_log_errors: Local log_errors setting.
:type local_log_errors: str
:param master_log_errors: Master log_errors setting.
:type master_log_errors: str
:param local_log_errors_max_length: Local log_errors_max_len setting.
:type local_log_errors_max_length: str
:param master_log_errors_max_length: Master log_errors_max_len setting.
:type master_log_errors_max_length: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'local_log_errors': {'key': 'properties.localLogErrors', 'type': 'str'},
'master_log_errors': {'key': 'properties.masterLogErrors', 'type': 'str'},
'local_log_errors_max_length': {'key': 'properties.localLogErrorsMaxLength', 'type': 'str'},
'master_log_errors_max_length': {'key': 'properties.masterLogErrorsMaxLength', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
local_log_errors: Optional[str] = None,
master_log_errors: Optional[str] = None,
local_log_errors_max_length: Optional[str] = None,
master_log_errors_max_length: Optional[str] = None,
**kwargs
):
super(SitePhpErrorLogFlag, self).__init__(kind=kind, **kwargs)
self.local_log_errors = local_log_errors
self.master_log_errors = master_log_errors
self.local_log_errors_max_length = local_log_errors_max_length
self.master_log_errors_max_length = master_log_errors_max_length
class SiteSourceControl(ProxyOnlyResource):
"""Source control configuration for an app.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param repo_url: Repository or source control URL.
:type repo_url: str
:param branch: Name of branch to use for deployment.
:type branch: str
:param is_manual_integration: :code:`<code>true</code>` to limit to manual integration;
:code:`<code>false</code>` to enable continuous integration (which configures webhooks into
online repos like GitHub).
:type is_manual_integration: bool
:param deployment_rollback_enabled: :code:`<code>true</code>` to enable deployment rollback;
otherwise, :code:`<code>false</code>`.
:type deployment_rollback_enabled: bool
:param is_mercurial: :code:`<code>true</code>` for a Mercurial repository;
:code:`<code>false</code>` for a Git repository.
:type is_mercurial: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'repo_url': {'key': 'properties.repoUrl', 'type': 'str'},
'branch': {'key': 'properties.branch', 'type': 'str'},
'is_manual_integration': {'key': 'properties.isManualIntegration', 'type': 'bool'},
'deployment_rollback_enabled': {'key': 'properties.deploymentRollbackEnabled', 'type': 'bool'},
'is_mercurial': {'key': 'properties.isMercurial', 'type': 'bool'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
repo_url: Optional[str] = None,
branch: Optional[str] = None,
is_manual_integration: Optional[bool] = None,
deployment_rollback_enabled: Optional[bool] = None,
is_mercurial: Optional[bool] = None,
**kwargs
):
super(SiteSourceControl, self).__init__(kind=kind, **kwargs)
self.repo_url = repo_url
self.branch = branch
self.is_manual_integration = is_manual_integration
self.deployment_rollback_enabled = deployment_rollback_enabled
self.is_mercurial = is_mercurial
class SlotConfigNamesResource(ProxyOnlyResource):
"""Slot Config names azure resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param connection_string_names: List of connection string names.
:type connection_string_names: list[str]
:param app_setting_names: List of application settings names.
:type app_setting_names: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'connection_string_names': {'key': 'properties.connectionStringNames', 'type': '[str]'},
'app_setting_names': {'key': 'properties.appSettingNames', 'type': '[str]'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
connection_string_names: Optional[List[str]] = None,
app_setting_names: Optional[List[str]] = None,
**kwargs
):
super(SlotConfigNamesResource, self).__init__(kind=kind, **kwargs)
self.connection_string_names = connection_string_names
self.app_setting_names = app_setting_names
class SlotDifference(ProxyOnlyResource):
"""A setting difference between two deployment slots of an app.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar type_properties_type: Type of the difference: Information, Warning or Error.
:vartype type_properties_type: str
:ivar setting_type: The type of the setting: General, AppSetting or ConnectionString.
:vartype setting_type: str
:ivar diff_rule: Rule that describes how to process the setting difference during a slot swap.
:vartype diff_rule: str
:ivar setting_name: Name of the setting.
:vartype setting_name: str
:ivar value_in_current_slot: Value of the setting in the current slot.
:vartype value_in_current_slot: str
:ivar value_in_target_slot: Value of the setting in the target slot.
:vartype value_in_target_slot: str
:ivar description: Description of the setting difference.
:vartype description: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'type_properties_type': {'readonly': True},
'setting_type': {'readonly': True},
'diff_rule': {'readonly': True},
'setting_name': {'readonly': True},
'value_in_current_slot': {'readonly': True},
'value_in_target_slot': {'readonly': True},
'description': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'type_properties_type': {'key': 'properties.type', 'type': 'str'},
'setting_type': {'key': 'properties.settingType', 'type': 'str'},
'diff_rule': {'key': 'properties.diffRule', 'type': 'str'},
'setting_name': {'key': 'properties.settingName', 'type': 'str'},
'value_in_current_slot': {'key': 'properties.valueInCurrentSlot', 'type': 'str'},
'value_in_target_slot': {'key': 'properties.valueInTargetSlot', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(SlotDifference, self).__init__(kind=kind, **kwargs)
self.type_properties_type = None
self.setting_type = None
self.diff_rule = None
self.setting_name = None
self.value_in_current_slot = None
self.value_in_target_slot = None
self.description = None
class SlotDifferenceCollection(msrest.serialization.Model):
"""Collection of slot differences.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.SlotDifference]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[SlotDifference]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["SlotDifference"],
**kwargs
):
super(SlotDifferenceCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class SlotSwapStatus(msrest.serialization.Model):
"""The status of the last successful slot swap operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar timestamp_utc: The time the last successful slot swap completed.
:vartype timestamp_utc: ~datetime.datetime
:ivar source_slot_name: The source slot of the last swap operation.
:vartype source_slot_name: str
:ivar destination_slot_name: The destination slot of the last swap operation.
:vartype destination_slot_name: str
"""
_validation = {
'timestamp_utc': {'readonly': True},
'source_slot_name': {'readonly': True},
'destination_slot_name': {'readonly': True},
}
_attribute_map = {
'timestamp_utc': {'key': 'timestampUtc', 'type': 'iso-8601'},
'source_slot_name': {'key': 'sourceSlotName', 'type': 'str'},
'destination_slot_name': {'key': 'destinationSlotName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SlotSwapStatus, self).__init__(**kwargs)
self.timestamp_utc = None
self.source_slot_name = None
self.destination_slot_name = None
class SlowRequestsBasedTrigger(msrest.serialization.Model):
"""Trigger based on request execution time.
:param time_taken: Time taken.
:type time_taken: str
:param count: Request Count.
:type count: int
:param time_interval: Time interval.
:type time_interval: str
"""
_attribute_map = {
'time_taken': {'key': 'timeTaken', 'type': 'str'},
'count': {'key': 'count', 'type': 'int'},
'time_interval': {'key': 'timeInterval', 'type': 'str'},
}
def __init__(
self,
*,
time_taken: Optional[str] = None,
count: Optional[int] = None,
time_interval: Optional[str] = None,
**kwargs
):
super(SlowRequestsBasedTrigger, self).__init__(**kwargs)
self.time_taken = time_taken
self.count = count
self.time_interval = time_interval
class Snapshot(ProxyOnlyResource):
"""A snapshot of an app.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar time: The time the snapshot was taken.
:vartype time: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'time': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'time': {'key': 'properties.time', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(Snapshot, self).__init__(kind=kind, **kwargs)
self.time = None
class SnapshotCollection(msrest.serialization.Model):
"""Collection of snapshots which can be used to revert an app to a previous time.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.Snapshot]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Snapshot]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["Snapshot"],
**kwargs
):
super(SnapshotCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class SnapshotRecoveryRequest(ProxyOnlyResource):
"""Details about app recovery operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param snapshot_time: Point in time in which the app recovery should be attempted, formatted as
a DateTime string.
:type snapshot_time: str
:param recovery_target: Specifies the web app that snapshot contents will be written to.
:type recovery_target: ~azure.mgmt.web.v2016_08_01.models.SnapshotRecoveryTarget
:param overwrite: If :code:`<code>true</code>` the recovery operation can overwrite source app;
otherwise, :code:`<code>false</code>`.
:type overwrite: bool
:param recover_configuration: If true, site configuration, in addition to content, will be
reverted.
:type recover_configuration: bool
:param ignore_conflicting_host_names: If true, custom hostname conflicts will be ignored when
recovering to a target web app.
This setting is only necessary when RecoverConfiguration is enabled.
:type ignore_conflicting_host_names: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'snapshot_time': {'key': 'properties.snapshotTime', 'type': 'str'},
'recovery_target': {'key': 'properties.recoveryTarget', 'type': 'SnapshotRecoveryTarget'},
'overwrite': {'key': 'properties.overwrite', 'type': 'bool'},
'recover_configuration': {'key': 'properties.recoverConfiguration', 'type': 'bool'},
'ignore_conflicting_host_names': {'key': 'properties.ignoreConflictingHostNames', 'type': 'bool'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
snapshot_time: Optional[str] = None,
recovery_target: Optional["SnapshotRecoveryTarget"] = None,
overwrite: Optional[bool] = None,
recover_configuration: Optional[bool] = None,
ignore_conflicting_host_names: Optional[bool] = None,
**kwargs
):
super(SnapshotRecoveryRequest, self).__init__(kind=kind, **kwargs)
self.snapshot_time = snapshot_time
self.recovery_target = recovery_target
self.overwrite = overwrite
self.recover_configuration = recover_configuration
self.ignore_conflicting_host_names = ignore_conflicting_host_names
class SnapshotRecoveryTarget(msrest.serialization.Model):
"""Specifies the web app that snapshot contents will be written to.
:param location: Geographical location of the target web app, e.g. SouthEastAsia,
SouthCentralUS.
:type location: str
:param id: ARM resource ID of the target app.
/subscriptions/{subId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}
for production slots and
/subscriptions/{subId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slotName}
for other slots.
:type id: str
"""
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
*,
location: Optional[str] = None,
id: Optional[str] = None,
**kwargs
):
super(SnapshotRecoveryTarget, self).__init__(**kwargs)
self.location = location
self.id = id
class StatusCodesBasedTrigger(msrest.serialization.Model):
"""Trigger based on status code.
:param status: HTTP status code.
:type status: int
:param sub_status: Request Sub Status.
:type sub_status: int
:param win32_status: Win32 error code.
:type win32_status: int
:param count: Request Count.
:type count: int
:param time_interval: Time interval.
:type time_interval: str
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'int'},
'sub_status': {'key': 'subStatus', 'type': 'int'},
'win32_status': {'key': 'win32Status', 'type': 'int'},
'count': {'key': 'count', 'type': 'int'},
'time_interval': {'key': 'timeInterval', 'type': 'str'},
}
def __init__(
self,
*,
status: Optional[int] = None,
sub_status: Optional[int] = None,
win32_status: Optional[int] = None,
count: Optional[int] = None,
time_interval: Optional[str] = None,
**kwargs
):
super(StatusCodesBasedTrigger, self).__init__(**kwargs)
self.status = status
self.sub_status = sub_status
self.win32_status = win32_status
self.count = count
self.time_interval = time_interval
class StorageMigrationOptions(ProxyOnlyResource):
"""Options for app content migration.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param azurefiles_connection_string: AzureFiles connection string.
:type azurefiles_connection_string: str
:param azurefiles_share: AzureFiles share.
:type azurefiles_share: str
:param switch_site_after_migration: :code:`<code>true</code>`if the app should be switched
over; otherwise, :code:`<code>false</code>`.
:type switch_site_after_migration: bool
:param block_write_access_to_site: :code:`<code>true</code>` if the app should be read only
during copy operation; otherwise, :code:`<code>false</code>`.
:type block_write_access_to_site: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'azurefiles_connection_string': {'key': 'properties.azurefilesConnectionString', 'type': 'str'},
'azurefiles_share': {'key': 'properties.azurefilesShare', 'type': 'str'},
'switch_site_after_migration': {'key': 'properties.switchSiteAfterMigration', 'type': 'bool'},
'block_write_access_to_site': {'key': 'properties.blockWriteAccessToSite', 'type': 'bool'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
azurefiles_connection_string: Optional[str] = None,
azurefiles_share: Optional[str] = None,
switch_site_after_migration: Optional[bool] = False,
block_write_access_to_site: Optional[bool] = False,
**kwargs
):
super(StorageMigrationOptions, self).__init__(kind=kind, **kwargs)
self.azurefiles_connection_string = azurefiles_connection_string
self.azurefiles_share = azurefiles_share
self.switch_site_after_migration = switch_site_after_migration
self.block_write_access_to_site = block_write_access_to_site
class StorageMigrationResponse(ProxyOnlyResource):
"""Response for a migration of app content request.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar operation_id: When server starts the migration process, it will return an operation ID
identifying that particular migration operation.
:vartype operation_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'operation_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'operation_id': {'key': 'properties.operationId', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(StorageMigrationResponse, self).__init__(kind=kind, **kwargs)
self.operation_id = None
class StringDictionary(ProxyOnlyResource):
"""String dictionary resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param properties: Settings.
:type properties: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
properties: Optional[Dict[str, str]] = None,
**kwargs
):
super(StringDictionary, self).__init__(kind=kind, **kwargs)
self.properties = properties
class TriggeredJobHistory(ProxyOnlyResource):
"""Triggered Web Job History. List of Triggered Web Job Run Information elements.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param triggered_job_runs: List of triggered web job runs.
:type triggered_job_runs: list[~azure.mgmt.web.v2016_08_01.models.TriggeredJobRun]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'triggered_job_runs': {'key': 'properties.triggeredJobRuns', 'type': '[TriggeredJobRun]'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
triggered_job_runs: Optional[List["TriggeredJobRun"]] = None,
**kwargs
):
super(TriggeredJobHistory, self).__init__(kind=kind, **kwargs)
self.triggered_job_runs = triggered_job_runs
class TriggeredJobHistoryCollection(msrest.serialization.Model):
"""Collection of Kudu continuous web job information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.TriggeredJobHistory]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[TriggeredJobHistory]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["TriggeredJobHistory"],
**kwargs
):
super(TriggeredJobHistoryCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class TriggeredJobRun(ProxyOnlyResource):
"""Triggered Web Job Run Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param id_properties_id: Job ID.
:type id_properties_id: str
:ivar name_properties_name: Job name.
:vartype name_properties_name: str
:param status: Job status. Possible values include: "Success", "Failed", "Error".
:type status: str or ~azure.mgmt.web.v2016_08_01.models.TriggeredWebJobStatus
:param start_time: Start time.
:type start_time: ~datetime.datetime
:param end_time: End time.
:type end_time: ~datetime.datetime
:param duration: Job duration.
:type duration: str
:param output_url: Output URL.
:type output_url: str
:param error_url: Error URL.
:type error_url: str
:param url: Job URL.
:type url: str
:param job_name: Job name.
:type job_name: str
:param trigger: Job trigger.
:type trigger: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'name_properties_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'id_properties_id': {'key': 'properties.id', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'end_time': {'key': 'properties.endTime', 'type': 'iso-8601'},
'duration': {'key': 'properties.duration', 'type': 'str'},
'output_url': {'key': 'properties.outputUrl', 'type': 'str'},
'error_url': {'key': 'properties.errorUrl', 'type': 'str'},
'url': {'key': 'properties.url', 'type': 'str'},
'job_name': {'key': 'properties.jobName', 'type': 'str'},
'trigger': {'key': 'properties.trigger', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
id_properties_id: Optional[str] = None,
status: Optional[Union[str, "TriggeredWebJobStatus"]] = None,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
duration: Optional[str] = None,
output_url: Optional[str] = None,
error_url: Optional[str] = None,
url: Optional[str] = None,
job_name: Optional[str] = None,
trigger: Optional[str] = None,
**kwargs
):
super(TriggeredJobRun, self).__init__(kind=kind, **kwargs)
self.id_properties_id = id_properties_id
self.name_properties_name = None
self.status = status
self.start_time = start_time
self.end_time = end_time
self.duration = duration
self.output_url = output_url
self.error_url = error_url
self.url = url
self.job_name = job_name
self.trigger = trigger
class TriggeredWebJob(ProxyOnlyResource):
"""Triggered Web Job Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param latest_run: Latest job run information.
:type latest_run: ~azure.mgmt.web.v2016_08_01.models.TriggeredJobRun
:param history_url: History URL.
:type history_url: str
:param scheduler_logs_url: Scheduler Logs URL.
:type scheduler_logs_url: str
:ivar name_properties_name: Job name. Used as job identifier in ARM resource URI.
:vartype name_properties_name: str
:param run_command: Run command.
:type run_command: str
:param url: Job URL.
:type url: str
:param extra_info_url: Extra Info URL.
:type extra_info_url: str
:param job_type: Job type. Possible values include: "Continuous", "Triggered".
:type job_type: str or ~azure.mgmt.web.v2016_08_01.models.WebJobType
:param error: Error information.
:type error: str
:param using_sdk: Using SDK?.
:type using_sdk: bool
:param settings: Job settings.
:type settings: dict[str, any]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'name_properties_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'latest_run': {'key': 'properties.latestRun', 'type': 'TriggeredJobRun'},
'history_url': {'key': 'properties.historyUrl', 'type': 'str'},
'scheduler_logs_url': {'key': 'properties.schedulerLogsUrl', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'run_command': {'key': 'properties.runCommand', 'type': 'str'},
'url': {'key': 'properties.url', 'type': 'str'},
'extra_info_url': {'key': 'properties.extraInfoUrl', 'type': 'str'},
'job_type': {'key': 'properties.jobType', 'type': 'str'},
'error': {'key': 'properties.error', 'type': 'str'},
'using_sdk': {'key': 'properties.usingSdk', 'type': 'bool'},
'settings': {'key': 'properties.settings', 'type': '{object}'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
latest_run: Optional["TriggeredJobRun"] = None,
history_url: Optional[str] = None,
scheduler_logs_url: Optional[str] = None,
run_command: Optional[str] = None,
url: Optional[str] = None,
extra_info_url: Optional[str] = None,
job_type: Optional[Union[str, "WebJobType"]] = None,
error: Optional[str] = None,
using_sdk: Optional[bool] = None,
settings: Optional[Dict[str, Any]] = None,
**kwargs
):
super(TriggeredWebJob, self).__init__(kind=kind, **kwargs)
self.latest_run = latest_run
self.history_url = history_url
self.scheduler_logs_url = scheduler_logs_url
self.name_properties_name = None
self.run_command = run_command
self.url = url
self.extra_info_url = extra_info_url
self.job_type = job_type
self.error = error
self.using_sdk = using_sdk
self.settings = settings
class TriggeredWebJobCollection(msrest.serialization.Model):
"""Collection of Kudu continuous web job information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.TriggeredWebJob]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[TriggeredWebJob]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["TriggeredWebJob"],
**kwargs
):
super(TriggeredWebJobCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class User(ProxyOnlyResource):
"""User credentials used for publishing activity.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param user_name: Username.
:type user_name: str
:param publishing_user_name: Username used for publishing.
:type publishing_user_name: str
:param publishing_password: Password used for publishing.
:type publishing_password: str
:param publishing_password_hash: Password hash used for publishing.
:type publishing_password_hash: str
:param publishing_password_hash_salt: Password hash salt used for publishing.
:type publishing_password_hash_salt: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'user_name': {'key': 'properties.name', 'type': 'str'},
'publishing_user_name': {'key': 'properties.publishingUserName', 'type': 'str'},
'publishing_password': {'key': 'properties.publishingPassword', 'type': 'str'},
'publishing_password_hash': {'key': 'properties.publishingPasswordHash', 'type': 'str'},
'publishing_password_hash_salt': {'key': 'properties.publishingPasswordHashSalt', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
user_name: Optional[str] = None,
publishing_user_name: Optional[str] = None,
publishing_password: Optional[str] = None,
publishing_password_hash: Optional[str] = None,
publishing_password_hash_salt: Optional[str] = None,
**kwargs
):
super(User, self).__init__(kind=kind, **kwargs)
self.user_name = user_name
self.publishing_user_name = publishing_user_name
self.publishing_password = publishing_password
self.publishing_password_hash = publishing_password_hash
self.publishing_password_hash_salt = publishing_password_hash_salt
class VirtualApplication(msrest.serialization.Model):
"""Virtual application in an app.
:param virtual_path: Virtual path.
:type virtual_path: str
:param physical_path: Physical path.
:type physical_path: str
:param preload_enabled: :code:`<code>true</code>` if preloading is enabled; otherwise,
:code:`<code>false</code>`.
:type preload_enabled: bool
:param virtual_directories: Virtual directories for virtual application.
:type virtual_directories: list[~azure.mgmt.web.v2016_08_01.models.VirtualDirectory]
"""
_attribute_map = {
'virtual_path': {'key': 'virtualPath', 'type': 'str'},
'physical_path': {'key': 'physicalPath', 'type': 'str'},
'preload_enabled': {'key': 'preloadEnabled', 'type': 'bool'},
'virtual_directories': {'key': 'virtualDirectories', 'type': '[VirtualDirectory]'},
}
def __init__(
self,
*,
virtual_path: Optional[str] = None,
physical_path: Optional[str] = None,
preload_enabled: Optional[bool] = None,
virtual_directories: Optional[List["VirtualDirectory"]] = None,
**kwargs
):
super(VirtualApplication, self).__init__(**kwargs)
self.virtual_path = virtual_path
self.physical_path = physical_path
self.preload_enabled = preload_enabled
self.virtual_directories = virtual_directories
class VirtualDirectory(msrest.serialization.Model):
"""Directory for virtual application.
:param virtual_path: Path to virtual application.
:type virtual_path: str
:param physical_path: Physical path.
:type physical_path: str
"""
_attribute_map = {
'virtual_path': {'key': 'virtualPath', 'type': 'str'},
'physical_path': {'key': 'physicalPath', 'type': 'str'},
}
def __init__(
self,
*,
virtual_path: Optional[str] = None,
physical_path: Optional[str] = None,
**kwargs
):
super(VirtualDirectory, self).__init__(**kwargs)
self.virtual_path = virtual_path
self.physical_path = physical_path
class VnetGateway(ProxyOnlyResource):
"""The Virtual Network gateway contract. This is used to give the Virtual Network gateway access to the VPN package.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param vnet_name: The Virtual Network name.
:type vnet_name: str
:param vpn_package_uri: The URI where the VPN package can be downloaded.
:type vpn_package_uri: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'vnet_name': {'key': 'properties.vnetName', 'type': 'str'},
'vpn_package_uri': {'key': 'properties.vpnPackageUri', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
vnet_name: Optional[str] = None,
vpn_package_uri: Optional[str] = None,
**kwargs
):
super(VnetGateway, self).__init__(kind=kind, **kwargs)
self.vnet_name = vnet_name
self.vpn_package_uri = vpn_package_uri
class VnetInfo(ProxyOnlyResource):
"""Virtual Network information contract.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param vnet_resource_id: The Virtual Network's resource ID.
:type vnet_resource_id: str
:ivar cert_thumbprint: The client certificate thumbprint.
:vartype cert_thumbprint: str
:param cert_blob: A certificate file (.cer) blob containing the public key of the private key
used to authenticate a
Point-To-Site VPN connection.
:type cert_blob: bytearray
:ivar routes: The routes that this Virtual Network connection uses.
:vartype routes: list[~azure.mgmt.web.v2016_08_01.models.VnetRoute]
:ivar resync_required: :code:`<code>true</code>` if a resync is required; otherwise,
:code:`<code>false</code>`.
:vartype resync_required: bool
:param dns_servers: DNS servers to be used by this Virtual Network. This should be a
comma-separated list of IP addresses.
:type dns_servers: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'cert_thumbprint': {'readonly': True},
'routes': {'readonly': True},
'resync_required': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'vnet_resource_id': {'key': 'properties.vnetResourceId', 'type': 'str'},
'cert_thumbprint': {'key': 'properties.certThumbprint', 'type': 'str'},
'cert_blob': {'key': 'properties.certBlob', 'type': 'bytearray'},
'routes': {'key': 'properties.routes', 'type': '[VnetRoute]'},
'resync_required': {'key': 'properties.resyncRequired', 'type': 'bool'},
'dns_servers': {'key': 'properties.dnsServers', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
vnet_resource_id: Optional[str] = None,
cert_blob: Optional[bytearray] = None,
dns_servers: Optional[str] = None,
**kwargs
):
super(VnetInfo, self).__init__(kind=kind, **kwargs)
self.vnet_resource_id = vnet_resource_id
self.cert_thumbprint = None
self.cert_blob = cert_blob
self.routes = None
self.resync_required = None
self.dns_servers = dns_servers
class VnetRoute(ProxyOnlyResource):
"""Virtual Network route contract used to pass routing information for a Virtual Network.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param vnet_route_name: The name of this route. This is only returned by the server and does
not need to be set by the client.
:type vnet_route_name: str
:param start_address: The starting address for this route. This may also include a CIDR
notation, in which case the end address must not be specified.
:type start_address: str
:param end_address: The ending address for this route. If the start address is specified in
CIDR notation, this must be omitted.
:type end_address: str
:param route_type: The type of route this is:
DEFAULT - By default, every app has routes to the local address ranges specified by RFC1918
INHERITED - Routes inherited from the real Virtual Network routes
STATIC - Static route set on the app only
These values will be used for syncing an app's routes with those from a Virtual Network.
Possible values include: "DEFAULT", "INHERITED", "STATIC".
:type route_type: str or ~azure.mgmt.web.v2016_08_01.models.RouteType
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'vnet_route_name': {'key': 'properties.name', 'type': 'str'},
'start_address': {'key': 'properties.startAddress', 'type': 'str'},
'end_address': {'key': 'properties.endAddress', 'type': 'str'},
'route_type': {'key': 'properties.routeType', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
vnet_route_name: Optional[str] = None,
start_address: Optional[str] = None,
end_address: Optional[str] = None,
route_type: Optional[Union[str, "RouteType"]] = None,
**kwargs
):
super(VnetRoute, self).__init__(kind=kind, **kwargs)
self.vnet_route_name = vnet_route_name
self.start_address = start_address
self.end_address = end_address
self.route_type = route_type
class WebAppCollection(msrest.serialization.Model):
"""Collection of App Service apps.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.Site]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Site]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["Site"],
**kwargs
):
super(WebAppCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class WebAppInstanceCollection(msrest.serialization.Model):
"""Collection of app instances.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.SiteInstance]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[SiteInstance]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["SiteInstance"],
**kwargs
):
super(WebAppInstanceCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class WebJob(ProxyOnlyResource):
"""Web Job Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar name_properties_name: Job name. Used as job identifier in ARM resource URI.
:vartype name_properties_name: str
:param run_command: Run command.
:type run_command: str
:param url: Job URL.
:type url: str
:param extra_info_url: Extra Info URL.
:type extra_info_url: str
:param job_type: Job type. Possible values include: "Continuous", "Triggered".
:type job_type: str or ~azure.mgmt.web.v2016_08_01.models.WebJobType
:param error: Error information.
:type error: str
:param using_sdk: Using SDK?.
:type using_sdk: bool
:param settings: Job settings.
:type settings: dict[str, any]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'name_properties_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'run_command': {'key': 'properties.runCommand', 'type': 'str'},
'url': {'key': 'properties.url', 'type': 'str'},
'extra_info_url': {'key': 'properties.extraInfoUrl', 'type': 'str'},
'job_type': {'key': 'properties.jobType', 'type': 'str'},
'error': {'key': 'properties.error', 'type': 'str'},
'using_sdk': {'key': 'properties.usingSdk', 'type': 'bool'},
'settings': {'key': 'properties.settings', 'type': '{object}'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
run_command: Optional[str] = None,
url: Optional[str] = None,
extra_info_url: Optional[str] = None,
job_type: Optional[Union[str, "WebJobType"]] = None,
error: Optional[str] = None,
using_sdk: Optional[bool] = None,
settings: Optional[Dict[str, Any]] = None,
**kwargs
):
super(WebJob, self).__init__(kind=kind, **kwargs)
self.name_properties_name = None
self.run_command = run_command
self.url = url
self.extra_info_url = extra_info_url
self.job_type = job_type
self.error = error
self.using_sdk = using_sdk
self.settings = settings
class WebJobCollection(msrest.serialization.Model):
"""Collection of Kudu web job information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.WebJob]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[WebJob]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["WebJob"],
**kwargs
):
super(WebJobCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
| mit | 6,072,001,635,998,820,000 | 38.26125 | 152 | 0.622387 | false | 3.792703 | true | false | false |
GbalsaC/bitnamiP | venv/src/edx-submissions/submissions/tests/test_models.py | 1 | 5673 | """
Tests for submission models.
"""
from django.test import TestCase
from submissions.models import Submission, Score, ScoreSummary, StudentItem
class TestScoreSummary(TestCase):
"""
Test selection of options from a rubric.
"""
def test_latest(self):
item = StudentItem.objects.create(
student_id="score_test_student",
course_id="score_test_course",
item_id="i4x://mycourse/class_participation.section_attendance"
)
first_score = Score.objects.create(
student_item=item,
submission=None,
points_earned=8,
points_possible=10,
)
second_score = Score.objects.create(
student_item=item,
submission=None,
points_earned=5,
points_possible=10,
)
latest_score = ScoreSummary.objects.get(student_item=item).latest
self.assertEqual(second_score, latest_score)
def test_highest(self):
item = StudentItem.objects.create(
student_id="score_test_student",
course_id="score_test_course",
item_id="i4x://mycourse/special_presentation"
)
# Low score is higher than no score...
low_score = Score.objects.create(
student_item=item,
points_earned=0,
points_possible=0,
)
self.assertEqual(
low_score,
ScoreSummary.objects.get(student_item=item).highest
)
# Medium score should supplant low score
med_score = Score.objects.create(
student_item=item,
points_earned=8,
points_possible=10,
)
self.assertEqual(
med_score,
ScoreSummary.objects.get(student_item=item).highest
)
# Even though the points_earned is higher in the med_score, high_score
# should win because it's 4/4 as opposed to 8/10.
high_score = Score.objects.create(
student_item=item,
points_earned=4,
points_possible=4,
)
self.assertEqual(
high_score,
ScoreSummary.objects.get(student_item=item).highest
)
# Put another medium score to make sure it doesn't get set back down
med_score2 = Score.objects.create(
student_item=item,
points_earned=5,
points_possible=10,
)
self.assertEqual(
high_score,
ScoreSummary.objects.get(student_item=item).highest
)
self.assertEqual(
med_score2,
ScoreSummary.objects.get(student_item=item).latest
)
def test_reset_score_highest(self):
item = StudentItem.objects.create(
student_id="score_test_student",
course_id="score_test_course",
item_id="i4x://mycourse/special_presentation"
)
# Reset score with no score
Score.create_reset_score(item)
highest = ScoreSummary.objects.get(student_item=item).highest
self.assertEqual(highest.points_earned, 0)
self.assertEqual(highest.points_possible, 0)
# Non-reset score after a reset score
submission = Submission.objects.create(student_item=item, attempt_number=1)
Score.objects.create(
student_item=item,
submission=submission,
points_earned=2,
points_possible=3,
)
highest = ScoreSummary.objects.get(student_item=item).highest
self.assertEqual(highest.points_earned, 2)
self.assertEqual(highest.points_possible, 3)
# Reset score after a non-reset score
Score.create_reset_score(item)
highest = ScoreSummary.objects.get(student_item=item).highest
self.assertEqual(highest.points_earned, 0)
self.assertEqual(highest.points_possible, 0)
def test_highest_score_hidden(self):
item = StudentItem.objects.create(
student_id="score_test_student",
course_id="score_test_course",
item_id="i4x://mycourse/special_presentation"
)
# Score with points possible set to 0
# (by convention a "hidden" score)
submission = Submission.objects.create(student_item=item, attempt_number=1)
Score.objects.create(
student_item=item,
submission=submission,
points_earned=0,
points_possible=0,
)
highest = ScoreSummary.objects.get(student_item=item).highest
self.assertEqual(highest.points_earned, 0)
self.assertEqual(highest.points_possible, 0)
# Score with points
submission = Submission.objects.create(student_item=item, attempt_number=1)
Score.objects.create(
student_item=item,
submission=submission,
points_earned=1,
points_possible=2,
)
highest = ScoreSummary.objects.get(student_item=item).highest
self.assertEqual(highest.points_earned, 1)
self.assertEqual(highest.points_possible, 2)
# Another score with points possible set to 0
# The previous score should remain the highest score.
submission = Submission.objects.create(student_item=item, attempt_number=1)
Score.objects.create(
student_item=item,
submission=submission,
points_earned=0,
points_possible=0,
)
highest = ScoreSummary.objects.get(student_item=item).highest
self.assertEqual(highest.points_earned, 1)
self.assertEqual(highest.points_possible, 2)
| agpl-3.0 | -71,096,119,413,147,070 | 33.174699 | 83 | 0.600212 | false | 4.165198 | true | false | false |
mgraffg/simplegp | examples/simplify.py | 1 | 2421 | from SimpleGP import GP
import numpy as np
seed = 0 # if len(sys.argv) == 1 else int(sys.argv[1])
x = np.linspace(0, 1, 100)
pol = np.array([0.2, -0.3, 0.2])
X = np.vstack((x**2, x, np.ones(x.shape[0])))
y = (X.T * pol).sum(axis=1)
gp = GP(popsize=10,
generations=100000,
verbose=True,
verbose_nind=1000,
min_length=1,
do_simplify=True,
func=["+", "-", "*", "/", 'abs', 'exp', 'sqrt',
'sin', 'cos', 'sigmoid', 'if', 'max', 'min',
'ln', 'sq'],
min_depth=0, fname_best='regression.npy',
seed=seed, nrandom=100, pxo=0.2, pgrow=0.5, walltime=None)
gp.create_random_constants()
x = x[:, np.newaxis]
gp.train(x, y)
gp.create_population()
nvar = gp._nop.shape[0]
ind = np.array([2, 3, 0, 0, nvar, nvar, 1, nvar, nvar,
0, 1, nvar, nvar, 2, nvar, nvar, 1, 3,
nvar, nvar, 3, nvar, nvar], dtype=np.int)
print gp.print_infix(ind)
ind2 = gp.simplify(ind)
print gp.print_infix(ind2, constants=gp._ind_generated_c)
ind = np.array([1, 0, 3, nvar, nvar, 1, nvar, nvar,
3, 2, nvar, nvar, 2, nvar, nvar], dtype=np.int)
print gp.print_infix(ind)
ind2 = gp.simplify(ind)
print gp.print_infix(ind2, constants=gp._ind_generated_c)
print ind2
ind = np.array([13, 5, 2, nvar, nvar], dtype=np.int)
print gp.print_infix(ind, constants=gp._ind_generated_c)
ind2 = gp.simplify(ind)
print gp.print_infix(ind2, constants=gp._ind_generated_c)
ind = np.array([5, 13, 2, nvar, nvar], dtype=np.int)
print gp.print_infix(ind, constants=gp._ind_generated_c)
ind2 = gp.simplify(ind)
print gp.print_infix(ind2, constants=gp._ind_generated_c)
ind = np.array([5, 13, 2, nvar, nvar], dtype=np.int)
print gp.print_infix(ind, constants=gp._ind_generated_c)
ind2 = gp.simplify(ind)
print gp.print_infix(ind2, constants=gp._ind_generated_c)
gp._p[0] = np.array([0, 2, nvar, nvar+2, nvar+1], dtype=np.int)
gp._p_constants[0] = np.array([0, 1.4])
print gp.print_infix(0)
gp.simplify(0)
print gp.print_infix(0) == "(X0 * 1.4)"
gp._p[0] = np.array([0, nvar+1, 2, nvar, nvar+2], dtype=np.int)
gp._p_constants[0] = np.array([0, 1.4])
print gp.print_infix(0)
gp.simplify(0)
print gp.print_infix(0) == "(X0 * 1.4)"
gp._p[0] = np.array([1, 0, 2, nvar, nvar+2, nvar+1,
2, nvar, nvar+2], dtype=np.int)
gp._p_constants[0] = np.array([0, 1.4])
print gp.print_infix(0)
gp.simplify(0)
print gp.print_infix(0)
| apache-2.0 | 7,469,544,312,858,264,000 | 31.716216 | 66 | 0.608013 | false | 2.359649 | false | true | false |
karcio/checkSumValidatorGUI | checkSumVal/src/checkSumGui.py | 1 | 2950 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'checkSumGui.ui'
#
# Created: Thu Jan 8 02:22:42 2015
# by: PyQt5 UI code generator 5.4
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(390, 210)
self.label = QtWidgets.QLabel(Form)
self.label.setGeometry(QtCore.QRect(20, 20, 76, 15))
self.label.setObjectName("label")
self.label_2 = QtWidgets.QLabel(Form)
self.label_2.setGeometry(QtCore.QRect(20, 70, 76, 15))
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(Form)
self.label_3.setGeometry(QtCore.QRect(20, 120, 36, 15))
self.label_3.setObjectName("label_3")
self.pushButton = QtWidgets.QPushButton(Form)
self.pushButton.setGeometry(QtCore.QRect(280, 160, 92, 27))
self.pushButton.setObjectName("pushButton")
self.lineEdit = QtWidgets.QLineEdit(Form)
self.lineEdit.setGeometry(QtCore.QRect(120, 20, 250, 25))
self.lineEdit.setMaxLength(32)
self.lineEdit.setObjectName("lineEdit")
self.lineEdit_2 = QtWidgets.QLineEdit(Form)
self.lineEdit_2.setGeometry(QtCore.QRect(120, 70, 250, 25))
self.lineEdit_2.setMaxLength(32)
self.lineEdit_2.setObjectName("lineEdit_2")
self.label_4 = QtWidgets.QLabel(Form)
self.label_4.setGeometry(QtCore.QRect(120, 120, 251, 16))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_4.setFont(font)
self.label_4.setText("")
self.label_4.setObjectName("label_4")
self.pushButton_2 = QtWidgets.QPushButton(Form)
self.pushButton_2.setGeometry(QtCore.QRect(170, 160, 92, 27))
self.pushButton_2.setObjectName("pushButton_2")
self.retranslateUi(Form)
self.pushButton_2.clicked.connect(Form.close)
QtCore.QMetaObject.connectSlotsByName(Form)
self.pushButton.clicked.connect(self.validation_b)
def validation_b(self):
text1 = self.lineEdit.text()
text2 = self.lineEdit_2.text()
if text1 == text2:
result = "True - identical"
else:
result = "False - NOT identical"
self.label_4.setText(repr(result))
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Check Sum validator v 0.2"))
self.label.setText(_translate("Form", "insert string"))
self.label_2.setText(_translate("Form", "insert string"))
self.label_3.setText(_translate("Form", "result"))
self.pushButton.setText(_translate("Form", "&validate"))
self.pushButton_2.setText(_translate("Form", "&exit"))
| gpl-3.0 | -6,674,606,078,571,181,000 | 36.820513 | 76 | 0.647119 | false | 3.710692 | false | false | false |
mkobos/tree_crawler | concurrent_tree_crawler/multithreaded_crawler.py | 1 | 5649 | import os
import logging
import time
import datetime
from concurrent_tree_crawler.common.file_helper import lenient_makedir
from concurrent_tree_crawler.common.logger import Logger
from concurrent_tree_crawler.common.activity_schedule import AlwaysActiveSchedule
from concurrent_tree_crawler.crawlers_manager import CrawlersManager
from concurrent_tree_crawler.rw_lock_tree_accessor import RWLockTreeAccessor
from concurrent_tree_crawler.navigator_tree_wrapper import NavigatorTreeWrapper
from concurrent_tree_crawler.tree_saver_thread import TreeSaverThread
from concurrent_tree_crawler.abstract_node import NodeState
from concurrent_tree_crawler.xml_tree_serialization import XMLTreeReader
class MultithreadedCrawler:
"""
Runs several threads to crawl the tree.
It is also responsible for all the ancillary stuff:
makes sure that the state of the tree is saved to disk,
sets up the logging level etc.
"""
def __init__(self, navigators, sentinel, activity_schedule=None,
log_file_path=None, state_file_path=None, save_period=None,
logging_level=logging.ERROR):
"""
@param navigators: list of navigators to be used by the crawler.
Each navigator will be run in a separate thread, thus the
number of the threads is equal to the number of navigators.
@type navigators: list of L{AbstractTreeNavigator}s
@param sentinel: a technical node which will be made parent of the
root node.
@type sentinel: L{AbstractNode}
@param activity_schedule: if C{None}, no schedule is used and the
program works until it finishes crawling.
@type activity_schedule: L{AbstractActivitySchedule}
@param log_file_path: path to the log file. If C{None}, no log file
will be used.
@param state_file_path: path to the file where the state of the
program will be saved. If C{None}, the state will not be saved.
@param save_period: time between saving the tree state. If
C{state_file_path} is C{None}, this value is ignored.
@param logging_level: one of the logging level constants from C{logging}
"""
if log_file_path is not None:
lenient_makedir(os.path.dirname(log_file_path))
if state_file_path is not None:
if os.path.exists(state_file_path):
print "State file already exists. Loading the tree from this "\
"file and changing nodes with state PROCESSING to OPEN ... ",
self.__load_state_file(state_file_path, sentinel)
print "Done."
else:
lenient_makedir(os.path.dirname(state_file_path))
self.__tree = RWLockTreeAccessor(sentinel)
self.__navigators = navigators
self.__manager = None
self.__state_file_path = state_file_path
self.__save_period = save_period
self.__activity_schedule = activity_schedule
if activity_schedule is None:
self.__activity_schedule = AlwaysActiveSchedule()
self.__logging_level = logging_level
self.__log_file_path = log_file_path
def run(self):
"""
@return: sentinel node
@rtype: L{AbstractNode}
"""
self.__manager = self._create_crawlers_manager(
self.__tree, self.__navigators)
if self.__log_file_path is not None:
Logger.start(file_path=self.__log_file_path,
logging_level=self.__logging_level)
while True:
activity_time = self.__sleep_until_activity_period()
saver_thread = None
if self.__state_file_path is not None:
saver_thread = self.__start_tree_saver_thread()
self.__manager.start()
threads_finished = \
self.__manager.wait_until_finish(timeout=activity_time)
if self.__state_file_path is not None:
saver_thread.stop_activity()
saver_thread.join()
if threads_finished:
break
if self.__log_file_path is not None:
Logger.stop()
return self.__tree.get_sentinel()
def _create_crawlers_manager(self, tree, navigators):
navigator_wrappers = []
for navigator in navigators:
navigator_wrapper = NavigatorTreeWrapper(navigator, tree)
navigator_wrappers.append(navigator_wrapper)
return CrawlersManager(tree, navigator_wrappers)
def __start_tree_saver_thread(self):
t = TreeSaverThread(
self.__state_file_path, self.__tree, self.__save_period)
t.daemon = True
t.start()
return t
def __sleep_until_activity_period(self):
"""
Sleep (stop program execution) until there's a time to wake up.
@return: activity time, i.e. time until the start of the next
sleep period, C{None} if such time point cannot be determined
(as in case when the activity time will not stop in future).
@rtype: number of seconds
"""
while True:
now = datetime.datetime.now()
info = self.__activity_schedule.get_activity_info(now)
if info.future_mode_change is None:
if info.is_in_activity_period:
return None
else:
raise Exception("Going to sleep forever?")
mode_change_time = (info.future_mode_change - now).total_seconds()
if not info.is_in_activity_period:
logging.info("Going to sleep for {:.1f} seconds "
"(according to schedule)".format(
mode_change_time))
time.sleep(mode_change_time)
logging.info("Awaken")
else:
logging.info("Starting activity for {:.1f} seconds "
"(according to schedule)".format(
mode_change_time))
return mode_change_time
@staticmethod
def __load_state_file(file_path, sentinel):
with open(file_path) as f:
reader = XMLTreeReader(f)
reader.read(sentinel)
MultithreadedCrawler.__change_state_from_PROCESSING_to_OPEN(
sentinel.get_child("root"))
@staticmethod
def __change_state_from_PROCESSING_to_OPEN(node):
if node.get_state() == NodeState.PROCESSING:
node.set_state(NodeState.OPEN)
for child in node.get_children():
MultithreadedCrawler.__change_state_from_PROCESSING_to_OPEN(child)
| mit | -2,767,789,249,155,980,300 | 36.164474 | 81 | 0.725084 | false | 3.311254 | false | false | false |
elli0ttB/problems | sorting/quicksort.py | 1 | 1524 | #!/usr/bin/env python
def quicksort(arr, partition):
if (partition == "hoare"):
quicksort_hoare(arr, 0, len(arr) -1)
elif (partition == "lomuto"):
quicksort_lomuto(arr, 0, len(arr) -1)
else:
raise ValueError()
def quicksort_hoare(arr, lo, hi):
# lo and hi follow standard method of being inclusive on the bottom, exclusive on the top.
"""Run a quicksort_hoare given a partition scheme"""
if lo < hi:
p = hoare(arr, lo, hi)
quicksort_hoare(arr, lo, p)
quicksort_hoare(arr, p+1, hi)
def quicksort_lomuto(arr, lo, hi):
# lo and hi follow standard method of being inclusive on the bottom, exclusive on the top.
"""Run a quicksort_lomuto given a partition scheme"""
if lo < hi:
p = lomuto(arr, lo, hi)
quicksort_lomuto(arr, lo, p-1)
quicksort_lomuto(arr, p+1, hi)
def lomuto(arr, lo, hi):
pivot = arr[hi]
i = lo - 1
for j in range(lo, hi + 1):
if arr[j] <= pivot:
i += 1
arr[i], arr[j] = arr[j], arr[i]
return i # we know that arr[i] = p
def hoare(arr, lo, hi):
pivot = arr[lo]
i = lo - 1
j = hi + 1
while True:
i, j = i+1, j-1
while arr[j] > pivot:
j -= 1
while arr[i] < pivot:
i += 1
if i < j:
arr[i], arr[j] = arr[j], arr[i]
else:
return j
def main():
import sort_test
sort_test.test(lom)
sort_test.test(hor)
if __name__ == "__main__":
main()
| mit | -8,108,625,509,353,319,000 | 25.736842 | 94 | 0.532152 | false | 3.017822 | false | false | false |
Turgon37/OpenVPN_UAM | OpenVPNUAM/pki/pki_filetree.py | 1 | 6143 | # -*- coding: utf8 -*-
# This file is a part of OpenVPN-UAM
#
# Copyright (c) 2015 Thomas PAJON, Pierre GINDRAUD
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""PKI - Public Key Infrastructure File Tree program class
This class is responsive of management of all SSL files
"""
# System imports
import logging
import os
import OpenSSL
from OpenSSL import crypto
from OpenSSL.crypto import (_lib as lib, _ffi as ffi)
# Project imports
from ..config import Error
# Global project declarations
g_sys_log = logging.getLogger('openvpn-uam.pki.file')
class PKIFileTree(object):
"""Build an instance of the pki model class
This instance must be called in the openvpn uam program class
"""
def __init__(self, confparser):
"""Constructor : Build a new PKI API instance
"""
self.__cp = confparser
# the root path of file tree
self.__new_cert_directory = "certificates/"
# the cipher to use for private key encryption
self.__cipher = "DES3"
def load(self):
"""Return a boolean indicates if PKI is ready to work or not
This function check things required by PKI working and return a boolean
that indicates if the PKI is ready to work with certificate or not
@return [bool] The ready status
"""
# check PKI section in configuration file
if not self.__cp.has_section(self.__cp.PKI_SECTION):
g_sys_log.error('Missing pki section in configuration file')
return False
sec = self.__cp.getItems(self.__cp.PKI_SECTION)
# read the new cert directory path from config file
self.__new_cert_directory = self.__cp.get(
self.__cp.PKI_SECTION,
'cert_directory',
fallback=self.__new_cert_directory).rstrip('/') + '/'
self.__cipher = self.__cp.get(
self.__cp.PKI_SECTION,
'cert_key_cipher',
fallback=self.__cipher)
# BAD USAGE but no other solution
if lib.EVP_get_cipherbyname(self.__cipher.encode()) == ffi.NULL:
g_sys_log.fatal("Invalid cipher name")
return False
if not self.makePath(self.__new_cert_directory):
g_sys_log.fatal("Certificate directory is invalid")
return False
return True
# Tools
def makePath(self, path):
"""Ensure that the given path is builded on the file system
@param path [str] the path to check for
@return [bool] True if the entire path is existing on the FS
False if an error happen
"""
p = ""
for folder in path.split('/'):
if len(folder) == 0:
continue
p += folder + '/'
if not os.path.exists(p):
# create it
g_sys_log.info("Creating directory '%s'", p)
try:
os.mkdir(p)
except OSError as e:
g_sys_log.error("File '%s' already exist", p)
return False
# if cert path already exist
else:
# check if it is a valid directory
if not os.path.isdir(p):
g_sys_log.error("File '%s' is not a directory", p)
return False
return True
# API
def storeBytesToFile(self, content, path):
"""Write a list of bytes into a file
@param content [bytes/str] the content to write into the file
@param path [str] the path to the file into
"""
f = None
if os.path.exists(path):
g_sys_log.error("Error during export of file '%s'.", path)
return
if isinstance(content, bytes):
# open output file in binary mode
f = open(path, "wb")
elif isinstance(content, str):
# open output file in text mode
f = open(path, "wt")
assert f is not None
f.write(content)
f.close()
def storePKIUserCertificate(self, user, hostname, certificate, obj,
password=None):
"""Store a given PKI object into a file
@param user [User] the user to which the certificate is associated
@param hostname [Hostname] the hostname to which the certificate is
associated
@param certificate [Certificate] the Certificate instance associated with
the file
@param obj [X509/PKey] The object that will be dump to the file
@param password [str] OPTIONNAL : an optionnal passphrase to use for encrypt
the output (if available)
"""
path = (self.__new_cert_directory + str(user.id) + "/" + str(hostname.id) +
"/")
self.makePath(path)
bytes_ = None
if isinstance(obj, OpenSSL.crypto.X509):
bytes_ = crypto.dump_certificate(crypto.FILETYPE_PEM, obj)
path += str(certificate.id) + ".crt"
if isinstance(obj, OpenSSL.crypto.X509Req):
bytes_ = crypto.dump_certificate_request(crypto.FILETYPE_PEM, obj)
path += str(certificate.id) + ".csr"
elif isinstance(obj, OpenSSL.crypto.PKey):
if isinstance(password, str):
bytes_ = crypto.dump_privatekey(crypto.FILETYPE_PEM, obj,
self.__cipher, password.encode())
else:
bytes_ = crypto.dump_privatekey(crypto.FILETYPE_PEM, obj)
path += str(certificate.id) + ".key"
assert bytes_ is not None
self.storeBytesToFile(bytes_, path)
| gpl-3.0 | -2,542,147,906,521,464,000 | 33.318436 | 80 | 0.654892 | false | 3.988961 | false | false | false |
RNAcentral/rnacentral-import-pipeline | rnacentral_pipeline/databases/pirbase/fetch.py | 1 | 1316 | # -*- coding: utf-8 -*-
"""
Copyright [2009-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import urllib
from pathlib import Path
import typing as ty
from furl import furl
import requests
from bs4 import BeautifulSoup
def base_url(url: furl) -> furl:
base = furl(url)
base.path.segments = base.path.segments[:-1]
return base
def extract_urls(base: furl, document: str) -> ty.List[furl]:
soup = BeautifulSoup(document)
urls = []
links = soup.find("table").find_all("a")
for link in links:
href = link.get("href")
if href.endswith("json.gz"):
urls.append(base / href)
return urls
def find_urls(url: furl):
response = requests.get(url.url)
response.raise_for_status()
return extract_urls(base_url(url), response.text)
| apache-2.0 | -4,344,957,713,363,397,600 | 28.244444 | 72 | 0.713526 | false | 3.665738 | false | false | false |
acutesoftware/worldbuild | scripts/minecraft/go_minecraft.py | 1 | 2260 | # go_minecraft.py
import sys
import time
import aikif.toolbox.interface_windows_tools as mod_tool
players = ['DynamiteBuilder', 'craftandstore']
#server = '1.9'
#server = '1.10'
server = '1.11.2'
seed = 0
if server == '1.11.2':
seed = -7560993781265470572
locations = [
{'name':'home', 'loc':'61 64 239'},
{'name':'Woodlands Mansion', 'loc':'4473 66 5773'},
{'name':'Stronghold', 'loc':'-184 67 1736'},
{'name':'Village', 'loc':'-710 87 548'},
]
elif server == '1.10':
seed = 8239770600742919613
locations = [
{'name':'home', 'loc':'248 66 -61'},
{'name':'farm', 'loc':'960 77 -260' },
{'name':'floating-garden', 'loc':'685 107 -588' },
{'name':'floating-castle', 'loc':'-202 105 -655' },
{'name':'stronghold', 'loc':'415 72 -2198' },
{'name':'village', 'loc':'121 77 -2019' },
{'name':'overhang-lookout/evil storm and zoo / garage', 'loc':'-449 110 -1830' },
{'name':'rock-island / harbour', 'loc':'154 98 384' },
{'name':'enchanted-village','loc':'1082 87 -1297' },
{'name':'flower-garden','loc':'1254 105 -1807' },
]
else:
seed = 2677023417700615710
locations = [
{'name':'v1-home', 'loc':'151 103 736'},
{'name':'v1-treehouse', 'loc':'120 72 662' },
{'name':'v1-castle', 'loc':'-132 68 388' },
{'name':'v1-village', 'loc':'-298 82 946' },
{'name':'v1-stables', 'loc':'-602 82 951' },
{'name':'v1-desert', 'loc':'-1524 97 1580' },
]
print('Minecraft Teleport Service for players ' + str(players))
print('(server version=' + server + ', seed = ' + str(seed) + ' )')
for num, l in enumerate(locations):
print(str(num+1) + ' = ' + l['name'])
loc = locations[int(input('Enter Location ')) - 1]
mod_tool.app_activate('Minecraft server')
for p in players:
print('Teleporting ' + p + ' to ' + loc['name'] + ' (' + loc['loc'] + ')')
mod_tool.send_keys('/tp ' + p + ' ' + loc['loc'])
mod_tool.send_keys("{ENTER}") # needs Enter key
time.sleep(0.1)
| gpl-2.0 | -5,863,558,261,222,107,000 | 31.285714 | 89 | 0.494248 | false | 3.033557 | false | false | false |
keenondrums/sovrin-node | sovrin_client/agent/walleted.py | 1 | 42903 | import asyncio
import collections
import inspect
import json
import time
from datetime import datetime
from typing import Dict, List, Union
from base58 import b58decode
from common.serializers.serialization import serialize_msg_for_signing
from stp_core.common.log import getlogger
from plenum.common.signer_did import DidSigner
from plenum.common.constants import TYPE, DATA, NONCE, IDENTIFIER, NAME, VERSION, \
TARGET_NYM, ATTRIBUTES, VERKEY, VERIFIABLE_ATTRIBUTES, PREDICATES
from plenum.common.types import f
from plenum.common.util import getTimeBasedId, getCryptonym, \
isMaxCheckTimeExpired, convertTimeBasedReqIdToMillis, friendlyToRaw
from plenum.common.verifier import DidVerifier
from anoncreds.protocol.issuer import Issuer
from anoncreds.protocol.prover import Prover
from anoncreds.protocol.verifier import Verifier
from anoncreds.protocol.globals import TYPE_CL
from anoncreds.protocol.types import AttribDef, ID, ProofRequest, AvailableClaim
from plenum.common.exceptions import NotConnectedToAny
from sovrin_client.agent.agent_issuer import AgentIssuer
from sovrin_client.agent.backend import BackendSystem
from sovrin_client.agent.agent_prover import AgentProver
from sovrin_client.agent.agent_verifier import AgentVerifier
from sovrin_client.agent.constants import ALREADY_ACCEPTED_FIELD, CLAIMS_LIST_FIELD, \
REQ_MSG, PING, ERROR, EVENT, EVENT_NAME, EVENT_NOTIFY_MSG, \
EVENT_POST_ACCEPT_INVITE, PONG, EVENT_NOT_CONNECTED_TO_ANY_ENV
from sovrin_client.agent.exception import NonceNotFound, SignatureRejected
from sovrin_client.agent.helper import friendlyVerkeyToPubkey, rawVerkeyToPubkey
from sovrin_client.agent.msg_constants import ACCEPT_INVITE, CLAIM_REQUEST, \
PROOF, AVAIL_CLAIM_LIST, CLAIM, PROOF_STATUS, NEW_AVAILABLE_CLAIMS, \
REF_REQUEST_ID, REQ_AVAIL_CLAIMS, INVITE_ACCEPTED, PROOF_REQUEST
from sovrin_client.client.wallet.attribute import Attribute, LedgerStore
from sovrin_client.client.wallet.connection import Connection, constant
from sovrin_client.client.wallet.wallet import Wallet
from sovrin_common.exceptions import ConnectionNotFound, ConnectionAlreadyExists, \
NotConnectedToNetwork, LinkNotReady, VerkeyNotFound, RemoteEndpointNotFound
from sovrin_common.identity import Identity
from sovrin_common.constants import ENDPOINT
from sovrin_common.util import ensureReqCompleted
from sovrin_common.config import agentLoggingLevel
from sovrin_common.exceptions import InvalidConnectionException
from plenum.common.constants import PUBKEY
from sovrin_common.util import getNonceForProof
logger = getlogger()
logger.setLevel(agentLoggingLevel)
class Walleted(AgentIssuer, AgentProver, AgentVerifier):
"""
An agent with a self-contained wallet.
Normally, other logic acts upon a remote agent. That other logic holds keys
and signs messages and transactions that the Agent then forwards. In this
case, the agent holds a wallet.
"""
def __init__(self,
issuer: Issuer = None,
prover: Prover = None,
verifier: Verifier = None):
AgentIssuer.__init__(self, issuer)
AgentProver.__init__(self, prover)
AgentVerifier.__init__(self, verifier)
# TODO Why are we syncing the client here?
if self.client:
self.syncClient()
self.rcvdMsgStore = {} # type: Dict[reqId, [reqMsg]]
self.msgHandlers = {
ERROR: self._handleError,
EVENT: self._eventHandler,
PING: self._handlePing,
ACCEPT_INVITE: self._handleAcceptance,
REQ_AVAIL_CLAIMS: self.processReqAvailClaims,
CLAIM_REQUEST: self.processReqClaim,
CLAIM: self.handleReqClaimResponse,
PROOF: self.verifyProof,
PROOF_STATUS: self.handleProofStatusResponse,
PROOF_REQUEST: self.handleProofRequest,
PONG: self._handlePong,
INVITE_ACCEPTED: self._handleAcceptInviteResponse,
AVAIL_CLAIM_LIST: self._handleAvailableClaimsResponse,
NEW_AVAILABLE_CLAIMS: self._handleNewAvailableClaimsDataResponse
}
self.logger = logger
self.issuer_backend = None
self._invites = {} # type: Dict[Nonce, Tuple(InternalId, str)]
self._attribDefs = {} # type: Dict[str, AttribDef]
self.defined_claims = [] # type: List[Dict[str, Any]
# dict for proof request schema Dict[str, Dict[str, any]]
self._proofRequestsSchema = {}
def syncClient(self):
obs = self._wallet.handleIncomingReply
if not self.client.hasObserver(obs):
self.client.registerObserver(obs)
self._wallet.pendSyncRequests()
prepared = self._wallet.preparePending()
self.client.submitReqs(*prepared)
@property
def wallet(self) -> Wallet:
return self._wallet
@wallet.setter
def wallet(self, wallet):
self._wallet = wallet
@property
def lockedMsgs(self):
# Msgs for which signature verification is required
return ACCEPT_INVITE, CLAIM_REQUEST, PROOF, \
CLAIM, AVAIL_CLAIM_LIST, EVENT, PONG, REQ_AVAIL_CLAIMS
async def postProofVerif(self, claimName, link, frm):
raise NotImplementedError
def is_claim_available(self, link, claim_name):
return any(
ac[NAME] == claim_name for ac in self._get_available_claim_list_by_internal_id(
link.internalId))
async def _postProofVerif(self, claimName, link, frm):
link.verifiedClaimProofs.append(claimName)
await self.postProofVerif(claimName, link, frm)
async def _set_available_claim_by_internal_id(self, internal_id, schema_id):
sd = await self.schema_dict_from_id(schema_id)
try:
if not any(
d == sd for d in self.issuer.wallet.availableClaimsByInternalId[internal_id]):
self.issuer.wallet.availableClaimsByInternalId[internal_id].append(
sd)
except KeyError:
self.issuer.wallet.availableClaimsByInternalId[internal_id] = [sd]
def _get_available_claim_list_by_internal_id(self, internal_id):
return self.issuer.wallet.availableClaimsByInternalId.get(
internal_id, set())
def get_available_claim_list(self, link):
li = self.wallet.getConnectionBy(remote=link.remoteIdentifier)
# TODO: Need to return set instead of list, but if we return set,
# stack communication fails as set is not json serializable,
# need to work on that.
if li is None:
return list()
return list(
self._get_available_claim_list_by_internal_id(li.internalId))
def getErrorResponse(self, reqBody, errorMsg="Error"):
invalidSigResp = {
TYPE: ERROR,
DATA: errorMsg,
REQ_MSG: reqBody,
}
return invalidSigResp
def logAndSendErrorResp(self, to, reqBody, respMsg, logMsg):
logger.warning(logMsg)
self.signAndSend(msg=self.getErrorResponse(reqBody, respMsg),
signingIdr=self.wallet.defaultId, name=to)
# TODO: Verification needs to be moved out of it,
# use `verifySignature` instead
def verifyAndGetLink(self, msg):
body, (frm, ha) = msg
nonce = body.get(NONCE)
try:
kwargs = dict(nonce=nonce, remoteIdr=body.get(
f.IDENTIFIER.nm), remoteHa=ha)
if ha is None:
# Incase of ZStack,
kwargs.update(remotePubkey=frm)
return self.linkFromNonce(**kwargs)
except NonceNotFound:
self.logAndSendErrorResp(frm, body,
"Nonce not found",
"Nonce not found for msg: {}".format(msg))
return None
def linkFromNonce(self, nonce, remoteIdr, remoteHa=None,
remotePubkey=None):
internalId = self.get_internal_id_by_nonce(nonce)
linkName = self.get_link_name_by_internal_id(internalId)
link = self.wallet.getConnectionBy(internalId=internalId)
if not link:
# QUESTION: We use wallet.defaultId as the local identifier,
# this looks ok for test code, but not production code
link = Connection(linkName,
self.wallet.defaultId,
self.wallet.getVerkey(),
request_nonce=nonce,
remoteIdentifier=remoteIdr,
remoteEndPoint=remoteHa,
internalId=internalId,
remotePubkey=remotePubkey)
self.wallet.addConnection(link)
else:
link.remoteIdentifier = remoteIdr
link.remoteEndPoint = remoteHa
return link
def get_internal_id_by_nonce(self, nonce):
if nonce in self._invites:
return self._invites[nonce][0]
else:
raise NonceNotFound
def get_link_name_by_internal_id(self, internalId):
for invite in self._invites.values():
if invite[0] == internalId:
return invite[1]
def set_issuer_backend(self, backend: BackendSystem):
self.issuer_backend = backend
async def publish_issuer_keys(self, schema_id, p_prime, q_prime):
keys = await self.issuer.genKeys(schema_id,
p_prime=p_prime,
q_prime=q_prime)
await self.add_to_available_claims(schema_id)
return keys
async def schema_dict_from_id(self, schema_id):
schema = await self.issuer.wallet.getSchema(schema_id)
return self.schema_dict(schema)
async def publish_revocation_registry(self, schema_id, rev_reg_id='110', size=5):
return await self.issuer.issueAccumulator(schemaId=schema_id,
iA=rev_reg_id,
L=size)
def schema_dict(self, schema):
return {
NAME: schema.name,
VERSION: schema.version,
"schemaSeqNo": schema.seqId
}
async def add_to_available_claims(self, schema_id):
schema = await self.issuer.wallet.getSchema(schema_id)
self.defined_claims.append(self.schema_dict(schema))
async def publish_schema(self,
attrib_def_name,
schema_name,
schema_version):
attribDef = self._attribDefs[attrib_def_name]
schema = await self.issuer.genSchema(schema_name,
schema_version,
attribDef.attribNames())
schema_id = ID(schemaKey=schema.getKey(), schemaId=schema.seqId)
return schema_id
def add_attribute_definition(self, attr_def: AttribDef):
self._attribDefs[attr_def.name] = attr_def
async def get_claim(self, schema_id: ID):
return await self.prover.wallet.getClaimAttributes(schema_id)
def new_identifier(self, seed=None):
idr, _ = self.wallet.addIdentifier(seed=seed)
verkey = self.wallet.getVerkey(idr)
return idr, verkey
def get_link_by_name(self, name):
return self.wallet.getConnection(str(name))
def signAndSendToLink(self, msg, linkName, origReqId=None):
link = self.wallet.getConnection(linkName, required=True)
if not link.localIdentifier:
raise LinkNotReady('connection is not yet established, '
'send/accept request first')
ha = link.getRemoteEndpoint(required=False)
name = link.name
if not ha:
# if not remote address is present, then it's upcominh link, so we may have no
# explicit connection (wrk in a listener mode).
# PulicKey is used as a name in this case
name = link.remotePubkey
if ha:
self.connectTo(link=link)
return self.signAndSend(msg=msg, signingIdr=link.localIdentifier,
name=name, ha=ha, origReqId=origReqId)
def signAndSend(self, msg, signingIdr, name=None, ha=None, origReqId=None):
msg[f.REQ_ID.nm] = getTimeBasedId()
if origReqId:
msg[REF_REQUEST_ID] = origReqId
msg[IDENTIFIER] = signingIdr
signature = self.wallet.signMsg(msg, signingIdr)
msg[f.SIG.nm] = signature
self.sendMessage(msg, name=name, ha=ha)
return msg[f.REQ_ID.nm]
@staticmethod
def getCommonMsg(typ, data):
msg = {
TYPE: typ,
DATA: data
}
return msg
@classmethod
def createInviteAcceptedMsg(cls, claimLists, alreadyAccepted=False):
data = {
CLAIMS_LIST_FIELD: claimLists
}
if alreadyAccepted:
data[ALREADY_ACCEPTED_FIELD] = alreadyAccepted
return cls.getCommonMsg(INVITE_ACCEPTED, data)
@classmethod
def createNewAvailableClaimsMsg(cls, claimLists):
data = {
CLAIMS_LIST_FIELD: claimLists
}
return cls.getCommonMsg(NEW_AVAILABLE_CLAIMS, data)
@classmethod
def createClaimMsg(cls, claim):
return cls.getCommonMsg(CLAIM, claim)
def _eventHandler(self, msg):
body, _ = msg
eventName = body[EVENT_NAME]
data = body[DATA]
self.notifyEventListeners(eventName, **data)
def notifyEventListeners(self, eventName, **data):
for el in self._eventListeners.get(eventName, []):
el(notifier=self, **data)
def notifyMsgListener(self, msg):
self.notifyEventListeners(EVENT_NOTIFY_MSG, msg=msg)
def isSignatureVerifRespRequired(self, typ):
return typ in self.lockedMsgs and typ not in [EVENT, PING, PONG]
def sendSigVerifResponseMsg(self, respMsg, to, reqMsgTyp, identifier):
if self.isSignatureVerifRespRequired(reqMsgTyp):
self.notifyToRemoteCaller(EVENT_NOTIFY_MSG,
respMsg, identifier, to)
def handleEndpointMessage(self, msg):
body, frm = msg
logger.debug("Message received (from -> {}): {}".format(frm, body))
if isinstance(frm, bytes):
frm = frm.decode()
for reqFieldName in (TYPE, f.REQ_ID.nm):
reqFieldValue = body.get(reqFieldName)
if not reqFieldValue:
errorMsg = "{} not specified in message: {}".format(
reqFieldName, body)
self.notifyToRemoteCaller(EVENT_NOTIFY_MSG,
errorMsg, self.wallet.defaultId, frm)
logger.warning("{}".format(errorMsg))
return
typ = body.get(TYPE)
link = self.wallet.getConnectionBy(remote=body.get(f.IDENTIFIER.nm))
# If accept invite is coming the first time, then use the default
# identifier of the wallet since link wont be created
if typ == ACCEPT_INVITE and link is None:
localIdr = self.wallet.defaultId
else:
# if accept invite is not the message type
# and we are still missing link, then return the error
if link is None:
linkNotCreated = ' Error processing {}. ' \
'Connection is not yet created.'.format(typ)
self.notifyToRemoteCaller(EVENT_NOTIFY_MSG,
linkNotCreated,
self.wallet.defaultId,
frm)
return
localIdr = link.localIdentifier
if typ in self.lockedMsgs:
try:
self.verifySignature(body)
except SignatureRejected:
self.sendSigVerifResponseMsg("\nSignature rejected.",
frm, typ, localIdr)
return
reqId = body.get(f.REQ_ID.nm)
oldResps = self.rcvdMsgStore.get(reqId)
if oldResps:
oldResps.append(msg)
else:
self.rcvdMsgStore[reqId] = [msg]
# TODO: Question: Should we sending an acknowledgement for every message?
# We are sending, ACKs for "signature accepted" messages too
self.sendSigVerifResponseMsg("\nSignature accepted.",
frm, typ, localIdr)
handler = self.msgHandlers.get(typ)
if handler:
# TODO we should verify signature here
frmHa = self.endpoint.getHa(frm)
# `frmHa` can be None
res = handler((body, (frm, frmHa)))
if inspect.isawaitable(res):
self.loop.call_soon(asyncio.ensure_future, res)
else:
raise NotImplementedError("No type handle found for {} message".
format(typ))
def _handleError(self, msg):
body, _ = msg
self.notifyMsgListener("Error ({}) occurred while processing this "
"msg: {}".format(body[DATA], body[REQ_MSG]))
def _handlePing(self, msg):
body, (frm, ha) = msg
link = self.wallet.getConnectionBy(nonce=body.get(NONCE))
if link:
self.logger.info('Ping sent to %s', link.remoteIdentifier)
self.signAndSend({TYPE: 'pong'}, self.wallet.defaultId, frm,
origReqId=body.get(f.REQ_ID.nm))
def _handlePong(self, msg):
body, (frm, ha) = msg
identifier = body.get(IDENTIFIER)
if identifier:
li = self._getLinkByTarget(getCryptonym(identifier))
if li:
self.logger.info('Pong received from %s', li.remoteIdentifier)
self.notifyMsgListener(" Pong received.")
else:
self.notifyMsgListener(
" Pong received from unknown endpoint.")
else:
self.notifyMsgListener(' Identifier is not yet set.')
def _handleNewAvailableClaimsDataResponse(self, msg):
body, _ = msg
isVerified = self.verifySignature(body)
if isVerified:
identifier = body.get(IDENTIFIER)
li = self._getLinkByTarget(getCryptonym(identifier))
if li:
self.notifyResponseFromMsg(li.name, body.get(f.REQ_ID.nm))
rcvdAvailableClaims = body[DATA][CLAIMS_LIST_FIELD]
newAvailableClaims = self._getNewAvailableClaims(
li, rcvdAvailableClaims)
if newAvailableClaims:
li.availableClaims.extend(newAvailableClaims)
claimNames = ", ".join(
[n for n, _, _ in newAvailableClaims])
self.notifyMsgListener(
" Available Claim(s): {}\n".format(claimNames))
else:
self.notifyMsgListener("No matching connection found")
@staticmethod
def _getNewAvailableClaims(
li, rcvdAvailableClaims) -> List[AvailableClaim]:
receivedClaims = [AvailableClaim(cl[NAME],
cl[VERSION],
li.remoteIdentifier)
for cl in rcvdAvailableClaims]
existingAvailableClaims = set(li.availableClaims)
newReceivedClaims = set(receivedClaims)
return list(newReceivedClaims - existingAvailableClaims)
def _handleAvailableClaimsResponse(self, msg):
body, _ = msg
identifier = body.get(IDENTIFIER)
li = self._getLinkByTarget(getCryptonym(identifier))
if li:
rcvdAvailableClaims = body[DATA][CLAIMS_LIST_FIELD]
if len(rcvdAvailableClaims) > 0:
self.notifyMsgListener(" Available Claim(s): {}". format(
",".join([rc.get(NAME) for rc in rcvdAvailableClaims])))
else:
self.notifyMsgListener(" Available Claim(s): "
"No available claims found")
def _handleAcceptInviteResponse(self, msg):
body, _ = msg
identifier = body.get(IDENTIFIER)
li = self._getLinkByTarget(getCryptonym(identifier))
if li:
# TODO: Show seconds took to respond
self.notifyResponseFromMsg(li.name, body.get(f.REQ_ID.nm))
self.notifyMsgListener(" Trust established.")
alreadyAccepted = body[DATA].get(ALREADY_ACCEPTED_FIELD)
if alreadyAccepted:
self.notifyMsgListener(" Already accepted.")
else:
self.notifyMsgListener(" DID created in Sovrin.")
li.connection_status = constant.CONNECTION_STATUS_ACCEPTED
rcvdAvailableClaims = body[DATA][CLAIMS_LIST_FIELD]
newAvailableClaims = self._getNewAvailableClaims(
li, rcvdAvailableClaims)
if newAvailableClaims:
li.availableClaims.extend(newAvailableClaims)
self.notifyMsgListener(" Available Claim(s): {}". format(
",".join([rc.get(NAME) for rc in rcvdAvailableClaims])))
try:
self._checkIfLinkIdentifierWrittenToSovrin(
li, newAvailableClaims)
except NotConnectedToAny:
self.notifyEventListeners(
EVENT_NOT_CONNECTED_TO_ANY_ENV,
msg="Cannot check if identifier is written to Sovrin.")
else:
self.notifyMsgListener("No matching connection found")
def getVerkeyForLink(self, link):
# TODO: Get latest verkey for this link's remote identifier from Sovrin
if link.remoteVerkey:
return link.remoteVerkey
else:
raise VerkeyNotFound("verkey not set in connection")
def getLinkForMsg(self, msg):
nonce = msg.get(NONCE)
identifier = msg.get(f.IDENTIFIER.nm)
link = self.wallet.getConnectionBy(nonce=nonce, remote=identifier)
if link:
return link
else:
raise ConnectionNotFound
def verifySignature(self, msg: Dict[str, str]):
signature = msg.get(f.SIG.nm)
identifier = msg.get(IDENTIFIER)
msgWithoutSig = {k: v for k, v in msg.items() if k != f.SIG.nm}
# TODO This assumes the current key is the cryptonym. This is a BAD
# ASSUMPTION!!! Sovrin needs to provide the current key.
ser = serialize_msg_for_signing(msgWithoutSig)
signature = b58decode(signature.encode())
typ = msg.get(TYPE)
# TODO: Maybe keeping ACCEPT_INVITE open is a better option than keeping
# an if condition here?
if typ == ACCEPT_INVITE:
verkey = msg.get(VERKEY)
else:
try:
link = self.getLinkForMsg(msg)
verkey = self.getVerkeyForLink(link)
except (ConnectionNotFound, VerkeyNotFound):
# This is for verification of `NOTIFY` events
link = self.wallet.getConnectionBy(remote=identifier)
# TODO: If verkey is None, it should be fetched from Sovrin.
# Assuming CID for now.
verkey = link.remoteVerkey
v = DidVerifier(verkey, identifier=identifier)
if not v.verify(signature, ser):
raise SignatureRejected
else:
if typ == ACCEPT_INVITE:
self.logger.info('Signature accepted.')
return True
def _getLinkByTarget(self, target) -> Connection:
return self.wallet.getConnectionBy(remote=target)
def _checkIfLinkIdentifierWrittenToSovrin(
self, li: Connection, availableClaims):
req = self.getIdentity(li.localIdentifier)
self.notifyMsgListener("\nSynchronizing...")
def getNymReply(reply, err, availableClaims, li: Connection):
if reply.get(DATA) and json.loads(reply[DATA])[TARGET_NYM] == \
li.localIdentifier:
self.notifyMsgListener(
" Confirmed DID written to Sovrin.")
self.notifyEventListeners(
EVENT_POST_ACCEPT_INVITE, connection=li)
else:
self.notifyMsgListener(
" DID is not yet written to Sovrin")
self.loop.call_later(.2, ensureReqCompleted, self.loop, req.key,
self.client, getNymReply, (availableClaims, li))
def notifyResponseFromMsg(self, linkName, reqId=None):
if reqId:
# TODO: This logic assumes that the req id is time based
curTimeBasedId = getTimeBasedId()
timeTakenInMillis = convertTimeBasedReqIdToMillis(
curTimeBasedId - reqId)
if timeTakenInMillis >= 1000:
responseTime = ' ({} sec)'.format(
round(timeTakenInMillis / 1000, 2))
else:
responseTime = ' ({} ms)'.format(round(timeTakenInMillis, 2))
else:
responseTime = ''
self.notifyMsgListener("\nResponse from {}{}:".format(linkName,
responseTime))
def notifyToRemoteCaller(self, event, msg, signingIdr, to, origReqId=None):
resp = {
TYPE: EVENT,
EVENT_NAME: event,
DATA: {'msg': msg}
}
self.signAndSend(resp, signingIdr, to, origReqId=origReqId)
def _handleAcceptance(self, msg):
body, (frm, ha) = msg
link = self.verifyAndGetLink(msg)
# TODO this is really kludgy code... needs refactoring
# exception handling, separation of concerns, etc.
if not link:
return
logger.debug("proceeding with connection: {}".format(link.name))
identifier = body.get(f.IDENTIFIER.nm)
verkey = body.get(VERKEY)
idy = Identity(identifier, verkey=verkey)
link.remoteVerkey = verkey
try:
pendingCount = self.wallet.addTrustAnchoredIdentity(idy)
logger.debug("pending request count {}".format(pendingCount))
alreadyAdded = False
except Exception as e:
if e.args[0] in ['identifier already added']:
alreadyAdded = True
else:
logger.warning("Exception raised while adding nym, "
"error was: {}".format(e.args[0]))
raise e
def send_claims(reply=None, error=None):
return self.sendClaimList(link=link,
alreadyAdded=alreadyAdded,
sender=frm,
reqId=body.get(f.REQ_ID.nm),
reply=reply,
error=error)
if alreadyAdded:
send_claims()
logger.debug("already accepted, "
"so directly sending available claims")
self.logger.info('Already added identifier [{}] in sovrin'
.format(identifier))
# self.notifyToRemoteCaller(EVENT_NOTIFY_MSG,
# " Already accepted",
# link.verkey, frm)
else:
logger.debug(
"not added to the ledger, so add nym to the ledger "
"and then will send available claims")
reqs = self.wallet.preparePending()
# Assuming there was only one pending request
logger.debug("sending to sovrin {}".format(reqs[0]))
# Need to think through
# how to provide separate logging for each agent
# anyhow this class should be implemented by each agent
# so we might not even need to add it as a separate logic
self.logger.info('Creating identifier [{}] in sovrin'
.format(identifier))
self._sendToSovrinAndDo(reqs[0], clbk=send_claims)
# TODO: If I have the below exception thrown, somehow the
# error msg which is sent in verifyAndGetLink is not being received
# on the other end, so for now, commented, need to come back to this
# else:
# raise NotImplementedError
def sendClaimList(self, link, alreadyAdded, sender,
reqId, reply=None, error=None):
logger.debug("sending available claims to {}".format(
link.remoteIdentifier))
resp = self.createInviteAcceptedMsg(
self.get_available_claim_list(link),
alreadyAccepted=alreadyAdded)
self.signAndSend(resp, link.localIdentifier, sender,
origReqId=reqId)
def _sendToSovrinAndDo(self, req, clbk=None, *args, **kwargs):
self.client.submitReqs(req)
ensureReqCompleted(self.loop, req.key, self.client,
clbk, *args, **kwargs)
def newAvailableClaimsPostClaimVerif(self, claimName):
raise NotImplementedError
def sendNewAvailableClaimsData(self, nac, frm, link):
if len(nac) > 0:
resp = self.createNewAvailableClaimsMsg(nac)
self.signAndSend(resp, link.localIdentifier, frm)
def sendPing(self, linkName):
link = self.wallet.getConnection(linkName, required=True)
self.connectTo(link=link)
ha = link.getRemoteEndpoint(required=True)
params = dict(ha=ha)
msg = {
TYPE: 'ping',
NONCE: link.request_nonce,
f.REQ_ID.nm: getTimeBasedId(),
f.IDENTIFIER.nm: link.localIdentifier
}
reqId = self.sendMessage(msg, **params)
self.notifyMsgListener(" Ping sent.")
return reqId
def connectTo(self, linkName=None, link=None):
assert linkName or link
if link is None:
link = self.wallet.getConnection(linkName, required=True)
ha = link.getRemoteEndpoint(required=True)
verKeyRaw = friendlyToRaw(
link.full_remote_verkey) if link.full_remote_verkey else None
publicKeyRaw = friendlyToRaw(
link.remotePubkey) if link.remotePubkey else None
if verKeyRaw is None and publicKeyRaw is None:
raise InvalidConnectionException(
"verkey or publicKey is required for connection.")
if publicKeyRaw is None:
publicKeyRaw = rawVerkeyToPubkey(verKeyRaw)
self.endpoint.connectIfNotConnected(
name=link.name,
ha=ha,
verKeyRaw=verKeyRaw,
publicKeyRaw=publicKeyRaw)
# duplicate function
# def loadInvitationFile(self, filePath):
# with open(filePath) as data_file:
# request = json.load(
# data_file, object_pairs_hook=collections.OrderedDict)
# return self.load_request_dict(request)
def load_request_str(self, json_str):
request = json.loads(
json_str, object_pairs_hook=collections.OrderedDict)
return self.load_request_dict(request)
def load_request_dict(self, request_dict):
link_request = request_dict.get("connection-request")
if not link_request:
raise ConnectionNotFound
linkName = link_request["name"]
existingLinkInvites = self.wallet. \
getMatchingConnections(linkName)
if len(existingLinkInvites) >= 1:
return self._merge_request(request_dict)
Connection.validate(request_dict)
link = self.load_request(request_dict)
return link
def load_request(self, request_data):
link_request = request_data["connection-request"]
remoteIdentifier = link_request[f.IDENTIFIER.nm]
# TODO signature should be validated!
# signature = request_data["sig"]
link_request_name = link_request[NAME]
remoteEndPoint = link_request.get("endpoint", None)
remote_verkey = link_request.get("verkey", None)
linkNonce = link_request[NONCE]
proofRequestsJson = request_data.get("proof-requests", None)
proofRequests = []
if proofRequestsJson:
for cr in proofRequestsJson:
proofRequests.append(
ProofRequest(
cr[NAME],
cr[VERSION],
getNonceForProof(linkNonce),
cr[ATTRIBUTES],
cr[VERIFIABLE_ATTRIBUTES] if VERIFIABLE_ATTRIBUTES in cr else [],
cr[PREDICATES] if PREDICATES in cr else []))
self.notifyMsgListener("1 connection request found for {}.".
format(link_request_name))
self.notifyMsgListener("Creating connection for {}.".
format(link_request_name))
# TODO: Would we always have a trust anchor corresponding to a link?
li = Connection(name=link_request_name,
trustAnchor=link_request_name,
remoteIdentifier=remoteIdentifier,
remoteEndPoint=remoteEndPoint,
request_nonce=linkNonce,
proofRequests=proofRequests,
remote_verkey=remote_verkey)
self.wallet.addConnection(li)
return li
def load_request_file(self, filePath):
with open(filePath) as data_file:
request_data = json.load(
data_file, object_pairs_hook=collections.OrderedDict)
link_request = request_data.get("connection-request")
if not link_request:
raise ConnectionNotFound
linkName = link_request["name"]
existingLinkInvites = self.wallet. \
getMatchingConnections(linkName)
if len(existingLinkInvites) >= 1:
return self._merge_request(request_data)
Connection.validate(request_data)
link = self.load_request(request_data)
return link
def _merge_request(self, request_data):
link_request = request_data.get('connection-request')
linkName = link_request['name']
link = self.wallet.getConnection(linkName)
request_proof_requests = request_data.get('proof-requests',
None)
nonce = link_request.get(NONCE)
if request_proof_requests:
for icr in request_proof_requests:
# match is found if name and version are same
matchedProofRequest = next(
(cr for cr in link.proofRequests
if (cr.name == icr[NAME] and cr.version == icr[VERSION])),
None
)
# if link.requestedProofs contains any claim request
if matchedProofRequest:
# merge 'attributes' and 'verifiableAttributes'
matchedProofRequest.attributes = {
**matchedProofRequest.attributes,
**icr[ATTRIBUTES]
}
matchedProofRequest.verifiableAttributes = dict(
matchedProofRequest.verifiableAttributes, **icr[VERIFIABLE_ATTRIBUTES])
else:
# otherwise append proof request to link
link.proofRequests.append(
ProofRequest(
icr[NAME],
icr[VERSION],
getNonceForProof(nonce),
attributes=icr[ATTRIBUTES],
verifiableAttributes=icr[VERIFIABLE_ATTRIBUTES]))
return link
else:
raise ConnectionAlreadyExists
def accept_request(self, link: Union[str, Connection]):
if isinstance(link, str):
link = self.wallet.getConnection(link, required=True)
elif isinstance(link, Connection):
pass
else:
raise TypeError(
"Type of connection must be either string or Link but "
"provided {}".format(
type(link)))
# TODO should move to wallet in a method like accept(link)
if not link.localIdentifier:
self.create_identifier_for_link(link)
msg = {
TYPE: ACCEPT_INVITE,
# TODO should not send this... because origin should be the sender
NONCE: link.request_nonce,
VERKEY: self.wallet.getVerkey(link.localIdentifier)
}
logger.debug("{} accepting request from {} with id {}".
format(self.name, link.name, link.remoteIdentifier))
self.logger.info('Accepting request with nonce {} from id {}'
.format(link.request_nonce, link.remoteIdentifier))
self.signAndSendToLink(msg, link.name)
# def _handleSyncNymResp(self, link, additionalCallback):
# def _(reply, err):
# if err:
# raise RuntimeError(err)
# reqId = self._updateLinkWithLatestInfo(link, reply)
# if reqId:
# self.loop.call_later(.2,
# self.executeWhenResponseRcvd,
# time.time(), 8000,
# self.loop, reqId, PONG, True,
# additionalCallback, reply, err)
# else:
# additionalCallback(reply, err)
#
# return _
def create_identifier_for_link(self, link):
signer = DidSigner()
self.wallet.addIdentifier(signer=signer)
link.localIdentifier = signer.identifier
link.localVerkey = signer.verkey
def _handleSyncResp(self, link, additionalCallback):
def _(reply, err):
if err:
raise RuntimeError(err)
reqId = self._updateLinkWithLatestInfo(link, reply)
if reqId:
self.loop.call_later(.2,
self.executeWhenResponseRcvd,
time.time(), 8000,
self.loop, reqId, PONG, True,
additionalCallback, reply, err)
else:
if callable(additionalCallback):
additionalCallback(reply, err)
return _
def _updateLinkWithLatestInfo(self, link: Connection, reply):
if DATA in reply and reply[DATA]:
data = json.loads(reply[DATA])
verkey = data.get(VERKEY)
if verkey is not None:
link.remoteVerkey = data[VERKEY]
ep = data.get(ENDPOINT)
if isinstance(ep, dict):
# TODO: Validate its an IP port pair or a malicious entity
# can crash the code
if 'ha' in ep:
ip, port = ep['ha'].split(":")
link.remoteEndPoint = (ip, int(port))
if PUBKEY in ep:
link.remotePubkey = ep[PUBKEY]
else:
link.remotePubkey = friendlyVerkeyToPubkey(
link.full_remote_verkey) if link.full_remote_verkey else None
link.connection_last_synced = datetime.now()
self.notifyMsgListener(
" Connection {} synced".format(link.name))
def _pingToEndpoint(self, name, endpoint):
self.notifyMsgListener("\nPinging target endpoint: {}".
format(endpoint))
reqId = self.sendPing(linkName=name)
return reqId
def sync(self, linkName, doneCallback=None):
if not self.client.isReady():
raise NotConnectedToNetwork
link = self.wallet.getConnection(linkName, required=True)
identifier = link.remoteIdentifier
identity = Identity(identifier=identifier)
req = self.wallet.requestIdentity(identity,
sender=self.wallet.defaultId)
self.client.submitReqs(req)
self.loop.call_later(.2,
ensureReqCompleted,
self.loop,
req.key,
self.client,
self._handleSyncResp(link, None))
attrib = Attribute(name=ENDPOINT,
value=None,
dest=identifier,
ledgerStore=LedgerStore.RAW)
req = self.wallet.requestAttribute(
attrib, sender=self.wallet.defaultId)
self.client.submitReqs(req)
self.loop.call_later(.2,
ensureReqCompleted,
self.loop,
req.key,
self.client,
self._handleSyncResp(link, doneCallback))
def executeWhenResponseRcvd(self, startTime, maxCheckForMillis,
loop, reqId, respType,
checkIfLinkExists, clbk, *args):
if isMaxCheckTimeExpired(startTime, maxCheckForMillis):
clbk(
None, "No response received within specified time ({} mills). "
"Retry the command and see if that works.\n". format(maxCheckForMillis))
else:
found = False
rcvdResponses = self.rcvdMsgStore.get(reqId)
if rcvdResponses:
for msg in rcvdResponses:
body, frm = msg
if body.get(TYPE) == respType:
if checkIfLinkExists:
identifier = body.get(IDENTIFIER)
li = self._getLinkByTarget(
getCryptonym(identifier))
linkCheckOk = li is not None
else:
linkCheckOk = True
if linkCheckOk:
found = True
break
if found:
clbk(*args)
else:
loop.call_later(.2, self.executeWhenResponseRcvd,
startTime, maxCheckForMillis, loop,
reqId, respType, checkIfLinkExists, clbk, *args)
| apache-2.0 | -2,601,363,187,121,016,000 | 40.016252 | 98 | 0.572687 | false | 4.309694 | false | false | false |
derks/cement | cement/core/arg.py | 1 | 3461 | """
Cement core argument module.
"""
from ..core import backend, exc, interface, handler
Log = backend.minimal_logger(__name__)
def argument_validator(klass, obj):
"""Validates a handler implementation against the IArgument interface."""
members = [
'_setup',
'parse',
'parsed_args',
'add_argument',
]
interface.validate(IArgument, obj, members)
class IArgument(interface.Interface):
"""
This class defines the Argument Handler Interface. Classes that
implement this handler must provide the methods and attributes defined
below. Implementations do *not* subclass from interfaces.
Example:
.. code-block:: python
from cement.core import interface, arg
class MyArgumentHandler(arg.CementArgumentHandler):
class Meta:
interface = arg.IArgument
label = 'my_argument_handler'
"""
class IMeta:
label = 'argument'
validator = argument_validator
# Must be provided by the implementation
Meta = interface.Attribute('Handler Meta-data')
parsed_args = interface.Attribute('Parsed args object')
def _setup(app_obj):
"""
The _setup function is called during application initialization and
must 'setup' the handler object making it ready for the framework
or the application to make further calls to it.
Required Arguments:
app_obj
The application object.
Return: None
"""
def add_argument(self, *args, **kw):
"""
Add arguments for parsing. This should be -o/--option or positional.
Positional Arguments:
args
List of option arguments. Generally something like
['-h', '--help'].
Optional Arguments
dest
The destination name (var). Default: arg[0]'s string.
help
The help text for --help output (for that argument).
action
Must support: ['store', 'store_true', 'store_false',
'store_const']
const
The value stored if action == 'store_const'.
default
The default value.
Return: None
"""
def parse(self, arg_list):
"""
Parse the argument list (i.e. sys.argv). Can return any object as
long as it's members contain those of the added arguments. For
example, if adding a '-v/--version' option that stores to the dest of
'version', then the member must be callable as 'Object().version'.
Must also set self.parsed_args to what is being returned.
Required Arguments:
arg_list
A list of command line arguments.
Return: Callable
"""
class CementArgumentHandler(handler.CementBaseHandler):
"""
Base class that all Argument Handlers should sub-class from.
"""
class Meta:
label = None
interface = IArgument
def __init__(self, *args, **kw):
super(CementArgumentHandler, self).__init__(*args, **kw)
| bsd-3-clause | -439,128,202,592,356,900 | 27.138211 | 79 | 0.540306 | false | 5.196697 | false | false | false |
synnick/roboronya | roboronya/plugins/cholify.py | 1 | 1871 | import random
from roboronya.plugins.plugin import Plugin
class Cholificator(Plugin):
description = 'Roboronya will use her *Automated Cholification Algorithm* (Patent Pending) to translate your text to a more sophisticated language.'
name = 'cholify'
@Plugin.requires_args
def run(roboronya, conv, cmd_args, **kwargs):
def _cholify(words):
choloWords = []
for word in words:
choloWord = ''
oldChar = ''
for char in word.lower():
if char == 'y':
choloWord += 'ii'
elif char == 't':
choloWord += 'th'
elif char == 'u' and (oldChar == 'q'):
choloWord += random.choice(['kh', 'k'])
elif (char == 'i' or char == 'e') and oldChar == 'c':
choloWord = choloWord[:-1]
choloWord += random.choice(['s', 'z']) + char
elif char == 'h' and oldChar == 'c':
choloWord = choloWord[:-1]
choloWord += random.choice(['zh', 'sh'])
elif char == 'c':
choloWord += 'k'
elif char == 's':
choloWord += 'z'
elif char == 'v':
choloWord += 'b'
elif char == 'b':
choloWord += 'v'
elif char == 'q':
pass
else:
choloWord += char
oldChar = char
choloWords.append(choloWord)
return choloWords
return roboronya.send_message(
conv,
' '.join(_cholify(cmd_args)),
**kwargs
)
| mit | 2,323,757,040,166,518,300 | 36.42 | 152 | 0.405665 | false | 4.552311 | false | false | false |
geeag/kafka | tests/kafkatest/tests/core/reassign_partitions_test.py | 4 | 5559 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ducktape.mark import parametrize
from ducktape.utils.util import wait_until
from kafkatest.services.zookeeper import ZookeeperService
from kafkatest.services.kafka import KafkaService
from kafkatest.services.verifiable_producer import VerifiableProducer
from kafkatest.services.console_consumer import ConsoleConsumer
from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest
from kafkatest.utils import is_int
import random
class ReassignPartitionsTest(ProduceConsumeValidateTest):
"""
These tests validate partition reassignment.
Create a topic with few partitions, load some data, trigger partition re-assignment with and without broker failure,
check that partition re-assignment can complete and there is no data loss.
"""
def __init__(self, test_context):
""":type test_context: ducktape.tests.test.TestContext"""
super(ReassignPartitionsTest, self).__init__(test_context=test_context)
self.topic = "test_topic"
self.zk = ZookeeperService(test_context, num_nodes=1)
self.kafka = KafkaService(test_context, num_nodes=4, zk=self.zk, topics={self.topic: {
"partitions": 20,
"replication-factor": 3,
'configs': {"min.insync.replicas": 2}}
})
self.num_partitions = 20
self.timeout_sec = 60
self.producer_throughput = 1000
self.num_producers = 1
self.num_consumers = 1
def setUp(self):
self.zk.start()
def min_cluster_size(self):
# Override this since we're adding services outside of the constructor
return super(ReassignPartitionsTest, self).min_cluster_size() + self.num_producers + self.num_consumers
def clean_bounce_some_brokers(self):
"""Bounce every other broker"""
for node in self.kafka.nodes[::2]:
self.kafka.restart_node(node, clean_shutdown=True)
def reassign_partitions(self, bounce_brokers):
partition_info = self.kafka.parse_describe_topic(self.kafka.describe_topic(self.topic))
self.logger.debug("Partitions before reassignment:" + str(partition_info))
# jumble partition assignment in dictionary
seed = random.randint(0, 2 ** 31 - 1)
self.logger.debug("Jumble partition assignment with seed " + str(seed))
random.seed(seed)
# The list may still be in order, but that's ok
shuffled_list = range(0, self.num_partitions)
random.shuffle(shuffled_list)
for i in range(0, self.num_partitions):
partition_info["partitions"][i]["partition"] = shuffled_list[i]
self.logger.debug("Jumbled partitions: " + str(partition_info))
# send reassign partitions command
self.kafka.execute_reassign_partitions(partition_info)
if bounce_brokers:
# bounce a few brokers at the same time
self.clean_bounce_some_brokers()
# Wait until finished or timeout
wait_until(lambda: self.kafka.verify_reassign_partitions(partition_info), timeout_sec=self.timeout_sec, backoff_sec=.5)
@parametrize(security_protocol="PLAINTEXT", bounce_brokers=True)
@parametrize(security_protocol="PLAINTEXT", bounce_brokers=False)
def test_reassign_partitions(self, bounce_brokers, security_protocol):
"""Reassign partitions tests.
Setup: 1 zk, 3 kafka nodes, 1 topic with partitions=3, replication-factor=3, and min.insync.replicas=2
- Produce messages in the background
- Consume messages in the background
- Reassign partitions
- If bounce_brokers is True, also bounce a few brokers while partition re-assignment is in progress
- When done reassigning partitions and bouncing brokers, stop producing, and finish consuming
- Validate that every acked message was consumed
"""
self.kafka.security_protocol = security_protocol
self.kafka.interbroker_security_protocol = security_protocol
new_consumer = False if self.kafka.security_protocol == "PLAINTEXT" else True
self.producer = VerifiableProducer(self.test_context, self.num_producers, self.kafka, self.topic, throughput=self.producer_throughput)
self.consumer = ConsoleConsumer(self.test_context, self.num_consumers, self.kafka, self.topic, new_consumer=new_consumer, consumer_timeout_ms=60000, message_validator=is_int)
self.kafka.start()
self.run_produce_consume_validate(core_test_action=lambda: self.reassign_partitions(bounce_brokers))
| apache-2.0 | 136,403,551,534,332,140 | 49.536364 | 182 | 0.679799 | false | 4.09352 | true | false | false |
h4ck3rm1k3/gcc_py_introspector | gcc/tree/attic/query_function_example.py | 1 | 42931 |
import prefix
import types
import json
#import pprint
from graphviz import Digraph
from SPARQLWrapper import SPARQLWrapper, XML, N3, JSONLD, JSON, POST, GET, SELECT, CONSTRUCT, ASK, DESCRIBE
from SPARQLWrapper.Wrapper import _SPARQL_DEFAULT, _SPARQL_XML, _SPARQL_JSON, _SPARQL_POSSIBLE, _RDF_XML, _RDF_N3, _RDF_JSONLD, _RDF_POSSIBLE
from SPARQLWrapper.SPARQLExceptions import QueryBadFormed
# special tree, name only
fdecl = {
'name' : 'function decl tree',
'exprs' : {
'node:function_decl': {
'fld:body': {'skip': 'yes'},
'fld:args': {'node:parm_decl': '45'},
'fld:mngl': {'node:identifier_node': '528'},
'fld:name': {'node:identifier_node': '3082'},
},
}
}
just_vals = {
'name' : 'just values tree',
'exprs' : {
'node:function_decl': {
'fld:body': {'skip': 'yes'},
'fld:args': {'node:parm_decl': '45'},
'fld:mngl': {'node:identifier_node': '528'},
'fld:name': {'node:identifier_node': '3082'},
},
}
}
stree = {
'name' : 'addr expr tree',
'exprs':
{
'node:addr_expr': {
'fld:type': {
'node:function_decl': fdecl, #this could contain an entire function
}
}
}
}
tree = {
'name' : 'main tree',
'exprs':
{
'node:addr_expr': {
'fld:OP0': {
'node:pointer_type': '90'
},
'fld:type': {
#u'node:function_decl': u'78', this could contain an entire function
'node:string_cst': '9',
'node:var_decl': '3'
}
},
'node:array_ref': {'fld:OP0': {'node:component_ref': '3'},
'fld:OP1': {'node:var_decl': '3'}},
'node:bind_expr': {'fld:body': {'node:return_expr': '30',
'node:statement_list': '24'},
'fld:vars': {'node:var_decl': '21'}},
'node:bit_and_expr': {'fld:OP0': {'node:array_ref': '1',
'node:component_ref': '2',
'node:convert_expr': '4',
'node:nop_expr': '3',
'node:parm_decl': '2',
'node:plus_expr': '3'},
'fld:OP1': {'node:bit_not_expr': '1',
'node:integer_cst': '13',
'node:var_decl': '1'}},
'node:bit_ior_expr': {'fld:OP0': {'node:array_ref': '1',
'node:bit_and_expr': '3',
'node:bit_ior_expr': '1',
'node:nop_expr': '1'},
'fld:OP1': {'node:bit_and_expr': '2',
'node:lshift_expr': '3',
'node:var_decl': '1'}},
'node:bit_not_expr': {'fld:OP0': {'node:var_decl': '1'}},
'node:call_expr': {'fld:E0': {'node:ge_expr': '6',
'node:integer_cst': '10',
'node:nop_expr': '23',
'node:parm_decl': '18',
'node:var_decl': '7'},
'fld:E1': {'node:integer_cst': '12',
'node:nop_expr': '13',
'node:parm_decl': '8',
'node:var_decl': '2'},
'fld:E2': {'node:integer_cst': '8',
'node:parm_decl': '6',
'node:var_decl': '2'},
'fld:E3': {'node:integer_cst': '5',
'node:parm_decl': '2'},
'fld:fn': {'node:addr_expr': '76',
'node:parm_decl': '1'}},
'node:case_label_expr': {'fld:low': {'node:integer_cst': '4'},
'fld:name': {'node:label_decl': '5'}},
'node:component_ref': {'fld:OP0': {'node:indirect_ref': '25',
'node:var_decl': '1'},
'fld:OP1': {'node:field_decl': '26'}},
'node:compound_expr': {'fld:OP0': {'node:modify_expr': '2'},
'fld:OP1': {'node:integer_cst': '2'}},
'node:cond_expr': {'fld:OP0': {'node:eq_expr': '12',
'node:gt_expr': '2',
'node:le_expr': '2',
'node:lt_expr': '2',
'node:ne_expr': '28',
'node:truth_andif_expr': '14',
'node:truth_orif_expr': '4'},
'fld:OP1': {'node:bind_expr': '2',
'node:call_expr': '16',
'node:cond_expr': '1',
'node:convert_expr': '2',
'node:goto_expr': '12',
'node:modify_expr': '9',
'node:nop_expr': '5',
'node:statement_list': '17'},
'fld:OP2': {'node:call_expr': '4',
'node:cond_expr': '3',
'node:goto_expr': '12',
'node:integer_cst': '2',
'node:nop_expr': '6',
'node:parm_decl': '2',
'node:return_expr': '1'}},
'node:const_decl': {#u'fld:chain': {u'node:const_decl': u'462',
# u'node:type_decl': u'26'},
'fld:cnst': {'node:integer_cst': '488'},
'fld:name': {'node:identifier_node': '488'},
#u'fld:scpe': {u'node:translation_unit_decl': u'488'}
},
'node:convert_expr': {'fld:OP0': {'node:addr_expr': '1',
'node:call_expr': '1',
'node:parm_decl': '9',
'node:rshift_expr': '3'}},
'node:eq_expr': {'fld:OP0': {'node:call_expr': '2',
'node:nop_expr': '16',
'node:parm_decl': '1',
'node:var_decl': '6'},
'fld:OP1': {'node:integer_cst': '12',
'node:nop_expr': '7',
'node:parm_decl': '6'}},
'node:field_decl': {
#u'fld:bpos': {u'node:integer_cst': u'562'},
#u'fld:chain': {u'node:field_decl': u'427'},
'fld:name': {'node:identifier_node': '545'},
'fld:orig': {'node:field_decl': '2'},
#u'fld:size': {u'node:integer_cst': u'562'}
},
'node:function_decl': {'fld:args': {'node:parm_decl': '45'},
'fld:body': {'node:bind_expr': '51'},
#u'fld:chain': {u'node:function_decl': u'3059',
# u'node:type_decl': u'3',
# u'node:var_decl': u'19'},
'fld:mngl': {'node:identifier_node': '528'},
'fld:name': {'node:identifier_node': '3082'},
#u'fld:scpe': {u'node:translation_unit_decl': u'2767'}
},
'node:ge_expr': {'fld:OP0': {'node:component_ref': '6'},
'fld:OP1': {'node:component_ref': '6'}},
'node:goto_expr': {'fld:labl': {'node:label_decl': '46'}},
'node:gt_expr': {'fld:OP0': {'node:var_decl': '2'},
'fld:OP1': {'node:integer_cst': '2'}},
'node:indirect_ref': {'fld:OP0': {'node:call_expr': '2',
'node:nop_expr': '3',
'node:parm_decl': '38',
'node:pointer_plus_expr': '18',
'node:postincrement_expr': '7',
'node:var_decl': '15'}},
'node:label_decl': {'fld:name': {'node:identifier_node': '1'},
#u'fld:scpe': {u'node:function_decl': u'47'}
},
'node:label_expr': {'fld:name': {'node:label_decl': '42'}},
'node:le_expr': {'fld:OP0': {'node:nop_expr': '1',
'node:parm_decl': '1',
'node:plus_expr': '2'},
'fld:OP1': {'node:integer_cst': '4'}},
'node:lshift_expr': {'fld:OP0': {'node:bit_and_expr': '3',
'node:integer_cst': '3'},
'fld:OP1': {'node:bit_and_expr': '3',
'node:integer_cst': '3'}},
'node:lt_expr': {'fld:OP0': {'node:var_decl': '2'},
'fld:OP1': {'node:integer_cst': '1',
'node:var_decl': '1'}},
'node:modify_expr': {'fld:OP0': {'node:array_ref': '2',
'node:indirect_ref': '11',
'node:parm_decl': '1',
'node:result_decl': '50',
'node:var_decl': '49'},
'fld:OP1': {'node:bit_and_expr': '1',
'node:bit_ior_expr': '4',
'node:call_expr': '18',
'node:compound_expr': '2',
'node:cond_expr': '14',
'node:convert_expr': '4',
'node:indirect_ref': '1',
'node:integer_cst': '34',
'node:modify_expr': '1',
'node:ne_expr': '3',
'node:nop_expr': '6',
'node:parm_decl': '2',
'node:plus_expr': '1',
'node:pointer_plus_expr': '1',
'node:postincrement_expr': '1',
'node:preincrement_expr': '1',
'node:trunc_div_expr': '1',
'node:var_decl': '18'}},
'node:mult_expr': {'fld:OP0': {'node:nop_expr': '2',
'node:var_decl': '1'},
'fld:OP1': {'node:integer_cst': '2',
'node:parm_decl': '1'}},
'node:ne_expr': {'fld:OP0': {'node:bit_and_expr': '3',
'node:call_expr': '9',
'node:component_ref': '1',
'node:modify_expr': '2',
'node:nop_expr': '25',
'node:parm_decl': '1',
'node:var_decl': '18'},
'fld:OP1': {'node:integer_cst': '48',
'node:parm_decl': '11'}},
'node:nop_expr': {'fld:OP0': {'node:addr_expr': '13',
'node:array_ref': '1',
'node:bit_ior_expr': '1',
'node:call_expr': '7',
'node:component_ref': '2',
'node:convert_expr': '3',
'node:indirect_ref': '40',
'node:modify_expr': '3',
'node:mult_expr': '3',
'node:nop_expr': '3',
'node:parm_decl': '24',
'node:plus_expr': '3',
'node:postincrement_expr': '3',
'node:var_decl': '31'}},
'node:parm_decl': {'fld:chain': {'node:parm_decl': '48'},
'fld:name': {'node:identifier_node': '93'},
#u'fld:scpe': {u'node:function_decl': u'93'},
#u'fld:size': {u'node:integer_cst': u'93'}
}
,
'node:plus_expr': {'fld:OP0': {'node:nop_expr': '2',
'node:parm_decl': '6',
'node:var_decl': '2'},
'fld:OP1': {'node:integer_cst': '9',
'node:var_decl': '1'}},
'node:pointer_plus_expr': {'fld:OP0': {'node:indirect_ref': '2',
'node:parm_decl': '17'},
'fld:OP1': {'node:integer_cst': '1',
'node:nop_expr': '18'}},
'node:postdecrement_expr': {'fld:OP0': {'node:var_decl': '1'},
'fld:OP1': {'node:integer_cst': '1'}},
'node:postincrement_expr': {'fld:OP0': {'node:component_ref': '6',
'node:indirect_ref': '1',
'node:parm_decl': '2',
'node:var_decl': '3'},
'fld:OP1': {'node:integer_cst': '12'}},
'node:preincrement_expr': {'fld:OP0': {'node:parm_decl': '3',
'node:var_decl': '9'},
'fld:OP1': {'node:integer_cst': '12'}},
'node:result_decl': {
#u'fld:scpe': {u'node:function_decl': u'49'},
# u'fld:size': {u'node:integer_cst': u'49'}
},
'node:return_expr': {'fld:expr': {'node:modify_expr': '50'}},
'node:rshift_expr': {'fld:OP0': {'node:parm_decl': '3'},
'fld:OP1': {'node:integer_cst': '3'}},
'node:statement_list': {'fld:E0': {'node:call_expr': '4',
'node:case_label_expr': '1',
'node:decl_expr': '21',
'node:goto_expr': '2',
'node:modify_expr': '14'},
'fld:E1': {'node:call_expr': '4',
'node:case_label_expr': '1',
'node:cond_expr': '7',
'node:decl_expr': '8',
'node:goto_expr': '12',
'node:label_expr': '4',
'node:modify_expr': '4',
'node:postincrement_expr': '1',
'node:switch_expr': '1'},
'fld:E10': {'node:cond_expr': '2',
'node:label_expr': '1',
'node:modify_expr': '2'},
'fld:E11': {'node:call_expr': '1',
'node:cond_expr': '1',
'node:modify_expr': '1',
'node:postdecrement_expr': '1',
'node:return_expr': '1'},
'fld:E12': {'node:cond_expr': '1',
'node:goto_expr': '1',
'node:modify_expr': '1',
'node:return_expr': '1'},
'fld:E13': {'node:case_label_expr': '1',
'node:label_expr': '1',
'node:modify_expr': '1'},
'fld:E14': {'node:call_expr': '1',
'node:cond_expr': '2'},
'fld:E15': {'node:label_expr': '1',
'node:return_expr': '1'},
'fld:E16': {'node:return_expr': '1'},
'fld:E2': {'node:call_expr': '2',
'node:case_label_expr': '1',
'node:cond_expr': '3',
'node:convert_expr': '1',
'node:decl_expr': '2',
'node:goto_expr': '2',
'node:label_expr': '8',
'node:modify_expr': '4',
'node:preincrement_expr': '2',
'node:return_expr': '6'},
'fld:E3': {'node:call_expr': '2',
'node:cond_expr': '4',
'node:decl_expr': '2',
'node:label_expr': '3',
'node:modify_expr': '4',
'node:preincrement_expr': '6'},
'fld:E4': {'node:call_expr': '2',
'node:cond_expr': '6',
'node:decl_expr': '1',
'node:label_expr': '7',
'node:modify_expr': '1',
'node:preincrement_expr': '3',
'node:return_expr': '1'},
'fld:E5': {'node:call_expr': '1',
'node:cond_expr': '7',
'node:goto_expr': '3',
'node:label_expr': '4',
'node:modify_expr': '5'},
'fld:E6': {'node:call_expr': '1',
'node:cond_expr': '3',
'node:goto_expr': '1',
'node:label_expr': '10',
'node:modify_expr': '3',
'node:return_expr': '2'},
'fld:E7': {'node:bind_expr': '1',
'node:case_label_expr': '1',
'node:cond_expr': '3',
'node:goto_expr': '1',
'node:label_expr': '1',
'node:modify_expr': '3',
'node:return_expr': '6'},
'fld:E8': {'node:cond_expr': '3',
'node:label_expr': '2',
'node:modify_expr': '2',
'node:return_expr': '1'},
'fld:E9': {'node:cond_expr': '4',
'node:modify_expr': '1'}},
'node:switch_expr': {'fld:body': {'node:statement_list': '1'},
'fld:cond': {'node:var_decl': '1'}},
'node:tree_list': {'fld:chan': {'node:tree_list': '2714'},
'fld:purp': {'node:identifier_node': '488'},
'fld:valu': {'node:integer_cst': '488'}},
'node:trunc_div_expr': {'fld:OP0': {'node:nop_expr': '3',
'node:plus_expr': '1'},
'fld:OP1': {'node:integer_cst': '4'}},
'node:truth_andif_expr': {'fld:OP0': {'node:eq_expr': '1',
'node:ne_expr': '13',
'node:truth_andif_expr': '6'},
'fld:OP1': {'node:eq_expr': '2',
'node:le_expr': '2',
'node:ne_expr': '15',
'node:truth_and_expr': '1'}},
'node:truth_orif_expr': {'fld:OP0': {'node:eq_expr': '4',
'node:truth_orif_expr': '2'},
'fld:OP1': {'node:eq_expr': '6'}},
'node:type_decl': {#u'fld:chain': {u'node:const_decl': u'26',
# u'node:function_decl': u'5',
# u'node:type_decl': u'460'},
'fld:name': {'node:identifier_node': '318'},
#u'fld:scpe': {u'node:translation_unit_decl': u'449'}
},
'node:var_decl': {#u'fld:chain': {u'node:function_decl': u'18',
# u'node:label_decl': u'1',
# u'node:var_decl': u'106'},
'fld:init': {'node:indirect_ref': '3',
'node:integer_cst': '6',
'node:lshift_expr': '3',
'node:trunc_div_expr': '3',
'node:var_decl': '2'},
'fld:name': {'node:identifier_node': '146'},
#u'fld:scpe': {u'node:function_decl': u'34',
# u'node:translation_unit_decl': u'112'},
#u'fld:size': {u'node:integer_cst': u'134'}
},
'node:enumeral_type': {
#{u'fld:csts': {u'node:tree_list': u'31'},
'fld:max': {'node:integer_cst': '31'},
'fld:min': {'node:integer_cst': '31'},
'fld:name': {'node:identifier_node': '9',
'node:type_decl': '5'},
'fld:size': {'node:integer_cst': '31'},
#u'fld:unql': {u'node:enumeral_type': u'5'}
},
'node:integer_type': {'fld:max': {'node:integer_cst': '188'},
'fld:min': {'node:integer_cst': '188'},
'fld:name': {'node:identifier_node': '2',
'node:type_decl': '157'},
'fld:size': {'node:integer_cst': '188'},
#u'fld:unql': {u'node:integer_type': u'144'}
},
'node:pointer_type': {'fld:name': {'node:type_decl': '17'},
'fld:ptd': {'node:array_type': '7',
'node:function_type': '77',
'node:integer_type': '40',
'node:pointer_type': '18',
'node:real_type': '6',
'node:record_type': '129',
'node:union_type': '2',
'node:vector_type': '3',
'node:void_type': '9'},
'fld:size': {'node:integer_cst': '291'},
'fld:unql': {'node:pointer_type': '62'}},
},
# here are the types of objects that are ignored
'types': {
'node:array_ref': {'fld:type': {'node:integer_type': '3'}},
'node:array_type': {'fld:domn': {'node:integer_type': '49'},
'fld:elts': {'node:integer_type': '36',
'node:pointer_type': '7',
'node:record_type': '14'},
'fld:name': {'node:type_decl': '8'},
#u'fld:size': {u'node:integer_cst': u'49'},
'fld:unql': {'node:array_type': '12'}},
'node:bind_expr': {'fld:type': {'node:void_type': '54'}},
'node:bit_and_expr': {'fld:type': {'node:integer_type': '15'}},
'node:bit_ior_expr': {'fld:type': {'node:integer_type': '6'}},
'node:bit_not_expr': {'fld:type': {'node:integer_type': '1'}},
'node:boolean_type': {'fld:name': {'node:type_decl': '1'},
'fld:size': {'node:integer_cst': '1'}},
'node:call_expr': {'fld:type': {'node:integer_type': '46',
'node:pointer_type': '12',
'node:real_type': '1',
'node:void_type': '18'}},
'node:case_label_expr': {'fld:type': {'node:void_type': '5'}},
'node:complex_type': {'fld:name': {'node:type_decl': '4'},
'fld:size': {'node:integer_cst': '5'}},
'node:component_ref': {'fld:type': {'node:array_type': '3',
'node:enumeral_type': '1',
'node:integer_type': '2',
'node:pointer_type': '20'}},
'node:compound_expr': {'fld:type': {'node:integer_type': '2'}},
'node:cond_expr': {'fld:type': {'node:integer_type': '11',
'node:pointer_type': '3',
'node:void_type': '50'}},
'node:const_decl': {'fld:type': {'node:enumeral_type': '488'}},
'node:convert_expr': {'fld:type': {'node:integer_type': '11',
'node:pointer_type': '2',
'node:void_type': '1'}},
'node:decl_expr': {'fld:type': {'node:void_type': '34'}},
'node:enumeral_type': {'fld:csts': {'node:tree_list': '31'},
#u'fld:max': {u'node:integer_cst': u'31'},
#u'fld:min': {u'node:integer_cst': u'31'},
#u'fld:name': {u'node:identifier_node': u'9',
# u'node:type_decl': u'5'},
#u'fld:size': {u'node:integer_cst': u'31'},
'fld:unql': {'node:enumeral_type': '5'}},
'node:eq_expr': {'fld:type': {'node:integer_type': '25'}},
'node:pointer_type': {
'fld:name': {'node:type_decl': '17'},
'fld:ptd': {'node:array_type': '7',
'node:function_type': '77',
'node:integer_type': '40',
'node:pointer_type': '18',
'node:real_type': '6',
'node:record_type': '129',
'node:union_type': '2',
'node:vector_type': '3',
'node:void_type': '9'},
'fld:size': {'node:integer_cst': '291'},
'fld:unql': {'node:pointer_type': '62'}},
'node:field_decl': {
#u'fld:scpe': {u'node:record_type': u'459',
# u'node:union_type': u'103'},
'fld:type': {'node:array_type': '42',
'node:enumeral_type': '4',
'node:integer_type': '290',
'node:pointer_type': '169',
'node:real_type': '2',
'node:record_type': '29',
'node:union_type': '26'}},
'node:function_decl': {'fld:type': {'node:function_type': '3082'}},
'node:function_type': {'fld:name': {'node:type_decl': '45'},
'fld:prms': {'node:tree_list': '1102'},
'fld:retn': {'node:boolean_type': '22',
'node:complex_type': '13',
'node:integer_type': '487',
'node:pointer_type': '310',
'node:real_type': '66',
'node:record_type': '4',
'node:vector_type': '58',
'node:void_type': '154'},
'fld:size': {'node:integer_cst': '1114'},
'fld:unql': {'node:function_type': '51'}},
'node:ge_expr': {'fld:type': {'node:integer_type': '6'}},
'node:goto_expr': {'fld:type': {'node:void_type': '46'}},
'node:gt_expr': {'fld:type': {'node:integer_type': '2'}},
'node:indirect_ref': {'fld:type': {'node:integer_type': '47',
'node:pointer_type': '11',
'node:record_type': '25'}},
'node:integer_cst': {'fld:type': {'node:integer_type': '455',
'node:pointer_type': '12'}},
'node:integer_type': {'fld:max': {'node:integer_cst': '188'},
'fld:min': {'node:integer_cst': '188'},
'fld:name': {'node:identifier_node': '2',
'node:type_decl': '157'},
'fld:size': {'node:integer_cst': '188'},
'fld:unql': {'node:integer_type': '144'}},
'node:label_decl': {'fld:type': {'node:void_type': '47'}},
'node:label_expr': {'fld:type': {'node:void_type': '42'}},
'node:le_expr': {'fld:type': {'node:integer_type': '4'}},
'node:lshift_expr': {'fld:type': {'node:integer_type': '6'}},
'node:lt_expr': {'fld:type': {'node:integer_type': '2'}},
'node:modify_expr': {'fld:type': {'node:integer_type': '76',
'node:pointer_type': '36',
'node:real_type': '1'}},
'node:mult_expr': {'fld:type': {'node:integer_type': '3'}},
'node:ne_expr': {'fld:type': {'node:integer_type': '59'}},
'node:nop_expr': {'fld:type': {'node:integer_type': '103',
'node:pointer_type': '34'}},
'node:parm_decl': {'fld:argt': {'node:integer_type': '49',
'node:pointer_type': '44'},
'fld:type': {'node:integer_type': '49',
'node:pointer_type': '44'}},
'node:plus_expr': {'fld:type': {'node:integer_type': '10'}},
'node:pointer_plus_expr': {'fld:type': {'node:pointer_type': '19'}},
'node:postdecrement_expr': {'fld:type': {'node:integer_type': '1'}},
'node:postincrement_expr': {'fld:type': {'node:integer_type': '1',
'node:pointer_type': '11'}},
'node:preincrement_expr': {'fld:type': {'node:integer_type': '7',
'node:pointer_type': '5'}},
'node:real_type': {'fld:name': {'node:type_decl': '9'},
'fld:size': {'node:integer_cst': '9'},
'fld:unql': {'node:real_type': '2'}},
'node:record_type': {'fld:flds': {'node:field_decl': '177'},
'fld:name': {'node:identifier_node': '89',
'node:type_decl': '69'},
'fld:size': {'node:integer_cst': '177'},
'fld:unql': {'node:record_type': '79'}},
'node:reference_type': {'fld:refd': {'node:pointer_type': '1'},
'fld:size': {'node:integer_cst': '1'}},
'node:result_decl': {'fld:type': {'node:integer_type': '41',
'node:pointer_type': '7',
'node:real_type': '1'}},
'node:return_expr': {'fld:type': {'node:void_type': '51'}},
'node:rshift_expr': {'fld:type': {'node:integer_type': '3'}},
'node:string_cst': {'fld:type': {'node:array_type': '9'}},
'node:switch_expr': {'fld:type': {'node:integer_type': '1'}},
'node:tree_list': {'fld:valu': {'node:boolean_type': '9',
'node:complex_type': '12',
'node:enumeral_type': '15',
'node:integer_type': '811',
'node:pointer_type': '1227',
'node:real_type': '89',
'node:record_type': '3',
'node:reference_type': '3',
'node:union_type': '6',
'node:vector_type': '105',
'node:void_type': '4'}},
'node:trunc_div_expr': {'fld:type': {'node:integer_type': '4'}},
'node:truth_and_expr': {'fld:type': {'node:integer_type': '1'}},
'node:truth_andif_expr': {'fld:type': {'node:integer_type': '20'}},
'node:truth_orif_expr': {'fld:type': {'node:integer_type': '6'}},
'node:type_decl': {'fld:type': {'node:array_type': '8',
'node:boolean_type': '1',
'node:complex_type': '5',
'node:enumeral_type': '31',
'node:function_type': '45',
'node:integer_type': '161',
'node:pointer_type': '17',
'node:real_type': '8',
'node:record_type': '167',
'node:union_type': '48',
'node:void_type': '2'}},
'node:union_type': {'fld:flds': {'node:field_decl': '50'},
'fld:name': {'node:identifier_node': '5',
'node:type_decl': '13'},
'fld:size': {'node:integer_cst': '50'},
'fld:unql': {'node:union_type': '14'}},
'node:var_decl': {'fld:type': {'node:array_type': '14',
'node:integer_type': '95',
'node:pointer_type': '30',
'node:record_type': '7'}},
'node:vector_type': {'fld:size': {'node:integer_cst': '12'},
'fld:unql': {'node:vector_type': '1'}},
'node:void_type': {'fld:name': {'node:type_decl': '5'},
'fld:unql': {'node:void_type': '4'}}}}
f = {}
skip= {
'fld:source_file' :1 # dont need this in the document
}
def query(s):
results = prefix.q( """
SELECT ?a ?p ?o ?t WHERE {
<%s> ?p ?o.
optional {
?o rdf:type ?t.
}
}
""" % s)
d={
'node_id' : prefix.clean(s)
}
dt={
'node_id' : None # literal has no type...
}
#pprint.pprint(results)
for x in results['results']['bindings']:
v = prefix.clean(x['o']['value'])
t = None
if 't' in x:
t = prefix.clean(x['t']['value'])
else:
#pprint.pprint(x)
pass # have no node type
k = x['p']['value']
k = prefix.clean(k)
if k not in d:
if k not in skip:
d[k]=v # the value of the field
dt[k]=t # the domain type of the field object
else:
#d[k]=[d[k],v]
raise Exception("duplicate")
pprint.pprint({'query_results':d}, depth=2)
return d, dt
import types
def recurse_ref(s, subtree):
print("RECURSE for %s\n" % s)
print("using subtree : %s" % subtree['name'])
d,dt = query(s)
pprint.pprint({"Got from db":d})
if 'rdf:type' not in d:
return d
st = d['rdf:type']
#print "st" + str(st)
#pprint.pprint(dt)
found = False
if not 'exprs' in subtree:
pprint.pprint({"bad subtree": subtree}, depth=2)
raise Exception()
lookup = subtree['exprs']
for k in d:
r = None # result of the field
ot = dt[k]
v = d[k]
u = prefix.tg +v
if type(st) is dict:
print('skip' + st)
pprint.pprint({
'case': 'is type',
'k' :k,
'ot' :ot,
'st' : st
}, depth=2)
#pprint.pprint(dt)
#pass # no type
elif not ot : # no type, a literal
if k.startswith('fld:'):
r = prefix.clean(v) # just a literal
pprint.pprint({
'case': 'is literal',
'k' :k,
'dt': dt,
'ot' :ot,
'st' : st
}, depth=2)
found = True
else:
pprint.pprint({
'case': 'is no field',
'k' :k,
'ot' :ot,
'st' : st,
'r' : r,
'v' : v,
}, depth=2)
r = v # we need to store the type field
found = True
elif st in lookup:
if k in lookup[st]:
if ot in lookup[st][k]:
subtree = lookup[st][k]
if type(subtree) is dict:
if 'exprs' in subtree:
r = recurse_ref(u, subtree)
pprint.pprint({"Subtree":r}, depth=2)
else:
r = recurse_ref(u, tree)
pprint.pprint({"tree":r}, depth=2)
else:
r = recurse_ref(u, tree)
pprint.pprint({"tree2":r}, depth=2)
found = True
else:
pass # skip
if not found:
r = recurse_ref(u, just_vals ) # just get one level of info for types and such
pprint.pprint({
"missing" : True,
'k' :k,
'ot' :ot,
'st' : st,
'u' :u,
'r' :r
}, depth=2)
d[k]=r
pprint.pprint({"rec found":d}, depth=2)
return (d)
# print out what field types occur
def start():
t = {}
results = prefix.q( """
SELECT ?a WHERE {
?a fld:srcp 'eval.c:216'.
?a fld:name [ fld:string 'parse_command'].
?a rdf:type nt:function_decl.
}
""")
for x in results['results']['bindings']:
print(x['a']['value'])
r= recurse_ref(x['a']['value'],tree)
o = open("data/body2.py","w")
o.write("deep={v2}".format(v2=pprint.pformat(r)))
o.close()
start()
| gpl-2.0 | 5,027,496,312,322,403,000 | 53.828863 | 141 | 0.313759 | false | 4.432731 | false | false | false |
ollej/shoutbridge | src/bridges/XmppPyBridge.py | 1 | 7001 | # -*- coding: utf-8 -*-
"""
The MIT License
Copyright (c) 2010 Olle Johansson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import sys
import os
import xmpp
import time
import re
from bridges.XmppBridge import *
from utils.utilities import *
class XmppPyBridge(XmppBridge):
login = ""
passwd = ""
room = ""
host = ""
port = 5222
discoName = "Shoutbridge"
shoutbox = None
roster = []
def __init__(self, sbox, cfg):
"""
Instantiate an XMPP bridge using XMPP login details and a shoutbox object.
"""
self.shoutbox = sbox
self.login = cfg.xmpp_login
self.passwd = cfg.xmpp_pass
self.host = cfg.xmpp_host
if cfg.xmpp_port:
self.port = cfg.xmpp_port
self.room = cfg.xmpp_room
# Make an XMPP connection
self.make_connection()
# Register handlers
self.register_handlers()
def __del__(self):
if self.cl:
self.cl.disconnect()
def make_connection(self):
"""
Make an XMPP connection and authorize the user.
"""
self.jid = xmpp.protocol.JID(self.login)
debug = xmpp.debug.Debug() #['always', 'nodebuilder']
self.cl = xmpp.Client(self.jid.getDomain(), debug=debug)
self.con = self.cl.connect()
#self.cl = xmpp.client.Component(self.jid.getDomain(), self.port, debug=debug)
#self.con = self.cl.connect((self.jid.getDomain(), self.port))
if not self.con:
raise BridgeConnectionError
print 'Connected with', self.con
self.auth = self.cl.auth(self.jid.getNode(), self.passwd, resource=self.jid.getResource())
if not self.auth:
raise BridgeAuthenticationError
print 'Authenticated using', self.auth
def register_handlers(self):
"""
Register message handlers
"""
self.cl.RegisterHandler('iq', self.handle_iq)
self.cl.RegisterHandler('presence', self.handle_presence)
self.cl.RegisterHandler('message', self.handle_message)
self.disco = xmpp.browser.Browser()
self.disco.PlugIn(self.cl)
self.disco.setDiscoHandler(self.xmpp_base_disco,node='', jid=self.login)
# Disco Handlers
def xmpp_base_disco(self, con, event, type):
fromjid = event.getFrom().__str__()
to = event.getTo()
node = event.getQuerynode();
#Type is either 'info' or 'items'
if to == self.login:
if node == None:
if type == 'info':
return {
'ids': [
{'category': 'gateway', 'type': 'smtp', 'name': self.discoName}],
'features': [NS_VERSION, NS_COMMANDS]}
if type == 'items':
return []
else:
self.cl.send(Error(event, ERR_ITEM_NOT_FOUND))
raise NodeProcessed
else:
self.cl.send(Error(event, MALFORMED_JID))
raise NodeProcessed
def handle_iq(self, conn, iq_node):
"""
Handler for processing some "get" query from custom namespace
"""
print "Iq stanza received:", iq_node.getType(), iq_node.getFrom().getResource()
reply = iq_node.buildReply('result')
# ... put some content into reply node
conn.send(reply)
raise NodeProcessed # This stanza is fully processed
def handle_presence(self, conn, pres):
nick = pres.getFrom().getResource()
type = pres.getType()
print "Presence stanza received:", nick, type
if type == 'unavailable':
if nick in self.roster:
self.roster.remove(nick)
print "Adding to roster:", nick
else:
if nick not in self.roster:
self.roster.append(nick)
print "Removing from roster:", nick
def handle_message(self, conn, mess):
"""
Handle an XMPP message.
"""
type = mess.getType()
fromjid = mess.getFrom().getStripped()
nick = mess.getFrom().getResource()
print "Message stanza received:", fromjid, '/', nick, type
if type in ['message', 'chat', None]:
# and fromjid == self.remotejid:
text = mess.getBody()
try:
user = self.shoutbox.getUserByLogin(fromjid)
except ShoutboxUserNotFoundError:
# Default to anonymous user with JID as username
user = User(1, nick, '', '')
self.shoutbox.sendShout(user, text)
def clean_message(self, text):
"""
Clean text of unwanted content.
"""
text = strip_tags(text)
return text
def send_message(self, tojid, text):
"""
Send an text as XMPP message to tojid
"""
try:
id = self.cl.send(xmpp.protocol.Message(tojid, text))
print 'Sent message with id', id
except UnicodeDecodeError:
print "Unicode Decode Error: " + text
def process_shoutbox_messages(self):
msgs = self.shoutbox.readShouts()
for m in msgs:
text = self.clean_message(m.text)
text = "%s <%s> %s" % (m.time, m.name, text)
self.send_message(self.room, text)
def listen(self):
"""
Start listening on XMPP and Shoutbox, relaying messages.
"""
try:
while 1:
print "Loop..."
# Process incoming XMPP messages.
self.cl.Process(5)
# Process shoutbox messages.
self.process_shoutbox_messages()
# Sleep before next loop iteration.
#time.sleep(1)
# Reconnect to XMPP if necessary.
if not self.cl.isConnected():
self.cl.reconnectAndReauth()
except KeyboardInterrupt:
print "Exiting..."
| mit | 4,904,962,323,956,999,000 | 32.821256 | 98 | 0.584202 | false | 4.108568 | false | false | false |
Diti24/python-ivi | ivi/agilent/agilent437B.py | 1 | 14103 | """
Python Interchangeable Virtual Instrument Library
Copyright (c) 2014-2016 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .. import ivi
from .. import pwrmeter
import time
Units = set(['dBm', 'Watts'])
class agilent437B(ivi.Driver, pwrmeter.Base, pwrmeter.ManualRange,
pwrmeter.DutyCycleCorrection, pwrmeter.AveragingCount,
pwrmeter.ZeroCorrection, pwrmeter.Calibration,
pwrmeter.ReferenceOscillator):
"Agilent 437B RF power meter"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', '437B')
super(agilent437B, self).__init__(*args, **kwargs)
self._channel_count = 1
self._identity_description = "Agilent 437B RF power meter driver"
self._identity_identifier = ""
self._identity_revision = ""
self._identity_vendor = ""
self._identity_instrument_manufacturer = "Agilent Technologies"
self._identity_instrument_model = ""
self._identity_instrument_firmware_revision = ""
self._identity_specification_major_version = 3
self._identity_specification_minor_version = 0
self._identity_supported_instrument_models = ['437B']
self._init_channels()
def _initialize(self, resource = None, id_query = False, reset = False, **keywargs):
"Opens an I/O session to the instrument."
super(agilent437B, self)._initialize(resource, id_query, reset, **keywargs)
# interface clear
if not self._driver_operation_simulate:
self._clear()
# check ID
if id_query and not self._driver_operation_simulate:
id = self.identity.instrument_model
id_check = self._instrument_id
id_short = id[:len(id_check)]
if id_short != id_check:
raise Exception("Instrument ID mismatch, expecting %s, got %s", id_check, id_short)
# reset
if reset:
self.utility_reset()
def _load_id_string(self):
if self._driver_operation_simulate:
self._identity_instrument_manufacturer = "Not available while simulating"
self._identity_instrument_model = "Not available while simulating"
self._identity_instrument_firmware_revision = "Not available while simulating"
else:
lst = self._ask("*IDN?").split(",")
self._identity_instrument_manufacturer = lst[0]
self._identity_instrument_model = lst[1]
self._identity_instrument_firmware_revision = lst[3]
self._set_cache_valid(True, 'identity_instrument_manufacturer')
self._set_cache_valid(True, 'identity_instrument_model')
self._set_cache_valid(True, 'identity_instrument_firmware_revision')
def _get_identity_instrument_manufacturer(self):
if self._get_cache_valid():
return self._identity_instrument_manufacturer
self._load_id_string()
return self._identity_instrument_manufacturer
def _get_identity_instrument_model(self):
if self._get_cache_valid():
return self._identity_instrument_model
self._load_id_string()
return self._identity_instrument_model
def _get_identity_instrument_firmware_revision(self):
if self._get_cache_valid():
return self._identity_instrument_firmware_revision
self._load_id_string()
return self._identity_instrument_firmware_revision
def _utility_disable(self):
pass
def _utility_error_query(self):
error_code = 0
error_message = "No error"
#if not self._driver_operation_simulate:
# error_code, error_message = self._ask(":system:error?").split(',')
# error_code = int(error_code)
# error_message = error_message.strip(' "')
return (error_code, error_message)
def _utility_lock_object(self):
pass
def _utility_reset(self):
if not self._driver_operation_simulate:
self._write("*RST")
self._clear()
self.driver_operation.invalidate_all_attributes()
def _utility_reset_with_defaults(self):
self._utility_reset()
def _utility_self_test(self):
code = 0
message = "Self test passed"
if not self._driver_operation_simulate:
code = int(self._ask("*TST?"))
if code != 0:
message = "Self test failed"
return (code, message)
raise ivi.OperationNotSupportedException()
def _utility_unlock_object(self):
pass
def _init_channels(self):
try:
super(agilent437B, self)._init_channels()
except AttributeError:
pass
self._channel_name = list()
self._channel_averaging_count_auto = list()
self._channel_correction_frequency = list()
self._channel_offset = list()
self._channel_range_auto = list()
self._channel_units = list()
for i in range(self._channel_count):
self._channel_name.append("channel%d" % (i+1))
self._channel_averaging_count_auto.append(True)
self._channel_correction_frequency.append(50e6)
self._channel_offset.append(0.0)
self._channel_range_auto.append(True)
self._channel_units.append('dBm')
self.channels._set_list(self._channel_name)
def _get_channel_averaging_count_auto(self, index):
index = ivi.get_index(self._channel_name, index)
return self._channel_averaging_count_auto[index]
def _set_channel_averaging_count_auto(self, index, value):
index = ivi.get_index(self._channel_name, index)
value = bool(value)
if not value:
raise ivi.ValueNotSupportedException()
self._channel_averaging_count_auto[index] = value
def _get_channel_correction_frequency(self, index):
index = ivi.get_index(self._channel_name, index)
return self._channel_correction_frequency[index]
def _set_channel_correction_frequency(self, index, value):
index = ivi.get_index(self._channel_name, index)
value = float(value)
if not self._driver_operation_simulate:
self._write("FR%eEN" % (value))
self._channel_correction_frequency[index] = value
self._set_cache_valid(index=index)
def _get_channel_offset(self, index):
index = ivi.get_index(self._channel_name, index)
return self._channel_offset[index]
def _set_channel_offset(self, index, value):
index = ivi.get_index(self._channel_name, index)
value = float(value)
if not self._driver_operation_simulate:
self._write("OS%eEN" % (value))
self._channel_offset[index] = value
self._set_cache_valid(index=index)
def _get_channel_range_auto(self, index):
index = ivi.get_index(self._channel_name, index)
return self._channel_range_auto[index]
def _set_channel_range_auto(self, index, value):
index = ivi.get_index(self._channel_name, index)
value = bool(value)
self._channel_range_auto[index] = value
def _get_channel_units(self, index):
index = ivi.get_index(self._channel_name, index)
return self._channel_units[index]
def _set_channel_units(self, index, value):
index = ivi.get_index(self._channel_name, index)
if value not in Units:
raise ivi.ValueNotSupportedException()
if not self._driver_operation_simulate:
if value == 'dBm':
self._write("LG")
elif value == 'Watts':
self._write("LN")
self._channel_units[index] = value
self._set_cache_valid(index=index)
def _get_measurement_measurement_state(self):
return self._measurement_measurement_state
def _measurement_abort(self):
self._clear()
pass
def _measurement_configure(self, operator, operand1, operand2):
pass
def _measurement_fetch(self):
if self._driver_operation_simulate:
return
val = self._read()
return float(val)
def _measurement_initiate(self):
if self._driver_operation_simulate:
return
self._write("TR1")
def _measurement_read(self, maximum_time):
self._measurement_initiate()
return self._measurement_fetch()
def _get_channel_range_lower(self, index):
index = ivi.get_index(self._channel_name, index)
return self._channel_range_lower[index]
def _set_channel_range_lower(self, index, value):
index = ivi.get_index(self._channel_name, index)
value = float(value)
self._channel_range_lower[index] = value
def _get_channel_range_upper(self, index):
index = ivi.get_index(self._channel_name, index)
return self._channel_range_upper[index]
def _set_channel_range_upper(self, index, value):
index = ivi.get_index(self._channel_name, index)
value = float(value)
self._channel_range_upper[index] = value
def _get_channel_duty_cycle_enabled(self, index):
index = ivi.get_index(self._channel_name, index)
return self._channel_duty_cycle_enabled[index]
def _set_channel_duty_cycle_enabled(self, index, value):
index = ivi.get_index(self._channel_name, index)
value = bool(value)
if not self._driver_operation_simulate:
self._write("DC%d" % int(value))
self._channel_duty_cycle_enabled[index] = value
self._set_cache_valid(index=index)
def _get_channel_duty_cycle_value(self, index):
index = ivi.get_index(self._channel_name, index)
return self._channel_duty_cycle_value[index]
def _set_channel_duty_cycle_value(self, index, value):
index = ivi.get_index(self._channel_name, index)
value = float(value)
if not self._driver_operation_simulate:
self._write("DY%eEN" % (value))
self._channel_duty_cycle_value[index] = value
self._set_cache_valid(index=index)
def _get_channel_averaging_count(self, index):
index = ivi.get_index(self._channel_name, index)
return self._channel_averaging_count[index]
def _set_channel_averaging_count(self, index, value):
index = ivi.get_index(self._channel_name, index)
value = int(value)
if not self._driver_operation_simulate:
self._write("FM%eEN" % (value))
self._channel_averaging_count[index] = value
self._set_cache_valid(index=index)
def _get_channel_zero_state(self, index):
index = ivi.get_index(self._channel_name, index)
return self._channel_zero_state[index]
def _channel_zero(self, index):
index = ivi.get_index(self._channel_name, index)
if self._driver_operation_simulate:
return
self._write("CS")
self._write("ZE")
it = 0
while True:
val = self._read_stb()
if val & 2:
break
if val & 8 or it > 20:
return
time.sleep(0.5)
self._channel_zero_state[index] = 'complete'
def _get_channel_calibration_state(self, index):
index = ivi.get_index(self._channel_name, index)
return self._channel_calibration_state[index]
def _channel_calibrate(self, index):
index = ivi.get_index(self._channel_name, index)
if self._driver_operation_simulate:
return
self._write("CS")
self._write("CLEN")
it = 0
while True:
val = self._read_stb()
if val & 2:
break
if val & 8 or it > 20:
return
time.sleep(0.5)
self._channel_calibration_state[index] = 'complete'
def _get_reference_oscillator_enabled(self):
return self._reference_oscillator_enabled
def _set_reference_oscillator_enabled(self, value):
value = bool(value)
if not self._driver_operation_simulate:
self._write("OC%d" % int(value))
self._reference_oscillator_enabled = value
self._set_cache_valid()
def _get_reference_oscillator_frequency(self):
return self._reference_oscillator_frequency
def _set_reference_oscillator_frequency(self, value):
value = float(value)
value = 50e6 # fixed at 50 MHz
self._reference_oscillator_frequency = value
def _get_reference_oscillator_level(self):
return self._reference_oscillator_level
def _set_reference_oscillator_level(self, value):
value = float(value)
value = 0.0 # fixed at 1.00 mW (0 dBm)
self._reference_oscillator_level = value
| mit | -4,515,614,092,387,864,600 | 35.726563 | 99 | 0.612139 | false | 3.941587 | false | false | false |
CSysTeam/SecurityPackage | MainAlgorithms/Monoalphabetics/Monoalphabetic.py | 1 | 3288 |
class Monoalphabetic:
""" Frequency Information:
E 12.51%
T 9.25
A 8.04
O 7.60
I 7.26
N 7.09
S 6.54
R 6.12
H 5.49
L 4.14
D 3.99
C 3.06
U 2.71
M 2.53
F 2.30
P 2.00
G 1.96
W 1.92
Y 1.73
B 1.54
V 0.99
K 0.67
X 0.19
J 0.16
Q 0.11
Z 0.09
"""
def analyse(self, plainText: str, cipherText: str) -> str:
LETTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
mainKey=""
temp = plainText.upper()
i = 0
while i < len(plainText):
index = LETTERS.index(temp[i])
mainKey = mainKey[:index] + cipherText[i] + mainKey[index+1:]
i = i + 1
temp = mainKey
temp = temp.upper()
i = 0
while i < len(temp):
if temp[i] == '-':
index = LETTERS.index(temp[i - 1])
if index == 25:
index = -1
temp = temp[:i] + LETTERS[index + 1] + temp[i + 1:]
i = i + 1
temp = temp.lower()
return temp
def decrypt(self, cipherText: str, key: str) -> str:
LETTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
temp = cipherText.upper()
KEY = key.upper()
plain = ""
i = 0
while i < len(cipherText):
index = KEY.index(temp[i])
plain += LETTERS[index]
i = i + 1
plain = plain.lower()
return plain
def encrypt(self, plainText: str, key: str) -> str:
LETTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
temp = plainText.upper()
i = 0
EncryptedText = ""
while i < len(plainText):
index = LETTERS.index(temp[i])
EncryptedText += key[index]
i = i + 1
EncryptedText = EncryptedText.upper()
return EncryptedText
def analyseUsingCharFrequency(self, cipher: str) -> str:
freqInfo = "ETAOINSRHLDCUMFPGWYBVKXJQZ"
newTemp = "-" * len(cipher)
temp = cipher.upper()
dictionary = {}
for letters in temp:
dictionary[letters] = 0
for letters in temp:
dictionary[letters] += 1
dictionary = sorted(dictionary.items(), reverse=True, key=lambda x: x[1])
#print("len: ", len(temp))
for position in range(0, len(temp)):
#print("position: ", position)
#print(dictionary[position])
if position >= len(dictionary) - 1:
break
#print("dict: ", dictionary[1][0])
i = 0
while i < len(dictionary):
#print(len(dictionary))
#print(dictionary[i][0])
j = 0
while j < len(temp):
#print("temp: ", temp[j],"dict: ", dictionary[i][0])
if temp[j] == dictionary[i][0]:
#print("..", temp[j:])
newTemp = newTemp[:j] + freqInfo[i] + newTemp[j + 1:]
#print("tmp: ", temp)
j = j + 1
i = i + 1
return newTemp
| gpl-3.0 | 1,636,525,540,505,321,200 | 27.097345 | 81 | 0.439173 | false | 3.841121 | false | false | false |
averainy/averainy | python/wechat_test.py | 1 | 7797 | #!/usr/bin/python
#coding=utf-8
import xml.dom.minidom
def get_tagname():
doc = xml.dom.minidom.parseString(input_xml_string)
class msg_parse:
def __init__(self,msg):
self.doc = xml.dom.minidom.parseString(msg)
def _getData(self,tagName):
nodes=self.doc.getElementsByTagName(tagName)
if nodes:
return nodes[0].childNodes[0].data
else:
return None
def getFromUserName(self):
return self._getData("FromUserName")
def getToUserName(self):
return self._getData("ToUserName")
def getCreateTime(self):
return self._getData("CreateTime")
def getMsgType(self):
return self._getData("MsgType")
def getContent(self):
return self._getData("Content")
def getMsgId(self):
return self._getData("MsgId")
def getPicUrl(self):
return self._getData("PicUrl")
def getMediaId(self):
return self._getData("MediaId")
def getFormat(self):
return self._getData("Format")
def getMediaId(self):
return self._getData("MediaId")
def getThumbMediaId(self):
return self._getData("ThumbMediaId")
def getLocation_X(self):
return self._getData("Location_X")
def getLocation_Y(self):
return self._getData("Location_Y")
def getScale(self):
return self._getData("Scale")
def getLabel(self):
return self._getData("Label")
def getTitle(self):
return self._getData("Title")
def getDescription(self):
return self._getData("Description")
def getUrl(self):
return self._getData("Url")
def getEvent(self):
return self._getData("Event")
def getEventKey(self):
return self._getData("EventKey")
def getTicket(self):
return self._getData("Ticket")
def getLatitude(self):
return self._getData("Latitude")
def getLongitude(self):
return self._getData("Longitude")
def getPrecision(self):
return self._getData("Precision")
def getTicket(self):
return self._getData("Ticket")
def getTicket(self):
return self._getData("Ticket")
if __name__ == "__main__":
# 文本消息
res="""<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[ffdfdromUser]]></FromUserName>
<CreateTime>1348831860</CreateTime>
<MsgType><![CDATA[text]]></MsgType>
<Content><![CDATA[this is a test]]></Content>
<MsgId>1234567890123456</MsgId>
</xml>"""
abc=msg_parse(res)
print abc.getFromUserName()
print abc.getToUserName()
print abc.getCreateTime()
print abc.getMsgType()
print abc.getContent()
print abc.getMsgId()
# 图片消息
res="""<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[fromUser]]></FromUserName>
<CreateTime>1348831860</CreateTime>
<MsgType><![CDATA[image]]></MsgType>
<PicUrl><![CDATA[this is a url]]></PicUrl>
<MediaId><![CDATA[media_id]]></MediaId>
<MsgId>1234567890123456</MsgId>
</xml>"""
abc=msg_parse(res)
print abc.getPicUrl()
print abc.getMediaId()
# 语音消息
res="""<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[fromUser]]></FromUserName>
<CreateTime>1357290913</CreateTime>
<MsgType><![CDATA[voice]]></MsgType>
<MediaId><![CDATA[media_id]]></MediaId>
<Format><![CDATA[Format]]></Format>
<MsgId>1234567890123456</MsgId>
</xml>"""
abc=msg_parse(res)
print abc.getFormat()
# 视频消息
res="""<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[fromUser]]></FromUserName>
<CreateTime>1357290913</CreateTime>
<MsgType><![CDATA[video]]></MsgType>
<MediaId><![CDATA[media_id]]></MediaId>
<ThumbMediaId><![CDATA[thumb_media_id]]></ThumbMediaId>
<MsgId>1234567890123456</MsgId>
</xml>"""
abc=msg_parse(res)
print abc.getThumbMediaId()
# 地理位置消息
res="""<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[fromUser]]></FromUserName>
<CreateTime>1351776360</CreateTime>
<MsgType><![CDATA[location]]></MsgType>
<Location_X>23.134521</Location_X>
<Location_Y>113.358803</Location_Y>
<Scale>20</Scale>
<Label><![CDATA[位置信息]]></Label>
<MsgId>1234567890123456</MsgId>
</xml> """
abc=msg_parse(res)
print abc.getLocation_X()
print abc.getLocation_Y()
print abc.getScale()
print abc.getLabel()
# 链接消息
res="""<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[fromUser]]></FromUserName>
<CreateTime>1351776360</CreateTime>
<MsgType><![CDATA[link]]></MsgType>
<Title><![CDATA[公众平台官网链接]]></Title>
<Description><![CDATA[公众平台官网链接]]></Description>
<Url><![CDATA[url]]></Url>
<MsgId>1234567890123456</MsgId>
</xml> """
abc=msg_parse(res)
print abc.getTitle()
print abc.getDescription()
print abc.getUrl()
# 关注/取消关注事件
res="""<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[FromUser]]></FromUserName>
<CreateTime>123456789</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[subscribe]]></Event>
</xml>"""
abc=msg_parse(res)
print abc.getEvent()
# 扫描带参数二维码事件
# 用户未关注时,进行关注后的事件推送
res="""<xml><ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[FromUser]]></FromUserName>
<CreateTime>123456789</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[subscribe]]></Event>
<EventKey><![CDATA[qrscene_123123]]></EventKey>
<Ticket><![CDATA[TICKET]]></Ticket>
</xml>"""
abc=msg_parse(res)
print abc.getEventKey()
print abc.getTicket()
# 用户已关注时的事件推送
res="""<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[FromUser]]></FromUserName>
<CreateTime>123456789</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[SCAN]]></Event>
<EventKey><![CDATA[SCENE_VALUE]]></EventKey>
<Ticket><![CDATA[TICKET]]></Ticket>
</xml>"""
abc=msg_parse(res)
print abc.getEventKey()
print abc.getTicket()
# 上报地理位置事件
res="""<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[fromUser]]></FromUserName>
<CreateTime>123456789</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[LOCATION]]></Event>
<Latitude>23.137466</Latitude>
<Longitude>113.352425</Longitude>
<Precision>119.385040</Precision>
</xml>"""
abc=msg_parse(res)
print abc.getLatitude()
print abc.getLongitude()
print abc.getPrecision()
# 点击菜单拉取消息时的事件推送
res="""<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[FromUser]]></FromUserName>
<CreateTime>123456789</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[CLICK]]></Event>
<EventKey><![CDATA[EVENTKEY]]></EventKey>
</xml>"""
abc=msg_parse(res)
print abc.getMsgType()
print abc.getEvent()
print abc.getEventKey()
# 点击菜单跳转链接时的事件推送
res="""<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[FromUser]]></FromUserName>
<CreateTime>123456789</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[VIEW]]></Event>
<EventKey><![CDATA[www.qq.com]]></EventKey>
</xml>"""
abc=msg_parse(res)
print abc.getMsgType()
print abc.getEvent()
print abc.getEventKey()
| gpl-2.0 | -3,903,680,879,921,830,400 | 31.786957 | 59 | 0.618486 | false | 3.112258 | false | false | false |
mjordan/pkppln | server.py | 1 | 1777 | #!/usr/bin/env python
import sys
import bottle
from bottle import Bottle, request, error, response, Response
from os.path import abspath, dirname
import logging
sys.path.append(dirname(abspath(__file__)))
import pkppln
from webapp.admin.terms_server import TermsApp
from webapp.sword.sword_server import SwordServer
from webapp.static.static_server import StaticApp
from webapp.feeds.feed_server import FeedsApp
from webapp.admin.journal_server import JournalsApp
def after_request():
if request.path.startswith('/static'):
return
try:
route_name = request.route.callback.__name__
except:
route_name = '(unknown)'
try:
pkppln.log_message(" - ".join([
'finished', request.get('REMOTE_ADDR'),
request.method, request.path,
type(request.app).__name__ + "#" + route_name
]), logging.INFO)
except:
pass
def before_request():
# pkppln.log_message(" - ".join([
# 'starting', request.get('REMOTE_ADDR'),
# request.method, request.path]))
pkppln.initialize()
static_path = dirname(abspath(__file__)) + '/static'
application = bottle.default_app()
application.add_hook('before_request', before_request)
application.add_hook('after_request', after_request)
application.mount('/static/', StaticApp('Static', static_path))
application.mount('/admin/terms/', TermsApp('Terms'))
application.mount('/admin/journals/', JournalsApp('JournalsApp'))
application.mount('/feeds/', FeedsApp('Feeds'))
application.mount('/api/sword/2.0/', SwordServer('SWORD'))
if __name__ == '__main__':
if len(sys.argv) == 2:
pkppln.config_file_name = sys.argv[1]
bottle.debug(True)
application.run(host='127.0.0.1', port=9999, reloader=True)
| gpl-3.0 | 2,497,417,321,384,419,300 | 28.616667 | 65 | 0.66798 | false | 3.525794 | false | false | false |
marshallmcdonnell/interactive_plotting | matplotlib/draggable_legend_code.py | 1 | 3140 | #!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as _plt
class DraggableLegend:
def __init__(self, legend):
self.legend = legend
self.gotLegend = False
legend.figure.canvas.mpl_connect('motion_notify_event', self.on_motion)
legend.figure.canvas.mpl_connect('pick_event', self.on_picker)
legend.figure.canvas.mpl_connect('button_release_event', self.on_release)
legend.set_picker(self.my_legend_picker)
#----------------------------------------------------#
# Connected event handlers
def on_motion(self, event):
if self.gotLegend:
dx = event.x - self.mouse_x
dy = event.y - self.mouse_y
loc_in_canvas = self.legend_x + dx, self.legend_y + dy
loc_in_norm_axes = self.legend.parent.transAxes.inverted().transform_point(loc_in_canvas)
self.legend._loc = tuple(loc_in_norm_axes)
self.legend.figure.canvas.draw()
def my_legend_picker(self, legend, event):
return self.legend.legendPatch.contains(event)
def on_picker(self, event):
if event.artist == self.legend:
# left-click
if event.mouseevent.button == 1:
self._move_legend(event)
# mouse button pressed
if event.mouseevent.button == 2:
pass
# right-click
if event.mouseevent.button == 3:
self._hideLegend()
# mouse up
if event.mouseevent.button == 'up':
self._scaleUpLegendFont()
# mouse down
if event.mouseevent.button == 'down':
self._scaleDownLegendFont()
def on_release(self, event):
if self.gotLegend:
self.gotLegend = False
#----------------------------------------------------#
# Utility functions
def _move_legend(self,event):
bbox = self.legend.get_window_extent()
self.mouse_x = event.mouseevent.x
self.mouse_y = event.mouseevent.y
self.legend_x = bbox.xmin
self.legend_y = bbox.ymin
self.gotLegend = 1
def _scaleUpLegendFont(self,size_step=4):
size = self.legend.get_texts()[0].get_fontsize()
size += size_step
_plt.setp(self.legend.get_texts(), fontsize=size) #legend 'list' fontsize
self.legend.figure.canvas.draw()
def _scaleDownLegendFont(self,size_step=4):
size = self.legend.get_texts()[0].get_fontsize()
size -= size_step
_plt.setp(self.legend.get_texts(), fontsize=size) #legend 'list' fontsize
self.legend.figure.canvas.draw()
def _hideLegend(self):
if self.legend.get_visible():
self.legend.set_visible(False)
else:
self.legend.set_visible(True)
self.legend.figure.canvas.draw()
figure = _plt.figure()
ax = figure.add_subplot(111)
scatter = ax.scatter(np.random.randn(100), np.random.randn(100), label='hi')
legend = ax.legend()
legend = DraggableLegend(legend)
_plt.show()
| mit | -4,343,959,866,343,381,500 | 31.040816 | 101 | 0.560828 | false | 3.833944 | false | false | false |
gromitsun/sim-xrf | python/snr/pysnip.py | 1 | 2468 | import numpy as np
from scipy.optimize import curve_fit
def FWHM(x, noise=100, fano=0.114):
sigma = np.sqrt((noise / 2.3548) ** 2 + 3.58 * fano * x)
return 2.3548 * sigma
def fit_FWHM(x, F):
def _FWHM(x, noise, fano):
return (noise / 2.3548) ** 2 + 3.58 * fano * x
popt, pcov = curve_fit(_FWHM, x, (F / 2.3548) ** 2, p0=[100, 0.114])
return popt
def energy_to_channel(energy, offset=2.97, gain=12.26952):
return 1. * (energy - offset) / gain
# # # Low statistics digital filter
def lsdf(E, y, FWHM=FWHM,
f=1.5,
A=75,
M=10,
r=1.3):
def _reduce(x, length_start):
for i in range(length_start):
length = length_start - i
if x < length:
raise IndexError
L = y[x - length:x].sum()
R = y[x + 1:x + length + 1].sum()
S = y[x] + L + R
slope = (R + 1.) / (L + 1.)
if S < M or S < A * np.sqrt(y[x]) or (1. / r <= slope <= r):
return S / (2. * length + 1)
print 'Not found for x = %d!' % x
return y[x]
y_out = y.copy()
for x in range(len(E)):
try:
len_0 = int(energy_to_channel(f * FWHM(E[x]), E[0], E[1] - E[0]))
y_out[x] = _reduce(x, len_0)
except IndexError:
pass
return y_out
# # # Peak-clipping
def snip(E, y, FWHM=FWHM, offset=0., gain=10., **kwargs):
det = kwargs.get('detector')
loops = kwargs.get('loops', 24)
end_loops = kwargs.get('end_loops', 8)
reduce_factor = kwargs.get('reduce_factor', np.sqrt(2))
factor = kwargs.get('factor', 2)
if det is not None:
FWHM = det.response.FWHM
offset = det.channel.offset
gain = det.channel.gain
def G(y):
return np.log(np.log(y + 1) + 1)
def w(x, factor=2):
return energy_to_channel(factor * FWHM(E[x]), offset=offset, gain=gain)
def G_inv(z):
return np.exp(np.exp(z) - 1) - 1
z_out = G(y)
for i in range(loops):
if i >= loops - end_loops:
factor /= 1. * reduce_factor
z = z_out.copy()
for x in range(len(E)):
try:
_w = w(x, factor=factor)
if _w > x:
raise IndexError
z_bar = (z[x + _w] + z[x - _w]) / 2.
z_out[x] = min(z[x], z_bar)
except IndexError:
pass
return G_inv(z_out)
| mit | 2,221,596,909,784,448,800 | 25.537634 | 79 | 0.480146 | false | 2.924171 | false | false | false |
wesm/statsmodels | scikits/statsmodels/sandbox/tsa/examples/ex_mle_garch.py | 1 | 10649 | # -*- coding: utf-8 -*-
"""
Created on Fri Feb 12 01:01:50 2010
Author: josef-pktd
latest result
-------------
all are very close
garch0 has different parameterization of constant
ordering of parameters is different
seed 2780185
h.shape (2000,)
Optimization terminated successfully.
Current function value: 2093.813397
Iterations: 387
Function evaluations: 676
ggres.params [-0.6146253 0.1914537 0.01039355 0.78802188]
Optimization terminated successfully.
Current function value: 2093.972953
Iterations: 201
Function evaluations: 372
ggres0.params [-0.61537527 0.19635128 4.00706058]
Warning: Desired error not necessarily achieveddue to precision loss
Current function value: 2093.972953
Iterations: 51
Function evaluations: 551
Gradient evaluations: 110
ggres0.params [-0.61537855 0.19635265 4.00694669]
Optimization terminated successfully.
Current function value: 2093.751420
Iterations: 103
Function evaluations: 187
[ 0.78671519 0.19692222 0.61457171]
-2093.75141963
Final Estimate:
LLH: 2093.750 norm LLH: 2.093750
omega alpha1 beta1
0.7867438 0.1970437 0.6145467
long run variance comparison
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
R
>>> 0.7867438/(1- 0.1970437- 0.6145467)
4.1757097302897526
Garch (gjr) asymetric, longrun var ?
>>> 1/(1-0.6146253 - 0.1914537 - 0.01039355) * 0.78802188
4.2937548579245242
>>> 1/(1-0.6146253 - 0.1914537 + 0.01039355) * 0.78802188
3.8569053452140345
Garch0
>>> (1-0.61537855 - 0.19635265) * 4.00694669
0.7543830449902722
>>> errgjr4.var() #for different random seed
4.0924199964716106
todo: add code and verify, check for longer lagpolys
"""
import numpy as np
from numpy.testing import assert_almost_equal
import matplotlib.pyplot as plt
import numdifftools as ndt
import scikits.statsmodels.api as sm
from scikits.statsmodels.sandbox import tsa
from scikits.statsmodels.sandbox.tsa.garch import * # local import
nobs = 1000
examples = ['garch', 'rpyfit']
if 'garch' in examples:
err,h = generate_kindofgarch(nobs, [1.0, -0.95], [1.0, 0.1], mu=0.5)
plt.figure()
plt.subplot(211)
plt.plot(err)
plt.subplot(212)
plt.plot(h)
#plt.show()
seed = 3842774 #91234 #8837708
seed = np.random.randint(9999999)
print 'seed', seed
np.random.seed(seed)
ar1 = -0.9
err,h = generate_garch(nobs, [1.0, ar1], [1.0, 0.50], mu=0.0,scale=0.1)
# plt.figure()
# plt.subplot(211)
# plt.plot(err)
# plt.subplot(212)
# plt.plot(h)
# plt.figure()
# plt.subplot(211)
# plt.plot(err[-400:])
# plt.subplot(212)
# plt.plot(h[-400:])
#plt.show()
garchplot(err, h)
garchplot(err[-400:], h[-400:])
np.random.seed(seed)
errgjr,hgjr, etax = generate_gjrgarch(nobs, [1.0, ar1],
[[1,0],[0.5,0]], mu=0.0,scale=0.1)
garchplot(errgjr[:nobs], hgjr[:nobs], 'GJR-GARCH(1,1) Simulation - symmetric')
garchplot(errgjr[-400:nobs], hgjr[-400:nobs], 'GJR-GARCH(1,1) Simulation - symmetric')
np.random.seed(seed)
errgjr2,hgjr2, etax = generate_gjrgarch(nobs, [1.0, ar1],
[[1,0],[0.1,0.9]], mu=0.0,scale=0.1)
garchplot(errgjr2[:nobs], hgjr2[:nobs], 'GJR-GARCH(1,1) Simulation')
garchplot(errgjr2[-400:nobs], hgjr2[-400:nobs], 'GJR-GARCH(1,1) Simulation')
np.random.seed(seed)
errgjr3,hgjr3, etax3 = generate_gjrgarch(nobs, [1.0, ar1],
[[1,0],[0.1,0.9],[0.1,0.9],[0.1,0.9]], mu=0.0,scale=0.1)
garchplot(errgjr3[:nobs], hgjr3[:nobs], 'GJR-GARCH(1,3) Simulation')
garchplot(errgjr3[-400:nobs], hgjr3[-400:nobs], 'GJR-GARCH(1,3) Simulation')
np.random.seed(seed)
errgjr4,hgjr4, etax4 = generate_gjrgarch(nobs, [1.0, ar1],
[[1., 1,0],[0, 0.1,0.9],[0, 0.1,0.9],[0, 0.1,0.9]],
mu=0.0,scale=0.1)
garchplot(errgjr4[:nobs], hgjr4[:nobs], 'GJR-GARCH(1,3) Simulation')
garchplot(errgjr4[-400:nobs], hgjr4[-400:nobs], 'GJR-GARCH(1,3) Simulation')
varinno = np.zeros(100)
varinno[0] = 1.
errgjr5,hgjr5, etax5 = generate_gjrgarch(100, [1.0, -0.],
[[1., 1,0],[0, 0.1,0.8],[0, 0.05,0.7],[0, 0.01,0.6]],
mu=0.0,scale=0.1, varinnovation=varinno)
garchplot(errgjr5[:20], hgjr5[:20], 'GJR-GARCH(1,3) Simulation')
#garchplot(errgjr4[-400:nobs], hgjr4[-400:nobs], 'GJR-GARCH(1,3) Simulation')
#plt.show()
seed = np.random.randint(9999999) # 9188410
print 'seed', seed
x = np.arange(20).reshape(10,2)
x3 = np.column_stack((np.ones((x.shape[0],1)),x))
y, inp = miso_lfilter([1., 0],np.array([[-2.0,3,1],[0.0,0.0,0]]),x3)
nobs = 1000
warmup = 1000
np.random.seed(seed)
ar = [1.0, -0.7]#7, -0.16, -0.1]
#ma = [[1., 1, 0],[0, 0.6,0.1],[0, 0.1,0.1],[0, 0.1,0.1]]
ma = [[1., 0, 0],[0, 0.8,0.0]] #,[0, 0.9,0.0]]
# errgjr4,hgjr4, etax4 = generate_gjrgarch(warmup+nobs, [1.0, -0.99],
# [[1., 1, 0],[0, 0.6,0.1],[0, 0.1,0.1],[0, 0.1,0.1]],
# mu=0.2, scale=0.25)
errgjr4,hgjr4, etax4 = generate_gjrgarch(warmup+nobs, ar, ma,
mu=0.4, scale=1.01)
errgjr4,hgjr4, etax4 = errgjr4[warmup:], hgjr4[warmup:], etax4[warmup:]
garchplot(errgjr4[:nobs], hgjr4[:nobs], 'GJR-GARCH(1,3) Simulation - DGP')
ggmod = Garch(errgjr4-errgjr4.mean())#hgjr4[:nobs])#-hgjr4.mean()) #errgjr4)
ggmod.nar = 1
ggmod.nma = 1
ggmod._start_params = np.array([-0.6, 0.1, 0.2, 0.0])
ggres = ggmod.fit(start_params=np.array([-0.6, 0.1, 0.2, 0.0]), maxiter=1000)
print 'ggres.params', ggres.params
garchplot(ggmod.errorsest, ggmod.h, title='Garch estimated')
ggmod0 = Garch0(errgjr4-errgjr4.mean())#hgjr4[:nobs])#-hgjr4.mean()) #errgjr4)
ggmod0.nar = 1
ggmod.nma = 1
start_params = np.array([-0.6, 0.2, 0.1])
ggmod0._start_params = start_params #np.array([-0.6, 0.1, 0.2, 0.0])
ggres0 = ggmod0.fit(start_params=start_params, maxiter=2000)
print 'ggres0.params', ggres0.params
ggmod0 = Garch0(errgjr4-errgjr4.mean())#hgjr4[:nobs])#-hgjr4.mean()) #errgjr4)
ggmod0.nar = 1
ggmod.nma = 1
start_params = np.array([-0.6, 0.2, 0.1])
ggmod0._start_params = start_params #np.array([-0.6, 0.1, 0.2, 0.0])
ggres0 = ggmod0.fit(start_params=start_params, method='bfgs', maxiter=2000)
print 'ggres0.params', ggres0.params
g11res = optimize.fmin(lambda params: -loglike_GARCH11(params, errgjr4-errgjr4.mean())[0], [0.93, 0.9, 0.2])
print g11res
llf = loglike_GARCH11(g11res, errgjr4-errgjr4.mean())
print llf[0]
if 'rpyfit' in examples:
from rpy import r
r.library('fGarch')
f = r.formula('~garch(1, 1)')
fit = r.garchFit(f, data = errgjr4-errgjr4.mean(), include_mean=False)
if 'rpysim' in examples:
from rpy import r
f = r.formula('~garch(1, 1)')
#fit = r.garchFit(f, data = errgjr4)
x = r.garchSim( n = 500)
print 'R acf', tsa.acf(np.power(x,2))[:15]
arma3 = Arma(np.power(x,2))
arma3res = arma3.fit(start_params=[-0.2,0.1,0.5],maxiter=5000)
print arma3res.params
arma3b = Arma(np.power(x,2))
arma3bres = arma3b.fit(start_params=[-0.2,0.1,0.5],maxiter=5000, method='bfgs')
print arma3bres.params
xr = r.garchSim( n = 100)
x = np.asarray(xr)
ggmod = Garch(x-x.mean())
ggmod.nar = 1
ggmod.nma = 1
ggmod._start_params = np.array([-0.6, 0.1, 0.2, 0.0])
ggres = ggmod.fit(start_params=np.array([-0.6, 0.1, 0.2, 0.0]), maxiter=1000)
print 'ggres.params', ggres.params
g11res = optimize.fmin(lambda params: -loglike_GARCH11(params, x-x.mean())[0], [0.6, 0.6, 0.2])
print g11res
llf = loglike_GARCH11(g11res, x-x.mean())
print llf[0]
garchplot(ggmod.errorsest, ggmod.h, title='Garch estimated')
fit = r.garchFit(f, data = x-x.mean(), include_mean=False, trace=False)
print r.summary(fit)
'''based on R default simulation
model = list(omega = 1e-06, alpha = 0.1, beta = 0.8)
nobs = 1000
(with nobs=500, gjrgarch doesn't do well
>>> ggres = ggmod.fit(start_params=np.array([-0.6, 0.1, 0.2, 0.0]), maxiter=1000)
Optimization terminated successfully.
Current function value: -448.861335
Iterations: 385
Function evaluations: 690
>>> print 'ggres.params', ggres.params
ggres.params [ -7.75090330e-01 1.57714749e-01 -9.60223930e-02 8.76021411e-07]
rearranged
8.76021411e-07 1.57714749e-01(-9.60223930e-02) 7.75090330e-01
>>> print g11res
[ 2.97459808e-06 7.83128600e-01 2.41110860e-01]
>>> llf = loglike_GARCH11(g11res, x-x.mean())
>>> print llf[0]
442.603541936
Log Likelihood:
-448.9376 normalized: -4.489376
omega alpha1 beta1
1.01632e-06 1.02802e-01 7.57537e-01
'''
''' the following is for errgjr4-errgjr4.mean()
ggres.params [-0.54510407 0.22723132 0.06482633 0.82325803]
Final Estimate:
LLH: 2065.56 norm LLH: 2.06556
mu omega alpha1 beta1
0.07229732 0.83069480 0.26313883 0.53986167
ggres.params [-0.50779163 0.2236606 0.00700036 1.154832
Final Estimate:
LLH: 2116.084 norm LLH: 2.116084
mu omega alpha1 beta1
-4.759227e-17 1.145404e+00 2.288348e-01 5.085949e-01
run3
DGP
0.4/?? 0.8 0.7
gjrgarch:
ggres.params [-0.45196579 0.2569641 0.02201904 1.11942636]
rearranged
const/omega ma1/alpha1 ar1/beta1
1.11942636 0.2569641(+0.02201904) 0.45196579
g11:
[ 1.10262688 0.26680468 0.45724957]
-2055.73912687
R:
Final Estimate:
LLH: 2055.738 norm LLH: 2.055738
mu omega alpha1 beta1
-1.665226e-17 1.102396e+00 2.668712e-01 4.573224e-01
fit = r.garchFit(f, data = errgjr4-errgjr4.mean())
rpy.RPy_RException: Error in solve.default(fit$hessian) :
Lapack routine dgesv: system is exactly singular
run4
DGP:
mu=0.4, scale=1.01
ma = [[1., 0, 0],[0, 0.8,0.0]], ar = [1.0, -0.7]
maybe something wrong with simulation
gjrgarch
ggres.params [-0.50554663 0.24449867 -0.00521004 1.00796791]
rearranged
1.00796791 0.24449867(-0.00521004) 0.50554663
garch11:
[ 1.01258264 0.24149155 0.50479994]
-2056.3877404
R include_constant=False
Final Estimate:
LLH: 2056.397 norm LLH: 2.056397
omega alpha1 beta1
1.0123560 0.2409589 0.5049154
'''
erro,ho, etaxo = generate_gjrgarch(20, ar, ma, mu=0.04, scale=0.01,
varinnovation = np.ones(20))
if 'sp500' in examples:
import tabular as tb
import scikits.timeseries as ts
a = tb.loadSV(r'C:\Josef\work-oth\gspc_table.csv')
s = ts.time_series(a[0]['Close'][::-1],
dates=ts.date_array(a[0]['Date'][::-1],freq="D"))
sp500 = a[0]['Close'][::-1]
sp500r = np.diff(np.log(sp500))
#plt.show()
| bsd-3-clause | -5,336,992,565,001,653,000 | 31.269697 | 108 | 0.6302 | false | 2.360151 | false | false | false |
JCardenasRdz/Machine-Learning-4-MRI | Infection_vs_Inflammation/Code/Process_Data.py | 1 | 2713 | # Import Modules as needed
import numpy as np
#import seaborn as sn
import pandas as pd
from pylab import *
from mylocal_functions import *
# ======== T2 MSME============= #
# Make list of all T2.txt files
T2_list = get_ipython().getoutput('ls ../Study_03_CBA/*T2.txt')
# Allocate variables needed for analysis
T2DF=pd.DataFrame()
TR=np.linspace(.012,.012*12,12)
# Fit T2 and construct dataframe
for names in T2_list:
#Convert txt file to array
YDataMatrix=txt_2_array(names)
#Estimate T2
T2time=fitT2(TR,YDataMatrix)
#convert to data frame
df_T2=pd.DataFrame(T2time.T,columns=["Infected","Healthy_R","St_Inf","Healthy_L"])
#df_T2=pd.DataFrame(T2time.T,columns=["ROI-1","ROI-2","ROI-3","ROI-4"])
df_info=name_2_df(names)
df_final=pd.concat([df_T2,df_info], axis=1)
T2DF=T2DF.append(df_final,ignore_index=True)
# Plot T2 Density ROIs 1 and 2
#T2DF[T2DF.Slice==1].iloc[:,:4].plot.density(); title("Slice 01"); xlim((0.025,.15))
#T2DF[T2DF.Slice==2].iloc[:,:4].plot.density(); title("Slice 02"); xlim((0.025,.15))
#T2DF[T2DF.Slice==3].iloc[:,:4].plot.density(); title("Slice 03"); xlim((0.025,.15))
#T2DF[T2DF.Slice==4].iloc[:,:4].plot.density(); title("Slice 04"); xlim((0.025,.15))
#T2DF[T2DF.Slice==5].iloc[:,:4].plot.density(); title("Slice 05"); xlim((0.025,.15))
# ======== CEST============= #
# Make list of all T2.txt files
CEST_list=get_ipython().getoutput('ls ../Study_03_CBA/*CEST.txt')
CEST_DF=pd.DataFrame()
Z=np.zeros((4,110))
def normalize_data(DataMatrix):
rows,cols = DataMatrix.shape
newData = np.zeros_like(DataMatrix)
for row in range(rows):
newData[row,:]=DataMatrix[row,:]/DataMatrix[row,8]
return newData
for names in CEST_list:
#Convert txt file to array
D=txt_2_array(names);
Zn=normalize_data(D.T)
Z=np.concatenate((Z,Zn))
Z=Z[4::,9::]
# define offsets in ppm
a1=np.linspace(-55,-50,9)
ppm=np.linspace(-8,8,101)
full_ppm = np.concatenate((a1, ppm))
# fit CEST data.
y=Z[12,:]
p=fit_L2_scale(ppm,y)
Yhat=Lscale(ppm,p[0],p[1],p[2],p[3],p[4],p[5],p[6]);
plt.figure(figsize=(10,6))
plt.plot(ppm,y,'o',label='Signal');
plt.plot(ppm,1-Yhat,'-',label='Fit');
plt.legend()
## ====== BUILD CEST Predictors ======== #####
CEST_predictors=np.zeros_like(Z)
rows,cols = CEST_predictors.shape
Tissue_Class=np.zeros((4,rows))
for i in range(rows):
p=fit_L2_scale(ppm,Z[i,:])
CEST_predictors[i,:]=Lscale(ppm,p[0],p[1],p[2],p[3],p[4],p[5],p[6]);
Tissue_Class=np.zeros((64,1))
for i in range(4):
Tissue_Class[i::4]=i
CEST_Dataframe=pd.DataFrame(CEST_predictors)
CEST_Dataframe["Tissue_Class"]=Tissue_Class
pd.DataFrame.to_csv(CEST_Dataframe,"CEST_infections.csv",header=True,index=False)
| mit | 3,666,962,277,034,696,000 | 28.172043 | 86 | 0.647991 | false | 2.448556 | false | false | false |
s-tar/just-a-chat | modules/chat/chat.py | 1 | 5141 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'mr.S'
from kernel.module import Module
from kernel.server import app
from bottle import jinja2_template as template, request
from entities.s_chat import Chat
from entities.s_message import Message
from kernel.validator import Validator
from kernel.socket import Rooms
import opengraph
import urlparse
import kernel.widget
module = Module('chat', route="/chat")
@app.route('/')
@app.route('/chat/<chat_id:int>')
def main(chat_id=None):
user = request.user.get()
if user:
chats = request.db(Chat).get_all_by_user(user)
current_chat = request.db(Chat).get_by_id(chat_id) if chat_id else None
return template('page', {'content': template('chat/main', {
'chats': chats,
'chat_id': chat_id,
'current_chat': current_chat,
'current_is_new': current_chat and current_chat not in chats
})})
return template('page', {'content': template('index')})
@module.post('/new')
def new_chat():
user = request.user.get()
if user:
data = request.forms
v = Validator(data)
v.field("chat_name").required(message='Назовите как-то чат')
if v.is_valid():
data = v.valid_data
chat = Chat()
chat.name = data.get("chat_name")
chat.members.append(user)
request.db.add(chat)
request.db.flush()
request.db.commit()
Rooms.get('user.'+str(user.usr_id)).emit('chat.new', {
'chat': chat.as_dict(),
'chat_item': kernel.widget.get('chat.item', {'chat': chat})})
return {"status": "ok"}
return {"status": "fail", "errors": v.errors}
@module.post('/<chat_id:int>/join')
def join_chat(chat_id=None):
user = request.user.get()
if user:
chat = request.db(Chat).get_by_id(chat_id)
if chat and user not in chat.members:
chat.members.append(user)
request.db.add(chat)
request.db.commit()
Rooms.get('user.'+str(user.usr_id)).emit('chat.join', { "chat": chat.as_dict(),
"chat_item": kernel.widget.get('chat.item', {'chat': chat})})
new_message(request.db, chat, '%s %s присоединяется к чату.' % (user.usr_firstname, user.usr_lastname), user, True)
return {"status": "ok",
"chat": chat.as_dict(),
"chat_item": kernel.widget.get('chat.item', {'chat': chat}),
"messages": kernel.widget.get('chat.messages', {'chat_id': chat.id})
}
return {"status": "fail"}
@module.post('/<chat_id:int>/leave')
def leave_chat(chat_id=None):
user = request.user.get()
if user:
chat = request.db(Chat).get_by_id(chat_id)
if chat:
chat.members.remove(user)
if len(chat.members) == 0:
chat.deleted = True
request.db.add(chat)
request.db.commit()
new_message(request.db, chat, '%s %s покидает чат.' % (user.usr_firstname, user.usr_lastname), user, True)
return {"status": "ok"}
return {"status": "fail"}
@module.post('/new_message')
def new_message_route():
user = request.user.get()
if user:
data = request.forms
v = Validator(data)
v.field("chat_id").integer()
if v.valid_data.get('chat_id'):
data = v.valid_data
chat = request.db(Chat).get_by_id(data.get('chat_id'))
if chat:
text = data.get('message').strip()
new_message(request.db, chat, text, user)
return {"status": "ok"}
return {"status": "fail"}
@module.post('/search')
def search():
user = request.user.get()
text = request.forms.get('text')
chats = request.db().query(Chat).filter(Chat.deleted == False, Chat.name.ilike(text.strip()+'%'), ~Chat.members.contains(user)).all()
return {
'chats': [c.as_dict() for c in chats],
'chat_items': [kernel.widget.get('chat.item', {'chat': chat}) for chat in chats]
}
def to_url(text):
text = 'http://'+text if text.startswith('www.') else text
return text if text.startswith('http://') or text.startswith('https://') else None
def new_message(db, chat, text, user, system=False):
data = None
url = to_url(text)
if url:
try:
og = opengraph.OpenGraph(url=url)
text = url
data = str(og if og.is_valid() else {})
except:
data = str({})
message = Message()
message.chat = chat
message.text = text
message.data = data
message.sender = user
message.is_system = system
chat.messages.append(message)
db.add(chat)
db.flush()
db.commit()
for member in chat.members:
Rooms.get('user.'+str(member.usr_id)).emit('chat.new_message', {
'is_sender': member.usr_id == user.usr_id,
'message': message.as_dict(),
'message_item': kernel.widget.get('chat.message', {'message': message})}) | mit | -6,152,623,473,492,803,000 | 32.090909 | 137 | 0.564475 | false | 3.414879 | false | false | false |
BeataBak/project-euler-problems | 008.py | 1 | 3404 | """
Project Euler Problem 8
=======================
Find the greatest product of thirteen consecutive digits in the 1000-digit
number.
73167176531330624919225119674426574742355349194934
96983520312774506326239578318016984801869478851843
85861560789112949495459501737958331952853208805511
12540698747158523863050715693290963295227443043557
66896648950445244523161731856403098711121722383113
62229893423380308135336276614282806444486645238749
30358907296290491560440772390713810515859307960866
70172427121883998797908792274921901699720888093776
65727333001053367881220235421809751254540594752243
52584907711670556013604839586446706324415722155397
53697817977846174064955149290862569321978468622482
83972241375657056057490261407972968652414535100474
82166370484403199890008895243450658541227588666881
16427171479924442928230863465674813919123162824586
17866458359124566529476545682848912883142607690042
24219022671055626321111109370544217506941658960408
07198403850962455444362981230987879927244284909188
84580156166097919133875499200524063689912560717606
05886116467109405077541002256983155200055935729725
71636269561882670428252483600823257530420752963450
"""
from functools import reduce
BIG_CUBE = ''.join("""
73167176531330624919225119674426574742355349194934
96983520312774506326239578318016984801869478851843
85861560789112949495459501737958331952853208805511
12540698747158523863050715693290963295227443043557
66896648950445244523161731856403098711121722383113
62229893423380308135336276614282806444486645238749
30358907296290491560440772390713810515859307960866
70172427121883998797908792274921901699720888093776
65727333001053367881220235421809751254540594752243
52584907711670556013604839586446706324415722155397
53697817977846174064955149290862569321978468622482
83972241375657056057490261407972968652414535100474
82166370484403199890008895243450658541227588666881
16427171479924442928230863465674813919123162824586
17866458359124566529476545682848912883142607690042
24219022671055626321111109370544217506941658960408
07198403850962455444362981230987879927244284909188
84580156166097919133875499200524063689912560717606
05886116467109405077541002256983155200055935729725
71636269561882670428252483600823257530420752963450
""".split())
def scoper(s, width, pos=0):
"""
Takes a string and a width, and yields consecutive chunks of `s`
to the size of `width` until we hit the end of `s`.
"""
while True:
yield s[pos:pos + width]
if pos + width == len(s):
break
pos += 1
def product_of_string(s):
"""
Takes a string containing integers and returns the product.
"""
return reduce(lambda x, y: x * y, [int(i) for i in s])
def main(length=13):
return max([product_of_string(s) for s in scoper(BIG_CUBE, length)])
def test_scoper():
assert list(scoper('Beata', 2)) == ['Be', 'ea', 'at', 'ta']
assert list(scoper('Beata', 3)) == ['Bea', 'eat', 'ata']
def test_product_of_string():
assert product_of_string('245') == 40
def test_main():
assert main(4) == 5832
print(main())
| mit | 7,358,781,582,324,919,000 | 35.602151 | 74 | 0.765864 | false | 2.803954 | false | false | false |
CMUSV-VisTrails/WorkflowRecommendation | vistrails/db/versions/v1_0_0/domain/log.py | 1 | 3092 | ###############################################################################
##
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: [email protected]
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the University of Utah nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
from auto_gen import DBLog as _DBLog
from auto_gen import DBAbstraction, DBModule, DBGroup, DBLoopExec, \
DBGroupExec, DBModuleExec
from id_scope import IdScope
import copy
class DBLog(_DBLog):
def __init__(self, *args, **kwargs):
_DBLog.__init__(self, *args, **kwargs)
self.id_scope = IdScope(1,
{DBLoopExec.vtType: 'item_exec',
DBModuleExec.vtType: 'item_exec',
DBGroupExec.vtType: 'item_exec',
DBAbstraction.vtType: DBModule.vtType,
DBGroup.vtType: DBModule.vtType})
def __copy__(self):
return DBLog.do_copy(self)
def do_copy(self, new_ids=False, id_scope=None, id_remap=None):
cp = _DBLog.do_copy(self, new_ids, id_scope, id_remap)
cp.__class__ = DBLog
cp.id_scope = copy.copy(self.id_scope)
return cps
@staticmethod
def update_version(old_obj, trans_dict, new_obj=None):
if new_obj is None:
new_obj = DBLog()
new_obj = _DBLog.update_version(old_obj, trans_dict, new_obj)
new_obj.update_id_scope()
return new_obj
def update_id_scope(self):
pass
| bsd-3-clause | -260,436,497,900,556,860 | 42.549296 | 79 | 0.635834 | false | 4.13369 | false | false | false |
pypa/warehouse | warehouse/utils/db/windowed_query.py | 1 | 2050 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Taken from "Theatrum Chemicum" at
# https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/WindowedRangeQuery
from sqlalchemy import and_, func, text
def column_windows(session, column, windowsize):
"""
Return a series of WHERE clauses against a given column that break it into
windows.
Result is an iterable of tuples, consisting of ((start, end), whereclause),
where (start, end) are the ids.
Requires a database that supports window functions, i.e. Postgresql,
SQL Server, Oracle.
Enhance this yourself ! Add a "where" argument so that windows of just a
subset of rows can be computed.
"""
def int_for_range(start_id, end_id):
if end_id:
return and_(column >= start_id, column < end_id)
else:
return column >= start_id
q = session.query(
column, func.row_number().over(order_by=column).label("rownum")
).from_self(column)
if windowsize > 1:
q = q.filter(text("rownum %% %d=1" % windowsize))
intervals = [row[0] for row in q]
while intervals:
start = intervals.pop(0)
if intervals:
end = intervals[0]
else:
end = None
yield int_for_range(start, end)
def windowed_query(q, column, windowsize):
"""
Break a Query into windows on a given column.
"""
for whereclause in column_windows(q.session, column, windowsize):
for row in q.filter(whereclause).order_by(column):
yield row
| apache-2.0 | 1,703,204,328,040,316,700 | 30.060606 | 79 | 0.666829 | false | 3.867925 | false | false | false |
UfSoft/ISPManCCP | extra-packages/pyperl-1.0.1d/t/apply.py | 1 | 2941 | import perl
#if (perl.MULTI_PERL):
# print "1..0"
# raise SystemExit
print "1..14"
def ok(a, b=None):
return "a=" + str(a) + ", b=" + str(b)
perl.eval("""
use Python qw(apply);
$| = 1;
sub {
my $f = shift;
# First some tests that are expected to blow up
eval {
apply($f);
};
#print $@;
# XXX For some strange reason =~ is not to force $@ to stingify, so
# I had to help it with "$@" =~.
# Hmmm, something to fix some other time :-(
print "not " unless "$@" =~ /^python\.<type 'exceptions.TypeError'>: ok\(\) takes at least 1 argument \(0 given\)/;
print "ok 1\n";
eval {
apply($f, undef);
};
#print $@;
print "not " unless "$@" =~ /^python\.<type 'exceptions.TypeError'>: ok\(\) takes at least 1 argument \(0 given\)/;
print "ok 2\n";
eval {
apply($f, undef, undef);
};
#print $@;
print "not " unless "$@" =~ /^python\.<type 'exceptions.TypeError'>: ok\(\) takes at least 1 argument \(0 given\)/;
print "ok 3\n";
eval {
apply($f, undef, undef, undef);
};
#print $@;
print "not " unless "$@" =~ /^Too many arguments at \(eval 1\) line \d+./;
print "ok 4\n";
eval {
apply($f, [1,2,3]);
};
#print $@;
print "not " unless "$@" =~ /^python\.<type 'exceptions.TypeError'>: ok\(\) takes at most 2 arguments \(3 given\)/;
print "ok 5\n";
eval {
apply($f, [], {b => 2});
};
#print $@;
print "not " unless "$@" =~ /^python\.<type 'exceptions.TypeError'>: ok\(\) takes at least 1 non-keyword argument \(0 given\)/;
print "ok 6\n";
eval {
apply($f, [1], {a => 2});
};
#print $@;
print "not " unless "$@" =~ /^python\.<type 'exceptions.TypeError'>: ok\(\) got multiple values for keyword argument 'a'/;
print "ok 7\n";
eval {
apply($f, [], {a => 2, b => 3, c => 4});
};
#print $@;
print "not " unless "$@" =~ /^python\.<type 'exceptions.TypeError'>: ok\(\) got an unexpected keyword argument 'c'/;
print "ok 8\n";
eval {
apply($f, 1);
};
#print $@;
print "not " unless "$@" =~ /^/;
print "ok 9\n";
# Then some tests that are expected to work
$res = apply($f, undef, { a => 101, b => 102 });
#print "$res\\n";
print "not " unless $res eq "a=101, b=102";
print "ok 10\n";
$res = apply($f, undef, { a => 101 });
#print "$res\\n";
print "not " unless $res eq "a=101, b=None";
print "ok 11\n";
$res = apply($f, [101, 102]);
#print "$res\\n";
print "not " unless $res eq "a=101, b=102";
print "ok 12\n";
$res = apply($f, Python::list(101, 102), Python::dict());
#print "$res\\n";
print "not " unless $res eq "a=101, b=102";
print "ok 13\n";
$res = apply($f, [], Python::dict(a => 101));
#print "$res\\n";
print "not " unless $res eq "a=101, b=None";
print "ok 14\n";
}
""")(ok)
| bsd-3-clause | -2,264,924,902,036,801,500 | 24.136752 | 131 | 0.50119 | false | 2.946894 | false | false | false |
a710128/Lesson9 | API/course.py | 1 | 3339 | import re
class CourseException(Exception):
def __init__(self, msg, err):
super(CourseException, self).__init__()
self.msg = msg
self.err = err
def __str__(self):
return "CourseError : " + self.msg
def __repr__(self):
return '<CourseException msg : "%s", errcode : %d>' % (self.msg, self.errcode)
def courseTimeParser(timeStr):
assert isinstance(timeStr, str), "Parameter type error"
timeStr, _ = re.subn('\([^)]*\)', '', timeStr)
ret = []
for item in timeStr.split(","):
if item == '':
continue
ws, pd = item.split('-')
ret.append((int(ws), int(pd)))
return ret
class Course:
def __init__(self, **kwargs):
if 'kch' in kwargs and 'kxh' in kwargs:
self.kch = kwargs['kch']
self.kxh = kwargs['kxh']
elif 'kid' in kwargs:
vs = kwargs['kid'].split(':')
if len(vs) != 2:
raise CourseException("Wrong Course id parameter", 0)
self.kch = vs[0]
self.kxh = vs[1]
else:
raise CourseException("Invalid parameters when Course __init__!", 1)
self.name = ''
self.teacher = ''
self.time = []
self.score = 0
self.feature = ''
self.other = ''
params = {
'name': 'Unknown',
'teacher': 'Unknown',
'time': [],
'score': 0,
'feature': '',
'other': ''
}
for key in params:
if key in kwargs:
if isinstance(kwargs[key], type(params[key])):
self.__dict__[key] = kwargs[key]
else:
raise CourseException("Invalid parameters when Course __init__!", 1)
else:
self.__dict__[key] = params[key]
for item in self.time:
if (not isinstance(item, tuple)) or len(item) != 2 or (not isinstance(item[0], int)) or (not isinstance(item[1], int)):
raise CourseException("Invalid parameters when Course __init__!", 1)
def __eq__(self, other):
if self.kxh == '*' or other.kxh == '*':
return self.kch == other.kch
return self.kch == other.kch and self.kxh == other.kxh
def timeClash(self, other):
if isinstance(other, tuple):
for time in self.time:
if time == other:
return True
return False
elif isinstance(other, Course):
for time in self.time:
if other.timeClash(time):
return True
return False
else:
raise CourseException("Invalid parameters when Course timeClash!", 2)
def __str__(self):
ret = 'Course: %s:%s; Time : ' % (self.kch, self.kxh)
first = True
for wk, pd in self.time:
if first:
first = False
else:
ret += ','
ret += '%d-%d' % (wk, pd)
ret += '; Name: %s; Teacher: %s; Score: %d; Feature: %s; Other: %s' % (self.name, self.teacher, self.score, self.feature, self.other)
return ret
def __repr__(self):
return "<" + self.__str__() + ">"
def __hash__(self):
return hash(self.kch + ":" + self.kxh)
| mit | 2,873,179,843,956,898,300 | 29.633028 | 141 | 0.48248 | false | 3.946809 | false | false | false |
robwarm/gpaw-symm | gpaw/cluster.py | 1 | 6122 | """Extensions to the ase Atoms class
"""
import numpy as np
from ase import Atoms
from ase.io import read, write
from ase.data import covalent_radii
from ase.calculators.neighborlist import NeighborList
class Cluster(Atoms):
"""A class for cluster structures
to enable simplified manipulation"""
def __init__(self, *args, **kwargs):
self.data = {}
if len(args) > 0:
filename = args[0]
if isinstance(filename, str):
self.read(filename, kwargs.get('filetype'))
return
else:
Atoms.__init__(self, [])
if kwargs.get('filename') is not None:
filename = kwargs.pop('filename')
Atoms.__init__(self, *args, **kwargs)
self.read(filename, kwargs.get('filetype'))
else:
Atoms.__init__(self, *args, **kwargs)
def extreme_positions(self):
"""get the extreme positions of the structure"""
pos = self.get_positions()
return np.array([np.minimum.reduce(pos), np.maximum.reduce(pos)])
def find_connected(self, index, dmax=None, scale=1.5):
"""Find the atoms connected to self[index] and return them.
If dmax is not None:
Atoms are defined to be connected if they are nearer than dmax
to each other.
If dmax is None:
Atoms are defined to be connected if they are nearer than the
sum of their covalent radii * scale to each other.
"""
# set neighbor lists
neighborlist = []
if dmax is None:
# define neighbors according to covalent radii
radii = scale * covalent_radii[self.get_atomic_numbers()]
for atom in self:
positions = self.positions - atom.position
distances = np.sqrt(np.sum(positions**2, axis=1))
radius = scale * covalent_radii[atom.number]
neighborlist.append(np.where(distances < radii + radius)[0])
else:
# define neighbors according to distance
nl = NeighborList([0.5 * dmax] * len(self), skin=0)
nl.update(self)
for i, atom in enumerate(self):
neighborlist.append(list(nl.get_neighbors(i)[0]))
connected = list(neighborlist[index])
isolated = False
while not isolated:
isolated = True
for i in connected:
for j in neighborlist[i]:
if j in connected:
pass
else:
connected.append(j)
isolated = False
atoms = Cluster()
for i in connected:
atoms.append(self[i])
return atoms
def minimal_box(self, border=0, h=None, multiple=4):
"""The box needed to fit the structure in.
The structure is moved to fit into the box [(0,x),(0,y),(0,z)]
with x,y,z > 0 (fitting the ASE constriction).
The border argument can be used to add a border of empty space
around the structure.
If h is set, the box is extended to ensure that box/h is
a multiple of 'multiple'.
This ensures that GPAW uses the desired h.
The shift applied to the structure is returned.
"""
if len(self) == 0:
return None
extr = self.extreme_positions()
# add borders
if type(border)==type([]):
b = border
else:
b = [border, border, border]
for c in range(3):
extr[0][c] -= b[c]
extr[1][c] += b[c] - extr[0][c] # shifted already
# check for multiple of 4
if h is not None:
if not hasattr(h, '__len__'):
h = np.array([h, h, h])
for c in range(3):
# apply the same as in paw.py
L = extr[1][c] # shifted already
N = np.ceil(L / h[c] / multiple) * multiple
# correct L
dL = N * h[c] - L
# move accordingly
extr[1][c] += dL # shifted already
extr[0][c] -= dL / 2.
# move lower corner to (0, 0, 0)
shift = tuple(-1. * np.array(extr[0]))
self.translate(shift)
self.set_cell(tuple(extr[1]))
return shift
def get(self, name):
"""General get"""
attr = 'get_' + name
if hasattr(self, attr):
getattr(self, attr)(data)
elif self.data.has_key(name):
return self.data[name]
else:
return None
def set(self, name, data):
"""General set"""
attr = 'set_' + name
if hasattr(self, attr):
getattr(self, attr)(data)
else:
self.data[name] = data
def read(self, filename, format=None):
"""Read the structure from some file. The type can be given
or it will be guessed from the filename."""
self.__init__(read(filename, format=format))
return len(self)
def write(self, filename=None, format=None, repeat=None):
"""Write the structure to file.
Parameters
----------
format: string
can be given or it will be guessed from the filename
repeat: array, eg.: [1,0,1]
can be used to repeat the structure
"""
if filename is None:
if format is None:
raise RuntimeError('Please specify either filename or format.')
else:
filename = self.get_name() + '.' + format
out = self
if repeat is None:
out = self
else:
out = Cluster([])
cell = self.get_cell().diagonal()
for i in range(repeat[0] + 1):
for j in range(repeat[1] + 1):
for k in range(repeat[2] + 1):
copy = self.copy()
copy.translate(np.array([i, j, k]) * cell)
out += copy
write(filename, out, format)
| gpl-3.0 | -2,736,821,158,500,044,300 | 30.556701 | 79 | 0.516335 | false | 4.198903 | false | false | false |
cggh/DQXServer | responders/recordinfo.py | 1 | 1698 | # This file is part of DQXServer - (C) Copyright 2014, Paul Vauterin, Ben Jeffery, Alistair Miles <[email protected]>
# This program is free software licensed under the GNU Affero General Public License.
# You can find a copy of this license in LICENSE in the top directory of the source code or at <http://opensource.org/licenses/AGPL-3.0>
import DQXDbTools
import DQXUtils
from DQXDbTools import DBCOLESC
from DQXDbTools import DBTBESC
import config
def response(returndata):
mytablename = returndata['tbname']
encodedquery = returndata['qry']
databaseName = None
if 'database' in returndata:
databaseName = returndata['database']
with DQXDbTools.DBCursor(returndata, databaseName, read_timeout=config.TIMEOUT) as cur:
whc = DQXDbTools.WhereClause()
whc.ParameterPlaceHolder = '%s' #NOTE!: MySQL PyODDBC seems to require this nonstardard coding
whc.Decode(encodedquery)
whc.CreateSelectStatement()
sqlquery = "SELECT * FROM {0} WHERE {1}".format(
DBTBESC(mytablename),
whc.querystring_params
)
if DQXDbTools.LogRequests:
DQXUtils.LogServer('###QRY:'+sqlquery)
DQXUtils.LogServer('###PARAMS:'+str(whc.queryparams))
cur.execute(sqlquery, whc.queryparams)
therow = cur.fetchone()
if therow is None:
returndata['Error'] = 'Record not found'
else:
data={}
colnr=0
for column in cur.description:
data[column[0]] = str(therow[colnr])
colnr += 1
returndata['Data'] = data
return returndata
| agpl-3.0 | -3,628,282,154,136,333,300 | 34.913043 | 136 | 0.630153 | false | 3.691304 | false | false | false |
hudora/huDjango | hudjango/management/commands/couchdb-init.py | 1 | 1234 | # encoding: utf-8
import couchdb
from optparse import make_option
from hudjango.management.couchdb.support import CouchDBBaseCommand
from django.core.management.base import CommandError
class Command(CouchDBBaseCommand):
help = """ Creates a new couchdb database. """
option_list = CouchDBBaseCommand.option_list + (
make_option('--purge', action='store_true', help='Delete existing database [default: %default]'),
)
def handle(self, *args, **options):
# get the name of the database to create
if len(args) != 1:
raise CommandError("You need to specify exactly one argument as database name")
database = args[0]
# drop a possibly existing database if the user wants us to.
couch = self._server(options)
if options['purge']:
try:
couch.delete(database)
except couchdb.client.ResourceNotFound:
pass
# then create the new database
try:
couch.create(database)
except couchdb.client.PreconditionFailed, exception:
raise CommandError("%s: %s" % (database, str(exception)))
print "database '%s' created succesfully" % database
| bsd-2-clause | -6,668,120,594,051,540,000 | 35.294118 | 105 | 0.636143 | false | 4.487273 | false | false | false |
T-R0D/JustForFun | aoc2016/aoc2016/day18/solution.py | 1 | 2158 | # This file is part of aoc2016.
#
# aoc2016 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# aoc2016 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with aoc2016. If not, see <http://www.gnu.org/licenses/>.
SAFE_TILE = '.'
TRAP_TILE = '^'
def part_one(puzzle_input):
return count_safe_tiles_in_room(first_row_of_tiles=puzzle_input, n_rows=40)
def part_two(puzzle_input):
return count_safe_tiles_in_room(first_row_of_tiles=puzzle_input, n_rows=400000)
def count_safe_tiles_in_room(first_row_of_tiles, n_rows):
current_row = list(first_row_of_tiles)
n_safe_tiles = count_safe_tiles(current_row)
for _ in range(n_rows - 1):
current_row = decode_next_row_of_tiles(current_row)
n_safe_tiles += count_safe_tiles((current_row))
return n_safe_tiles
def count_safe_tiles(row_of_tiles):
n_traps = 0
for tile in row_of_tiles:
if tile == SAFE_TILE:
n_traps += 1
return n_traps
def decode_next_row_of_tiles(input_row):
new_row = ['' for _ in range(len(input_row))]
new_row[0] = determine_tile(SAFE_TILE, input_row[0], input_row[1])
new_row[-1] = determine_tile(input_row[-2], input_row[-1], SAFE_TILE)
for i in range(1, len(input_row) - 1):
new_row[i] = determine_tile(*input_row[i - 1: i + 2])
return new_row
def determine_tile(left, center, right):
if (left == TRAP_TILE and center == SAFE_TILE and right == SAFE_TILE) or \
(left == SAFE_TILE and center == SAFE_TILE and right == TRAP_TILE) or \
(left == TRAP_TILE and center == TRAP_TILE and right == SAFE_TILE) or \
(left == SAFE_TILE and center == TRAP_TILE and right == TRAP_TILE):
return TRAP_TILE
return SAFE_TILE
| gpl-2.0 | 2,805,949,098,795,429,000 | 34.966667 | 83 | 0.667285 | false | 3.022409 | false | false | false |
pyfa-org/Pyfa | gui/builtinViews/implantEditor.py | 1 | 12699 | import re
# noinspection PyPackageRequirements
import wx
# noinspection PyPackageRequirements
from wx.lib.buttons import GenBitmapButton
import gui.builtinMarketBrowser.pfSearchBox as SBox
import gui.display as d
from gui.bitmap_loader import BitmapLoader
from gui.marketBrowser import SearchBox
from service.market import Market
def stripHtml(text):
text = re.sub('<\s*br\s*/?\s*>', '\n', text)
text = re.sub('</?[^/]+?(/\s*)?>', '', text)
return text
class BaseImplantEditorView(wx.Panel):
def addMarketViewImage(self, iconFile):
if iconFile is None:
return -1
bitmap = BitmapLoader.getBitmap(iconFile, "icons")
if bitmap is None:
return -1
else:
return self.availableImplantsImageList.Add(bitmap)
def __init__(self, parent):
wx.Panel.__init__(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition, size=wx.DefaultSize,
style=wx.TAB_TRAVERSAL)
self.SetBackgroundColour(wx.SystemSettings.GetColour(wx.SYS_COLOUR_WINDOW))
pmainSizer = wx.BoxSizer(wx.HORIZONTAL)
availableSizer = wx.BoxSizer(wx.VERTICAL)
self.searchBox = SearchBox(self)
self.itemView = ItemView(self)
self.itemView.Hide()
availableSizer.Add(self.searchBox, 0, wx.EXPAND)
availableSizer.Add(self.itemView, 1, wx.EXPAND)
self.availableImplantsTree = wx.TreeCtrl(self, wx.ID_ANY, style=wx.TR_DEFAULT_STYLE | wx.TR_HIDE_ROOT)
root = self.availableRoot = self.availableImplantsTree.AddRoot("Available")
self.availableImplantsImageList = wx.ImageList(16, 16)
self.availableImplantsTree.SetImageList(self.availableImplantsImageList)
availableSizer.Add(self.availableImplantsTree, 1, wx.EXPAND)
pmainSizer.Add(availableSizer, 1, wx.ALL | wx.EXPAND, 5)
buttonSizer = wx.BoxSizer(wx.VERTICAL)
buttonSizer.AddStretchSpacer()
self.btnAdd = GenBitmapButton(self, wx.ID_ADD, BitmapLoader.getBitmap("fit_add_small", "gui"),
style=wx.BORDER_NONE)
buttonSizer.Add(self.btnAdd, 0)
self.btnRemove = GenBitmapButton(self, wx.ID_REMOVE, BitmapLoader.getBitmap("fit_delete_small", "gui"),
style=wx.BORDER_NONE)
buttonSizer.Add(self.btnRemove, 0)
buttonSizer.AddStretchSpacer()
pmainSizer.Add(buttonSizer, 0, wx.EXPAND, 0)
characterImplantSizer = wx.BoxSizer(wx.VERTICAL)
self.pluggedImplantsTree = AvailableImplantsView(self)
characterImplantSizer.Add(self.pluggedImplantsTree, 1, wx.ALL | wx.EXPAND, 5)
pmainSizer.Add(characterImplantSizer, 1, wx.EXPAND, 5)
self.SetSizer(pmainSizer)
self.hoveredLeftTreeTypeID = None
self.hoveredRightListRow = None
# Populate the market tree
sMkt = Market.getInstance()
for mktGrp in sMkt.getImplantTree():
iconId = self.addMarketViewImage(sMkt.getIconByMarketGroup(mktGrp))
childId = self.availableImplantsTree.AppendItem(root, mktGrp.name, iconId, data=mktGrp.ID)
if sMkt.marketGroupHasTypesCheck(mktGrp) is False:
self.availableImplantsTree.AppendItem(childId, "dummy")
self.availableImplantsTree.SortChildren(self.availableRoot)
# Bind the event to replace dummies by real data
self.availableImplantsTree.Bind(wx.EVT_TREE_ITEM_EXPANDING, self.expandLookup)
self.availableImplantsTree.Bind(wx.EVT_TREE_ITEM_ACTIVATED, self.itemSelected)
self.availableImplantsTree.Bind(wx.EVT_MOTION, self.OnLeftTreeMouseMove)
self.availableImplantsTree.Bind(wx.EVT_LEAVE_WINDOW, self.OnLeftTreeMouseLeave)
self.itemView.Bind(wx.EVT_LIST_ITEM_ACTIVATED, self.itemSelected)
self.pluggedImplantsTree.Bind(wx.EVT_MOTION, self.OnRightListMouseMove)
# Bind add & remove buttons
self.btnAdd.Bind(wx.EVT_BUTTON, self.itemSelected)
self.btnRemove.Bind(wx.EVT_BUTTON, self.removeItem)
# We update with an empty list first to set the initial size for Layout(), then update later with actual
# implants for character. This helps with sizing issues.
self.pluggedImplantsTree.update([])
self.bindContext()
self.Layout()
self.update()
def bindContext(self):
# Binds self.contextChanged to whatever changes the context
raise NotImplementedError()
def getImplantsFromContext(self):
""" Gets list of implants from current context """
raise NotImplementedError()
def addImplantToContext(self, item):
""" Adds implant to the current context"""
raise NotImplementedError()
def removeImplantFromContext(self, implant):
""" Removes implant from the current context"""
raise NotImplementedError()
def update(self):
"""Updates implant list based off the current context"""
self.implants = self.getImplantsFromContext()[:]
self.implants.sort(key=lambda i: int(i.getModifiedItemAttr("implantness")))
self.pluggedImplantsTree.update(self.implants)
def contextChanged(self, event):
self.update()
event.Skip()
def expandLookup(self, event):
tree = self.availableImplantsTree
sMkt = Market.getInstance()
parent = event.Item
child, _ = tree.GetFirstChild(parent)
text = tree.GetItemText(child)
if text == "dummy" or text == "itemdummy":
tree.Delete(child)
# if the dummy item is a market group, replace with actual market groups
if text == "dummy":
# Add 'real stoof!' instead
currentMktGrp = sMkt.getMarketGroup(tree.GetItemData(parent), eager="children")
for childMktGrp in sMkt.getMarketGroupChildren(currentMktGrp):
iconId = self.addMarketViewImage(sMkt.getIconByMarketGroup(childMktGrp))
childId = tree.AppendItem(parent, childMktGrp.name, iconId, data=childMktGrp.ID)
if sMkt.marketGroupHasTypesCheck(childMktGrp) is False:
tree.AppendItem(childId, "dummy")
else:
tree.AppendItem(childId, "itemdummy")
# replace dummy with actual items
if text == "itemdummy":
currentMktGrp = sMkt.getMarketGroup(tree.GetItemData(parent))
items = sMkt.getItemsByMarketGroup(currentMktGrp)
for item in items:
iconId = self.addMarketViewImage(item.iconID)
tree.AppendItem(parent, item.name, iconId, data=item)
tree.SortChildren(parent)
def itemSelected(self, event):
if event.EventObject is self.btnAdd:
# janky fix that sets EventObject so that we don't have similar code elsewhere.
if self.itemView.IsShown():
event.EventObject = self.itemView
else:
event.EventObject = self.availableImplantsTree
if event.EventObject is self.itemView:
curr = event.EventObject.GetFirstSelected()
while curr != -1:
item = self.itemView.items[curr]
self.addImplantToContext(item)
curr = event.EventObject.GetNextSelected(curr)
else:
root = self.availableImplantsTree.GetSelection()
if not root.IsOk():
return
nchilds = self.availableImplantsTree.GetChildrenCount(root)
if nchilds == 0:
item = self.availableImplantsTree.GetItemData(root)
self.addImplantToContext(item)
else:
event.Skip()
return
self.update()
def removeItem(self, event):
pos = self.pluggedImplantsTree.GetFirstSelected()
if pos != -1:
self.removeImplantFromContext(self.implants[pos])
self.update()
# Due to https://github.com/wxWidgets/Phoenix/issues/1372 we cannot set tooltips on
# tree itself; work this around with following two methods, by setting tooltip to
# parent window
def OnLeftTreeMouseMove(self, event):
event.Skip()
treeItemId, _ = self.availableImplantsTree.HitTest(event.Position)
if not treeItemId:
if self.hoveredLeftTreeTypeID is not None:
self.hoveredLeftTreeTypeID = None
self.SetToolTip(None)
return
item = self.availableImplantsTree.GetItemData(treeItemId)
isImplant = getattr(item, 'isImplant', False)
if not isImplant:
if self.hoveredLeftTreeTypeID is not None:
self.hoveredLeftTreeTypeID = None
self.SetToolTip(None)
return
if self.hoveredLeftTreeTypeID == item.ID:
return
if self.ToolTip is not None:
self.SetToolTip(None)
else:
self.hoveredLeftTreeTypeID = item.ID
toolTip = wx.ToolTip(stripHtml(item.description))
toolTip.SetMaxWidth(self.GetSize().Width)
self.SetToolTip(toolTip)
def OnLeftTreeMouseLeave(self, event):
event.Skip()
self.SetToolTip(None)
def OnRightListMouseMove(self, event):
event.Skip()
row, _, col = self.pluggedImplantsTree.HitTestSubItem(event.Position)
if row != self.hoveredRightListRow:
if self.pluggedImplantsTree.ToolTip is not None:
self.pluggedImplantsTree.SetToolTip(None)
else:
self.hoveredRightListRow = row
try:
implant = self.implants[row]
except IndexError:
self.pluggedImplantsTree.SetToolTip(None)
else:
toolTip = wx.ToolTip(stripHtml(implant.item.description))
toolTip.SetMaxWidth(self.pluggedImplantsTree.GetSize().Width)
self.pluggedImplantsTree.SetToolTip(toolTip)
class AvailableImplantsView(d.Display):
DEFAULT_COLS = ["attr:implantness",
"Base Name"]
def __init__(self, parent):
d.Display.__init__(self, parent, style=wx.LC_SINGLE_SEL)
self.Bind(wx.EVT_LEFT_DCLICK, parent.removeItem)
class ItemView(d.Display):
DEFAULT_COLS = ["Base Icon",
"Base Name"]
def __init__(self, parent):
d.Display.__init__(self, parent)
self.parent = parent
self.searchBox = parent.searchBox
self.hoveredRow = None
self.items = []
# Bind search actions
self.searchBox.Bind(SBox.EVT_TEXT_ENTER, self.scheduleSearch)
self.searchBox.Bind(SBox.EVT_SEARCH_BTN, self.scheduleSearch)
self.searchBox.Bind(SBox.EVT_CANCEL_BTN, self.clearSearch)
self.searchBox.Bind(SBox.EVT_TEXT, self.scheduleSearch)
self.Bind(wx.EVT_MOTION, self.OnMouseMove)
def clearSearch(self, event=None):
if self.IsShown():
self.parent.availableImplantsTree.Show()
self.Hide()
self.parent.Layout()
if event:
self.searchBox.Clear()
self.items = []
self.update(self.items)
def scheduleSearch(self, event=None):
sMkt = Market.getInstance()
search = self.searchBox.GetLineText(0)
# Make sure we do not count wildcards as search symbol
realsearch = search.replace('*', '').replace('?', '')
# Show nothing if query is too short
if len(realsearch) < 3:
self.clearSearch()
return
sMkt.searchItems(search, self.populateSearch, 'implants')
def populateSearch(self, itemIDs):
if not self.IsShown():
self.parent.availableImplantsTree.Hide()
self.Show()
self.parent.Layout()
items = Market.getItems(itemIDs)
items = [i for i in items if i.group.name != 'Booster']
self.items = sorted(list(items), key=lambda i: i.name)
self.update(self.items)
def OnMouseMove(self, event):
event.Skip()
row, _, col = self.HitTestSubItem(event.Position)
if row != self.hoveredRow:
if self.ToolTip is not None:
self.SetToolTip(None)
else:
self.hoveredRow = row
try:
item = self.items[row]
except IndexError:
self.SetToolTip(None)
else:
toolTip = wx.ToolTip(stripHtml(item.description))
toolTip.SetMaxWidth(self.GetSize().Width)
self.SetToolTip(toolTip)
| gpl-3.0 | 8,534,346,245,961,577,000 | 36.35 | 112 | 0.62438 | false | 3.907385 | false | false | false |
bchareyre/ratchet | py/ymport.py | 1 | 14686 | """
Import geometry from various formats ('import' is python keyword, hence the name 'ymport').
"""
from yade.wrapper import *
from yade import utils
try:
from minieigen import *
except ImportError:
from miniEigen import *
def textExt(fileName,format='x_y_z_r',shift=Vector3.Zero,scale=1.0,**kw):
"""Load sphere coordinates from file in specific format, returns a list of corresponding bodies; that may be inserted to the simulation with O.bodies.append().
:param str filename: file name
:param str format: the name of output format. Supported `x_y_z_r`(default), `x_y_z_r_matId`
:param [float,float,float] shift: [X,Y,Z] parameter moves the specimen.
:param float scale: factor scales the given data.
:param \*\*kw: (unused keyword arguments) is passed to :yref:`yade.utils.sphere`
:returns: list of spheres.
Lines starting with # are skipped
"""
infile = open(fileName,"r")
lines = infile.readlines()
infile.close()
ret=[]
for line in lines:
data = line.split()
if (data[0] == "#format"):
format=data[1]
continue
elif (data[0][0] == "#"): continue
if (format=='x_y_z_r'):
pos = Vector3(float(data[0]),float(data[1]),float(data[2]))
ret.append(utils.sphere(shift+scale*pos,scale*float(data[3]),**kw))
elif (format=='x_y_z_r_matId'):
pos = Vector3(float(data[0]),float(data[1]),float(data[2]))
ret.append(utils.sphere(shift+scale*pos,scale*float(data[3]),material=int(data[4]),**kw))
elif (format=='id_x_y_z_r_matId'):
pos = Vector3(float(data[1]),float(data[2]),float(data[3]))
ret.append(utils.sphere(shift+scale*pos,scale*float(data[4]),material=int(data[5]),**kw))
else:
raise RuntimeError("Please, specify a correct format output!");
return ret
def textClumps(fileName,shift=Vector3.Zero,discretization=0,orientation=Quaternion((0,1,0),0.0),scale=1.0,**kw):
"""Load clumps-members from file, insert them to the simulation.
:param str filename: file name
:param str format: the name of output format. Supported `x_y_z_r`(default), `x_y_z_r_clumpId`
:param [float,float,float] shift: [X,Y,Z] parameter moves the specimen.
:param float scale: factor scales the given data.
:param \*\*kw: (unused keyword arguments) is passed to :yref:`yade.utils.sphere`
:returns: list of spheres.
Lines starting with # are skipped
"""
infile = open(fileName,"r")
lines = infile.readlines()
infile.close()
ret=[]
curClump=[]
newClumpId = -1
for line in lines:
data = line.split()
if (data[0][0] == "#"): continue
pos = orientation*Vector3(float(data[0]),float(data[1]),float(data[2]))
if (newClumpId<0 or newClumpId==int(data[4])):
idD = curClump.append(utils.sphere(shift+scale*pos,scale*float(data[3]),**kw))
newClumpId = int(data[4])
else:
newClumpId = int(data[4])
ret.append(O.bodies.appendClumped(curClump,discretization=discretization))
curClump=[]
idD = curClump.append(utils.sphere(shift+scale*pos,scale*float(data[3]),**kw))
if (len(curClump)<>0):
ret.append(O.bodies.appendClumped(curClump,discretization=discretization))
# Set the mask to a clump the same as the first member of it
for i in range(len(ret)):
O.bodies[ret[i][0]].mask = O.bodies[ret[i][1][0]].mask
return ret
def text(fileName,shift=Vector3.Zero,scale=1.0,**kw):
"""Load sphere coordinates from file, returns a list of corresponding bodies; that may be inserted to the simulation with O.bodies.append().
:param string filename: file which has 4 colums [x, y, z, radius].
:param [float,float,float] shift: [X,Y,Z] parameter moves the specimen.
:param float scale: factor scales the given data.
:param \*\*kw: (unused keyword arguments) is passed to :yref:`yade.utils.sphere`
:returns: list of spheres.
Lines starting with # are skipped
"""
return textExt(fileName=fileName,format='x_y_z_r',shift=shift,scale=scale,**kw)
def stl(file, dynamic=None,fixed=True,wire=True,color=None,highlight=False,noBound=False,material=-1):
""" Import geometry from stl file, return list of created facets."""
imp = STLImporter()
facets=imp.ymport(file)
for b in facets:
b.shape.color=color if color else utils.randomColor()
b.shape.wire=wire
b.shape.highlight=highlight
pos=b.state.pos
utils._commonBodySetup(b,0,Vector3(0,0,0),material=material,pos=pos,noBound=noBound,dynamic=dynamic,fixed=fixed)
b.aspherical=False
return facets
def gts(meshfile,shift=(0,0,0),scale=1.0,**kw):
""" Read given meshfile in gts format.
:Parameters:
`meshfile`: string
name of the input file.
`shift`: [float,float,float]
[X,Y,Z] parameter moves the specimen.
`scale`: float
factor scales the given data.
`**kw`: (unused keyword arguments)
is passed to :yref:`yade.utils.facet`
:Returns: list of facets.
"""
import gts,yade.pack
surf=gts.read(open(meshfile))
surf.scale(scale)
surf.translate(shift)
yade.pack.gtsSurface2Facets(surf,**kw)
def gmsh(meshfile="file.mesh",shift=Vector3.Zero,scale=1.0,orientation=Quaternion((0,1,0),0.0),**kw):
""" Imports geometry from mesh file and creates facets.
:Parameters:
`shift`: [float,float,float]
[X,Y,Z] parameter moves the specimen.
`scale`: float
factor scales the given data.
`orientation`: quaternion
orientation of the imported mesh
`**kw`: (unused keyword arguments)
is passed to :yref:`yade.utils.facet`
:Returns: list of facets forming the specimen.
mesh files can be easily created with `GMSH <http://www.geuz.org/gmsh/>`_.
Example added to :ysrc:`examples/regular-sphere-pack/regular-sphere-pack.py`
Additional examples of mesh-files can be downloaded from
http://www-roc.inria.fr/gamma/download/download.php
"""
infile = open(meshfile,"r")
lines = infile.readlines()
infile.close()
nodelistVector3=[]
findVerticesString=0
while (lines[findVerticesString].split()[0]<>'Vertices'): #Find the string with the number of Vertices
findVerticesString+=1
findVerticesString+=1
numNodes = int(lines[findVerticesString].split()[0])
for i in range(numNodes):
nodelistVector3.append(Vector3(0.0,0.0,0.0))
id = 0
for line in lines[findVerticesString+1:numNodes+findVerticesString+1]:
data = line.split()
nodelistVector3[id] = orientation*Vector3(float(data[0])*scale,float(data[1])*scale,float(data[2])*scale)+shift
id += 1
findTriangleString=findVerticesString+numNodes
while (lines[findTriangleString].split()[0]<>'Triangles'): #Find the string with the number of Triangles
findTriangleString+=1
findTriangleString+=1
numTriangles = int(lines[findTriangleString].split()[0])
triList = []
for i in range(numTriangles):
triList.append([0,0,0,0])
tid = 0
for line in lines[findTriangleString+1:findTriangleString+numTriangles+1]:
data = line.split()
id1 = int(data[0])-1
id2 = int(data[1])-1
id3 = int(data[2])-1
triList[tid][0] = tid
triList[tid][1] = id1
triList[tid][2] = id2
triList[tid][3] = id3
tid += 1
ret=[]
for i in triList:
a=nodelistVector3[i[1]]
b=nodelistVector3[i[2]]
c=nodelistVector3[i[3]]
ret.append(utils.facet((nodelistVector3[i[1]],nodelistVector3[i[2]],nodelistVector3[i[3]]),**kw))
return ret
def gengeoFile(fileName="file.geo",shift=Vector3.Zero,scale=1.0,orientation=Quaternion((0,1,0),0.0),**kw):
""" Imports geometry from LSMGenGeo .geo file and creates spheres.
Since 2012 the package is available in Debian/Ubuntu and known as python-demgengeo
http://packages.qa.debian.org/p/python-demgengeo.html
:Parameters:
`filename`: string
file which has 4 colums [x, y, z, radius].
`shift`: Vector3
Vector3(X,Y,Z) parameter moves the specimen.
`scale`: float
factor scales the given data.
`orientation`: quaternion
orientation of the imported geometry
`**kw`: (unused keyword arguments)
is passed to :yref:`yade.utils.sphere`
:Returns: list of spheres.
LSMGenGeo library allows one to create pack of spheres
with given [Rmin:Rmax] with null stress inside the specimen.
Can be useful for Mining Rock simulation.
Example: :ysrc:`examples/packs/packs.py`, usage of LSMGenGeo library in :ysrc:`examples/test/genCylLSM.py`.
* https://answers.launchpad.net/esys-particle/+faq/877
* http://www.access.edu.au/lsmgengeo_python_doc/current/pythonapi/html/GenGeo-module.html
* https://svn.esscc.uq.edu.au/svn/esys3/lsm/contrib/LSMGenGeo/"""
from yade.utils import sphere
infile = open(fileName,"r")
lines = infile.readlines()
infile.close()
numSpheres = int(lines[6].split()[0])
ret=[]
for line in lines[7:numSpheres+7]:
data = line.split()
pos = orientation*Vector3(float(data[0]),float(data[1]),float(data[2]))
ret.append(utils.sphere(shift+scale*pos,scale*float(data[3]),**kw))
return ret
def gengeo(mntable,shift=Vector3.Zero,scale=1.0,**kw):
""" Imports geometry from LSMGenGeo library and creates spheres.
Since 2012 the package is available in Debian/Ubuntu and known as python-demgengeo
http://packages.qa.debian.org/p/python-demgengeo.html
:Parameters:
`mntable`: mntable
object, which creates by LSMGenGeo library, see example
`shift`: [float,float,float]
[X,Y,Z] parameter moves the specimen.
`scale`: float
factor scales the given data.
`**kw`: (unused keyword arguments)
is passed to :yref:`yade.utils.sphere`
LSMGenGeo library allows one to create pack of spheres
with given [Rmin:Rmax] with null stress inside the specimen.
Can be useful for Mining Rock simulation.
Example: :ysrc:`examples/packs/packs.py`, usage of LSMGenGeo library in :ysrc:`examples/test/genCylLSM.py`.
* https://answers.launchpad.net/esys-particle/+faq/877
* http://www.access.edu.au/lsmgengeo_python_doc/current/pythonapi/html/GenGeo-module.html
* https://svn.esscc.uq.edu.au/svn/esys3/lsm/contrib/LSMGenGeo/"""
try:
from GenGeo import MNTable3D,Sphere
except ImportError:
from gengeo import MNTable3D,Sphere
ret=[]
sphereList=mntable.getSphereListFromGroup(0)
for i in range(0, len(sphereList)):
r=sphereList[i].Radius()
c=sphereList[i].Centre()
ret.append(utils.sphere([shift[0]+scale*float(c.X()),shift[1]+scale*float(c.Y()),shift[2]+scale*float(c.Z())],scale*float(r),**kw))
return ret
def unv(fileName,shift=(0,0,0),scale=1.0,returnConnectivityTable=False,**kw):
""" Import geometry from unv file, return list of created facets.
:param string fileName: name of unv file
:param (float,float,float)|Vector3 shift: (X,Y,Z) parameter moves the specimen.
:param float scale: factor scales the given data.
:param \*\*kw: (unused keyword arguments) is passed to :yref:`yade.utils.facet`
:param bool returnConnectivityTable: if True, apart from facets returns also nodes (list of (x,y,z) nodes coordinates) and elements (list of (id1,id2,id3) element nodes ids). If False (default), returns only facets
unv files are mainly used for FEM analyses (are used by `OOFEM <http://www.oofem.org/>`_ and `Abaqus <http://www.simulia.com/products/abaqus_fea.html>`_), but triangular elements can be imported as facets.
These files cen be created e.g. with open-source free software `Salome <http://salome-platform.org>`_.
Example: :ysrc:`examples/test/unv-read/unvRead.py`."""
class UNVReader:
# class used in ymport.unv function
# reads and evaluate given unv file and extracts all triangles
# can be extended to read tetrahedrons as well
def __init__(self,fileName,shift=(0,0,0),scale=1.0,returnConnectivityTable=False,**kw):
self.shift = shift
self.scale = scale
self.unvFile = open(fileName,'r')
self.flag = 0
self.line = self.unvFile.readline()
self.lineSplit = self.line.split()
self.nodes = []
self.elements = []
self.read(**kw)
def readLine(self):
self.line = self.unvFile.readline()
self.lineSplit = self.line.split()
def read(self,**kw):
while self.line:
self.evalLine()
self.line = self.unvFile.readline()
self.unvFile.close()
self.createFacets(**kw)
def evalLine(self):
self.lineSplit = self.line.split()
if len(self.lineSplit) <= 1: # eval special unv format
if self.lineSplit[0] == '-1': pass
elif self.lineSplit[0] == '2411': self.flag = 1; # nodes
elif self.lineSplit[0] == '2412': self.flag = 2; # edges (lines)
else: self.flag = 4; # volume elements or other, not interesting for us (at least yet)
elif self.flag == 1: self.evalNodes()
elif self.flag == 2: self.evalEdge()
elif self.flag == 3: self.evalFacet()
#elif self.flag == 4: self.evalGroup()
def evalNodes(self):
self.readLine()
self.nodes.append((
self.shift[0]+self.scale*float(self.lineSplit[0]),
self.shift[1]+self.scale*float(self.lineSplit[1]),
self.shift[2]+self.scale*float(self.lineSplit[2])))
def evalEdge(self):
if self.lineSplit[1]=='41':
self.flag = 3
self.evalFacet()
else:
self.readLine()
self.readLine()
def evalFacet(self):
if self.lineSplit[1]=='41': # triangle
self.readLine()
self.elements.append((
int(self.lineSplit[0])-1,
int(self.lineSplit[1])-1,
int(self.lineSplit[2])-1))
else: # is not triangle
self.readLine()
self.flag = 4
# can be added function to handle tetrahedrons
def createFacets(self,**kw):
self.facets = [utils.facet(tuple(self.nodes[i] for i in e),**kw) for e in self.elements]
#
unvReader = UNVReader(fileName,shift,scale,returnConnectivityTable,**kw)
if returnConnectivityTable:
return unvReader.facets, unvReader.nodes, unvReader.elements
return facets
def iges(fileName,shift=(0,0,0),scale=1.0,returnConnectivityTable=False,**kw):
""" Import triangular mesh from .igs file, return list of created facets.
:param string fileName: name of iges file
:param (float,float,float)|Vector3 shift: (X,Y,Z) parameter moves the specimen.
:param float scale: factor scales the given data.
:param \*\*kw: (unused keyword arguments) is passed to :yref:`yade.utils.facet`
:param bool returnConnectivityTable: if True, apart from facets returns also nodes (list of (x,y,z) nodes coordinates) and elements (list of (id1,id2,id3) element nodes ids). If False (default), returns only facets
"""
nodes,elems = [],[]
f = open(fileName)
for line in f:
if line.startswith('134,'): # read nodes coordinates
ls = line.split(',')
v = Vector3(
float(ls[1])*scale + shift[0],
float(ls[2])*scale + shift[1],
float(ls[3])*scale + shift[2]
)
nodes.append(v)
if line.startswith('136,'): # read elements
ls = line.split(',')
i1,i2,i3 = int(ls[3])/2, int(ls[4])/2, int(ls[5])/2 # the numbering of nodes is 1,3,5,7,..., hence this int(ls[*])/2
elems.append( (i1,i2,i3) )
facets = [utils.facet( ( nodes[e[0]], nodes[e[1]], nodes[e[2]] ), **kw) for e in elems]
if returnConnectivityTable:
return facets, nodes, elems
return facets
| gpl-2.0 | 2,926,112,471,460,078,000 | 35.899497 | 216 | 0.702778 | false | 2.862768 | false | false | false |
nimiq/moogle-project | magpie/response.py | 1 | 3599 | from abc import ABCMeta, abstractmethod
from utils.exceptions import ResponseError, InconsistentItemError, EntryNotToBeIndexed
class AbstractApiResponse(metaclass=ABCMeta):
"""
Response got after a query to a `Provider`.
Parameters:
response -- a `requests.models.Response` instance.
"""
def __init__(self, response):
self.response = response
self.updates_cursor = ''
self.has_more = False
self.pagination_cursor = ''
self._sanity_check()
def _sanity_check(self):
"""
Check whether the current response got is an error response.
"""
# If the HTTP status code is not 200, then it is an error.
if self.response.status_code != 200:
msg = 'HTTP Status: {}\n{}'.format(self.response.status_code, self.response.json())
raise ResponseError(msg)
def parse(self, bearertoken_id):
redis = self._init_redis_list(bearertoken_id)
self._hook_parse_entire_response(redis)
is_first_entry = True
entry = None
for entry in self._entries_to_apientries():
# `entry` is a `Api<Provider>Entry` instance.
redis.buffer(entry)
# Updates cursor: the `updated_time` of the most recent post.
if is_first_entry:
self._hook_parse_first_entry(entry)
is_first_entry = False
if entry: # if there is at least 1 `entry`.
self._hook_parse_last_entry(entry)
redis.flush_buffer()
@abstractmethod
def _init_redis_list(self, *args, **kwargs):
pass
def _hook_parse_entire_response(self, redis):
pass
def _hook_parse_first_entry(self, entry):
pass
def _hook_parse_last_entry(self, entry):
pass
@abstractmethod
def _build_pagination_cursor(self):
pass
@abstractmethod
def _build_updates_cursor(self):
pass
def _entries_to_apientries(self):
"""
Iter over all entries in the response.
Each entry in the response is converted to a `Api<Provider>Entry` instance.
"""
entries_list = self._extract_entries_list()
def _lpop():
"""
Pop from the head of the list.
Convert the item to `Api<Provider>Entry`.
"""
while True:
try:
entry = entries_list.pop(0) # Raise IndexError when completely consumed.
entry = self._init_api_provider_entry(entry)
return entry
except IndexError:
# `self.response` is empty, return None to stop the iter.
return None
except EntryNotToBeIndexed:
# The entry is probably a dir or not a textual file and we don't need to
# index it
continue
except InconsistentItemError as e:
# The entry is not consistent, like some important metadata are missing,
# we just skip it
# TODO log it anyway
continue
# The first argument of iter must be a callable, that's why we created the _lpop()
# closure. This closure will be called for each iteration and the result is returned
# until the result is None.
return iter(_lpop, None)
def _extract_entries_list(self):
return self.response.json()
@abstractmethod
def _init_api_provider_entry(self, *args, **kwargs):
pass | apache-2.0 | 3,897,381,863,946,606,000 | 30.304348 | 95 | 0.571548 | false | 4.465261 | false | false | false |
pdsteele/DES-Python | rvms.py | 1 | 20759 |
# -------------------------------------------------------------------------
# * This is an ANSI C library that can be used to evaluate the probability
# * density functions (pdf's), cumulative distribution functions (cdf's), and
# * inverse distribution functions (idf's) for a variety of discrete and
# * continuous random variables.
# *
# * The following notational conventions are used
# * x : possible value of the random variable
# * u : real variable (probability) between 0.0 and 1.0
# * a, b, n, p, m, s : distribution-specific parameters
# *
# * There are pdf's, cdf's and idf's for 6 discrete random variables
# *
# * Random Variable Range (x) Mean Variance
# *
# * Bernoulli(p) 0..1 p p*(1-p)
# * Binomial(n, p) 0..n n*p n*p*(1-p)
# * Equilikely(a, b) a..b (a+b)/2 ((b-a+1)*(b-a+1)-1)/12
# * Geometric(p) 0... p/(1-p) p/((1-p)*(1-p))
# * Pascal(n, p) 0... n*p/(1-p) n*p/((1-p)*(1-p))
# * Poisson(m) 0... m m
# *
# * and for 7 continuous random variables
# *
# * Uniform(a, b) a < x < b (a+b)/2 (b-a)*(b-a)/12
# * Exponential(m) x > 0 m m*m
# * Erlang(n, b) x > 0 n*b n*b*b
# * Normal(m, s) all x m s*s
# * Lognormal(a, b) x > 0 see below
# * Chisquare(n) x > 0 n 2*n
# * Student(n) all x 0 (n > 1) n/(n-2) (n > 2)
# *
# * For the Lognormal(a, b), the mean and variance are
# *
# * mean = Exp(a + 0.5*b*b)
# * variance = (Exp(b*b) - )1*Exp(2*a + b*b)
# *
# * Name : rvms.c (Random Variable ModelS)
# * Author : Steve Park & Dave Geyer
# * Language : ANSI C
# * Latest Revision : 11-22-97
# Translated by : Philip Steele
# Language : Python 3.3
# Latest Revision : 3/26/14
# * -------------------------------------------------------------------------
from math import exp, log, fabs, sqrt
#from rvgs import
TINY= 1.0e-10
SQRT2PI= 2.506628274631 # #/* sqrt(2 * pi) */
# static double pdfStandard(x)
# static double cdfStandard(x)
# static double idfStandard(u)
# static double LogGamma(a)
# static double LogBeta(a, b)
# static double InGamma(a, b)
# static double InBeta(a, b, x)
def pdfBernoulli(p,x):
# =======================================
# * NOTE: use 0.0 < p < 1.0 and 0 <= x <= 1
# * =======================================
if(x==0):
return (1.0-p)
else:
return (p)
def cdfBernoulli(p,x):
# =======================================
# * NOTE: use 0.0 < p < 1.0 and 0 <= x <= 1
# * =======================================
if(x==0):
return (1.0-p)
else:
return (1)
def idfBernoulli(p,u):
# =========================================
# * NOTE: use 0.0 < p < 1.0 and 0.0 < u < 1.0
# * =========================================
if (u < 1.0 - p):
return(0)
else:
return(1)
def pdfEquilikely(a,b,x):
# ============================================
# * NOTE: use a <= x <= b
# * ============================================
return (1.0 / (b - a + 1.0))
def cdfEquilikely(a,b,x):
# ============================================
# * NOTE: use a <= x <= b
# * ============================================
return ((x - a + 1.0) / (b - a + 1.0))
def idfEquilikely(a,b,u):
# ============================================
# * NOTE: use a <= b and 0.0 < u < 1.0
# * ============================================
#LIKELY NEEDS TEST
return (a + int(u * (b - a + 1)))
def pdfBinomial(n,p,x):
# ============================================
# * NOTE: use 0 <= x <= n and 0.0 < p < 1.0
# * ============================================
# TEST
s = LogChoose(n, x)
t = x * log(p) + (n - x) * log(1.0 - p)
return (exp(s + t))
def cdfBinomial(n,p,x):
# ============================================
# * NOTE: use 0 <= x <= n and 0.0 < p < 1.0
# * ============================================
if (x < n):
return (1.0 - InBeta(x + 1, n - x, p))
else:
return (1.0)
def idfBinomial(n,p,u):
# =================================================
# * NOTE: use 0 <= n, 0.0 < p < 1.0 and 0.0 < u < 1.0
# * =================================================
x = int(n * p) #/* start searching at the mean */
if (cdfBinomial(n, p, x) <= u):
while (cdfBinomial(n, p, x) <= u):
x += 1
elif (cdfBinomial(n, p, 0) <= u):
while (cdfBinomial(n, p, x - 1) > u):
x -= 1
else:
x = 0
return (x)
def pdfGeometric(p,x):
# =====================================
# * NOTE: use 0.0 < p < 1.0 and x >= 0
# * =====================================
return ((1.0 - p) * exp(x * log(p)))
def cdfGeometric(p,x):
# =====================================
# * NOTE: use 0.0 < p < 1.0 and x >= 0
# * =====================================
return (1.0 - exp((x + 1) * log(p)))
def idfGeometric(p,u):
# =========================================
# * NOTE: use 0.0 < p < 1.0 and 0.0 < u < 1.0
# * =========================================
return ((long) (log(1.0 - u) / log(p)))
def pdfPascal(n,p,x):
# ===========================================
# * NOTE: use n >= 1, 0.0 < p < 1.0, and x >= 0
# * ===========================================
s = LogChoose(n + x - 1, x)
t = x * log(p) + n * log(1.0 - p)
return (exp(s + t))
def cdfPascal(n,p,x):
# ===========================================
# * NOTE: use n >= 1, 0.0 < p < 1.0, and x >= 0
# * ===========================================
return (1.0 - InBeta(x + 1, n, p))
def idfPascal(n,p,u):
# ==================================================
# * NOTE: use n >= 1, 0.0 < p < 1.0, and 0.0 < u < 1.0
# * ==================================================
x = int(n * p / (1.0 - p)) #/* start searching at the mean */
if (cdfPascal(n, p, x) <= u):
while (cdfPascal(n, p, x) <= u):
x += 1
elif (cdfPascal(n, p, 0) <= u):
while (cdfPascal(n, p, x - 1) > u):
x -= 1
else:
x = 0
return (x)
def pdfPoisson(m,x):
# ===================================
# * NOTE: use m > 0 and x >= 0
# * ===================================
t = - m + x * log(m) - LogFactorial(x)
return (exp(t))
def cdfPoisson(m,x):
# ===================================
# * NOTE: use m > 0 and x >= 0
# * ===================================
return (1.0 - InGamma(x + 1, m))
def idfPoisson(m,u):
# ===================================
# * NOTE: use m > 0 and 0.0 < u < 1.0
# * ===================================
x = int(m) #/* start searching at the mean */
if (cdfPoisson(m, x) <= u):
while (cdfPoisson(m, x) <= u):
x += 1
elif (cdfPoisson(m, 0) <= u):
while (cdfPoisson(m, x - 1) > u):
x -= 1
else:
x = 0
return (x)
def pdfUniform(a, b, x):
# ===============================================
# * NOTE: use a < x < b
# * ===============================================
return (1.0 / (b - a))
def cdfUniform(a, b, x):
# ===============================================
# * NOTE: use a < x < b
# * ===============================================
return ((x - a) / (b - a))
def idfUniform(a, b, u):
# ===============================================
# * NOTE: use a < b and 0.0 < u < 1.0
# * ===============================================
return (a + (b - a) * u)
def pdfExponential(m, x):
# =========================================
# * NOTE: use m > 0 and x > 0
# * =========================================
return ((1.0 / m) * exp(- x / m))
def cdfExponential(m, x):
# =========================================
# * NOTE: use m > 0 and x > 0
# * =========================================
return (1.0 - exp(- x / m))
def idfExponential(m, u):
# =========================================
# * NOTE: use m > 0 and 0.0 < u < 1.0
# * =========================================
return (- m * log(1.0 - u))
def pdfErlang(n, b, x):
# ============================================
# * NOTE: use n >= 1, b > 0, and x > 0
# * ============================================
t = (n - 1) * log(x / b) - (x / b) - log(b) - LogGamma(n)
return (exp(t))
def cdfErlang(n, b, x):
# ============================================
# * NOTE: use n >= 1, b > 0, and x > 0
# * ============================================
return (InGamma(n, x / b))
def idfErlang(n, b, u):
# ============================================
# * NOTE: use n >= 1, b > 0 and 0.0 < u < 1.0
# * ============================================
x = n*b
condition = True
while(condition == True): #/* use Newton-Raphson iteration */
t = x
x = t + (u - cdfErlang(n, b, t)) / pdfErlang(n, b, t)
if (x <= 0.0):
x = 0.5 * t
condition = (fabs(x - t) >= TINY)
return (x)
def pdfStandard(x):
# ===================================
# * NOTE: x can be any value
# * ===================================
return (exp(- 0.5 * x * x) / SQRT2PI)
def cdfStandard(x):
# ===================================
# * NOTE: x can be any value
# * ===================================
t = InGamma(0.5, 0.5 * x * x)
if (x < 0.0):
return (0.5 * (1.0 - t))
else:
return (0.5 * (1.0 + t))
def idfStandard(u):
# ===================================
# * NOTE: 0.0 < u < 1.0
# * ===================================
t = 0.0
x = 0.0 #/* initialize to the mean, then */
condition = True
while(condition == True): #/* use Newton-Raphson iteration */
t = x
x = t + (u - cdfStandard(t)) / pdfStandard(t)
condition = (fabs(x - t) >= TINY)
return (x)
def pdfNormal(m, s, x):
# ==============================================
# * NOTE: x and m can be any value, but s > 0.0
# * =============================================
t = (x - m) / s
return (pdfStandard(t) / s)
def cdfNormal(m, s, x):
# ==============================================
# * NOTE: x and m can be any value, but s > 0.0
# * ==============================================
t = (x - m) / s
return (cdfStandard(t))
def idfNormal(m, s, u):
# =======================================================
# * NOTE: m can be any value, but s > 0.0 and 0.0 < u < 1.0
# * =======================================================
return (m + s * idfStandard(u))
def pdfLognormal(a, b, x):
# ===================================================
# * NOTE: a can have any value, but b > 0.0 and x > 0.0
# * ===================================================
t = (log(x) - a) / b
return (pdfStandard(t) / (b * x))
def cdfLognormal(a, b, x):
# ===================================================
# * NOTE: a can have any value, but b > 0.0 and x > 0.0
# * ===================================================
t = (log(x) - a) / b
return (cdfStandard(t))
def idfLognormal(a, b, u):
# =========================================================
# * NOTE: a can have any value, but b > 0.0 and 0.0 < u < 1.0
# * =========================================================
t = a + b * idfStandard(u)
return (exp(t))
def pdfChisquare(n, x):
# =====================================
# * NOTE: use n >= 1 and x > 0.0
# * =====================================
t= n/2.0
s = n / 2.0
t = (s - 1.0) * log(x / 2.0) - (x / 2.0) - log(2.0) - LogGamma(s)
return (exp(t))
def cdfChisquare(n, x):
# =====================================
# * NOTE: use n >= 1 and x > 0.0
# * ====================================
return (InGamma(n / 2.0, x / 2))
def idfChisquare(n, u):
# =====================================
# * NOTE: use n >= 1 and 0.0 < u < 1.0
# * =====================================
x = n #/* initialize to the mean, then */
condition = True
while(condition == True): #/* use Newton-Raphson iteration */
t = x
x = t + (u - cdfChisquare(n, t)) / pdfChisquare(n, t)
if (x <= 0.0):
x = 0.5 * t
condition = (fabs(x - t) >= TINY)
return (x)
def pdfStudent(n, x):
# ===================================
# * NOTE: use n >= 1 and x > 0.0
# * ===================================
s = -0.5 * (n + 1) * log(1.0 + ((x * x) / float(n)))
t = -1*LogBeta(0.5, n / 2.0)
return (exp(s + t) / sqrt(float(n)))
def cdfStudent(n, x):
# ===================================
# * NOTE: use n >= 1 and x > 0.0
# * ===================================
t = (x * x) / (n + x * x)
s = InBeta(0.5, n / 2.0, t)
if (x >= 0.0):
return (0.5 * (1.0 + s))
else:
return (0.5 * (1.0 - s))
def idfStudent(n, u):
# ===================================
# * NOTE: use n >= 1 and 0.0 < u < 1.0
# * ===================================
t = 0.0
x = 0.0 #/* initialize to the mean, then */
condition = True
while(condition == True): #/* use Newton-Raphson iteration */
t = x
# print("t is set to "+ t)
x = t + (u - cdfStudent(n, t)) / pdfStudent(n, t)
# print("x is set to "+x)
# print(fabs(x-t))
condition = (fabs(x - t) >= TINY)
return (x)
# ===================================================================
# * The six functions that follow are a 'special function' mini-library
# * used to support the evaluation of pdf, cdf and idf functions.
# * ===================================================================
def LogGamma(a):
# ========================================================================
# * LogGamma returns the natural log of the gamma function.
# * NOTE: use a > 0.0
# *
# * The algorithm used to evaluate the natural log of the gamma function is
# * based on an approximation by C. Lanczos, SIAM J. Numerical Analysis, B,
# * vol 1, 1964. The constants have been selected to yield a relative error
# * which is less than 2.0e-10 for all positive values of the parameter a.
# * ========================================================================
s = []
s.append(76.180091729406 / a)
s.append(-86.505320327112 / (a + 1.0))
s.append(24.014098222230 / (a + 2.0))
s.append(-1.231739516140 / (a + 3.0))
s.append(0.001208580030 / (a + 4.0))
s.append(-0.000005363820 / (a + 5.0))
sum = 1.000000000178
for i in range(0,6):
sum += s[i]
temp = (a - 0.5) * log(a + 4.5) - (a + 4.5) + log(SQRT2PI * sum)
return (temp)
def LogFactorial(n):
# ==================================================================
# * LogFactorial(n) returns the natural log of n!
# * NOTE: use n >= 0
# *
# * The algorithm used to evaluate the natural log of n! is based on a
# * simple equation which relates the gamma and factorial functions.
# * ==================================================================
return (LogGamma(n + 1))
def LogBeta(a,b):
# ======================================================================
# * LogBeta returns the natural log of the beta function.
# * NOTE: use a > 0.0 and b > 0.0
# *
# * The algorithm used to evaluate the natural log of the beta function is
# * based on a simple equation which relates the gamma and beta functions.
# *
return (LogGamma(a) + LogGamma(b) - LogGamma(a + b))
def LogChoose(n,m):
# ========================================================================
# * LogChoose returns the natural log of the binomial coefficient C(n,m).
# * NOTE: use 0 <= m <= n
# *
# * The algorithm used to evaluate the natural log of a binomial coefficient
# * is based on a simple equation which relates the beta function to a
# * binomial coefficient.
# * ========================================================================
if (m > 0):
return (-LogBeta(m, n - m + 1) - log(m))
else:
return (0.0)
def InGamma(a,x):
# ========================================================================
# * Evaluates the incomplete gamma function.
# * NOTE: use a > 0.0 and x >= 0.0
# *
# * The algorithm used to evaluate the incomplete gamma function is based on
# * Algorithm AS 32, J. Applied Statistics, 1970, by G. P. Bhattacharjee.
# * See also equations 6.5.29 and 6.5.31 in the Handbook of Mathematical
# * Functions, Abramowitz and Stegum (editors). The absolute error is less
# * than 1e-10 for all non-negative values of x.
# * ========================================================================
if (x > 0.0):
factor = exp(-1*x + a*log(x) - LogGamma(a))
else:
factor = 0.0
if (x < a + 1.0): ##/* evaluate as an infinite series - */
t = a ##/* A & S equation 6.5.29 */
term = 1.0 / a
sum = term
while (term >= TINY * sum): ##/* sum until 'term' is small */
t += 1
term = term*(x / t)
sum += term
#EndWhile
return (factor * sum)
else: ##/* evaluate as a continued fraction - */
p = [0.0,1.0, -1] ##/* A & S eqn 6.5.31 with the extended */
q = [1.0,x, -1] ##/* pattern 2-a, 2, 3-a, 3, 4-a, 4,... */
##/* - see also A & S sec 3.10, eqn (3) */
f = p[1] / q[1]
n = 0
condition = True
while(condition == True): ##/* recursively generate the continued */
g = f ##/* fraction 'f' until two consecutive */
n += 1 ##/* values are small */
if ((n % 2) > 0):
c=[(((n + 1) / 2.0) - a), 1]
else:
c=[(n / 2.0),x]
p[2] = (c[1] * p[1] + c[0] * p[0])
q[2] = (c[1] * q[1] + c[0] * q[0])
if (q[2] != 0.0): ##/* rescale to avoid overflow */
p[0] = p[1] / q[2]
q[0] = q[1] / q[2]
p[1] = p[2] / q[2]
q[1] = 1.0
f = p[1]
condition = (fabs(f - g) >= TINY) or (q[1] != 1.0)
return (1.0 - factor * f)
def InBeta(a,b,x):
# =======================================================================
# * Evaluates the incomplete beta function.
# * NOTE: use a > 0.0, b > 0.0 and 0.0 <= x <= 1.0
# *
# * The algorithm used to evaluate the incomplete beta function is based on
# * equation 26.5.8 in the Handbook of Mathematical Functions, Abramowitz
# * and Stegum (editors). The absolute error is less than 1e-10 for all x
# * between 0 and 1.
# * =======================================================================
if (x > (a + 1.0) / (a + b + 1.0)): # #/* to accelerate convergence */
swap = 1 ##/* complement x and swap a & b */
x = 1.0 - x
t = a
a = b
b = t
else: ##/* do nothing */
swap = 0
if (x > 0):
factor = exp(a * log(x) + b * log(1.0 - x) - LogBeta(a,b)) / a
else:
factor = 0.0
p = [0.0,1.0, -1]
q = [1.0,1.0, -1]
f = p[1] / q[1]
n = 0
condition = True
while (condition==True): ##/* recursively generate the continued */
g = f ##/* fraction 'f' until two consecutive */
n += 1 ##/* values are small */
if ((n % 2) > 0):
t = (n - 1) / 2.0
c = -(a + t) * (a + b + t) * x / ((a + n - 1.0) * (a + n))
else:
t = n / 2.0
c = t * (b - t) * x / ((a + n - 1.0) * (a + n))
p[2] = (p[1] + c * p[0])
q[2] = (q[1] + c * q[0])
if (q[2] != 0.0): ##/* rescale to avoid overflow */
p[0] = p[1] / q[2]
q[0] = q[1] / q[2]
p[1] = p[2] / q[2]
q[1] = 1.0
f = p[1]
condition = ((fabs(f - g) >= TINY) or (q[1] != 1.0))
#endWhile
if (swap == 1):
return (1.0 - factor * f)
else:
return (factor * f)
# C output:
# IDFSTU(10,.8) is 0.879058 - PASS
# IDFStud(10,.975) is 2.228139 - PASS
# IDFStud(100,.975) is 1.983972 - PASS
# IDFchisq(10,.5) is 9.341818 - PASS
# IDFchisq(15,.8) is 19.310657 - PASS
# IDFerlang(16,4,.878) is 82.934761 - PASS
# IDFerlang(20,7,.113) is 103.476309 - PASS
# IDFpoisson(16,.878) is 21.000000 - PASS
# IDFpoisson(19,.231) is 16.000000 - PASS
# IDFNorm(9,2,.66) is 9.824926 - PASS
# IDFNorm(-19,3.4,.81) is -16.015153 - PASS
# idfPascal(23,.11,.90) is 5.000000 - PASS
# idfPascal(6,.5,.5) is 6.000000 - PASS
# idfBinomial(23,.11,.90) is 5.000000 - PASS
# idfBinomial(6,.5,.5) is 3.000000 - PASS | mit | 617,795,352,525,077,500 | 28.446809 | 86 | 0.366877 | false | 3.23047 | false | false | false |
NicholasColotouros/RaspiCorder | RaspiCorder/Menus.py | 1 | 2928 | #!/usr/bin/python
from time import sleep
from Adafruit_CharLCDPlate import Adafruit_CharLCDPlate
class Instrument:
drums = 1
guitar = 2
bass = 3
other = 4
@staticmethod
def instrumentName(num):
if num == 1:
return "drums"
elif num == 2:
return "guitar"
elif num == 3:
return "bass"
else:
return "other"
class ConfirmationMenu:
menuText = None
selected = None
lcd = None
def __init__(self, plcd, instrument):
self.menuText = " START REC " + instrument + "\n RESELECT instr"
self.lcd = plcd
self.selected = 0
def InstrumentConfirm(self):
lcd = self.lcd
lcd.clear()
lcd.message(self.menuText)
lcd.blink()
while True:
lcd.setCursor(0, self.selected)
if lcd.buttonPressed(lcd.UP):
self.selected = 0
elif lcd.buttonPressed(lcd.DOWN):
self.selected = 1
elif lcd.buttonPressed(lcd.SELECT):
lcd.noBlink()
if self.selected == 1:
return False
else:
return True
class InstrumentMenu:
instrumentSelection = " Drums Bass\n Guitar Other"
selected = 1
delayTime = 0.5 # The time it takes to look for another button press
def __init__(self):
selected = Instrument.drums
delayTime = 0.5
def updateCursor(self, lcd):
if self.selected == Instrument.drums:
lcd.setCursor(0,0)
elif self.selected == Instrument.guitar:
lcd.setCursor(0,1)
elif self.selected == Instrument.bass:
lcd.setCursor(10,0)
else:
lcd.setCursor(10,1)
def getInstrumentInput(self, lcd):
lcd.clear()
lcd.message(self.instrumentSelection)
lcd.blink()
while True:
self.updateCursor(lcd)
# Move left
if lcd.buttonPressed(lcd.LEFT):
if self.selected == Instrument.bass:
self.selected = Instrument.drums
sleep(self.delayTime)
elif self.selected == Instrument.other:
self.selected = Instrument.guitar
sleep(self.delayTime)
# Move right
elif lcd.buttonPressed(lcd.RIGHT):
if self.selected == Instrument.drums:
self.selected = Instrument.bass
sleep(self.delayTime)
elif self.selected == Instrument.guitar:
self.selected = Instrument.other
sleep(self.delayTime)
# Move up
elif lcd.buttonPressed(lcd.UP):
if self.selected == Instrument.guitar:
self.selected = Instrument.drums
sleep(self.delayTime)
elif self.selected == Instrument.other:
self.selected = Instrument.bass
sleep(self.delayTime)
# Move down
elif lcd.buttonPressed(lcd.DOWN):
if self.selected == Instrument.drums:
self.selected = Instrument.guitar
sleep(self.delayTime)
elif self.selected == Instrument.bass:
self.selected = Instrument.other
sleep(self.delayTime)
# Select the current entry
elif lcd.buttonPressed(lcd.SELECT):
lcd.noBlink()
return self.selected | gpl-2.0 | 9,203,748,813,401,811,000 | 22.246032 | 74 | 0.644467 | false | 3.078864 | false | false | false |
ppwwyyxx/tensorpack | examples/DoReFa-Net/resnet-dorefa.py | 1 | 6148 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: resnet-dorefa.py
import argparse
import numpy as np
import os
import cv2
import tensorflow as tf
from tensorpack import *
from tensorpack.dataflow import dataset
from tensorpack.tfutils.varreplace import remap_variables
from dorefa import get_dorefa
from imagenet_utils import ImageNetModel, eval_classification, fbresnet_augmentor
"""
This script loads the pre-trained ResNet-18 model with (W,A,G) = (1,4,32)
It has 59.2% top-1 and 81.5% top-5 validation error on ILSVRC12 validation set.
To run on images:
./resnet-dorefa.py --load ResNet-18-14f.npz --run a.jpg b.jpg
To eval on ILSVRC validation set:
./resnet-dorefa.py --load ResNet-18-14f.npz --eval --data /path/to/ILSVRC
"""
BITW = 1
BITA = 4
BITG = 32
class Model(ModelDesc):
def inputs(self):
return [tf.TensorSpec([None, 224, 224, 3], tf.float32, 'input'),
tf.TensorSpec([None], tf.int32, 'label')]
def build_graph(self, image, label):
image = image / 256.0
fw, fa, fg = get_dorefa(BITW, BITA, BITG)
def new_get_variable(v):
name = v.op.name
# don't binarize first and last layer
if not name.endswith('W') or 'conv1' in name or 'fct' in name:
return v
else:
logger.info("Binarizing weight {}".format(v.op.name))
return fw(v)
def nonlin(x):
return tf.clip_by_value(x, 0.0, 1.0)
def activate(x):
return fa(nonlin(x))
def resblock(x, channel, stride):
def get_stem_full(x):
return (LinearWrap(x)
.Conv2D('c3x3a', channel, 3)
.BatchNorm('stembn')
.apply(activate)
.Conv2D('c3x3b', channel, 3)())
channel_mismatch = channel != x.get_shape().as_list()[3]
if stride != 1 or channel_mismatch or 'pool1' in x.name:
# handling pool1 is to work around an architecture bug in our model
if stride != 1 or 'pool1' in x.name:
x = AvgPooling('pool', x, stride, stride)
x = BatchNorm('bn', x)
x = activate(x)
shortcut = Conv2D('shortcut', x, channel, 1)
stem = get_stem_full(x)
else:
shortcut = x
x = BatchNorm('bn', x)
x = activate(x)
stem = get_stem_full(x)
return shortcut + stem
def group(x, name, channel, nr_block, stride):
with tf.variable_scope(name + 'blk1'):
x = resblock(x, channel, stride)
for i in range(2, nr_block + 1):
with tf.variable_scope(name + 'blk{}'.format(i)):
x = resblock(x, channel, 1)
return x
with remap_variables(new_get_variable), \
argscope(BatchNorm, decay=0.9, epsilon=1e-4), \
argscope(Conv2D, use_bias=False, nl=tf.identity):
logits = (LinearWrap(image)
# use explicit padding here, because our private training framework has
# different padding mechanisms from TensorFlow
.tf.pad([[0, 0], [3, 2], [3, 2], [0, 0]])
.Conv2D('conv1', 64, 7, stride=2, padding='VALID', use_bias=True)
.tf.pad([[0, 0], [1, 1], [1, 1], [0, 0]], 'SYMMETRIC')
.MaxPooling('pool1', 3, 2, padding='VALID')
.apply(group, 'conv2', 64, 2, 1)
.apply(group, 'conv3', 128, 2, 2)
.apply(group, 'conv4', 256, 2, 2)
.apply(group, 'conv5', 512, 2, 2)
.BatchNorm('lastbn')
.apply(nonlin)
.GlobalAvgPooling('gap')
.tf.multiply(49) # this is due to a bug in our model design
.FullyConnected('fct', 1000)())
tf.nn.softmax(logits, name='output')
ImageNetModel.compute_loss_and_error(logits, label)
def get_inference_augmentor():
return fbresnet_augmentor(False)
def run_image(model, sess_init, inputs):
pred_config = PredictConfig(
model=model,
session_init=sess_init,
input_names=['input'],
output_names=['output']
)
predict_func = OfflinePredictor(pred_config)
meta = dataset.ILSVRCMeta()
words = meta.get_synset_words_1000()
transformers = get_inference_augmentor()
for f in inputs:
assert os.path.isfile(f)
img = cv2.imread(f).astype('float32')
assert img is not None
img = transformers.augment(img)[np.newaxis, :, :, :]
o = predict_func(img)
prob = o[0][0]
ret = prob.argsort()[-10:][::-1]
names = [words[i] for i in ret]
print(f + ":")
print(list(zip(names, prob[ret])))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', help='the physical ids of GPUs to use')
parser.add_argument('--load', help='load a npz pretrained model')
parser.add_argument('--data', help='ILSVRC dataset dir')
parser.add_argument('--dorefa',
help='number of bits for W,A,G, separated by comma. Defaults to \'1,4,32\'',
default='1,4,32')
parser.add_argument(
'--run', help='run on a list of images with the pretrained model', nargs='*')
parser.add_argument('--eval', action='store_true')
args = parser.parse_args()
BITW, BITA, BITG = map(int, args.dorefa.split(','))
if args.gpu:
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu
if args.eval:
ds = dataset.ILSVRC12(args.data, 'val', shuffle=False)
ds = AugmentImageComponent(ds, get_inference_augmentor())
ds = BatchData(ds, 192, remainder=True)
eval_classification(Model(), SmartInit(args.load), ds)
elif args.run:
assert args.load.endswith('.npz')
run_image(Model(), SmartInit(args.load), args.run)
| apache-2.0 | 7,932,895,381,036,762,000 | 35.378698 | 100 | 0.544242 | false | 3.541475 | false | false | false |
ani2404/ee6761cloud | inference.py | 1 | 1332 | # Build the model, restore the variables and run the inference
# Need to use SavedModel builder and loader instead - future work
import sys
sys.path.append('/home/ani2404/Desktop/ee6761cloud/')
import numpy as np
#Need to replace with the actual model
from code_ref.model import Model
class infer(object):
def __init__(self,session,checkpoint_dir,image_size_x,image_size_y,resolution_factor=4,batch_size=1):
#Build the model based on resolution factor
self.session = session
self.model = Model(session, checkpoint_dir=checkpoint_dir,batch_size=batch_size,
image_size_x=image_size_x,image_size_y=image_size_y,resolution_factor=resolution_factor)
self.resolution_factor = resolution_factor
# Restores the variables from the checkpoint dir
if self.model.load(checkpoint_dir):
print(" [*] Load SUCCESS")
else:
print(" [*] Load Failed")
def super_resolute(self,input_image):
# Super resolutes the input image
output_images,up_input = self.session.run([self.model.ESCNN,self.model.interpolation],
feed_dict={self.model.inputs:input_image})
output_images = np.array(output_images).astype(np.float32)
return output_images,up_input
| mit | 3,867,639,695,548,613,600 | 27.340426 | 115 | 0.660661 | false | 3.940828 | false | false | false |
jodygarnett/qgis-geoserver-plugin | src/geoserverexplorer/gui/gsoperations.py | 1 | 6538 | from PyQt4 import QtCore
from qgis.core import *
from geoserverexplorer.qgis import layers as qgislayers
from geoserverexplorer.qgis.catalog import CatalogWrapper
from geoserverexplorer.gui.confirm import publishLayer
from geoserverexplorer.gui.dialogs.projectdialog import PublishProjectDialog
from geoserver.catalog import ConflictingDataError
from geoserverexplorer.gui.dialogs.layerdialog import PublishLayersDialog
def publishDraggedGroup(explorer, groupItem, catalog, workspace):
groupName = groupItem.element
groups = qgislayers.getGroups()
group = groups[groupName]
gslayers= [layer.name for layer in catalog.get_layers()]
missing = []
overwrite = bool(QtCore.QSettings().value("/GeoServer/Settings/GeoServer/OverwriteGroupLayers", True, bool))
for layer in group:
if layer.name() not in gslayers or overwrite:
missing.append(layer)
if missing:
explorer.setProgressMaximum(len(missing), "Publish layers")
progress = 0
cat = CatalogWrapper(catalog)
for layer in missing:
explorer.setProgress(progress)
explorer.run(cat.publishLayer,
None,
[],
layer, workspace, True)
progress += 1
explorer.setProgress(progress)
explorer.resetActivity()
names = [layer.name() for layer in group]
layergroup = catalog.create_layergroup(groupName, names, names)
explorer.run(catalog.save, "Create layer group from group '" + groupName + "'",
[], layergroup)
def publishDraggedLayer(explorer, layer, workspace):
cat = workspace.catalog
cat = CatalogWrapper(cat)
ret = explorer.run(publishLayer,
"Publish layer from layer '" + layer.name() + "'",
[],
cat, layer, workspace)
return ret
def addDraggedLayerToGroup(explorer, layer, groupItem):
group = groupItem.element
styles = group.styles
layers = group.layers
if layer.name not in layers:
layers.append(layer.name)
styles.append(layer.default_style.name)
group.dirty.update(layers = layers, styles = styles)
explorer.run(layer.catalog.save,
"Update group '" + group.name + "'",
[groupItem],
group)
def addDraggedUrisToWorkspace(uris, catalog, workspace, tree):
if uris:
if len(uris) > 1:
explorer.setProgressMaximum(len(uris))
for i, uri in enumerate(uris):
if isinstance(uri, basestring):
layerName = QtCore.QFileInfo(uri).completeBaseName()
layer = QgsRasterLayer(uri, layerName)
else:
layer = QgsRasterLayer(uri.uri, uri.name)
if not layer.isValid() or layer.type() != QgsMapLayer.RasterLayer:
if isinstance(uri, basestring):
layerName = QtCore.QFileInfo(uri).completeBaseName()
layer = QgsVectorLayer(uri, layerName, "ogr")
else:
layer = QgsVectorLayer(uri.uri, uri.name, uri.providerKey)
if not layer.isValid() or layer.type() != QgsMapLayer.VectorLayer:
layer.deleteLater()
name = uri if isinstance(uri, basestring) else uri.uri
explorer.setError("Error reading file {} or it is not a valid layer file".format(name))
else:
if not publishDraggedLayer(explorer, layer, workspace):
return []
else:
if not publishDraggedLayer(explorer, layer, workspace):
return []
setProgress(i + 1)
resetActivity()
return [tree.findAllItems(catalog)[0]]
else:
return []
def addDraggedStyleToLayer(tree, explorer, styleItem, layerItem):
catalog = layerItem.element.catalog
catItem = tree.findFirstItem(catalog)
style = styleItem.element
layer = layerItem.element
if not hasattr(layer, "default_style") or layer.default_style is None:
# if default style is missing, make dragged style the layer's default
# without a default style, some GeoServer operations may fail
layer.default_style = style
else:
# add to layer's additional styles
styles = layer.styles
styles.append(style)
layer.styles = styles
explorer.run(catalog.save,
"Add style '" + style.name + "' to layer '" + layer.name + "'",
[catItem],
layer)
def publishProject(tree, explorer, catalog):
layers = qgislayers.getAllLayers()
dlg = PublishProjectDialog(catalog)
dlg.exec_()
if not dlg.ok:
return
workspace = dlg.workspace
groupName = dlg.groupName
explorer.setProgressMaximum(len(layers), "Publish layers")
progress = 0
cat = CatalogWrapper(catalog)
for layer in layers:
explorer.setProgress(progress)
explorer.run(publishLayer,
None,
[],
cat, layer, workspace)
progress += 1
explorer.setProgress(progress)
explorer.resetActivity()
groups = qgislayers.getGroups()
for group in groups:
names = [layer.name() for layer in groups[group]]
try:
layergroup = catalog.create_layergroup(group, names, names)
explorer.run(catalog.save, "Create layer group '" + group + "'",
[], layergroup)
except ConflictingDataError, e:
explorer.setWarning(str(e))
if groupName is not None:
names = [layer.name() for layer in layers]
layergroup = catalog.create_layergroup(groupName, names, names)
explorer.run(catalog.save, "Create global layer group",
[], layergroup)
tree.findAllItems(catalog)[0].refreshContent(explorer)
explorer.resetActivity()
def publishLayers(tree, explorer, catalog):
dlg = PublishLayersDialog(catalog)
dlg.exec_()
if dlg.topublish is None:
return
cat = CatalogWrapper(catalog)
progress = 0
explorer.setProgressMaximum(len(dlg.topublish), "Publish layers")
for layer, workspace, name in dlg.topublish:
explorer.run(cat.publishLayer,
None,
[],
layer, workspace, True, name)
progress += 1
explorer.setProgress(progress)
catItem = tree.findAllItems(catalog)[0]
catItem.refreshContent(explorer)
explorer.resetActivity()
| gpl-2.0 | -6,896,900,767,553,893,000 | 38.149701 | 112 | 0.620526 | false | 4.270411 | false | false | false |
crckyl/pixplus | tools/conf-parser.py | 1 | 1750 | import sys
import json
from xml.sax.saxutils import escape
from xml.sax.saxutils import quoteattr
format = ' <preference name="%(name)s" value=%(value)s />'
if sys.argv[1] == 'safari':
format = ''' <dict>
<key>Title</key>
<string>%(name)s</string>
<key>Key</key>
<string>%(name)s</string>
<key>DefaultValue</key>
%(value_safari)s
<key>Type</key>
<string>%(type_safari)s</string>%(more)s
</dict>'''
pass
def print_conf(conf):
for sec in conf:
for item in sec['items']:
name = 'conf_%s_%s' % (sec['name'], item['key'])
value = item['value']
type_safari = 'TextField'
value_safari = '<string>%s</string>' % escape(str(value))
more = ''
if 'hint' in item:
type_safari = 'PopUpButton'
more = '''
<key>Titles</key>
<array>'''
for hint in item['hint']:
more += '\n <string>%s</string>' % hint['title']
pass
more += '\n </array>'
more += '''
<key>Values</key>
<array>'''
for hint in item['hint']:
more += '\n <string>%s</string>' % str(hint['value'])
pass
more += '\n </array>'
elif isinstance(value, bool):
type_safari = 'CheckBox'
if value:
value = 'true'
else:
value = 'false'
pass
value_safari = '<%s/>' % value
pass
params = {
'name': name,
'value': quoteattr(str(value)),
'type_safari': type_safari,
'value_safari': value_safari,
'more': more
}
print(format % params)
pass
pass
pass
print_conf(json.loads(sys.stdin.read()))
| mit | -6,539,849,018,826,677,000 | 25.119403 | 70 | 0.487429 | false | 3.411306 | false | false | false |
deejross/python3-pywbem | lex.py | 1 | 39948 | # -----------------------------------------------------------------------------
# ply: lex.py
#
# Author: David M. Beazley ([email protected])
#
# Copyright (C) 2001-2009, David M. Beazley
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See the file COPYING for a complete copy of the LGPL.
# -----------------------------------------------------------------------------
__version__ = "3.0"
__tabversion__ = "3.0" # Version of table file used
import re, sys, types, copy, os
# This tuple contains known string types
try:
# Python 2.6
StringTypes = (types.StringType, types.UnicodeType)
except AttributeError:
# Python 3.0
StringTypes = (str, bytes)
# Extract the code attribute of a function. Different implementations
# are for Python 2/3 compatibility.
if sys.version_info[0] < 3:
def func_code(f):
return f.func_code
else:
def func_code(f):
return f.__code__
# This regular expression is used to match valid token names
_is_identifier = re.compile(r'^[a-zA-Z0-9_]+$')
# Exception thrown when invalid token encountered and no default error
# handler is defined.
class LexError(Exception):
def __init__(self,message,s):
self.args = (message,)
self.text = s
# Token class. This class is used to represent the tokens produced.
class LexToken(object):
def __str__(self):
return "LexToken(%s,%r,%d,%d)" % (self.type,self.value,self.lineno,self.lexpos)
def __repr__(self):
return str(self)
# This object is a stand-in for a logging object created by the
# logging module.
class PlyLogger(object):
def __init__(self,f):
self.f = f
def critical(self,msg,*args,**kwargs):
self.f.write((msg % args) + "\n")
def warning(self,msg,*args,**kwargs):
self.f.write("WARNING: "+ (msg % args) + "\n")
def error(self,msg,*args,**kwargs):
self.f.write("ERROR: " + (msg % args) + "\n")
info = critical
debug = critical
# Null logger is used when no output is generated. Does nothing.
class NullLogger(object):
def __getattribute__(self,name):
return self
def __call__(self,*args,**kwargs):
return self
# -----------------------------------------------------------------------------
# === Lexing Engine ===
#
# The following Lexer class implements the lexer runtime. There are only
# a few public methods and attributes:
#
# input() - Store a new string in the lexer
# token() - Get the next token
# clone() - Clone the lexer
#
# lineno - Current line number
# lexpos - Current position in the input string
# -----------------------------------------------------------------------------
class Lexer:
def __init__(self):
self.lexre = None # Master regular expression. This is a list of
# tuples (re,findex) where re is a compiled
# regular expression and findex is a list
# mapping regex group numbers to rules
self.lexretext = None # Current regular expression strings
self.lexstatere = {} # Dictionary mapping lexer states to master regexs
self.lexstateretext = {} # Dictionary mapping lexer states to regex strings
self.lexstaterenames = {} # Dictionary mapping lexer states to symbol names
self.lexstate = "INITIAL" # Current lexer state
self.lexstatestack = [] # Stack of lexer states
self.lexstateinfo = None # State information
self.lexstateignore = {} # Dictionary of ignored characters for each state
self.lexstateerrorf = {} # Dictionary of error functions for each state
self.lexreflags = 0 # Optional re compile flags
self.lexdata = None # Actual input data (as a string)
self.lexpos = 0 # Current position in input text
self.lexlen = 0 # Length of the input text
self.lexerrorf = None # Error rule (if any)
self.lextokens = None # List of valid tokens
self.lexignore = "" # Ignored characters
self.lexliterals = "" # Literal characters that can be passed through
self.lexmodule = None # Module
self.lineno = 1 # Current line number
self.lexoptimize = 0 # Optimized mode
def clone(self,object=None):
c = copy.copy(self)
# If the object parameter has been supplied, it means we are attaching the
# lexer to a new object. In this case, we have to rebind all methods in
# the lexstatere and lexstateerrorf tables.
if object:
newtab = { }
for key, ritem in self.lexstatere.items():
newre = []
for cre, findex in ritem:
newfindex = []
for f in findex:
if not f or not f[0]:
newfindex.append(f)
continue
newfindex.append((getattr(object,f[0].__name__),f[1]))
newre.append((cre,newfindex))
newtab[key] = newre
c.lexstatere = newtab
c.lexstateerrorf = { }
for key, ef in self.lexstateerrorf.items():
c.lexstateerrorf[key] = getattr(object,ef.__name__)
c.lexmodule = object
return c
# ------------------------------------------------------------
# writetab() - Write lexer information to a table file
# ------------------------------------------------------------
def writetab(self,tabfile,outputdir=""):
if isinstance(tabfile,types.ModuleType):
return
basetabfilename = tabfile.split(".")[-1]
filename = os.path.join(outputdir,basetabfilename)+".py"
tf = open(filename,"w")
tf.write("# %s.py. This file automatically created by PLY (version %s). Don't edit!\n" % (tabfile,__version__))
tf.write("_tabversion = %s\n" % repr(__version__))
tf.write("_lextokens = %s\n" % repr(self.lextokens))
tf.write("_lexreflags = %s\n" % repr(self.lexreflags))
tf.write("_lexliterals = %s\n" % repr(self.lexliterals))
tf.write("_lexstateinfo = %s\n" % repr(self.lexstateinfo))
tabre = { }
# Collect all functions in the initial state
initial = self.lexstatere["INITIAL"]
initialfuncs = []
for part in initial:
for f in part[1]:
if f and f[0]:
initialfuncs.append(f)
for key, lre in self.lexstatere.items():
titem = []
for i in range(len(lre)):
titem.append((self.lexstateretext[key][i],_funcs_to_names(lre[i][1],self.lexstaterenames[key][i])))
tabre[key] = titem
tf.write("_lexstatere = %s\n" % repr(tabre))
tf.write("_lexstateignore = %s\n" % repr(self.lexstateignore))
taberr = { }
for key, ef in self.lexstateerrorf.items():
if ef:
taberr[key] = ef.__name__
else:
taberr[key] = None
tf.write("_lexstateerrorf = %s\n" % repr(taberr))
tf.close()
# ------------------------------------------------------------
# readtab() - Read lexer information from a tab file
# ------------------------------------------------------------
def readtab(self,tabfile,fdict):
if isinstance(tabfile,types.ModuleType):
lextab = tabfile
else:
if sys.version_info[0] < 3:
exec("import %s as lextab" % tabfile)
else:
env = { }
exec("import %s as lextab" % tabfile, env,env)
lextab = env['lextab']
if getattr(lextab,"_tabversion","0.0") != __version__:
raise ImportError("Inconsistent PLY version")
self.lextokens = lextab._lextokens
self.lexreflags = lextab._lexreflags
self.lexliterals = lextab._lexliterals
self.lexstateinfo = lextab._lexstateinfo
self.lexstateignore = lextab._lexstateignore
self.lexstatere = { }
self.lexstateretext = { }
for key,lre in lextab._lexstatere.items():
titem = []
txtitem = []
for i in range(len(lre)):
titem.append((re.compile(lre[i][0],lextab._lexreflags),_names_to_funcs(lre[i][1],fdict)))
txtitem.append(lre[i][0])
self.lexstatere[key] = titem
self.lexstateretext[key] = txtitem
self.lexstateerrorf = { }
for key,ef in lextab._lexstateerrorf.items():
self.lexstateerrorf[key] = fdict[ef]
self.begin('INITIAL')
# ------------------------------------------------------------
# input() - Push a new string into the lexer
# ------------------------------------------------------------
def input(self,s):
# Pull off the first character to see if s looks like a string
c = s[:1]
if not isinstance(c,StringTypes):
raise ValueError("Expected a string")
self.lexdata = s
self.lexpos = 0
self.lexlen = len(s)
# ------------------------------------------------------------
# begin() - Changes the lexing state
# ------------------------------------------------------------
def begin(self,state):
if not state in self.lexstatere:
raise ValueError("Undefined state")
self.lexre = self.lexstatere[state]
self.lexretext = self.lexstateretext[state]
self.lexignore = self.lexstateignore.get(state,"")
self.lexerrorf = self.lexstateerrorf.get(state,None)
self.lexstate = state
# ------------------------------------------------------------
# push_state() - Changes the lexing state and saves old on stack
# ------------------------------------------------------------
def push_state(self,state):
self.lexstatestack.append(self.lexstate)
self.begin(state)
# ------------------------------------------------------------
# pop_state() - Restores the previous state
# ------------------------------------------------------------
def pop_state(self):
self.begin(self.lexstatestack.pop())
# ------------------------------------------------------------
# current_state() - Returns the current lexing state
# ------------------------------------------------------------
def current_state(self):
return self.lexstate
# ------------------------------------------------------------
# skip() - Skip ahead n characters
# ------------------------------------------------------------
def skip(self,n):
self.lexpos += n
# ------------------------------------------------------------
# opttoken() - Return the next token from the Lexer
#
# Note: This function has been carefully implemented to be as fast
# as possible. Don't make changes unless you really know what
# you are doing
# ------------------------------------------------------------
def token(self):
# Make local copies of frequently referenced attributes
lexpos = self.lexpos
lexlen = self.lexlen
lexignore = self.lexignore
lexdata = self.lexdata
while lexpos < lexlen:
# This code provides some short-circuit code for whitespace, tabs, and other ignored characters
if lexdata[lexpos] in lexignore:
lexpos += 1
continue
# Look for a regular expression match
for lexre,lexindexfunc in self.lexre:
m = lexre.match(lexdata,lexpos)
if not m: continue
# Create a token for return
tok = LexToken()
tok.value = m.group()
tok.lineno = self.lineno
tok.lexpos = lexpos
i = m.lastindex
func,tok.type = lexindexfunc[i]
if not func:
# If no token type was set, it's an ignored token
if tok.type:
self.lexpos = m.end()
return tok
else:
lexpos = m.end()
break
lexpos = m.end()
# If token is processed by a function, call it
tok.lexer = self # Set additional attributes useful in token rules
self.lexmatch = m
self.lexpos = lexpos
newtok = func(tok)
# Every function must return a token, if nothing, we just move to next token
if not newtok:
lexpos = self.lexpos # This is here in case user has updated lexpos.
lexignore = self.lexignore # This is here in case there was a state change
break
# Verify type of the token. If not in the token map, raise an error
if not self.lexoptimize:
if not newtok.type in self.lextokens:
raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % (
func_code(func).co_filename, func_code(func).co_firstlineno,
func.__name__, newtok.type),lexdata[lexpos:])
return newtok
else:
# No match, see if in literals
if lexdata[lexpos] in self.lexliterals:
tok = LexToken()
tok.value = lexdata[lexpos]
tok.lineno = self.lineno
tok.type = tok.value
tok.lexpos = lexpos
self.lexpos = lexpos + 1
return tok
# No match. Call t_error() if defined.
if self.lexerrorf:
tok = LexToken()
tok.value = self.lexdata[lexpos:]
tok.lineno = self.lineno
tok.type = "error"
tok.lexer = self
tok.lexpos = lexpos
self.lexpos = lexpos
newtok = self.lexerrorf(tok)
if lexpos == self.lexpos:
# Error method didn't change text position at all. This is an error.
raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:])
lexpos = self.lexpos
if not newtok: continue
return newtok
self.lexpos = lexpos
raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos],lexpos), lexdata[lexpos:])
self.lexpos = lexpos + 1
if self.lexdata is None:
raise RuntimeError("No input string given with input()")
return None
# Iterator interface
def __iter__(self):
return self
def next(self):
t = self.token()
if t is None:
raise StopIteration
return t
__next__ = next
# -----------------------------------------------------------------------------
# ==== Lex Builder ===
#
# The functions and classes below are used to collect lexing information
# and build a Lexer object from it.
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# get_caller_module_dict()
#
# This function returns a dictionary containing all of the symbols defined within
# a caller further down the call stack. This is used to get the environment
# associated with the yacc() call if none was provided.
# -----------------------------------------------------------------------------
def get_caller_module_dict(levels):
try:
raise RuntimeError
except RuntimeError:
e,b,t = sys.exc_info()
f = t.tb_frame
while levels > 0:
f = f.f_back
levels -= 1
ldict = f.f_globals.copy()
if f.f_globals != f.f_locals:
ldict.update(f.f_locals)
return ldict
# -----------------------------------------------------------------------------
# _funcs_to_names()
#
# Given a list of regular expression functions, this converts it to a list
# suitable for output to a table file
# -----------------------------------------------------------------------------
def _funcs_to_names(funclist,namelist):
result = []
for f,name in zip(funclist,namelist):
if f and f[0]:
result.append((name, f[1]))
else:
result.append(f)
return result
# -----------------------------------------------------------------------------
# _names_to_funcs()
#
# Given a list of regular expression function names, this converts it back to
# functions.
# -----------------------------------------------------------------------------
def _names_to_funcs(namelist,fdict):
result = []
for n in namelist:
if n and n[0]:
result.append((fdict[n[0]],n[1]))
else:
result.append(n)
return result
# -----------------------------------------------------------------------------
# _form_master_re()
#
# This function takes a list of all of the regex components and attempts to
# form the master regular expression. Given limitations in the Python re
# module, it may be necessary to break the master regex into separate expressions.
# -----------------------------------------------------------------------------
def _form_master_re(relist,reflags,ldict,toknames):
if not relist: return []
regex = "|".join(relist)
try:
lexre = re.compile(regex,re.VERBOSE | reflags)
# Build the index to function map for the matching engine
lexindexfunc = [ None ] * (max(lexre.groupindex.values())+1)
lexindexnames = lexindexfunc[:]
for f,i in lexre.groupindex.items():
handle = ldict.get(f,None)
if type(handle) in (types.FunctionType, types.MethodType):
lexindexfunc[i] = (handle,toknames[f])
lexindexnames[i] = f
elif handle is not None:
lexindexnames[i] = f
if f.find("ignore_") > 0:
lexindexfunc[i] = (None,None)
else:
lexindexfunc[i] = (None, toknames[f])
return [(lexre,lexindexfunc)],[regex],[lexindexnames]
except Exception:
m = int(len(relist)/2)
if m == 0: m = 1
llist, lre, lnames = _form_master_re(relist[:m],reflags,ldict,toknames)
rlist, rre, rnames = _form_master_re(relist[m:],reflags,ldict,toknames)
return llist+rlist, lre+rre, lnames+rnames
# -----------------------------------------------------------------------------
# def _statetoken(s,names)
#
# Given a declaration name s of the form "t_" and a dictionary whose keys are
# state names, this function returns a tuple (states,tokenname) where states
# is a tuple of state names and tokenname is the name of the token. For example,
# calling this with s = "t_foo_bar_SPAM" might return (('foo','bar'),'SPAM')
# -----------------------------------------------------------------------------
def _statetoken(s,names):
nonstate = 1
parts = s.split("_")
for i in range(1,len(parts)):
if not parts[i] in names and parts[i] != 'ANY': break
if i > 1:
states = tuple(parts[1:i])
else:
states = ('INITIAL',)
if 'ANY' in states:
states = tuple(names)
tokenname = "_".join(parts[i:])
return (states,tokenname)
# -----------------------------------------------------------------------------
# LexerReflect()
#
# This class represents information needed to build a lexer as extracted from a
# user's input file.
# -----------------------------------------------------------------------------
class LexerReflect(object):
def __init__(self,ldict,log=None,reflags=0):
self.ldict = ldict
self.error_func = None
self.tokens = []
self.reflags = reflags
self.stateinfo = { 'INITIAL' : 'inclusive'}
self.files = {}
self.error = 0
if log is None:
self.log = PlyLogger(sys.stderr)
else:
self.log = log
# Get all of the basic information
def get_all(self):
self.get_tokens()
self.get_literals()
self.get_states()
self.get_rules()
# Validate all of the information
def validate_all(self):
self.validate_tokens()
self.validate_literals()
self.validate_rules()
return self.error
# Get the tokens map
def get_tokens(self):
tokens = self.ldict.get("tokens",None)
if not tokens:
self.log.error("No token list is defined")
self.error = 1
return
if not isinstance(tokens,(list, tuple)):
self.log.error("tokens must be a list or tuple")
self.error = 1
return
if not tokens:
self.log.error("tokens is empty")
self.error = 1
return
self.tokens = tokens
# Validate the tokens
def validate_tokens(self):
terminals = {}
for n in self.tokens:
if not _is_identifier.match(n):
self.log.error("Bad token name '%s'",n)
self.error = 1
if n in terminals:
self.log.warning("Token '%s' multiply defined", n)
terminals[n] = 1
# Get the literals specifier
def get_literals(self):
self.literals = self.ldict.get("literals","")
# Validate literals
def validate_literals(self):
try:
for c in self.literals:
if not isinstance(c,StringTypes) or len(c) > 1:
self.log.error("Invalid literal %s. Must be a single character", repr(c))
self.error = 1
continue
except TypeError:
self.log.error("Invalid literals specification. literals must be a sequence of characters")
self.error = 1
def get_states(self):
self.states = self.ldict.get("states",None)
# Build statemap
if self.states:
if not isinstance(self.states,(tuple,list)):
self.log.error("states must be defined as a tuple or list")
self.error = 1
else:
for s in self.states:
if not isinstance(s,tuple) or len(s) != 2:
self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')",repr(s))
self.error = 1
continue
name, statetype = s
if not isinstance(name,StringTypes):
self.log.error("State name %s must be a string", repr(name))
self.error = 1
continue
if not (statetype == 'inclusive' or statetype == 'exclusive'):
self.log.error("State type for state %s must be 'inclusive' or 'exclusive'",name)
self.error = 1
continue
if name in self.stateinfo:
self.log.error("State '%s' already defined",name)
self.error = 1
continue
self.stateinfo[name] = statetype
# Get all of the symbols with a t_ prefix and sort them into various
# categories (functions, strings, error functions, and ignore characters)
def get_rules(self):
tsymbols = [f for f in self.ldict if f[:2] == 't_' ]
# Now build up a list of functions and a list of strings
self.toknames = { } # Mapping of symbols to token names
self.funcsym = { } # Symbols defined as functions
self.strsym = { } # Symbols defined as strings
self.ignore = { } # Ignore strings by state
self.errorf = { } # Error functions by state
for s in self.stateinfo:
self.funcsym[s] = []
self.strsym[s] = []
if len(tsymbols) == 0:
self.log.error("No rules of the form t_rulename are defined")
self.error = 1
return
for f in tsymbols:
t = self.ldict[f]
states, tokname = _statetoken(f,self.stateinfo)
self.toknames[f] = tokname
if hasattr(t,"__call__"):
if tokname == 'error':
for s in states:
self.errorf[s] = t
elif tokname == 'ignore':
line = func_code(t).co_firstlineno
file = func_code(t).co_filename
self.log.error("%s:%d: Rule '%s' must be defined as a string",file,line,t.__name__)
self.error = 1
else:
for s in states:
self.funcsym[s].append((f,t))
elif isinstance(t, StringTypes):
if tokname == 'ignore':
for s in states:
self.ignore[s] = t
if "\\" in t:
self.log.warning("%s contains a literal backslash '\\'",f)
elif tokname == 'error':
self.log.error("Rule '%s' must be defined as a function", f)
self.error = 1
else:
for s in states:
self.strsym[s].append((f,t))
else:
self.log.error("%s not defined as a function or string", f)
self.error = 1
# Sort the functions by line number
for f in self.funcsym.values():
if sys.version_info[0] < 3:
f.sort(lambda x,y: cmp(func_code(x[1]).co_firstlineno,func_code(y[1]).co_firstlineno))
else:
# Python 3.0
f.sort(key=lambda x: func_code(x[1]).co_firstlineno)
# Sort the strings by regular expression length
for s in self.strsym.values():
if sys.version_info[0] < 3:
s.sort(lambda x,y: (len(x[1]) < len(y[1])) - (len(x[1]) > len(y[1])))
else:
# Python 3.0
s.sort(key=lambda x: len(x[1]),reverse=True)
# Validate all of the t_rules collected
def validate_rules(self):
for state in self.stateinfo:
# Validate all rules defined by functions
for fname, f in self.funcsym[state]:
line = func_code(f).co_firstlineno
file = func_code(f).co_filename
self.files[file] = 1
tokname = self.toknames[fname]
if isinstance(f, types.MethodType):
reqargs = 2
else:
reqargs = 1
nargs = func_code(f).co_argcount
if nargs > reqargs:
self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,f.__name__)
self.error = 1
continue
if nargs < reqargs:
self.log.error("%s:%d: Rule '%s' requires an argument", file,line,f.__name__)
self.error = 1
continue
if not f.__doc__:
self.log.error("%s:%d: No regular expression defined for rule '%s'",file,line,f.__name__)
self.error = 1
continue
try:
c = re.compile("(?P<%s>%s)" % (fname,f.__doc__), re.VERBOSE | self.reflags)
if c.match(""):
self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file,line,f.__name__)
self.error = 1
except re.error:
_etype, e, _etrace = sys.exc_info()
self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file,line,f.__name__,e)
if '#' in f.__doc__:
self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'",file,line, f.__name__)
self.error = 1
# Validate all rules defined by strings
for name,r in self.strsym[state]:
tokname = self.toknames[name]
if tokname == 'error':
self.log.error("Rule '%s' must be defined as a function", name)
self.error = 1
continue
if not tokname in self.tokens and tokname.find("ignore_") < 0:
self.log.error("Rule '%s' defined for an unspecified token %s",name,tokname)
self.error = 1
continue
try:
c = re.compile("(?P<%s>%s)" % (name,r),re.VERBOSE | self.reflags)
if (c.match("")):
self.log.error("Regular expression for rule '%s' matches empty string",name)
self.error = 1
except re.error:
_etype, e, _etrace = sys.exc_info()
self.log.error("Invalid regular expression for rule '%s'. %s",name,e)
if '#' in r:
self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'",name)
self.error = 1
if not self.funcsym[state] and not self.strsym[state]:
self.log.error("No rules defined for state '%s'",state)
self.error = 1
# Validate the error function
efunc = self.errorf.get(state,None)
if efunc:
f = efunc
line = func_code(f).co_firstlineno
file = func_code(f).co_filename
self.files[file] = 1
if isinstance(f, types.MethodType):
reqargs = 2
else:
reqargs = 1
nargs = func_code(f).co_argcount
if nargs > reqargs:
self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,f.__name__)
self.error = 1
if nargs < reqargs:
self.log.error("%s:%d: Rule '%s' requires an argument", file,line,f.__name__)
self.error = 1
for f in self.files:
self.validate_file(f)
# -----------------------------------------------------------------------------
# validate_file()
#
# This checks to see if there are duplicated t_rulename() functions or strings
# in the parser input file. This is done using a simple regular expression
# match on each line in the given file.
# -----------------------------------------------------------------------------
def validate_file(self,filename):
import os.path
base,ext = os.path.splitext(filename)
if ext != '.py': return # No idea what the file is. Return OK
try:
f = open(filename)
lines = f.readlines()
f.close()
except IOError:
return # Couldn't find the file. Don't worry about it
fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(')
sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=')
counthash = { }
linen = 1
for l in lines:
m = fre.match(l)
if not m:
m = sre.match(l)
if m:
name = m.group(1)
prev = counthash.get(name)
if not prev:
counthash[name] = linen
else:
self.log.error("%s:%d: Rule %s redefined. Previously defined on line %d",filename,linen,name,prev)
self.error = 1
linen += 1
# -----------------------------------------------------------------------------
# lex(module)
#
# Build all of the regular expression rules from definitions in the supplied module
# -----------------------------------------------------------------------------
def lex(module=None,object=None,debug=0,optimize=0,lextab="lextab",reflags=0,nowarn=0,outputdir="", debuglog=None, errorlog=None):
global lexer
ldict = None
stateinfo = { 'INITIAL' : 'inclusive'}
lexobj = Lexer()
lexobj.lexoptimize = optimize
global token,input
if errorlog is None:
errorlog = PlyLogger(sys.stderr)
if debug:
if debuglog is None:
debuglog = PlyLogger(sys.stderr)
# Get the module dictionary used for the lexer
if object: module = object
if module:
_items = [(k,getattr(module,k)) for k in dir(module)]
ldict = dict(_items)
else:
ldict = get_caller_module_dict(2)
# Collect parser information from the dictionary
linfo = LexerReflect(ldict,log=errorlog,reflags=reflags)
linfo.get_all()
if not optimize:
if linfo.validate_all():
raise SyntaxError("Can't build lexer")
if optimize and lextab:
try:
lexobj.readtab(lextab,ldict)
token = lexobj.token
input = lexobj.input
lexer = lexobj
return lexobj
except ImportError:
pass
# Dump some basic debugging information
if debug:
debuglog.info("lex: tokens = %r", linfo.tokens)
debuglog.info("lex: literals = %r", linfo.literals)
debuglog.info("lex: states = %r", linfo.stateinfo)
# Build a dictionary of valid token names
lexobj.lextokens = { }
for n in linfo.tokens:
lexobj.lextokens[n] = 1
# Get literals specification
if isinstance(linfo.literals,(list,tuple)):
lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals)
else:
lexobj.lexliterals = linfo.literals
# Get the stateinfo dictionary
stateinfo = linfo.stateinfo
regexs = { }
# Build the master regular expressions
for state in stateinfo:
regex_list = []
# Add rules defined by functions first
for fname, f in linfo.funcsym[state]:
line = func_code(f).co_firstlineno
file = func_code(f).co_filename
regex_list.append("(?P<%s>%s)" % (fname,f.__doc__))
if debug:
debuglog.info("lex: Adding rule %s -> '%s' (state '%s')",fname,f.__doc__, state)
# Now add all of the simple rules
for name,r in linfo.strsym[state]:
regex_list.append("(?P<%s>%s)" % (name,r))
if debug:
debuglog.info("lex: Adding rule %s -> '%s' (state '%s')",name,r, state)
regexs[state] = regex_list
# Build the master regular expressions
if debug:
debuglog.info("lex: ==== MASTER REGEXS FOLLOW ====")
for state in regexs:
lexre, re_text, re_names = _form_master_re(regexs[state],reflags,ldict,linfo.toknames)
lexobj.lexstatere[state] = lexre
lexobj.lexstateretext[state] = re_text
lexobj.lexstaterenames[state] = re_names
if debug:
for i in range(len(re_text)):
debuglog.info("lex: state '%s' : regex[%d] = '%s'",state, i, re_text[i])
# For inclusive states, we need to add the regular expressions from the INITIAL state
for state,stype in stateinfo.items():
if state != "INITIAL" and stype == 'inclusive':
lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL'])
lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL'])
lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL'])
lexobj.lexstateinfo = stateinfo
lexobj.lexre = lexobj.lexstatere["INITIAL"]
lexobj.lexretext = lexobj.lexstateretext["INITIAL"]
# Set up ignore variables
lexobj.lexstateignore = linfo.ignore
lexobj.lexignore = lexobj.lexstateignore.get("INITIAL","")
# Set up error functions
lexobj.lexstateerrorf = linfo.errorf
lexobj.lexerrorf = linfo.errorf.get("INITIAL",None)
if not lexobj.lexerrorf:
errorlog.warning("No t_error rule is defined")
# Check state information for ignore and error rules
for s,stype in stateinfo.items():
if stype == 'exclusive':
if not s in linfo.errorf:
errorlog.warning("No error rule is defined for exclusive state '%s'", s)
if not s in linfo.ignore and lexobj.lexignore:
errorlog.warning("No ignore rule is defined for exclusive state '%s'", s)
elif stype == 'inclusive':
if not s in linfo.errorf:
linfo.errorf[s] = linfo.errorf.get("INITIAL",None)
if not s in linfo.ignore:
linfo.ignore[s] = linfo.ignore.get("INITIAL","")
# Create global versions of the token() and input() functions
token = lexobj.token
input = lexobj.input
lexer = lexobj
# If in optimize mode, we write the lextab
if lextab and optimize:
lexobj.writetab(lextab,outputdir)
return lexobj
# -----------------------------------------------------------------------------
# runmain()
#
# This runs the lexer as a main program
# -----------------------------------------------------------------------------
def runmain(lexer=None,data=None):
if not data:
try:
filename = sys.argv[1]
f = open(filename)
data = f.read()
f.close()
except IndexError:
sys.stdout.write("Reading from standard input (type EOF to end):\n")
data = sys.stdin.read()
if lexer:
_input = lexer.input
else:
_input = input
_input(data)
if lexer:
_token = lexer.token
else:
_token = token
while 1:
tok = _token()
if not tok: break
sys.stdout.write("(%s,%r,%d,%d)\n" % (tok.type, tok.value, tok.lineno,tok.lexpos))
# -----------------------------------------------------------------------------
# @TOKEN(regex)
#
# This decorator function can be used to set the regex expression on a function
# when its docstring might need to be set in an alternative way
# -----------------------------------------------------------------------------
def TOKEN(r):
def set_doc(f):
if hasattr(r,"__call__"):
f.__doc__ = r.__doc__
else:
f.__doc__ = r
return f
return set_doc
# Alternative spelling of the TOKEN decorator
Token = TOKEN
| lgpl-2.1 | -8,469,962,769,702,211,000 | 37.118321 | 134 | 0.495519 | false | 4.346899 | false | false | false |
4Quant/tensorflow | tensorflow/python/ops/rnn.py | 1 | 21568 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""RNN helpers for TensorFlow models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import logging_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import rnn_cell
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope as vs
def rnn(cell, inputs, initial_state=None, dtype=None,
sequence_length=None, scope=None):
"""Creates a recurrent neural network specified by RNNCell "cell".
The simplest form of RNN network generated is:
state = cell.zero_state(...)
outputs = []
for input_ in inputs:
output, state = cell(input_, state)
outputs.append(output)
return (outputs, state)
However, a few other options are available:
An initial state can be provided.
If the sequence_length vector is provided, dynamic calculation is performed.
This method of calculation does not compute the RNN steps past the maximum
sequence length of the minibatch (thus saving computational time),
and properly propagates the state at an example's sequence length
to the final state output.
The dynamic calculation performed is, at time t for batch row b,
(output, state)(b, t) =
(t >= sequence_length(b))
? (zeros(cell.output_size), states(b, sequence_length(b) - 1))
: cell(input(b, t), state(b, t - 1))
Args:
cell: An instance of RNNCell.
inputs: A length T list of inputs, each a tensor of shape
[batch_size, cell.input_size].
initial_state: (optional) An initial state for the RNN. This must be
a tensor of appropriate type and shape [batch_size x cell.state_size].
dtype: (optional) The data type for the initial state. Required if
initial_state is not provided.
sequence_length: Specifies the length of each sequence in inputs.
An int32 or int64 vector (tensor) size [batch_size]. Values in [0, T).
scope: VariableScope for the created subgraph; defaults to "RNN".
Returns:
A pair (outputs, state) where:
outputs is a length T list of outputs (one for each input)
state is the final state
Raises:
TypeError: If "cell" is not an instance of RNNCell.
ValueError: If inputs is None or an empty list.
"""
if not isinstance(cell, rnn_cell.RNNCell):
raise TypeError("cell must be an instance of RNNCell")
if not isinstance(inputs, list):
raise TypeError("inputs must be a list")
if not inputs:
raise ValueError("inputs must not be empty")
outputs = []
# Create a new scope in which the caching device is either
# determined by the parent scope, or is set to place the cached
# Variable using the same placement as for the rest of the RNN.
with vs.variable_scope(scope or "RNN") as varscope:
if varscope.caching_device is None:
varscope.set_caching_device(lambda op: op.device)
fixed_batch_size = inputs[0].get_shape().with_rank_at_least(1)[0]
if fixed_batch_size.value:
batch_size = fixed_batch_size.value
else:
batch_size = array_ops.shape(inputs[0])[0]
if initial_state is not None:
state = initial_state
else:
if not dtype:
raise ValueError("If no initial_state is provided, dtype must be.")
state = cell.zero_state(batch_size, dtype)
if sequence_length is not None:
sequence_length = math_ops.to_int32(sequence_length)
if sequence_length: # Prepare variables
zero_output = array_ops.zeros(
array_ops.pack([batch_size, cell.output_size]), inputs[0].dtype)
zero_output.set_shape(
tensor_shape.TensorShape([fixed_batch_size.value, cell.output_size]))
min_sequence_length = math_ops.reduce_min(sequence_length)
max_sequence_length = math_ops.reduce_max(sequence_length)
for time, input_ in enumerate(inputs):
if time > 0: vs.get_variable_scope().reuse_variables()
# pylint: disable=cell-var-from-loop
call_cell = lambda: cell(input_, state)
# pylint: enable=cell-var-from-loop
if sequence_length:
(output, state) = _rnn_step(
time, sequence_length, min_sequence_length, max_sequence_length,
zero_output, state, call_cell)
else:
(output, state) = call_cell()
outputs.append(output)
return (outputs, state)
def state_saving_rnn(cell, inputs, state_saver, state_name,
sequence_length=None, scope=None):
"""RNN that accepts a state saver for time-truncated RNN calculation.
Args:
cell: An instance of RNNCell.
inputs: A length T list of inputs, each a tensor of shape
[batch_size, cell.input_size].
state_saver: A state saver object with methods `state` and `save_state`.
state_name: The name to use with the state_saver.
sequence_length: (optional) An int32/int64 vector size [batch_size].
See the documentation for rnn() for more details about sequence_length.
scope: VariableScope for the created subgraph; defaults to "RNN".
Returns:
A pair (outputs, state) where:
outputs is a length T list of outputs (one for each input)
states is the final state
Raises:
TypeError: If "cell" is not an instance of RNNCell.
ValueError: If inputs is None or an empty list.
"""
initial_state = state_saver.state(state_name)
(outputs, state) = rnn(cell, inputs, initial_state=initial_state,
sequence_length=sequence_length, scope=scope)
save_state = state_saver.save_state(state_name, state)
with ops.control_dependencies([save_state]):
outputs[-1] = array_ops.identity(outputs[-1])
return (outputs, state)
def _rnn_step(
time, sequence_length, min_sequence_length, max_sequence_length,
zero_output, state, call_cell):
"""Calculate one step of a dynamic RNN minibatch.
Returns an (output, state) pair conditioned on the sequence_lengths.
The pseudocode is something like:
if t >= max_sequence_length:
return (zero_output, state)
if t < min_sequence_length:
return call_cell()
# Selectively output zeros or output, old state or new state depending
# on if we've finished calculating each row.
new_output, new_state = call_cell()
final_output = np.vstack([
zero_output if time >= sequence_lengths[r] else new_output_r
for r, new_output_r in enumerate(new_output)
])
final_state = np.vstack([
state[r] if time >= sequence_lengths[r] else new_state_r
for r, new_state_r in enumerate(new_state)
])
return (final_output, final_state)
Args:
time: Python int, the current time step
sequence_length: int32 `Tensor` vector of size [batch_size]
min_sequence_length: int32 `Tensor` scalar, min of sequence_length
max_sequence_length: int32 `Tensor` scalar, max of sequence_length
zero_output: `Tensor` vector of shape [output_size]
state: `Tensor` matrix of shape [batch_size, state_size]
call_cell: lambda returning tuple of (new_output, new_state) where
new_output is a `Tensor` matrix of shape [batch_size, output_size]
new_state is a `Tensor` matrix of shape [batch_size, state_size]
Returns:
A tuple of (final_output, final_state) as given by the pseudocode above:
final_output is a `Tensor` matrix of shape [batch_size, output_size]
final_state is a `Tensor` matrix of shape [batch_size, state_size]
"""
# Step 1: determine whether we need to call_cell or not
empty_update = lambda: (zero_output, state)
state_shape = state.get_shape()
output, new_state = control_flow_ops.cond(
time < max_sequence_length, call_cell, empty_update)
# Step 2: determine whether we need to copy through state and/or outputs
existing_output_state = lambda: (output, new_state)
def copy_through():
# Use broadcasting select to determine which values should get
# the previous state & zero output, and which values should get
# a calculated state & output.
copy_cond = (time >= sequence_length)
return (math_ops.select(copy_cond, zero_output, output),
math_ops.select(copy_cond, state, new_state))
(output, state) = control_flow_ops.cond(
time < min_sequence_length, existing_output_state, copy_through)
output.set_shape(zero_output.get_shape())
state.set_shape(state_shape)
return (output, state)
def _reverse_seq(input_seq, lengths):
"""Reverse a list of Tensors up to specified lengths.
Args:
input_seq: Sequence of seq_len tensors of dimension (batch_size, depth)
lengths: A tensor of dimension batch_size, containing lengths for each
sequence in the batch. If "None" is specified, simply reverses
the list.
Returns:
time-reversed sequence
"""
if lengths is None:
return list(reversed(input_seq))
for input_ in input_seq:
input_.set_shape(input_.get_shape().with_rank(2))
# Join into (time, batch_size, depth)
s_joined = array_ops.pack(input_seq)
# Reverse along dimension 0
s_reversed = array_ops.reverse_sequence(s_joined, lengths, 0, 1)
# Split again into list
result = array_ops.unpack(s_reversed)
return result
def bidirectional_rnn(cell_fw, cell_bw, inputs,
initial_state_fw=None, initial_state_bw=None,
dtype=None, sequence_length=None, scope=None):
"""Creates a bidirectional recurrent neural network.
Similar to the unidirectional case above (rnn) but takes input and builds
independent forward and backward RNNs with the final forward and backward
outputs depth-concatenated, such that the output will have the format
[time][batch][cell_fw.output_size + cell_bw.output_size]. The input_size of
forward and backward cell must match. The initial state for both directions
is zero by default (but can be set optionally) and no intermediate states are
ever returned -- the network is fully unrolled for the given (passed in)
length(s) of the sequence(s) or completely unrolled if length(s) is not given.
Args:
cell_fw: An instance of RNNCell, to be used for forward direction.
cell_bw: An instance of RNNCell, to be used for backward direction.
inputs: A length T list of inputs, each a tensor of shape
[batch_size, cell.input_size].
initial_state_fw: (optional) An initial state for the forward RNN.
This must be a tensor of appropriate type and shape
[batch_size x cell.state_size].
initial_state_bw: (optional) Same as for initial_state_fw.
dtype: (optional) The data type for the initial state. Required if either
of the initial states are not provided.
sequence_length: (optional) An int32/int64 vector, size [batch_size],
containing the actual lengths for each of the sequences.
scope: VariableScope for the created subgraph; defaults to "BiRNN"
Returns:
A tuple (outputs, output_state_fw, output_state_bw) where:
outputs is a length T list of outputs (one for each input), which
are depth-concatenated forward and backward outputs
output_state_fw is the final state of the forward rnn
output_state_bw is the final state of the backward rnn
Raises:
TypeError: If "cell_fw" or "cell_bw" is not an instance of RNNCell.
ValueError: If inputs is None or an empty list.
"""
if not isinstance(cell_fw, rnn_cell.RNNCell):
raise TypeError("cell_fw must be an instance of RNNCell")
if not isinstance(cell_bw, rnn_cell.RNNCell):
raise TypeError("cell_bw must be an instance of RNNCell")
if not isinstance(inputs, list):
raise TypeError("inputs must be a list")
if not inputs:
raise ValueError("inputs must not be empty")
name = scope or "BiRNN"
# Forward direction
with vs.variable_scope(name + "_FW") as fw_scope:
output_fw, output_state_fw = rnn(cell_fw, inputs, initial_state_fw, dtype,
sequence_length, scope=fw_scope)
# Backward direction
with vs.variable_scope(name + "_BW") as bw_scope:
tmp, output_state_bw = rnn(cell_bw, _reverse_seq(inputs, sequence_length),
initial_state_bw, dtype, sequence_length, scope=bw_scope)
output_bw = _reverse_seq(tmp, sequence_length)
# Concat each of the forward/backward outputs
outputs = [array_ops.concat(1, [fw, bw])
for fw, bw in zip(output_fw, output_bw)]
return (outputs, output_state_fw, output_state_bw)
def dynamic_rnn(cell, inputs, sequence_length, initial_state=None, dtype=None,
parallel_iterations=None, swap_memory=False, time_major=False,
scope=None):
"""Creates a recurrent neural network specified by RNNCell "cell".
This function is functionally identical to the function `rnn` above, but
performs fully dynamic unrolling of `inputs`.
Unlike `rnn`, the input `inputs` is not a Python list of `Tensors`. Instead,
it is a single `Tensor` where the maximum time is either the first or second
dimension (see the parameter `time_major`). The corresponding output is
a single `Tensor` having the same number of time steps and batch size.
The parameter `sequence_length` is required and dynamic calculation is
automatically performed.
Args:
cell: An instance of RNNCell.
inputs: The RNN inputs.
If time_major == False (default), this must be a tensor of shape:
`[batch_size, max_time, cell.input_size]`.
If time_major == True, this must be a tensor of shape:
`[max_time, batch_size, cell.input_size]`.
sequence_length: An int32/int64 vector (tensor) size [batch_size].
initial_state: (optional) An initial state for the RNN. This must be
a tensor of appropriate type and shape [batch_size x cell.state_size].
dtype: (optional) The data type for the initial state. Required if
initial_state is not provided.
parallel_iterations: (Default: 32). The number of iterations to run in
parallel. Those operations which do not have any temporal dependency
and can be run in parallel, will be. This parameter trades off
time for space. Values >> 1 use more memory but take less time,
while smaller values use less memory but computations take longer.
swap_memory: Swap the tensors produced in forward inference but needed
for back prop from GPU to CPU.
time_major: The shape format of the `inputs` and `outputs` Tensors.
If true, these `Tensors` must be shaped `[max_time, batch_size, depth]`.
If false, these `Tensors` must be shaped `[batch_size, max_time, depth]`.
Using time_major = False is a bit more efficient because it avoids
transposes at the beginning and end of the RNN calculation. However,
most TensorFlow data is batch-major, so by default this function
accepts input and emits output in batch-major form.
scope: VariableScope for the created subgraph; defaults to "RNN".
Returns:
A pair (outputs, state) where:
outputs: The RNN output `Tensor`.
If time_major == False (default), this will be a `Tensor` shaped:
`[batch_size, max_time, cell.output_size]`.
If time_major == True, this will be a `Tensor` shaped:
`[max_time, batch_size, cell.output_size]`.
state: The final state, shaped:
`[batch_size, cell.state_size]`.
Raises:
TypeError: If "cell" is not an instance of RNNCell.
ValueError: If inputs is None or an empty list.
"""
if not isinstance(cell, rnn_cell.RNNCell):
raise TypeError("cell must be an instance of RNNCell")
# By default, time_major==False and inputs are batch-major: shaped
# [batch, time, depth]
# For internal calculations, we transpose to [time, batch, depth]
if not time_major:
inputs = array_ops.transpose(inputs, [1, 0, 2]) # (B,T,D) => (T,B,D)
parallel_iterations = parallel_iterations or 32
sequence_length = math_ops.to_int32(sequence_length)
sequence_length = array_ops.identity(sequence_length, name="sequence_length")
# Create a new scope in which the caching device is either
# determined by the parent scope, or is set to place the cached
# Variable using the same placement as for the rest of the RNN.
with vs.variable_scope(scope or "RNN") as varscope:
if varscope.caching_device is None:
varscope.set_caching_device(lambda op: op.device)
input_shape = array_ops.shape(inputs)
batch_size = input_shape[1]
if initial_state is not None:
state = initial_state
else:
if not dtype:
raise ValueError("If no initial_state is provided, dtype must be.")
state = cell.zero_state(batch_size, dtype)
def _assert_has_shape(x, shape):
x_shape = array_ops.shape(x)
packed_shape = array_ops.pack(shape)
return logging_ops.Assert(
math_ops.reduce_all(math_ops.equal(x_shape, packed_shape)),
["Expected shape for Tensor %s is " % x.name,
packed_shape, " but saw shape: ", x_shape])
# Perform some shape validation
with ops.control_dependencies(
[_assert_has_shape(sequence_length, [batch_size])]):
sequence_length = array_ops.identity(sequence_length, name="CheckSeqLen")
(outputs, final_state) = _dynamic_rnn_loop(
cell, inputs, state, sequence_length,
parallel_iterations=parallel_iterations,
swap_memory=swap_memory)
# Outputs of _dynamic_rnn_loop are always shaped [time, batch, depth].
# If we are performing batch-major calculations, transpose output back
# to shape [batch, time, depth]
if not time_major:
outputs = array_ops.transpose(outputs, [1, 0, 2]) # (T,B,D) => (B,T,D)
return (outputs, final_state)
def _dynamic_rnn_loop(cell, inputs, initial_state, sequence_length,
parallel_iterations, swap_memory):
"""Internal implementation of Dynamic RNN.
Args:
cell: An instance of RNNCell.
inputs: A `Tensor` of shape [time, batch_size, depth].
initial_state: A `Tensor` of shape [batch_size, depth].
sequence_length: An `int32` `Tensor` of shape [batch_size].
parallel_iterations: Positive Python int.
swap_memory: A Python boolean
Returns:
Tuple (final_outputs, final_state).
final_outputs:
A `Tensor` of shape [time, batch_size, depth]`.
final_state:
A `Tensor` of shape [batch_size, depth].
"""
state = initial_state
assert isinstance(parallel_iterations, int), "parallel_iterations must be int"
# Construct an initial output
input_shape = array_ops.shape(inputs)
(time_steps, batch_size, unused_depth) = array_ops.unpack(input_shape, 3)
inputs_got_shape = inputs.get_shape().with_rank(3)
(const_time_steps, const_batch_size, const_depth) = inputs_got_shape.as_list()
# Prepare dynamic conditional copying of state & output
zero_output = array_ops.zeros(
array_ops.pack([batch_size, cell.output_size]), inputs.dtype)
min_sequence_length = math_ops.reduce_min(sequence_length)
max_sequence_length = math_ops.reduce_max(sequence_length)
time = array_ops.constant(0, dtype=dtypes.int32, name="time")
with ops.op_scope([], "dynamic_rnn") as scope:
base_name = scope
output_ta = tensor_array_ops.TensorArray(
dtype=inputs.dtype, size=time_steps,
tensor_array_name=base_name + "output")
input_ta = tensor_array_ops.TensorArray(
dtype=inputs.dtype, size=time_steps,
tensor_array_name=base_name + "input")
input_ta = input_ta.unpack(inputs)
def _time_step(time, state, output_ta_t):
"""Take a time step of the dynamic RNN.
Args:
time: int32 scalar Tensor.
state: Vector.
output_ta_t: `TensorArray`, the output with existing flow.
Returns:
The tuple (time + 1, new_state, output_ta_t with updated flow).
"""
input_t = input_ta.read(time)
# Restore some shape information
input_t.set_shape([const_batch_size, const_depth])
(output, new_state) = _rnn_step(
time, sequence_length, min_sequence_length, max_sequence_length,
zero_output, state, lambda: cell(input_t, state))
output_ta_t = output_ta_t.write(time, output)
return (time + 1, new_state, output_ta_t)
(unused_final_time, final_state, output_final_ta) = control_flow_ops.While(
cond=lambda time, _1, _2: time < time_steps,
body=_time_step,
loop_vars=(time, state, output_ta),
parallel_iterations=parallel_iterations,
swap_memory=swap_memory)
final_outputs = output_final_ta.pack()
# Restore some shape information
final_outputs.set_shape([
const_time_steps, const_batch_size, cell.output_size])
return (final_outputs, final_state)
| apache-2.0 | 5,813,275,549,810,683,000 | 39.314019 | 80 | 0.688335 | false | 3.777233 | false | false | false |
sknepneklab/SAMoS | utils/make_circular_patch.py | 1 | 3349 | # ***************************************************************************
# *
# * Copyright (C) 2013-2016 University of Dundee
# * All rights reserved.
# *
# * This file is part of SAMoS (Soft Active Matter on Surfaces) program.
# *
# * SAMoS is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License as published by
# * the Free Software Foundation; either version 2 of the License, or
# * (at your option) any later version.
# *
# * SAMoS is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program. If not, see <http://www.gnu.org/licenses/>.
# *
# *****************************************************************************
# Utility code for generating intial configuration for cell simulations.
# This code places N cells in a patch of radius R keeing in mind that the
# minimum distance between two cells shold be greater than a certain value.
import sys
import argparse
import numpy as np
from random import uniform
from datetime import *
import math as m
from CellList2D import *
parser = argparse.ArgumentParser()
parser.add_argument("-o", "--output", type=str, default='patch.dat', help="output file name")
parser.add_argument("-R", "--radius", type=float, default=20.0, help="patch radius")
parser.add_argument("-N", "--num", type=int, default=100, help="number of particles")
parser.add_argument("-m", "--min_dist", type=float, default=1.5, help="minium distance between particles")
parser.add_argument("-A", "--A0", type=float, default=m.pi, help="native cell area")
args = parser.parse_args()
print
print "\tSoft Actve Matter on Surfaces (SAMoS)"
print "\tGenerates a circial cell patch"
print
print "\tRastko Sknepnek"
print "\tUniversity of Dundee"
print "\t(c) 2015"
print "\t----------------------------------------------"
print
print "\tOutput files : ", args.output
print "\tPatch radius : ", args.radius
print "\tNumber of cells : ", args.num
print "\tMinimum distance between cells : ", args.min_dist
print
start = datetime.now()
R = args.radius
cl = CellList2D([2.2*R,2.2*R],2*args.min_dist)
particles = []
i = 0
while i < args.num:
x, y = uniform(-R,R), uniform(-R,R)
if (x**2 + y**2 < R**2):
cid = cl.get_cell_idx((x,y))
can_add = True
for nb in cl.cell_list[cid].neighbors:
for idx in cl.cell_list[nb].indices:
xi, yi = particles[idx]
dx, dy = x-xi, y-yi
if dx*dx + dy*dy < args.min_dist**2:
can_add = False
break
if not can_add:
break
if can_add:
print "Successfully added particle : ", i
particles.append((x,y))
cl.add_particle((x,y),i)
i += 1
out = open(args.output,'w')
out.write('keys: id x y nx ny nvx nvy nvz area\n')
for i in range(len(particles)):
x,y = particles[i]
phi = uniform(0,2*m.pi)
out.write('%4d %f %f %f %f %f %f %f %f\n' % (i,x,y, m.cos(phi),m.sin(phi), 0, 0, 1.0, args.A0))
out.close()
end = datetime.now()
total = end - start
print
print " *** Completed in ", total.total_seconds(), " seconds *** "
print | gpl-3.0 | 5,762,786,436,947,331,000 | 31.843137 | 106 | 0.622574 | false | 3.229508 | false | false | false |
chrisjrn/registrasion | registrasion/tests/controller_helpers.py | 1 | 2034 | from registrasion.controllers.cart import CartController
from registrasion.controllers.credit_note import CreditNoteController
from registrasion.controllers.invoice import InvoiceController
from registrasion.models import commerce
from django.core.exceptions import ObjectDoesNotExist
class TestingCartController(CartController):
def set_quantity(self, product, quantity, batched=False):
''' Sets the _quantity_ of the given _product_ in the cart to the given
_quantity_. '''
self.set_quantities(((product, quantity),))
def add_to_cart(self, product, quantity):
''' Adds _quantity_ of the given _product_ to the cart. Raises
ValidationError if constraints are violated.'''
try:
product_item = commerce.ProductItem.objects.get(
cart=self.cart,
product=product)
old_quantity = product_item.quantity
except ObjectDoesNotExist:
old_quantity = 0
self.set_quantity(product, old_quantity + quantity)
def next_cart(self):
if self.cart.status == commerce.Cart.STATUS_ACTIVE:
self.cart.status = commerce.Cart.STATUS_PAID
self.cart.save()
class TestingInvoiceController(InvoiceController):
def pay(self, reference, amount, pre_validate=True):
''' Testing method for simulating an invoice paymenht by the given
amount. '''
if pre_validate:
# Manual payments don't pre-validate; we should test that things
# still work if we do silly things.
self.validate_allowed_to_pay()
''' Adds a payment '''
commerce.PaymentBase.objects.create(
invoice=self.invoice,
reference=reference,
amount=amount,
)
self.update_status()
class TestingCreditNoteController(CreditNoteController):
def refund(self):
commerce.CreditNoteRefund.objects.create(
parent=self.credit_note,
reference="Whoops."
)
| apache-2.0 | -5,205,401,486,481,097,000 | 31.285714 | 79 | 0.652901 | false | 4.421739 | false | false | false |
shfengcj/pyminer | pyminer_setting.py | 1 | 1142 | # -*- coding: utf-8 -*-
"""
Created on Tue Jun 23 06:53:32 2015
@author: chaojun
"""
from pyminer_cos_model import lcdm
from pyminer_residual import JLAresiCal, CMBresiCal, BAOresiCal
# Genearl setting
divMax = 15 # for romberg integral
ogh2 = 2.469e-5
JLA_DIR = '/Users/chaojun/Documents/Research/2015/grb/pycode/data/jla'
# Cosmological model
model = lcdm(divmax = divMax)
# Data setting
use_sn_data = True
use_cmb_data = True
use_bao_data = True
resobj=[]
if use_sn_data : resobj.append( JLAresiCal(cosModel = model, DATA_DIR_JLA = JLA_DIR) )
if use_cmb_data: resobj.append( CMBresiCal(cosModel = model) )
if use_bao_data: resobj.append( BAOresiCal(cosModel = model) )
# Residual function
def residual(p, resobj = resobj, fjac=None):
import numpy as np
res = np.array([])
for obj in resobj:
tmp = obj.residual(p)
res = np.append(res, tmp)
status = 0
return [status, res]
# some other functions
def clear_env():
for key in globals().keys():
if not key.startswith("__"):
globals().pop(key)
| gpl-2.0 | -945,975,103,707,670,100 | 18.689655 | 86 | 0.622592 | false | 2.862155 | false | false | false |
priyaganti/rockstor-core | src/rockstor/storageadmin/views/clone_helpers.py | 1 | 2535 | """
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from storageadmin.models import (Share, Snapshot)
from storageadmin.util import handle_exception
from fs.btrfs import (add_clone, share_id, update_quota)
from rest_framework.response import Response
from storageadmin.serializers import ShareSerializer
import re
from django.conf import settings
def create_clone(share, new_name, request, logger, snapshot=None):
# if snapshot is None, create clone of the share.
# If it's not, then clone it.
if (re.match(settings.SHARE_REGEX + '$', new_name) is None):
e_msg = ('Clone name is invalid. It must start with a letter and can'
' contain letters, digits, _, . and - characters')
handle_exception(Exception(e_msg), request)
if (Share.objects.filter(name=new_name).exists()):
e_msg = ('Another Share with name: %s already exists.' % new_name)
handle_exception(Exception(e_msg), request)
if (Snapshot.objects.filter(share=share, name=new_name).exists()):
e_msg = ('Snapshot with name: %s already exists for the '
'share: %s. Choose a different name' %
(new_name, share.name))
handle_exception(Exception(e_msg), request)
try:
share_name = share.subvol_name
snap = None
if (snapshot is not None):
snap = snapshot.real_name
add_clone(share.pool, share_name, new_name, snapshot=snap)
snap_id = share_id(share.pool, new_name)
qgroup_id = ('0/%s' % snap_id)
update_quota(share.pool, qgroup_id, share.size * 1024)
new_share = Share(pool=share.pool, qgroup=qgroup_id, name=new_name,
size=share.size, subvol_name=new_name)
new_share.save()
return Response(ShareSerializer(new_share).data)
except Exception as e:
handle_exception(e, request)
| gpl-3.0 | -311,084,012,256,732,200 | 42.706897 | 77 | 0.680473 | false | 3.738938 | false | false | false |
lpredova/pybookie | server/sources/footbal_db.py | 1 | 3991 | # coding=utf-8
import json
import os
class FootballDB:
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
groups_file = BASE_DIR + '/sources/groups.json'
wc_history_file = BASE_DIR + '/sources/wc_history'
wc_team_file = BASE_DIR + '/sources/squads/'
top_teams = ['RealMadrid(ESP)', 'Barcelona(ESP)', 'Chelsea(ENG)', 'ManchesterCity(ENG)', 'ParisSaint-Germain(FRA)',
'BayernMunich(GER)', 'Internazionale(ITA)', 'Napoli(ITA)', 'ManchesterUnited(ENG)', 'Arsenal(ENG)',
'Liverpool(ENG)', 'Juventus(ITA)', 'BorussiaDortmund(GER)', 'AtléticoMadrid(ESP)']
def __init__(self):
pass
@staticmethod
def get_team_by_id(team_id):
data = json.loads(FootballDB.get_games())
result = None
for group in data:
for team in group['teams']:
if int(team['id']) == int(team_id):
result = team['team']
return result
@staticmethod
def get_ranking(team_name):
return int(FootballDB.get_wc_history(team_name, 0))
@staticmethod
def get_wc_games_played(team_name):
return int(FootballDB.get_wc_history(team_name, 2))
@staticmethod
def get_won_wc_games_played(team_name):
return int(FootballDB.get_wc_history(team_name, 3))
@staticmethod
def get_draw_wc_games_played(team_name):
return int(FootballDB.get_wc_history(team_name, 4))
@staticmethod
def get_lost_wc_games_played(team_name):
return int(FootballDB.get_wc_history(team_name, 5))
@staticmethod
def get_goal_difference_wc_games_played(team_name):
gd = FootballDB.get_wc_history(team_name, 6)
gd = gd.split(':')
goals_for = int(gd[0])
goals_against = int(gd[1])
return goals_for - goals_against
@staticmethod
def get_wc_points(team_name):
return int(FootballDB.get_wc_history(team_name, 7))
@staticmethod
def get_wc_participations(team_name):
return int(FootballDB.get_wc_history(team_name, 8))
@staticmethod
def get_wc_titles(team_name):
titles = FootballDB.get_wc_history(team_name, 9)
try:
if titles.isalpha() and int(titles) != 0:
titles = titles[0]
return int(titles)
else:
return 0
except Exception:
return 0
@staticmethod
def get_wc_history(team, result_row_index):
path = FootballDB.wc_history_file
if os.path.isfile(path):
f = open(path)
for line in f:
if line[0].isdigit():
row = line.replace('\n', '')
row = row.replace(' ', '')
row = row.split('|')
if row[1] == team.replace(' ', ''):
f.close()
try:
return row[result_row_index]
except BaseException:
return 0
@staticmethod
def get_wc_team_player_ratings(team):
path = '%s%s.txt' % (FootballDB.wc_team_file, (team.replace(' ', '-')))
path = path.lower()
team_rating = 0
if os.path.isfile(path):
f = open(path)
for line in f:
try:
row = line.split('##')
row = row[1].replace(' ', '').split(',')
team_rating += int(row[0])
team_name = row[1].replace('\n', '')
if team_name in FootballDB.top_teams:
team_rating += 10
except Exception:
pass
return team_rating
@staticmethod
def get_games():
data = None
path = FootballDB.groups_file
if os.path.isfile(path):
with open(path, 'r') as football_teams:
data = football_teams.read().replace('\n', '')
return data
| apache-2.0 | -716,865,174,706,552,200 | 29.458015 | 119 | 0.525063 | false | 3.591359 | false | false | false |
flavour/eden | modules/plugins/__init__.py | 5 | 8807 | # -*- coding: utf-8 -*-
import os
import sys
from gluon import current
from gluon.storage import Storage
from s3compat import reload
__all__ = ("PluginLoader",
)
# Name of the plugin directory in modules
PLUGINS = "plugins"
# Module names to ignore when scanning for plugins
IGNORE = ("skeleton", "__init__")
# Name of the setup function in plugins
SETUP = "setup"
# Name of the variable that contains the version info in plugins
VERSION = "__version__"
# =============================================================================
class PluginLoader(object):
"""
Simple plugin loader (experimental)
Plugins are python modules or packages in the modules/plugins
directory.
Each plugin defines a setup() function which is called during
the request cycle immediately before entering the controller.
Plugins can be added by simply placing them in the plugins
directory, without any code change required.
The plugin directory will be scanned for new or updated plugins
whenever a new session starts, or by calling explicitly:
PluginLoader.detect(reset_all=True)
NB the reloading of the plugins can only be enforced in the
current interpreter thread - while other threads may still
run the old version. Therefore, it is recommended to restart
all threads (=reloading the server) after installing or updating
a plugin.
NB failing setup() methods will not be tried again until the next
reload (new session, restart, or explicit call)
session.s3.plugins contains a dict of all current plugins, like:
{name: (version, status)}
where:
- name is the python module name of the plugin
- version is the version string provided by the plugin (or
"unknown" if not present)
- status is:
None = newly detected plugin, not set up yet
True = plugin has been set up successfully
False = plugin setup failed in the last attempt, deactivated
"""
# -------------------------------------------------------------------------
@classmethod
def setup_all(cls, reload_all=False):
"""
Setup all plugins
@param reload_all: reload all plugins and reset the registry
"""
if reload_all:
cls.detect(reset_all=True)
for name in list(cls._registry().keys()):
cls.load(name)
# -------------------------------------------------------------------------
@classmethod
def detect(cls, reset_all=False):
"""
Detect new plugins and update the registry
@param reset_all: reset all entries in the registry
"""
default = (None, None)
if reset_all:
plugin = lambda name: default
else:
registry = cls._registry()
plugin = lambda name: registry.get(name, default)
plugins = dict((name, plugin(name)) for name in cls._scan())
cls._registry(plugins)
# -------------------------------------------------------------------------
@classmethod
def load(cls, name, force=False):
"""
Run the setup method of a particular plugin
@param name: the name of the plugin
@param force: enforce the plugin to be reloaded and its
setup method to be re-run regardless of the
previous status
"""
if name[0] == "_":
return False
log = current.log
registry = cls._registry()
if name not in registry:
cls.detect()
if name not in registry:
raise NameError("plugin '%s' not found" % name)
# Get version and status info from registry
plugin_info = registry[name]
if force or not isinstance(plugin_info, tuple):
version, status = None, None
else:
version, status = plugin_info
if status is None:
new = True
if not (cls._reload(name)):
version, status = "unknown", False
else:
version, status = None, True
else:
new = False
if status is False:
# Skip plugins which have failed in previous attempts
registry[name] = (version, status)
return False
status = True
setup = None
# Import manifest
package = "%s.%s" % (PLUGINS, name)
try:
setup = getattr(__import__(package, fromlist=[SETUP]), SETUP)
except (ImportError, AttributeError):
# This may not be a plugin at all => remove from registry
if new:
log.debug("Plugin '%s' not found" % name)
registry.pop(name, None)
return False
except SyntaxError:
if new:
log.error("Skipping invalid plugin '%s'" % name)
if current.response.s3.debug:
raise
version, status = "invalid", False
if version is None:
# Update version info if plugin has been reloaded
try:
version = getattr(__import__(package, fromlist=[VERSION]), VERSION)
except (ImportError, AttributeError):
version = "unknown"
if status and not callable(setup):
# Is a module => find setup function
try:
setup = setup.setup
except AttributeError:
# No setup function found => treat as failed
if new:
log.debug("No setup function found for plugin '%s'" % name)
status = False
if status:
# Execute setup method
if new:
log.info("Setting up plugin '%s'" % name)
try:
setup()
except Exception:
log.error("Plugin '%s' setup failed" % name)
if current.response.s3.debug:
raise
status = False
# Update the registry
registry[name] = (version, status)
return status
# -------------------------------------------------------------------------
@classmethod
def _registry(cls, plugins=None):
"""
Get (or replace) the current plugin registry
@param plugins: the new registry
"""
session_s3 = current.session.s3
if plugins:
registry = session_s3.plugins = plugins
else:
registry = session_s3.plugins
if registry is None:
# New session => run detect
# - initialize registry first to prevent infinite recursion
registry = session_s3.plugins = {}
cls.detect()
return registry
# -------------------------------------------------------------------------
@staticmethod
def _scan():
"""
Iterator scanning the plugin directory for available plugins
@return: the names of the plugins
"""
folder = current.request.folder
path = os.path.join(folder, "modules", PLUGINS)
names = os.listdir(path)
for name in names:
name_, extension = os.path.splitext(name)
if name_ in IGNORE:
continue
path_ = os.path.join(path, name)
if os.path.isdir(path_) or extension == ".py":
yield(name_)
# -------------------------------------------------------------------------
@staticmethod
def _reload(name):
"""
Reload a plugin
@param name: the plugin name
@note: this works only within the current thread, other
threads may still be bound to the old version of
the plugin
"""
if name in IGNORE:
return
success = True
appname = current.request.application
plugin_name = "applications.%s.modules.%s.%s" % (appname, PLUGINS, name)
plugin = sys.modules.get(plugin_name)
if plugin is not None:
try:
reload(plugin)
except ImportError:
current.log.error("Reloading plugin '%s' failed" % name)
success = False
return success
# =============================================================================
# Do a full scan when reloading the module (=when the thread starts)
PluginLoader.detect(reset_all=True)
# =============================================================================
| mit | 2,472,458,515,809,089,000 | 29.901754 | 83 | 0.507551 | false | 5.318237 | false | false | false |
bdeak/taskmgr | fabfile/execute/install_package.py | 1 | 2064 | from fabric.api import *
import re
import os.path
import logging
import utils.log
l = logging.getLogger()
l = utils.log.CustomLogAdapter(l, None)
@task(default=True)
def check(input_params, cluster):
""" Install a given version of a given package
Can support multiple backends
input_params parameter is a string, with the following fields:
package:version
The backend to be used for package management is autodetected.
For adapting to various systems this needs to be extended.
"""
# split up the input_params, and make sense of it
m = re.search("^([^:]+)(?::(.+))?$", input_params)
if not m:
raise AttributeError("The given input_params '%s' doesn't match the requirements!" % input_params)
package = m.group(1)
version = m.group(2) if m.group(2) else None
# auto detect the backend
try:
result = run("test -e /usr/bin/apt-get")
except:
return False
if result.failed:
raise RuntimeError("%s: Failed to execute remote command for detecting backend" % env.command)
if result.return_code == 0:
backend = "apt_get"
else:
# check for other backends - note yet implemented
raise SystemError("%s: only backend 'apt_get' is currently supported." % env.command)
backends = { 'apt_get': install_package_apt_get }
if not backend in backends.keys():
raise ValueError("function for detected backend '%s' is not found!" % backend)
return backends[backend](package, version)
def install_package_apt_get(package, version):
""" Install the package, internal function, not exposed via @task """
if version is None:
# just install the package
command = "apt-get -qq update && apt-get -qq install -y %s" % package
else:
command = "apt-get -qq update && apt-get -qq install -y %s=%s" % (package, version)
try:
result = sudo(command)
except:
return False
if result.succeeded:
return True
else:
return False
| gpl-2.0 | -5,122,457,050,969,303,000 | 28.913043 | 106 | 0.637597 | false | 4.007767 | false | false | false |
okolisny/integration_tests | scripts/post_jenkins_result.py | 1 | 2181 | #!/usr/bin/env python2
import json
import os
import os.path
from datetime import datetime
from artifactor.plugins.post_result import test_report
from cfme.utils import read_env
from cfme.utils.path import project_path
from cfme.utils.trackerbot import post_jenkins_result
job_name = os.environ['JOB_NAME']
number = int(os.environ['BUILD_NUMBER'])
date = str(datetime.now())
# reduce returns to bools for easy logic
runner_src = read_env(project_path.join('.jenkins_runner_result'))
runner_return = runner_src.get('RUNNER_RETURN', '1') == '0'
test_return = runner_src.get('TEST_RETURN', '1') == '0'
# 'stream' environ is set by jenkins for all stream test jobs
# but not in the template tester
if job_name not in ('template-tester', 'template-tester-openstack',
'template-tester-rhevm', 'template-tester-virtualcenter'):
# try to pull out the appliance template name
template_src = read_env(project_path.join('.appliance_template'))
template = template_src.get('appliance_template', 'Unknown')
stream = os.environ['stream']
else:
tester_src = read_env(project_path.join('.template_tester'))
stream = tester_src['stream']
template = tester_src['appliance_template']
if test_report.check():
with test_report.open() as f:
artifact_report = json.load(f)
else:
raise RuntimeError('Unable to post to jenkins without test report: '
'{} does not exist!'.format(test_report.strpath))
if runner_return and test_return:
build_status = 'success'
elif runner_return:
build_status = 'unstable'
else:
build_status = 'failed'
result_attrs = ('job_name', 'number', 'stream', 'date', 'template',
'build_status', 'artifact_report')
# pack the result attr values into the jenkins post
post_jenkins_result(*[eval(attr) for attr in result_attrs])
# vain output padding calculation
# get len of longest string, pad with an extra space to make the output pretty
max_len = len(max(result_attrs, key=len)) + 1
# now print all the attrs so we can see what we posted (and *that* we
# posted) in the jenkins log
for attr in result_attrs[:-1]:
print('{:>{width}}: {}'.format(attr, eval(attr), width=max_len))
| gpl-2.0 | 5,412,878,123,027,363,000 | 34.177419 | 78 | 0.703347 | false | 3.4896 | true | false | false |
lukeolson/clean-latex-to-arxiv | parxiv.py | 1 | 12460 | #! /usr/bin/env python
from __future__ import print_function
import glob
import re
import os
import io
import time
import shutil
import tempfile
import subprocess
import ply.lex
# Python2 FileNotFoundError support
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
"""
usage:
python parxiv.py file.tex
this will make arxiv-somelongdatestring with
- file_strip.tex (where includegraphics paths are stripped)
- file_strip.bbl (you should have the .bbl file already)
- all figures
- the class file if custom
- the bib style if custom
- extra files listed in extra.txt
"""
def strip_comments(source):
"""
from https://gist.github.com/dzhuang/dc34cdd7efa43e5ecc1dc981cc906c85
"""
tokens = (
'PERCENT', 'BEGINCOMMENT', 'ENDCOMMENT',
'BACKSLASH', 'CHAR', 'BEGINVERBATIM',
'ENDVERBATIM', 'NEWLINE', 'ESCPCT',
'MAKEATLETTER', 'MAKEATOTHER',
)
states = (
('makeatblock', 'exclusive'),
('makeatlinecomment', 'exclusive'),
('linecomment', 'exclusive'),
('commentenv', 'exclusive'),
('verbatim', 'exclusive')
)
# Deal with escaped backslashes, so we don't
# think they're escaping %
def t_BACKSLASH(t):
r"\\\\"
return t
# Leaving all % in makeatblock
def t_MAKEATLETTER(t):
r"\\makeatletter"
t.lexer.begin("makeatblock")
return t
# One-line comments
def t_PERCENT(t):
r"\%"
t.lexer.begin("linecomment")
# Escaped percent signs
def t_ESCPCT(t):
r"\\\%"
return t
# Comment environment, as defined by verbatim package
def t_BEGINCOMMENT(t):
r"\\begin\s*{\s*comment\s*}"
t.lexer.begin("commentenv")
#Verbatim environment (different treatment of comments within)
def t_BEGINVERBATIM(t):
r"\\begin\s*{\s*verbatim\s*}"
t.lexer.begin("verbatim")
return t
#Any other character in initial state we leave alone
def t_CHAR(t):
r"."
return t
def t_NEWLINE(t):
r"\n"
return t
# End comment environment
def t_commentenv_ENDCOMMENT(t):
r"\\end\s*{\s*comment\s*}"
#Anything after \end{comment} on a line is ignored!
t.lexer.begin('linecomment')
# Ignore comments of comment environment
def t_commentenv_CHAR(t):
r"."
pass
def t_commentenv_NEWLINE(t):
r"\n"
pass
#End of verbatim environment
def t_verbatim_ENDVERBATIM(t):
r"\\end\s*{\s*verbatim\s*}"
t.lexer.begin('INITIAL')
return t
#Leave contents of verbatim environment alone
def t_verbatim_CHAR(t):
r"."
return t
def t_verbatim_NEWLINE(t):
r"\n"
return t
#End a % comment when we get to a new line
def t_linecomment_ENDCOMMENT(t):
r"\n"
t.lexer.begin("INITIAL")
# Newline at the end of a line comment is presevered.
return t
#Ignore anything after a % on a line
def t_linecomment_CHAR(t):
r"."
pass
def t_makeatblock_MAKEATOTHER(t):
r"\\makeatother"
t.lexer.begin('INITIAL')
return t
def t_makeatblock_BACKSLASH(t):
r"\\\\"
return t
# Escaped percent signs in makeatblock
def t_makeatblock_ESCPCT(t):
r"\\\%"
return t
# presever % in makeatblock
def t_makeatblock_PERCENT(t):
r"\%"
t.lexer.begin("makeatlinecomment")
return t
def t_makeatlinecomment_NEWLINE(t):
r"\n"
t.lexer.begin('makeatblock')
return t
# Leave contents of makeatblock alone
def t_makeatblock_CHAR(t):
r"."
return t
def t_makeatblock_NEWLINE(t):
r"\n"
return t
# For bad characters, we just skip over it
def t_ANY_error(t):
t.lexer.skip(1)
lexer = ply.lex.lex()
lexer.input(source)
return u"".join([tok.value for tok in lexer])
def find_class(source):
"""
(unused)
look for \documentclass[review]{siamart}
then return 'siamart.cls'
"""
classname = re.search(r'\\documentclass.*{(.*)}', source)
if classname:
classname = classname.group(1) + '.cls'
return classname
def find_bibstyle(source):
"""
look for \ bibliographystyle{siamplain}
then return 'siamplain.bst'
"""
bibstylename = re.search(r'\\bibliographystyle{(.*)}', source)
if bibstylename:
bibstylename = bibstylename.group(1) + '.bst'
return bibstylename
def find_figs(source):
"""
look for \graphicspath{{subdir}} (a single subdir)
find figures in \includegraphics[something]{PATH/filename.ext}
\includegraphics{PATH/filename.ext}
make them \includegraphics[something]{PATH-filename.ext}
\includegraphics{PATH-filename.ext}
later: copy figures to arxivdir
"""
findgraphicspath = re.search(r'\\graphicspath{(.*)}', source)
if findgraphicspath:
graphicspaths = findgraphicspath.group(1)
graphicspaths = re.findall('{(.*?)}', graphicspaths)
else:
graphicspaths = []
# keep a list of (figname, figpath)
figlist = []
def repl(m):
figpath = ''
figname = os.path.basename(m.group(2))
figpath = os.path.dirname(m.group(2)).lstrip('./')
if figpath:
newfigname = figpath.replace(' ', '_').replace('/', '_')+'_'+figname
else:
newfigname = figname
newincludegraphics = m.group(1) + newfigname + m.group(3)
figlist.append((figname, figpath, newfigname))
return newincludegraphics
source = re.sub(r'(\\includegraphics.*?{)(.*?)(})', repl, source)
return figlist, source, graphicspaths
def flatten(source):
"""
replace arguments of include{} and intput{}
only input can be nested
include adds a clearpage
includeonly not supported
"""
def repl(m):
inputname = m.group(2)
if not os.path.isfile(inputname):
inputname = inputname + '.tex'
with io.open(inputname, encoding='utf-8') as f:
newtext = f.read()
newtext = re.sub(r'(\\input{)(.*?)(})', repl, newtext)
return newtext
def repl_include(m):
inputname = m.group(2)
if not os.path.isfile(inputname):
inputname = inputname + '.tex'
with io.open(inputname, encoding='utf-8') as f:
newtext = f.read()
newtext = '\\clearpage\n' + newtext
newtext = re.sub(r'(\\input{)(.*?)(})', repl, newtext)
newtext += '\\clearpage\n'
return newtext
dest = re.sub(r'(\\include{)(.*?)(})', repl_include, source, True)
dest = re.sub(r'(\\input{)(.*?)(})', repl, dest)
return dest
def main(fname):
print('[parxiv] reading %s' % fname)
with io.open(fname, encoding='utf-8') as f:
source = f.read()
print('[parxiv] stripping comments')
source = strip_comments(source)
print('[parxiv] flattening source')
source = flatten(source)
print('[parxiv] stripping comments again')
source = strip_comments(source)
print('[parxiv] finding figures...')
figlist, source, graphicspaths = find_figs(source)
# print('[parxiv] finding article class and bib style')
# localbibstyle = find_bibstyle(source)
print('[parxiv] making directory', end='')
dirname = 'arxiv-' + time.strftime('%c').replace(' ', '-')
dirname = dirname.replace(':', '-')
print(' %s' % dirname)
os.makedirs(dirname)
print('[parxiv] copying class/style files')
# shutil.copy2(localclass, os.path.join(dirname, localclass))
# if localbibstyle is not None:
# shutil.copy2(localbibstyle, os.path.join(dirname, localbibstyle))
for bst in glob.glob('*.bst'):
shutil.copy2(bst, os.path.join(dirname, bst))
for sty in glob.glob('*.sty'):
shutil.copy2(sty, os.path.join(dirname, sty))
for cls in glob.glob('*.cls'):
shutil.copy2(cls, os.path.join(dirname, cls))
print('[parxiv] copying figures')
for figname, figpath, newfigname in figlist:
allpaths = graphicspaths
allpaths += ['./']
_, ext = os.path.splitext(figname)
if ext == '':
figname += '.pdf'
newfigname += '.pdf'
if figpath:
allpaths = [os.path.join(p, figpath) for p in allpaths]
for p in allpaths:
#if 'quartz' in newfigname:
# print(p)
src = os.path.join(p, figname)
dest = os.path.join(dirname, os.path.basename(newfigname))
try:
shutil.copy2(src, dest)
except IOError:
# attempts multiple graphics paths
pass
# copy bbl file
print('[parxiv] copying bbl file')
bblfile = fname.replace('.tex', '.bbl')
newbblfile = fname.replace('.tex', '_strip.bbl')
bblflag = False
try:
shutil.copy2(bblfile, os.path.join(dirname, newbblfile))
bblflag = True
except FileNotFoundError:
print(' ...skipping, not found')
# copy extra files
try:
with io.open('extra.txt', encoding='utf-8') as f:
inputsource = f.read()
except IOError:
print('[parxiv] copying no extra files')
else:
print('[parxiv] copying extra file(s): ', end='')
for f in inputsource.split('\n'):
if os.path.isfile(f):
localname = os.path.basename(f)
print(' %s' % localname, end='')
shutil.copy2(f, os.path.join(dirname, localname))
print('\n')
newtexfile = fname.replace('.tex', '_strip.tex')
print('[parxiv] writing %s' % newtexfile)
with io.open(
os.path.join(dirname, newtexfile), 'w') as fout:
fout.write(source)
print('[parxiv] attempting to generate bbl file')
if not bblflag:
# attempt to generate
# with tempfile.TemporaryDirectory() as d:
# python2 support
try:
d = tempfile.mkdtemp()
try:
args = ['pdflatex',
'-interaction', 'nonstopmode',
'-recorder',
'-output-directory', d,
newtexfile]
# python2 support
try:
from subprocess import DEVNULL
except ImportError:
DEVNULL = open(os.devnull, 'wb')
p = subprocess.Popen(args,
cwd=dirname,
stdin=DEVNULL,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = p.communicate()
# copy .bib files
for bib in glob.glob('*.bib'):
shutil.copy2(bib, os.path.join(d, bib))
for bib in glob.glob('*.bst'):
shutil.copy2(bib, os.path.join(d, bib))
args = ['bibtex', newtexfile.replace('.tex', '.aux')]
p = subprocess.Popen(args,
cwd=d,
stdin=DEVNULL,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = p.communicate()
except OSError as e:
raise RuntimeError(e)
bblfile = newtexfile.replace('.tex', '.bbl')
if os.path.isfile(os.path.join(d, bblfile)):
print(' ... generated')
shutil.copy2(os.path.join(d, bblfile),
os.path.join(dirname, bblfile))
else:
print(' ... could not generate')
finally:
try:
shutil.rmtree(d)
except OSError as e:
if e.errno != errno.ENOENT:
raise
return source
if __name__ == '__main__':
import sys
if len(sys.argv) != 2:
print('usage: python parxiv.py <filename.tex>')
sys.exit(-1)
fname = sys.argv[1]
source = main(fname)
| mit | -5,527,309,183,598,388,000 | 27.190045 | 80 | 0.54374 | false | 3.837388 | false | false | false |
sameenjalal/mavenize-beta | mavenize/apps/item/models.py | 1 | 2128 | from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.core.exceptions import ObjectDoesNotExist
class Item(models.Model):
item_type = models.CharField(max_length=30, default="")
four_star = models.IntegerField(default=0)
three_star = models.IntegerField(default=0)
two_star = models.IntegerField(default=0)
one_star = models.IntegerField(default=0)
reviews = models.IntegerField(default=0)
bookmarks = models.IntegerField(default=0)
def __unicode__(self):
return str(self.id)
def get_popularity(self):
"""
Returns the Popularity model for this item.
"""
if not hasattr(self, '_popularity_cache'):
try:
self._popularity_cache = Popularity.objects.get(
item__id__exact=self.id)
self._popularity_cache.item = self
except:
raise ObjectDoesNotExist
return self._popularity_cache
def get_rating(self):
return (self.four_star*4 + self.three_star*3 +
self.two_star*2 + self.one_star) / self.get_votes()
def get_votes(self):
return (self.four_star + self.three_star + self.two_star +
self.one_star)
class Link(models.Model):
item = models.ForeignKey(Item)
partner = models.CharField(max_length=20)
url = models.CharField(max_length=200)
def __unicode__(self):
return self.url
class Popularity(models.Model):
item = models.OneToOneField(Item, primary_key=True)
today = models.IntegerField(default=0, db_index=True)
week = models.IntegerField(default=0, db_index=True)
month = models.IntegerField(default=0, db_index=True)
alltime = models.IntegerField(default=0, db_index=True)
class Meta:
verbose_name_plural = "Popularities"
def __unicode__(self):
return "Item #%s: %s" % (self.item.id, self.alltime)
@receiver(post_save, sender=Item)
def create_item(sender, instance, created, **kwargs):
if created:
Popularity.objects.create(item=instance)
| mit | 1,467,266,724,365,762,000 | 32.25 | 67 | 0.648496 | false | 3.753086 | false | false | false |
mancoast/CPythonPyc_test | fail/314_test_normalization.py | 1 | 3162 | from test.support import run_unittest, open_urlresource
import unittest
import sys
import os
from unicodedata import normalize, unidata_version
TESTDATAFILE = "NormalizationTest.txt"
TESTDATAURL = "http://www.unicode.org/Public/" + unidata_version + "/ucd/" + TESTDATAFILE
if os.path.exists(TESTDATAFILE):
f = open(TESTDATAFILE, encoding='utf-8')
l = f.readline()
f.close()
if not unidata_version in l:
os.unlink(TESTDATAFILE)
class RangeError(Exception):
pass
def NFC(str):
return normalize("NFC", str)
def NFKC(str):
return normalize("NFKC", str)
def NFD(str):
return normalize("NFD", str)
def NFKD(str):
return normalize("NFKD", str)
def unistr(data):
data = [int(x, 16) for x in data.split(" ")]
for x in data:
if x > sys.maxunicode:
raise RangeError
return "".join([chr(x) for x in data])
class NormalizationTest(unittest.TestCase):
def test_main(self):
part1_data = {}
# Hit the exception early
try:
open_urlresource(TESTDATAURL, encoding="utf-8")
except IOError:
self.skipTest("Could not retrieve " + TESTDATAURL)
for line in open_urlresource(TESTDATAURL, encoding="utf-8"):
if '#' in line:
line = line.split('#')[0]
line = line.strip()
if not line:
continue
if line.startswith("@Part"):
part = line.split()[0]
continue
try:
c1,c2,c3,c4,c5 = [unistr(x) for x in line.split(';')[:-1]]
except RangeError:
# Skip unsupported characters;
# try atleast adding c1 if we are in part1
if part == "@Part1":
try:
c1 = unistr(line.split(';')[0])
except RangeError:
pass
else:
part1_data[c1] = 1
continue
# Perform tests
self.assertTrue(c2 == NFC(c1) == NFC(c2) == NFC(c3), line)
self.assertTrue(c4 == NFC(c4) == NFC(c5), line)
self.assertTrue(c3 == NFD(c1) == NFD(c2) == NFD(c3), line)
self.assertTrue(c5 == NFD(c4) == NFD(c5), line)
self.assertTrue(c4 == NFKC(c1) == NFKC(c2) == \
NFKC(c3) == NFKC(c4) == NFKC(c5),
line)
self.assertTrue(c5 == NFKD(c1) == NFKD(c2) == \
NFKD(c3) == NFKD(c4) == NFKD(c5),
line)
# Record part 1 data
if part == "@Part1":
part1_data[c1] = 1
# Perform tests for all other data
for c in range(sys.maxunicode+1):
X = chr(c)
if X in part1_data:
continue
self.assertTrue(X == NFC(X) == NFD(X) == NFKC(X) == NFKD(X), c)
def test_bug_834676(self):
# Check for bug 834676
normalize('NFC', '\ud55c\uae00')
def test_main():
run_unittest(NormalizationTest)
if __name__ == "__main__":
test_main()
| gpl-3.0 | -3,841,246,843,036,727,300 | 29.699029 | 89 | 0.504428 | false | 3.482379 | true | false | false |
kaltura/server | alpha/scripts/utils/apiGrep.py | 1 | 4097 | #!/usr/bin/python
from optparse import OptionParser
import sys
import os
def isLineLogStart(curLine):
if len(curLine) < 20:
return False
if (curLine[4] == '-' and curLine[7] == '-' and curLine[10] == ' ' and
curLine[13] == ':' and curLine[16] == ':'):
return True
return False
def parseCmdLine():
parser = OptionParser(usage='%prog [OPTION]... PATTERN [FILE]...', add_help_option=False)
parser.add_option("--help", help="display this help and exit", action="help")
parser.add_option("-h", "--no-filename",
action="store_true", dest="noFilename", default=False,
help="suppress the file name prefix on output")
parser.add_option("-H", "--with-filename",
action="store_true", dest="withFilename", default=False,
help="print the file name for each match")
parser.add_option("--label", dest="stdinLabel", default="(standard input)", metavar="LABEL",
help="use LABEL as the standard input file name prefix")
parser.add_option("-i", "--ignore-case",
action="store_true", dest="ignoreCase", default=False,
help="ignore case distinctions")
parser.add_option("--match-any",
action="store_true", dest="matchAny", default=False,
help="match the pattern against any line (default is to match only starting log lines)")
parser.add_option("-v", "--invert-match",
action="store_true", dest="invertMatch", default=False,
help="select non-matching lines")
return parser.parse_args()
def shellQuote(s):
return "'" + s.replace("'", "'\\''") + "'"
def matchCaseSensitive(pattern, block):
return pattern in block
def matchCaseInsensitive(pattern, block):
return pattern in block.lower()
def processFileMatchStart(inputFile, pattern, prefix):
output = False
for curLine in inputFile:
logStart = isLineLogStart(curLine)
if output:
if not logStart:
print prefix + curLine.rstrip()
continue
output = False
if logStart and match(pattern, curLine):
print prefix + curLine.rstrip()
output = True
def processFileMatchAny(inputFile, pattern, prefix):
block = ''
for curLine in inputFile:
if isLineLogStart(curLine):
if match(pattern, block):
print prefix + block.rstrip().replace('\n', '\n' + prefix)
block = curLine
elif len(block) < 10485760: # 10MB
block += curLine
if match(pattern, block):
print prefix + block.rstrip().replace('\n', '\n' + prefix)
# parse the command line
(options, args) = parseCmdLine()
if len(args) < 1:
baseName = os.path.basename(__file__)
print 'Usage: python %s [OPTION]... PATTERN [FILE]...' % baseName
print 'Try `python %s --help` for more information.' % baseName
sys.exit(1)
pattern = args[0]
fileNames = args[1:]
if len(fileNames) == 0:
fileNames = ['-']
if options.withFilename:
outputFileName = True
elif options.noFilename:
outputFileName = False
else:
outputFileName = len(fileNames) > 1
if options.matchAny:
processFile = processFileMatchAny
else:
processFile = processFileMatchStart
if options.ignoreCase:
match = matchCaseInsensitive
pattern = pattern.lower()
else:
match = matchCaseSensitive
if options.invertMatch:
originalMatch = match
match = lambda p, b: not originalMatch(p, b)
prefix = ''
for fileName in fileNames:
if fileName.endswith('.gz'):
# using zcat | python is faster than using python's gzip module
params = [__file__, '--label=' + fileName]
if outputFileName:
params.append('-H')
if options.matchAny:
params.append('--match-any')
if options.ignoreCase:
params.append('-i')
if options.invertMatch:
params.append('-v')
params.append(pattern)
params = ' '.join(map(shellQuote, params))
cmdLine = "gzip -cd %s | python %s" % (shellQuote(fileName), params)
if os.system(cmdLine) != 0:
break
continue
if fileName == '-':
inputFile = sys.stdin
else:
inputFile = file(fileName, 'r')
# get the prefix
if outputFileName:
if fileName == '-':
prefix = options.stdinLabel + ':'
else:
prefix = '%s:' % fileName
try:
processFile(inputFile, pattern, prefix)
except IOError: # broken pipe
sys.exit(1)
| agpl-3.0 | 4,614,323,749,777,093,000 | 28.056738 | 95 | 0.672687 | false | 3.27498 | false | false | false |
flgiordano/netcash | +/google-cloud-sdk/lib/surface/sql/instances/patch.py | 1 | 13800 | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Updates the settings of a Cloud SQL instance."""
from googlecloudsdk.api_lib.sql import errors
from googlecloudsdk.api_lib.sql import instances
from googlecloudsdk.api_lib.sql import operations
from googlecloudsdk.api_lib.sql import validate
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core import log
from googlecloudsdk.core import resource_printer
from googlecloudsdk.core.console import console_io
from googlecloudsdk.third_party.apitools.base.py import encoding
class _BasePatch(object):
"""Updates the settings of a Cloud SQL instance."""
@classmethod
def Args(cls, parser):
"""Args is called by calliope to gather arguments for this command.
Please add arguments in alphabetical order except for no- or a clear-
pair for that argument which can follow the argument itself.
Args:
parser: An argparse parser that you can use to add arguments that go
on the command line after this command. Positional arguments are
allowed.
"""
parser.add_argument(
'--activation-policy',
required=False,
choices=['ALWAYS', 'NEVER', 'ON_DEMAND'],
help='The activation policy for this instance. This specifies when the '
'instance should be activated and is applicable only when the '
'instance state is RUNNABLE.')
parser.add_argument(
'--assign-ip',
action='store_true',
default=None, # Tri-valued: None => don't change the setting.
help='The instance must be assigned an IP address.')
gae_apps_group = parser.add_mutually_exclusive_group()
gae_apps_group.add_argument(
'--authorized-gae-apps',
type=arg_parsers.ArgList(min_length=1),
metavar='APP',
required=False,
action=arg_parsers.FloatingListValuesCatcher(),
help='A list of App Engine app IDs that can access this instance.')
gae_apps_group.add_argument(
'--clear-gae-apps',
required=False,
action='store_true',
help=('Specified to clear the list of App Engine apps that can access '
'this instance.'))
networks_group = parser.add_mutually_exclusive_group()
networks_group.add_argument(
'--authorized-networks',
type=arg_parsers.ArgList(min_length=1),
metavar='NETWORK',
required=False,
action=arg_parsers.FloatingListValuesCatcher(),
help='The list of external networks that are allowed to connect to the '
'instance. Specified in CIDR notation, also known as \'slash\' '
'notation (e.g. 192.168.100.0/24).')
networks_group.add_argument(
'--clear-authorized-networks',
required=False,
action='store_true',
help='Clear the list of external networks that are allowed to connect '
'to the instance.')
backups_group = parser.add_mutually_exclusive_group()
backups_group.add_argument(
'--backup-start-time',
required=False,
help='The start time of daily backups, specified in the 24 hour format '
'- HH:MM, in the UTC timezone.')
backups_group.add_argument(
'--no-backup',
required=False,
action='store_true',
help='Specified if daily backup should be disabled.')
database_flags_group = parser.add_mutually_exclusive_group()
database_flags_group.add_argument(
'--database-flags',
type=arg_parsers.ArgDict(min_length=1),
metavar='FLAG=VALUE',
required=False,
action=arg_parsers.FloatingListValuesCatcher(),
help='A comma-separated list of database flags to set on the instance. '
'Use an equals sign to separate flag name and value. Flags without '
'values, like skip_grant_tables, can be written out without a value '
'after, e.g., `skip_grant_tables=`. Use on/off for '
'booleans. View the Instance Resource API for allowed flags. '
'(e.g., `--database-flags max_allowed_packet=55555,skip_grant_tables=,'
'log_output=1`)')
database_flags_group.add_argument(
'--clear-database-flags',
required=False,
action='store_true',
help='Clear the database flags set on the instance. '
'WARNING: Instance will be restarted.')
parser.add_argument(
'--enable-bin-log',
action='store_true',
default=None, # Tri-valued: None => don't change the setting.
help='Enable binary log. If backup configuration is disabled, binary '
'log should be disabled as well.')
parser.add_argument(
'--follow-gae-app',
required=False,
help='The App Engine app this instance should follow. It must be in '
'the same region as the instance. '
'WARNING: Instance may be restarted.')
parser.add_argument(
'--gce-zone',
required=False,
help='The preferred Compute Engine zone (e.g. us-central1-a, '
'us-central1-b, etc.). '
'WARNING: Instance may be restarted.')
parser.add_argument(
'instance',
completion_resource='sql.instances',
help='Cloud SQL instance ID.')
parser.add_argument(
'--pricing-plan',
'-p',
required=False,
choices=['PER_USE', 'PACKAGE'],
help='The pricing plan for this instance.')
parser.add_argument(
'--replication',
required=False,
choices=['SYNCHRONOUS', 'ASYNCHRONOUS'],
help='The type of replication this instance uses.')
parser.add_argument(
'--require-ssl',
action='store_true',
default=None, # Tri-valued: None => don't change the setting.
help='mysqld should default to \'REQUIRE X509\' for users connecting '
'over IP.')
parser.add_argument(
'--tier',
'-t',
required=False,
help='The tier of service for this instance, for example D0, D1. '
'WARNING: Instance will be restarted.')
parser.add_argument(
'--enable-database-replication',
action='store_true',
default=None, # Tri-valued: None => don't change the setting.
help='Enable database replication. Applicable only '
'for read replica instance(s). WARNING: Instance will be restarted.')
parser.add_argument(
'--async',
action='store_true',
help='Do not wait for the operation to complete.')
parser.add_argument(
'--diff',
action='store_true',
help='Show what changed as a result of the update.')
def Display(self, args, result):
"""Display prints information about what just happened to stdout.
Args:
args: The same as the args in Run.
result: A dict object representing the operations resource describing the
patch operation if the patch was successful.
"""
if args.diff:
resource_printer.Print(result, 'text')
def _PrintAndConfirmWarningMessage(self, args):
"""Print and confirm warning indicating the effect of applying the patch."""
continue_msg = None
if any([args.tier, args.database_flags, args.clear_database_flags,
args.enable_database_replication is not None]):
continue_msg = ('WARNING: This patch modifies a value that requires '
'your instance to be restarted. Submitting this patch '
'will immediately restart your instance if it\'s running.'
)
else:
if any([args.follow_gae_app, args.gce_zone]):
continue_msg = ('WARNING: This patch modifies the zone your instance '
'is set to run in, which may require it to be moved. '
'Submitting this patch will restart your instance '
'if it is running in a different zone.')
if continue_msg and not console_io.PromptContinue(continue_msg):
raise exceptions.ToolException('canceled by the user.')
def _GetConfirmedClearedFields(self, args, patch_instance):
"""Clear fields according to args and confirm with user."""
cleared_fields = []
if args.clear_gae_apps:
cleared_fields.append('settings.authorizedGaeApplications')
if args.clear_authorized_networks:
cleared_fields.append('settings.ipConfiguration.authorizedNetworks')
if args.clear_database_flags:
cleared_fields.append('settings.databaseFlags')
log.status.write(
'The following message will be used for the patch API method.\n')
log.status.write(
encoding.MessageToJson(
patch_instance, include_fields=cleared_fields)+'\n')
self._PrintAndConfirmWarningMessage(args)
return cleared_fields
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Patch(_BasePatch, base.Command):
"""Updates the settings of a Cloud SQL instance."""
@errors.ReraiseHttpException
def Run(self, args):
"""Updates settings of a Cloud SQL instance using the patch api method.
Args:
args: argparse.Namespace, The arguments that this command was invoked
with.
Returns:
A dict object representing the operations resource describing the patch
operation if the patch was successful.
Raises:
HttpException: A http error response was received while executing api
request.
ToolException: An error other than http error occured while executing the
command.
"""
sql_client = self.context['sql_client']
sql_messages = self.context['sql_messages']
resources = self.context['registry']
validate.ValidateInstanceName(args.instance)
instance_ref = resources.Parse(args.instance, collection='sql.instances')
original_instance_resource = sql_client.instances.Get(
instance_ref.Request())
patch_instance = instances.InstancesV1Beta3.ConstructInstanceFromArgs(
sql_messages, args, original=original_instance_resource)
patch_instance.project = instance_ref.project
patch_instance.instance = instance_ref.instance
cleared_fields = self._GetConfirmedClearedFields(args, patch_instance)
with sql_client.IncludeFields(cleared_fields):
result = sql_client.instances.Patch(patch_instance)
operation_ref = resources.Create(
'sql.operations',
operation=result.operation,
project=instance_ref.project,
instance=instance_ref.instance,
)
if args.async:
return sql_client.operations.Get(operation_ref.Request())
operations.OperationsV1Beta3.WaitForOperation(
sql_client, operation_ref, 'Patching Cloud SQL instance')
log.UpdatedResource(instance_ref)
if args.diff:
changed_instance_resource = sql_client.instances.Get(
instance_ref.Request())
return resource_printer.ResourceDiff(
original_instance_resource, changed_instance_resource)
return sql_client.instances.Get(instance_ref.Request())
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class PatchBeta(_BasePatch, base.Command):
"""Updates the settings of a Cloud SQL instance."""
@errors.ReraiseHttpException
def Run(self, args):
"""Updates settings of a Cloud SQL instance using the patch api method.
Args:
args: argparse.Namespace, The arguments that this command was invoked
with.
Returns:
A dict object representing the operations resource describing the patch
operation if the patch was successful.
Raises:
HttpException: A http error response was received while executing api
request.
ToolException: An error other than http error occured while executing the
command.
"""
sql_client = self.context['sql_client']
sql_messages = self.context['sql_messages']
resources = self.context['registry']
validate.ValidateInstanceName(args.instance)
instance_ref = resources.Parse(args.instance, collection='sql.instances')
original_instance_resource = sql_client.instances.Get(
instance_ref.Request())
patch_instance = instances.InstancesV1Beta3.ConstructInstanceFromArgs(
sql_messages, args, original=original_instance_resource)
patch_instance.project = instance_ref.project
patch_instance.name = instance_ref.instance
cleared_fields = self._GetConfirmedClearedFields(args, patch_instance)
with sql_client.IncludeFields(cleared_fields):
result_operation = sql_client.instances.Patch(patch_instance)
operation_ref = resources.Create(
'sql.operations',
operation=result_operation.name,
project=instance_ref.project,
instance=instance_ref.instance,
)
if args.async:
return sql_client.operations.Get(operation_ref.Request())
operations.OperationsV1Beta4.WaitForOperation(
sql_client, operation_ref, 'Patching Cloud SQL instance')
log.UpdatedResource(instance_ref)
if args.diff:
changed_instance_resource = sql_client.instances.Get(
instance_ref.Request())
return resource_printer.ResourceDiff(
original_instance_resource, changed_instance_resource)
return sql_client.instances.Get(instance_ref.Request())
| bsd-3-clause | 5,171,648,414,095,974,000 | 37.547486 | 80 | 0.672681 | false | 4.250077 | false | false | false |
tensorflow/estimator | tensorflow_estimator/python/estimator/canned/dnn_test_fc_v2.py | 1 | 19054 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for dnn.py with feature_column_v2."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import tempfile
from unittest.mock import patch
from absl.testing import parameterized
import numpy as np
import six
import tensorflow as tf
from tensorflow.core.example import example_pb2
from tensorflow.core.example import feature_pb2
from tensorflow.python.feature_column import feature_column_v2
from tensorflow_estimator.python.estimator.canned import dnn
from tensorflow_estimator.python.estimator.canned import dnn_testing_utils
from tensorflow_estimator.python.estimator.canned import prediction_keys
from tensorflow_estimator.python.estimator.export import export
from tensorflow_estimator.python.estimator.inputs import numpy_io
from tensorflow_estimator.python.estimator.inputs import pandas_io
try:
# pylint: disable=g-import-not-at-top
import pandas as pd
HAS_PANDAS = True
except IOError:
# Pandas writes a temporary file during import. If it fails, don't use pandas.
HAS_PANDAS = False
except ImportError:
HAS_PANDAS = False
def _dnn_classifier_fn(*args, **kwargs):
return dnn.DNNClassifierV2(*args, **kwargs)
class DNNModelFnV2Test(dnn_testing_utils.BaseDNNModelFnTest, tf.test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
tf.test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNModelFnTest.__init__(
self, dnn.dnn_model_fn_v2, fc_impl=feature_column_v2)
class DNNLogitFnV2Test(dnn_testing_utils.BaseDNNLogitFnTest, tf.test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
tf.test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNLogitFnTest.__init__(
self, dnn.dnn_logit_fn_builder_v2, fc_impl=feature_column_v2)
class DNNWarmStartingV2Test(dnn_testing_utils.BaseDNNWarmStartingTest,
tf.test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
tf.test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNWarmStartingTest.__init__(
self, _dnn_classifier_fn, _dnn_regressor_fn, fc_impl=feature_column_v2)
class DNNClassifierEvaluateV2Test(
dnn_testing_utils.BaseDNNClassifierEvaluateTest, tf.test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
tf.test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNClassifierEvaluateTest.__init__(
self, _dnn_classifier_fn, fc_impl=feature_column_v2)
class DNNClassifierPredictV2Test(dnn_testing_utils.BaseDNNClassifierPredictTest,
tf.test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
tf.test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNClassifierPredictTest.__init__(
self, _dnn_classifier_fn, fc_impl=feature_column_v2)
class DNNClassifierTrainV2Test(dnn_testing_utils.BaseDNNClassifierTrainTest,
tf.test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
tf.test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNClassifierTrainTest.__init__(
self, _dnn_classifier_fn, fc_impl=feature_column_v2)
def _dnn_regressor_fn(*args, **kwargs):
return dnn.DNNRegressorV2(*args, **kwargs)
class DNNRegressorEvaluateV2Test(dnn_testing_utils.BaseDNNRegressorEvaluateTest,
tf.test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
tf.test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNRegressorEvaluateTest.__init__(
self, _dnn_regressor_fn, fc_impl=feature_column_v2)
class DNNRegressorPredictV2Test(dnn_testing_utils.BaseDNNRegressorPredictTest,
tf.test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
tf.test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNRegressorPredictTest.__init__(
self, _dnn_regressor_fn, fc_impl=feature_column_v2)
class DNNRegressorTrainV2Test(dnn_testing_utils.BaseDNNRegressorTrainTest,
tf.test.TestCase):
def __init__(self, methodName='runTest'): # pylint: disable=invalid-name
tf.test.TestCase.__init__(self, methodName)
dnn_testing_utils.BaseDNNRegressorTrainTest.__init__(
self, _dnn_regressor_fn, fc_impl=feature_column_v2)
def _queue_parsed_features(feature_map):
tensors_to_enqueue = []
keys = []
for key, tensor in six.iteritems(feature_map):
keys.append(key)
tensors_to_enqueue.append(tensor)
queue_dtypes = [x.dtype for x in tensors_to_enqueue]
input_queue = tf.queue.FIFOQueue(capacity=100, dtypes=queue_dtypes)
tf.compat.v1.train.queue_runner.add_queue_runner(
tf.compat.v1.train.queue_runner.QueueRunner(
input_queue, [input_queue.enqueue(tensors_to_enqueue)]))
dequeued_tensors = input_queue.dequeue()
return {keys[i]: dequeued_tensors[i] for i in range(len(dequeued_tensors))}
class DNNRegressorIntegrationTest(tf.test.TestCase, parameterized.TestCase):
def setUp(self):
self._model_dir = tempfile.mkdtemp()
def tearDown(self):
if self._model_dir:
tf.compat.v1.summary.FileWriterCache.clear()
shutil.rmtree(self._model_dir)
def _test_complete_flow(self, train_input_fn, eval_input_fn, predict_input_fn,
input_dimension, label_dimension, batch_size):
feature_columns = [
tf.feature_column.numeric_column('x', shape=(input_dimension,))
]
est = dnn.DNNRegressorV2(
hidden_units=(2, 2),
feature_columns=feature_columns,
label_dimension=label_dimension,
model_dir=self._model_dir)
# TRAIN
num_steps = 10
est.train(train_input_fn, steps=num_steps)
# EVALUATE
scores = est.evaluate(eval_input_fn)
self.assertEqual(num_steps, scores[tf.compat.v1.GraphKeys.GLOBAL_STEP])
self.assertIn('loss', six.iterkeys(scores))
# PREDICT
predictions = np.array([
x[prediction_keys.PredictionKeys.PREDICTIONS]
for x in est.predict(predict_input_fn)
])
self.assertAllEqual((batch_size, label_dimension), predictions.shape)
# EXPORT
feature_spec = tf.feature_column.make_parse_example_spec(feature_columns)
serving_input_receiver_fn = export.build_parsing_serving_input_receiver_fn(
feature_spec)
export_dir = est.export_saved_model(tempfile.mkdtemp(),
serving_input_receiver_fn)
self.assertTrue(tf.compat.v1.gfile.Exists(export_dir))
def test_numpy_input_fn(self):
"""Tests complete flow with numpy_input_fn."""
label_dimension = 2
batch_size = 10
data = np.linspace(0., 2., batch_size * label_dimension, dtype=np.float32)
data = data.reshape(batch_size, label_dimension)
# learn y = x
train_input_fn = numpy_io.numpy_input_fn(
x={'x': data},
y=data,
batch_size=batch_size,
num_epochs=None,
shuffle=True)
eval_input_fn = numpy_io.numpy_input_fn(
x={'x': data}, y=data, batch_size=batch_size, shuffle=False)
predict_input_fn = numpy_io.numpy_input_fn(
x={'x': data}, batch_size=batch_size, shuffle=False)
self._test_complete_flow(
train_input_fn=train_input_fn,
eval_input_fn=eval_input_fn,
predict_input_fn=predict_input_fn,
input_dimension=label_dimension,
label_dimension=label_dimension,
batch_size=batch_size)
def test_pandas_input_fn(self):
"""Tests complete flow with pandas_input_fn."""
if not HAS_PANDAS:
return
label_dimension = 1
batch_size = 10
data = np.linspace(0., 2., batch_size, dtype=np.float32)
x = pd.DataFrame({'x': data})
y = pd.Series(data)
train_input_fn = pandas_io.pandas_input_fn(
x=x, y=y, batch_size=batch_size, num_epochs=None, shuffle=True)
eval_input_fn = pandas_io.pandas_input_fn(
x=x, y=y, batch_size=batch_size, shuffle=False)
predict_input_fn = pandas_io.pandas_input_fn(
x=x, batch_size=batch_size, shuffle=False)
self._test_complete_flow(
train_input_fn=train_input_fn,
eval_input_fn=eval_input_fn,
predict_input_fn=predict_input_fn,
input_dimension=label_dimension,
label_dimension=label_dimension,
batch_size=batch_size)
def test_input_fn_from_parse_example(self):
"""Tests complete flow with input_fn constructed from parse_example."""
label_dimension = 2
batch_size = 10
data = np.linspace(0., 2., batch_size * label_dimension, dtype=np.float32)
data = data.reshape(batch_size, label_dimension)
serialized_examples = []
for datum in data:
example = example_pb2.Example(
features=feature_pb2.Features(
feature={
'x':
feature_pb2.Feature(
float_list=feature_pb2.FloatList(value=datum)),
'y':
feature_pb2.Feature(
float_list=feature_pb2.FloatList(value=datum)),
}))
serialized_examples.append(example.SerializeToString())
feature_spec = {
'x': tf.io.FixedLenFeature([label_dimension], tf.dtypes.float32),
'y': tf.io.FixedLenFeature([label_dimension], tf.dtypes.float32),
}
def _train_input_fn():
feature_map = tf.compat.v1.io.parse_example(serialized_examples,
feature_spec)
features = _queue_parsed_features(feature_map)
labels = features.pop('y')
return features, labels
def _eval_input_fn():
feature_map = tf.compat.v1.io.parse_example(
tf.compat.v1.train.limit_epochs(serialized_examples, num_epochs=1),
feature_spec)
features = _queue_parsed_features(feature_map)
labels = features.pop('y')
return features, labels
def _predict_input_fn():
feature_map = tf.compat.v1.io.parse_example(
tf.compat.v1.train.limit_epochs(serialized_examples, num_epochs=1),
feature_spec)
features = _queue_parsed_features(feature_map)
features.pop('y')
return features, None
self._test_complete_flow(
train_input_fn=_train_input_fn,
eval_input_fn=_eval_input_fn,
predict_input_fn=_predict_input_fn,
input_dimension=label_dimension,
label_dimension=label_dimension,
batch_size=batch_size)
class DNNClassifierIntegrationTest(tf.test.TestCase):
def setUp(self):
self._model_dir = tempfile.mkdtemp()
def tearDown(self):
if self._model_dir:
tf.compat.v1.summary.FileWriterCache.clear()
shutil.rmtree(self._model_dir)
def _as_label(self, data_in_float):
return np.rint(data_in_float).astype(np.int64)
def _test_complete_flow(self, train_input_fn, eval_input_fn, predict_input_fn,
input_dimension, n_classes, batch_size):
feature_columns = [
tf.feature_column.numeric_column('x', shape=(input_dimension,))
]
est = dnn.DNNClassifierV2(
hidden_units=(2, 2),
feature_columns=feature_columns,
n_classes=n_classes,
model_dir=self._model_dir)
# TRAIN
num_steps = 10
est.train(train_input_fn, steps=num_steps)
# EVALUATE
scores = est.evaluate(eval_input_fn)
self.assertEqual(num_steps, scores[tf.compat.v1.GraphKeys.GLOBAL_STEP])
self.assertIn('loss', six.iterkeys(scores))
# PREDICT
predicted_proba = np.array([
x[prediction_keys.PredictionKeys.PROBABILITIES]
for x in est.predict(predict_input_fn)
])
self.assertAllEqual((batch_size, n_classes), predicted_proba.shape)
# EXPORT
feature_spec = tf.feature_column.make_parse_example_spec(feature_columns)
serving_input_receiver_fn = export.build_parsing_serving_input_receiver_fn(
feature_spec)
export_dir = est.export_saved_model(tempfile.mkdtemp(),
serving_input_receiver_fn)
self.assertTrue(tf.compat.v1.gfile.Exists(export_dir))
def test_numpy_input_fn(self):
"""Tests complete flow with numpy_input_fn."""
n_classes = 3
input_dimension = 2
batch_size = 10
data = np.linspace(
0., n_classes - 1., batch_size * input_dimension, dtype=np.float32)
x_data = data.reshape(batch_size, input_dimension)
y_data = np.reshape(self._as_label(data[:batch_size]), (batch_size, 1))
# learn y = x
train_input_fn = numpy_io.numpy_input_fn(
x={'x': x_data},
y=y_data,
batch_size=batch_size,
num_epochs=None,
shuffle=True)
eval_input_fn = numpy_io.numpy_input_fn(
x={'x': x_data}, y=y_data, batch_size=batch_size, shuffle=False)
predict_input_fn = numpy_io.numpy_input_fn(
x={'x': x_data}, batch_size=batch_size, shuffle=False)
self._test_complete_flow(
train_input_fn=train_input_fn,
eval_input_fn=eval_input_fn,
predict_input_fn=predict_input_fn,
input_dimension=input_dimension,
n_classes=n_classes,
batch_size=batch_size)
def test_pandas_input_fn(self):
"""Tests complete flow with pandas_input_fn."""
if not HAS_PANDAS:
return
input_dimension = 1
n_classes = 3
batch_size = 10
data = np.linspace(0., n_classes - 1., batch_size, dtype=np.float32)
x = pd.DataFrame({'x': data})
y = pd.Series(self._as_label(data))
train_input_fn = pandas_io.pandas_input_fn(
x=x, y=y, batch_size=batch_size, num_epochs=None, shuffle=True)
eval_input_fn = pandas_io.pandas_input_fn(
x=x, y=y, batch_size=batch_size, shuffle=False)
predict_input_fn = pandas_io.pandas_input_fn(
x=x, batch_size=batch_size, shuffle=False)
self._test_complete_flow(
train_input_fn=train_input_fn,
eval_input_fn=eval_input_fn,
predict_input_fn=predict_input_fn,
input_dimension=input_dimension,
n_classes=n_classes,
batch_size=batch_size)
def test_input_fn_from_parse_example(self):
"""Tests complete flow with input_fn constructed from parse_example."""
input_dimension = 2
n_classes = 3
batch_size = 10
data = np.linspace(
0., n_classes - 1., batch_size * input_dimension, dtype=np.float32)
data = data.reshape(batch_size, input_dimension)
serialized_examples = []
for datum in data:
example = example_pb2.Example(
features=feature_pb2.Features(
feature={
'x':
feature_pb2.Feature(
float_list=feature_pb2.FloatList(value=datum)),
'y':
feature_pb2.Feature(
int64_list=feature_pb2.Int64List(
value=self._as_label(datum[:1]))),
}))
serialized_examples.append(example.SerializeToString())
feature_spec = {
'x': tf.io.FixedLenFeature([input_dimension], tf.dtypes.float32),
'y': tf.io.FixedLenFeature([1], tf.dtypes.int64),
}
def _train_input_fn():
feature_map = tf.compat.v1.io.parse_example(serialized_examples,
feature_spec)
features = _queue_parsed_features(feature_map)
labels = features.pop('y')
return features, labels
def _eval_input_fn():
feature_map = tf.compat.v1.io.parse_example(
tf.compat.v1.train.limit_epochs(serialized_examples, num_epochs=1),
feature_spec)
features = _queue_parsed_features(feature_map)
labels = features.pop('y')
return features, labels
def _predict_input_fn():
feature_map = tf.compat.v1.io.parse_example(
tf.compat.v1.train.limit_epochs(serialized_examples, num_epochs=1),
feature_spec)
features = _queue_parsed_features(feature_map)
features.pop('y')
return features, None
self._test_complete_flow(
train_input_fn=_train_input_fn,
eval_input_fn=_eval_input_fn,
predict_input_fn=_predict_input_fn,
input_dimension=input_dimension,
n_classes=n_classes,
batch_size=batch_size)
class DNNTrainingMode(tf.test.TestCase):
"""Tests that training mode propagates to feature columns correctly."""
def setUp(self):
self._model_dir = tempfile.mkdtemp()
self._label_dimension = 1
self._batch_size = 10
def tearDown(self):
if self._model_dir:
tf.compat.v1.summary.FileWriterCache.clear()
shutil.rmtree(self._model_dir)
def _create_data(self):
data = np.linspace(
0., 2., self._batch_size * self._label_dimension, dtype=np.float32)
return data.reshape(self._batch_size, self._label_dimension)
def _get_estimator(self):
feature_columns = [
tf.feature_column.numeric_column('x', shape=(self._label_dimension,))
]
return dnn.DNNRegressorV2(
hidden_units=(2, 2),
feature_columns=feature_columns,
label_dimension=self._label_dimension,
model_dir=self._model_dir)
def test_train_vs_eval_mode(self):
data = self._create_data()
train_input_fn = numpy_io.numpy_input_fn(
x={'x': data},
y=data,
batch_size=self._batch_size,
num_epochs=None,
shuffle=True)
eval_input_fn = numpy_io.numpy_input_fn(
x={'x': data}, y=data, batch_size=self._batch_size, shuffle=False)
est = self._get_estimator()
with patch.object(
tf.compat.v2.keras.layers.DenseFeatures, 'call',
return_value=data) as mock_dense_features_call:
est.train(train_input_fn, steps=10)
est.evaluate(eval_input_fn)
train_args, eval_args = mock_dense_features_call.call_args_list
# DenseFeature should have been called with training = True in train.
_, train_training_kwarg = train_args
self.assertTrue(train_training_kwarg['training'])
# DenseFeature should have been called with training = False in eval.
_, eval_training_kwarg = eval_args
self.assertFalse(eval_training_kwarg['training'])
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | -8,548,565,812,858,429,000 | 35.293333 | 80 | 0.65031 | false | 3.53769 | true | false | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.